code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def add_to_document(self, parent): arg = ET.SubElement(parent, "arg") arg.set("name", self.name) if self.title is not None: ET.SubElement(arg, "title").text = self.title if self.description is not None: ET.SubElement(arg, "description").text = self.description if self.validation is not None: ET.SubElement(arg, "validation").text = self.validation subelements = [ ("data_type", self.data_type), ("required_on_edit", self.required_on_edit), ("required_on_create", self.required_on_create) ] for name, value in subelements: ET.SubElement(arg, name).text = str(value).lower() return arg
Adds an ``Argument`` object to this ElementTree document. Adds an <arg> subelement to the parent element, typically <args> and sets up its subelements with their respective text. :param parent: An ``ET.Element`` to be the parent of a new <arg> subelement :returns: An ``ET.Element`` object representing this argument.
def root(self): with self._mutex: if self._parent: return self._parent.root else: return self
The root node of the tree this node is in.
def registration_backend(backend=None, namespace=None): backend = backend or ORGS_REGISTRATION_BACKEND class_module, class_name = backend.rsplit(".", 1) mod = import_module(class_module) return getattr(mod, class_name)(namespace=namespace)
Returns a specified registration backend Args: backend: dotted path to the registration backend class namespace: URL namespace to use Returns: an instance of an RegistrationBackend
def RunInstaller(): try: os.makedirs(os.path.dirname(config.CONFIG["Installer.logfile"])) except OSError: pass handler = logging.FileHandler(config.CONFIG["Installer.logfile"], mode="wb") handler.setLevel(logging.DEBUG) logging.getLogger().addHandler(handler) config.CONFIG.Initialize(filename=flags.FLAGS.config, reset=True) config.CONFIG.AddContext(contexts.INSTALLER_CONTEXT, "Context applied when we run the client installer.") logging.warning("Starting installation procedure for GRR client.") try: Installer().Init() except Exception as e: logging.exception("Installation failed: %s", e) sys.exit(-1) sys.exit(0)
Run all registered installers. Run all the current installers and then exit the process.
def connection_ok(): try: urlopen(Dataset.base_url, timeout=1) return True except HTTPError: return True except URLError: return False
Check web connection. Returns True if web connection is OK, False otherwise.
def _get_values(self): (gi, _), (ci, _), (si, _) = self._get_dims(self.hmap.last) ndims = self.hmap.last.ndims dims = self.hmap.last.kdims dimensions = [] values = {} for vidx, vtype in zip([gi, ci, si], self._dimensions): if vidx < ndims: dim = dims[vidx] dimensions.append(dim) vals = self.hmap.dimension_values(dim.name) else: dimensions.append(None) vals = [None] values[vtype] = list(unique_iterator(vals)) return values, dimensions
Get unique index value for each bar
def sendSignal(self, p, member, signature=None, body=None, path='/org/freedesktop/DBus', interface='org.freedesktop.DBus'): if not isinstance(body, (list, tuple)): body = [body] s = message.SignalMessage(path, member, interface, p.uniqueName, signature, body) p.sendMessage(s)
Sends a signal to a specific connection @type p: L{BusProtocol} @param p: L{BusProtocol} instance to send a signal to @type member: C{string} @param member: Name of the signal to send @type path: C{string} @param path: Path of the object emitting the signal. Defaults to 'org/freedesktop/DBus' @type interface: C{string} @param interface: If specified, this specifies the interface containing the desired method. Defaults to 'org.freedesktop.DBus' @type body: None or C{list} @param body: If supplied, this is a list of signal arguments. The contents of the list must match the signature. @type signature: None or C{string} @param signature: If specified, this specifies the DBus signature of the body of the DBus Signal message. This string must be a valid Signature string as defined by the DBus specification. If the body argumnent is supplied, this parameter must be provided.
def _pidgin_status(status, message): try: iface = _dbus_get_interface('im.pidgin.purple.PurpleService', '/im/pidgin/purple/PurpleObject', 'im.pidgin.purple.PurpleInterface') if iface: code = PIDGIN_CODE_MAP[status] saved_status = iface.PurpleSavedstatusNew('', code) iface.PurpleSavedstatusSetMessage(saved_status, message) iface.PurpleSavedstatusActivate(saved_status) except dbus.exceptions.DBusException: pass
Updates status and message for Pidgin IM application. `status` Status type. `message` Status message.
def extract_message_info(): base_path = BASE_PACKAGE.replace('.', '/') filename = os.path.join(base_path, 'ProtocolMessage.proto') with open(filename, 'r') as file: types_found = False for line in file: stripped = line.lstrip().rstrip() if stripped == 'enum Type {': types_found = True continue elif types_found and stripped == '}': break elif not types_found: continue constant = stripped.split(' ')[0] title = constant.title().replace( '_', '').replace('Hid', 'HID') accessor = title[0].lower() + title[1:] if not os.path.exists(os.path.join(base_path, title + '.proto')): continue yield MessageInfo( title + '_pb2', title, accessor, constant)
Get information about all messages of interest.
def check_usrmove(self, pkgs): if 'filesystem' not in pkgs: return os.path.islink('/bin') and os.path.islink('/sbin') else: filesys_version = pkgs['filesystem']['version'] return True if filesys_version[0] == '3' else False
Test whether the running system implements UsrMove. If the 'filesystem' package is present, it will check that the version is greater than 3. If the package is not present the '/bin' and '/sbin' paths are checked and UsrMove is assumed if both are symbolic links. :param pkgs: a packages dictionary
def simultaneous_listen(self): if self.server_con is not None: self.server_con.s.close() self.server_con = None self.mappings = None self.predictions = None parts = self.sequential_connect() if parts is None: return 0 con, mappings, predictions = parts con.blocking = 0 con.timeout = 0 con.s.settimeout(0) self.server_con = con self.mappings = mappings self.predictions = predictions msg = "SIMULTANEOUS READY 0 0" ret = self.server_con.send_line(msg) if not ret: return 0 return 1
This function is called by passive simultaneous nodes who wish to establish themself as such. It sets up a connection to the Rendezvous Server to monitor for new hole punching requests.
def _set_object_view(self, session): for obj_name in self._object_views: if self._object_views[obj_name] == PLENARY: try: getattr(session, 'use_plenary_' + obj_name + '_view')() except AttributeError: pass else: try: getattr(session, 'use_comparative_' + obj_name + '_view')() except AttributeError: pass
Sets the underlying object views to match current view
def uint32_to_uint8(cls, img): return np.flipud(img.view(dtype=np.uint8).reshape(img.shape + (4,)))
Cast uint32 RGB image to 4 uint8 channels.
async def get_endpoint_for_did(wallet_handle: int, pool_handle: int, did: str) -> (str, Optional[str]): logger = logging.getLogger(__name__) logger.debug("get_endpoint_for_did: >>> wallet_handle: %r, pool_handle: %r, did: %r", wallet_handle, pool_handle, did) if not hasattr(get_endpoint_for_did, "cb"): logger.debug("get_endpoint_for_did: Creating callback") get_endpoint_for_did.cb = create_cb(CFUNCTYPE(None, c_int32, c_int32, c_char_p, c_char_p)) c_wallet_handle = c_int32(wallet_handle) c_pool_handle = c_int32(pool_handle) c_did = c_char_p(did.encode('utf-8')) endpoint, transport_vk = await do_call('indy_get_endpoint_for_did', c_wallet_handle, c_pool_handle, c_did, get_endpoint_for_did.cb) endpoint = endpoint.decode() transport_vk = transport_vk.decode() if transport_vk is not None else None res = (endpoint, transport_vk) logger.debug("get_endpoint_for_did: <<< res: %r", res) return res
Returns endpoint information for the given DID. :param wallet_handle: Wallet handle (created by open_wallet). :param pool_handle: Pool handle (created by open_pool). :param did: The DID to resolve endpoint. :return: (endpoint, transport_vk)
def upgrade(self, conn, skip_versions=()): db_versions = self.get_db_versions(conn) self.starting_version = max(db_versions) to_skip = sorted(db_versions | set(skip_versions)) scripts = self.read_scripts(None, None, to_skip) if not scripts: return [] self.ending_version = max(s['version'] for s in scripts) return self._upgrade(conn, scripts)
Upgrade the database from the current version to the maximum version in the upgrade scripts. :param conn: a DBAPI 2 connection :param skip_versions: the versions to skip
def tparse(instring, lenout=_default_len_out): errmsg = stypes.stringToCharP(lenout) lenout = ctypes.c_int(lenout) instring = stypes.stringToCharP(instring) sp2000 = ctypes.c_double() libspice.tparse_c(instring, lenout, ctypes.byref(sp2000), errmsg) return sp2000.value, stypes.toPythonString(errmsg)
Parse a time string and return seconds past the J2000 epoch on a formal calendar. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/tparse_c.html :param instring: Input time string, UTC. :type instring: str :param lenout: Available space in output error message string. :type lenout: int :return: Equivalent UTC seconds past J2000, Descriptive error message. :rtype: tuple
def guess_mode(self, data): if data.ndim == 2: return "L" elif data.shape[-1] == 3: return "RGB" elif data.shape[-1] == 4: return "RGBA" else: raise ValueError( "Un-supported shape for image conversion %s" % list(data.shape))
Guess what type of image the np.array is representing
def create_endpoint(service_name: str, *, ipv4: OptStr = None, ipv6: OptStr = None, port: OptInt = None) -> Endpoint: return Endpoint(service_name, ipv4, ipv6, port)
Factory function to create Endpoint object.
def validate_card_issue_modes(issue_mode: int, cards: list) -> list: supported_mask = 63 if not bool(issue_mode & supported_mask): return [] for i in [1 << x for x in range(len(IssueMode))]: if bool(i & issue_mode): try: parser_fn = cast( Callable[[list], Optional[list]], parsers[IssueMode(i).name] ) except ValueError: continue parsed_cards = parser_fn(cards) if not parsed_cards: return [] cards = parsed_cards return cards
validate cards against deck_issue modes
def update(self, attributes=None): if attributes is None: attributes = {} headers = self.__class__.create_headers(attributes) headers.update(self._update_headers()) result = self._client._put( self._update_url(), self.__class__.create_attributes(attributes, self), headers=headers ) self._update_from_resource(result) return self
Updates the resource with attributes.
def compat_py2_py3(): if (sys.version_info > (3, 0)): def iteritems(dictionary): return dictionary.items() def itervalues(dictionary): return dictionary.values() else: def iteritems(dictionary): return dictionary.iteritems() def itervalues(dictionary): return dictionary.itervalues() return iteritems, itervalues
For Python 2, 3 compatibility.
def read_meminfo(): data = {} with open("/proc/meminfo", "rb") as meminfo_file: for row in meminfo_file: fields = row.split() data[fields[0].decode("ascii")[:-1]] = int(fields[1]) * 1024 return data
Returns system memory usage information. :returns: The system memory usage. :rtype: dict
def cookie_to_state(cookie_str, name, encryption_key): try: cookie = SimpleCookie(cookie_str) state = State(cookie[name].value, encryption_key) except KeyError as e: msg_tmpl = 'No cookie named {name} in {data}' msg = msg_tmpl.format(name=name, data=cookie_str) logger.exception(msg) raise SATOSAStateError(msg) from e except ValueError as e: msg_tmpl = 'Failed to process {name} from {data}' msg = msg_tmpl.format(name=name, data=cookie_str) logger.exception(msg) raise SATOSAStateError(msg) from e else: msg_tmpl = 'Loading state from cookie {data}' msg = msg_tmpl.format(data=cookie_str) satosa_logging(logger, logging.DEBUG, msg, state) return state
Loads a state from a cookie :type cookie_str: str :type name: str :type encryption_key: str :rtype: satosa.state.State :param cookie_str: string representation of cookie/s :param name: Name identifier of the cookie :param encryption_key: Key to encrypt the state information :return: A state
def register_archive_format(name, function, extra_args=None, description=''): if extra_args is None: extra_args = [] if not isinstance(function, collections.Callable): raise TypeError('The %s object is not callable' % function) if not isinstance(extra_args, (tuple, list)): raise TypeError('extra_args needs to be a sequence') for element in extra_args: if not isinstance(element, (tuple, list)) or len(element) !=2: raise TypeError('extra_args elements are : (arg_name, value)') _ARCHIVE_FORMATS[name] = (function, extra_args, description)
Registers an archive format. name is the name of the format. function is the callable that will be used to create archives. If provided, extra_args is a sequence of (name, value) tuples that will be passed as arguments to the callable. description can be provided to describe the format, and will be returned by the get_archive_formats() function.
def gen_sites(path): " Seek sites by path. " for root, _, _ in walklevel(path, 2): try: yield Site(root) except AssertionError: continue
Seek sites by path.
def dictionize(fields: Sequence, records: Sequence) -> Generator: return (dict(zip(fields, rec)) for rec in records)
Create dictionaries mapping fields to record data.
def rate(self, currency): if not self._backend: raise ExchangeBackendNotInstalled() return self._backend.rate(currency)
Return quotation between the base and another currency
def subjects(self): subj = [i["subject_id"] for i in self._cache.find()] return list(set(subj))
Return identifiers for all the subjects that are in the cache. :return: list of subject identifiers
def delete_auto_scaling_group(self, name, force_delete=False): if(force_delete): params = {'AutoScalingGroupName': name, 'ForceDelete': 'true'} else: params = {'AutoScalingGroupName': name} return self.get_object('DeleteAutoScalingGroup', params, Request)
Deletes the specified auto scaling group if the group has no instances and no scaling activities in progress.
def set_size(self, size): buffer_size = self._calculate_zoom_buffer_size(size, self._zoom_level) self._size = size self._initialize_buffers(buffer_size)
Set the size of the map in pixels This is an expensive operation, do only when absolutely needed. :param size: (width, height) pixel size of camera/view of the group
def cleanup_tail(options): if options.kwargs['omode'] == "csv": options.kwargs['fd'].write("\n") elif options.kwargs['omode'] == "xml": options.kwargs['fd'].write("\n</results>\n") else: options.kwargs['fd'].write("\n]\n")
cleanup the tail of a recovery
def get_profile(session): try: profile = session.get(PROFILE_URL).json() if 'errorCode' in profile and profile['errorCode'] == '403': raise MoparError("not logged in") return profile except JSONDecodeError: raise MoparError("not logged in")
Get complete profile.
async def hset(self, name, key, value): return await self.execute_command('HSET', name, key, value)
Set ``key`` to ``value`` within hash ``name`` Returns 1 if HSET created a new field, otherwise 0
def add_module_plugin_filters(self, module_plugin_filters): module_plugin_filters = util.return_list(module_plugin_filters) self.module_plugin_filters.extend(module_plugin_filters)
Adds `module_plugin_filters` to the internal module filters. May be a single object or an iterable. Every module filters must be a callable and take in a list of plugins and their associated names.
def normalize_arxiv_category(category): category = _NEW_CATEGORIES.get(category.lower(), category) for valid_category in valid_arxiv_categories(): if (category.lower() == valid_category.lower() or category.lower().replace('-', '.') == valid_category.lower()): return valid_category return category
Normalize arXiv category to be schema compliant. This properly capitalizes the category and replaces the dash by a dot if needed. If the category is obsolete, it also gets converted it to its current equivalent. Example: >>> from inspire_schemas.utils import normalize_arxiv_category >>> normalize_arxiv_category('funct-an') # doctest: +SKIP u'math.FA'
def kitchen_delete(backend, kitchen): click.secho('%s - Deleting kitchen %s' % (get_datetime(), kitchen), fg='green') master = 'master' if kitchen.lower() != master.lower(): check_and_print(DKCloudCommandRunner.delete_kitchen(backend.dki, kitchen)) else: raise click.ClickException('Cannot delete the kitchen called %s' % master)
Provide the name of the kitchen to delete
def verify(self): for row in range(self.nrows()): result = self.verify_row(row) if result != 0: return result return 0
Checks all parameters for invalidating conditions :returns: str -- message if error, 0 otherwise
def default_value(self): if self.name in tsdb_coded_attributes: return tsdb_coded_attributes[self.name] elif self.datatype == ':integer': return -1 else: return ''
Get the default value of the field.
def init_chain(self): if not self._hasinit: self._hasinit = True self._devices = [] self.jtag_enable() while True: idcode = self.rw_dr(bitcount=32, read=True, lastbit=False)() if idcode in NULL_ID_CODES: break dev = self.initialize_device_from_id(self, idcode) if self._debug: print(dev) self._devices.append(dev) if len(self._devices) >= 128: raise JTAGTooManyDevicesError("This is an arbitrary " "limit to deal with breaking infinite loops. If " "you have more devices, please open a bug") self.jtag_disable() self._devices.reverse()
Autodetect the devices attached to the Controller, and initialize a JTAGDevice for each. This is a required call before device specific Primitives can be used.
def genes_with_peak(self, peaks, transform_func=None, split=False, intersect_kwargs=None, id_attribute='ID', *args, **kwargs): def _transform_func(x): result = transform_func(x) if isinstance(result, pybedtools.Interval): result = [result] for i in result: if i: yield result intersect_kwargs = intersect_kwargs or {} if not self._cached_features: self._cached_features = pybedtools\ .BedTool(self.features())\ .saveas() if transform_func: if split: features = self._cached_features\ .split(_transform_func, *args, **kwargs) else: features = self._cached_features\ .each(transform_func, *args, **kwargs) else: features = self._cached_features hits = list(set([i[id_attribute] for i in features.intersect( peaks, **intersect_kwargs)])) return self.data.index.isin(hits)
Returns a boolean index of genes that have a peak nearby. Parameters ---------- peaks : string or pybedtools.BedTool If string, then assume it's a filename to a BED/GFF/GTF file of intervals; otherwise use the pybedtools.BedTool object directly. transform_func : callable This function will be applied to each gene object returned by self.features(). Additional args and kwargs are passed to `transform_func`. For example, if you're looking for peaks within 1kb upstream of TSSs, then pybedtools.featurefuncs.TSS would be a useful `transform_func`, and you could supply additional kwargs of `upstream=1000` and `downstream=0`. This function can return iterables of features, too. For example, you might want to look for peaks falling within the exons of a gene. In this case, `transform_func` should return an iterable of pybedtools.Interval objects. The only requirement is that the `name` field of any feature matches the index of the dataframe. intersect_kwargs : dict kwargs passed to pybedtools.BedTool.intersect. id_attribute : str The attribute in the GTF or GFF file that contains the id of the gene. For meaningful results to be returned, a gene's ID be also found in the index of the dataframe. For GFF files, typically you'd use `id_attribute="ID"`. For GTF files, you'd typically use `id_attribute="gene_id"`.
def _get_state(): try: return pyconnman.ConnManager().get_property('State') except KeyError: return 'offline' except dbus.DBusException as exc: raise salt.exceptions.CommandExecutionError('Connman daemon error: {0}'.format(exc))
Returns the state of connman
def remove_tag(self, task, params={}, **options): path = "/tasks/%s/removeTag" % (task) return self.client.post(path, params, **options)
Removes a tag from the task. Returns an empty data block. Parameters ---------- task : {Id} The task to remove a tag from. [data] : {Object} Data for the request - tag : {Id} The tag to remove from the task.
def set_number(self, key, value): storage = self.storage if not isinstance(value, int): logger.error("set_number: Value must be an integer") return try: lock.acquire() storage[key] = value finally: self.storage._p_changed = True lock.release() return storage[key]
set a key's value
def on_load(target: "EncryptableMixin", context): decrypt, plaintext = decrypt_instance(target) if decrypt: target.plaintext = plaintext
Intercept SQLAlchemy's instance load event.
def removeCallback(cls, eventType, func, record=None): callbacks = cls.callbacks() callbacks.setdefault(eventType, []) for i in xrange(len(callbacks[eventType])): my_func, my_record, _ = callbacks[eventType][i] if func == my_func and record == my_record: del callbacks[eventType][i] break
Removes a callback from the model's event callbacks. :param eventType: <str> :param func: <callable>
def describe_api_stages(restApiId, deploymentId, region=None, key=None, keyid=None, profile=None): try: conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) stages = conn.get_stages(restApiId=restApiId, deploymentId=deploymentId) return {'stages': [_convert_datetime_str(stage) for stage in stages['item']]} except ClientError as e: return {'error': __utils__['boto3.get_error'](e)}
Get all API stages for a given apiID and deploymentID CLI Example: .. code-block:: bash salt myminion boto_apigateway.describe_api_stages restApiId deploymentId
def log_parameter_and_gradient_statistics(self, model: Model, batch_grad_norm: float) -> None: if self._should_log_parameter_statistics: for name, param in model.named_parameters(): self.add_train_scalar("parameter_mean/" + name, param.data.mean()) self.add_train_scalar("parameter_std/" + name, param.data.std()) if param.grad is not None: if param.grad.is_sparse: grad_data = param.grad.data._values() else: grad_data = param.grad.data if torch.prod(torch.tensor(grad_data.shape)).item() > 0: self.add_train_scalar("gradient_mean/" + name, grad_data.mean()) self.add_train_scalar("gradient_std/" + name, grad_data.std()) else: logger.info("No gradient for %s, skipping tensorboard logging.", name) if batch_grad_norm is not None: self.add_train_scalar("gradient_norm", batch_grad_norm)
Send the mean and std of all parameters and gradients to tensorboard, as well as logging the average gradient norm.
def has_isotropic_cells(self): return self.is_uniform and np.allclose(self.cell_sides[:-1], self.cell_sides[1:])
``True`` if `grid` is uniform and `cell_sides` are all equal. Always ``True`` for 1D partitions. Examples -------- >>> part = uniform_partition([0, -1], [1, 1], (5, 10)) >>> part.has_isotropic_cells True >>> part = uniform_partition([0, -1], [1, 1], (5, 5)) >>> part.has_isotropic_cells False
def _set_proxy_filter(warningstuple): if len(warningstuple) > 5: key = len(_proxy_map)+1 _proxy_map[key] = warningstuple return ('always', re_matchall, ProxyWarning, re_matchall, key) else: return warningstuple
set up a proxy that store too long warnings in a separate map
def matches_pattern(self, other): if hasattr(other, 'messageType'): messageTypeIsEqual = False if self.messageType is None or other.messageType is None: messageTypeIsEqual = True else: messageTypeIsEqual = (self.messageType == other.messageType) extendedIsEqual = False if self.extended is None or other.extended is None: extendedIsEqual = True else: extendedIsEqual = (self.extended == other.extended) return messageTypeIsEqual and extendedIsEqual return False
Test if current message match a patterns or template.
def Execute(self, *params, **kw): fp = self.__expandparamstodict(params, kw) return self._get_subfolder('execute/', GPExecutionResult, fp)
Synchronously execute the specified GP task. Parameters are passed in either in order or as keywords.
def add_bond(self, name, ifaces, bond_mode=None, lacp=None): assert len(ifaces) >= 2 options = '' if bond_mode: options += 'bond_mode=%(bond_mode)s' % locals() if lacp: options += 'lacp=%(lacp)s' % locals() command_add = ovs_vsctl.VSCtlCommand( 'add-bond', (self.br_name, name, ifaces), options) self.run_command([command_add])
Creates a bonded port. :param name: Port name to be created :param ifaces: List of interfaces containing at least 2 interfaces :param bond_mode: Bonding mode (active-backup, balance-tcp or balance-slb) :param lacp: LACP mode (active, passive or off)
def _build_install_args(options): install_args = [] if options.user_install: if sys.version_info < (2, 6): log.warn('--user requires Python 2.6 or later') raise SystemExit(1) install_args.append('--user') return install_args
Build the arguments to 'python setup.py install' on the setuptools package
def delete_content(self, max_chars=100): def delete_content_element(): chars_deleted = 0 while len(self.get_attribute('value')) > 0 and chars_deleted < max_chars: self.click() self.send_keys(Keys.HOME) self.send_keys(Keys.DELETE) chars_deleted += 1 self.execute_and_handle_webelement_exceptions(delete_content_element, 'delete input contents') return self
Deletes content in the input field by repeatedly typing HOME, then DELETE @rtype: WebElementWrapper @return: Returns itself
def async_from_options(options): _type = options.pop('_type', 'furious.async.Async') _type = path_to_reference(_type) return _type.from_dict(options)
Deserialize an Async or Async subclass from an options dict.
def chunks(iterable, size=100): it = iter(iterable) size -= 1 for head in it: yield itertools.chain([head], itertools.islice(it, size))
Turns the given iterable into chunks of the specified size, which is 100 by default since that's what Telegram uses the most.
def write(self, p_todos): todofile = codecs.open(self.path, 'w', encoding="utf-8") if p_todos is list: for todo in p_todos: todofile.write(str(todo)) else: todofile.write(p_todos) todofile.write("\n") todofile.close()
Writes all the todo items to the todo.txt file. p_todos can be a list of todo items, or a string that is just written to the file.
def createSuperimposedSensorySDRs(sequenceSensations, objectSensations): assert len(sequenceSensations) == len(objectSensations) superimposedSensations = [] for i, objectSensation in enumerate(objectSensations): newSensation = { 0: (objectSensation[0][0], sequenceSensations[i][0][1].union(objectSensation[0][1])) } superimposedSensations.append(newSensation) return superimposedSensations
Given two lists of sensations, create a new list where the sensory SDRs are union of the individual sensory SDRs. Keep the location SDRs from the object. A list of sensations has the following format: [ { 0: (set([1, 5, 10]), set([6, 12, 52]), # location, feature for CC0 }, { 0: (set([5, 46, 50]), set([8, 10, 11]), # location, feature for CC0 }, ] We assume there is only one cortical column, and that the two input lists have identical length.
def regions(): return [RDSRegionInfo(name='us-east-1', endpoint='rds.us-east-1.amazonaws.com'), RDSRegionInfo(name='eu-west-1', endpoint='rds.eu-west-1.amazonaws.com'), RDSRegionInfo(name='us-west-1', endpoint='rds.us-west-1.amazonaws.com'), RDSRegionInfo(name='us-west-2', endpoint='rds.us-west-2.amazonaws.com'), RDSRegionInfo(name='sa-east-1', endpoint='rds.sa-east-1.amazonaws.com'), RDSRegionInfo(name='ap-northeast-1', endpoint='rds.ap-northeast-1.amazonaws.com'), RDSRegionInfo(name='ap-southeast-1', endpoint='rds.ap-southeast-1.amazonaws.com') ]
Get all available regions for the RDS service. :rtype: list :return: A list of :class:`boto.rds.regioninfo.RDSRegionInfo`
def render(self, namespace): return self._text.format_map(namespace.dictionary) \ if self._need_format else self._text
Render template lines. Note: we only need to parse the namespace if we used variables in this part of the template.
def isoformat(self): text = [self.strftime('%Y-%m-%dT%H:%M:%S'), ] if self.tzinfo: text.append(self.tzinfo.as_timezone()) else: text.append('+00:00') return ''.join(text)
Generate an ISO 8601 formatted time stamp. Returns: str: `ISO 8601`_ formatted time stamp .. _ISO 8601: http://www.cl.cam.ac.uk/~mgk25/iso-time.html
def delete_room(room, reason=''): if room.custom_server: return def _delete_room(xmpp): muc = xmpp.plugin['xep_0045'] muc.destroy(room.jid, reason=reason) current_plugin.logger.info('Deleting room %s', room.jid) _execute_xmpp(_delete_room) delete_logs(room)
Deletes a MUC room from the XMPP server.
def get_files (dirname): for entry in os.listdir(dirname): fullentry = os.path.join(dirname, entry) if os.path.islink(fullentry): continue if os.path.isfile(fullentry): yield entry elif os.path.isdir(fullentry): yield entry+"/"
Get iterator of entries in directory. Only allows regular files and directories, no symlinks.
def _getLayer(self, name, **kwargs): for glyph in self.layers: if glyph.layer.name == name: return glyph raise ValueError("No layer named '%s' in glyph '%s'." % (name, self.name))
name will be a string, but there may not be a layer with a name matching the string. If not, a ``ValueError`` must be raised. Subclasses may override this method.
def _checkResponseNumberOfRegisters(payload, numberOfRegisters): _checkString(payload, minlength=4, description='payload') _checkInt(numberOfRegisters, minvalue=1, maxvalue=0xFFFF, description='numberOfRegisters') BYTERANGE_FOR_NUMBER_OF_REGISTERS = slice(2, 4) bytesForNumberOfRegisters = payload[BYTERANGE_FOR_NUMBER_OF_REGISTERS] receivedNumberOfWrittenReisters = _twoByteStringToNum(bytesForNumberOfRegisters) if receivedNumberOfWrittenReisters != numberOfRegisters: raise ValueError('Wrong number of registers to write in the response: {0}, but commanded is {1}. The data payload is: {2!r}'.format( \ receivedNumberOfWrittenReisters, numberOfRegisters, payload))
Check that the number of written registers as given in the response is correct. The bytes 2 and 3 (zero based counting) in the payload holds the value. Args: * payload (string): The payload * numberOfRegisters (int): Number of registers that have been written Raises: TypeError, ValueError
def fetch(url, dest, force=False): cached = True if force or not os.path.exists(dest): cached = False r = requests.get(url, stream=True) if r.status_code == 200: with open(dest, 'wb') as f: for chunk in r.iter_content(1024): f.write(chunk) return cached, dest
Retrieve data from an url and store it into dest. Parameters ---------- url: str Link to the remote data dest: str Path where the file must be stored force: bool (default=False) Overwrite if the file exists Returns ------- cached: bool True if the file already exists dest: str The same string of the parameter
def on_connection_open(self, connection): LOGGER.debug('Connection opened') connection.add_on_connection_blocked_callback( self.on_connection_blocked) connection.add_on_connection_unblocked_callback( self.on_connection_unblocked) connection.add_backpressure_callback(self.on_back_pressure_detected) self.channel = self._open_channel()
This method is called by pika once the connection to RabbitMQ has been established. :type connection: pika.TornadoConnection
def _call(self, x): out = x.asarray().ravel()[self._indices_flat] if self.variant == 'point_eval': weights = 1.0 elif self.variant == 'integrate': weights = getattr(self.domain, 'cell_volume', 1.0) else: raise RuntimeError('bad variant {!r}'.format(self.variant)) if weights != 1.0: out *= weights return out
Return values at indices, possibly weighted.
def _next(self, **kwargs): spec = self._pagination_default_spec(kwargs) spec.update(kwargs) query = queries.build_query(spec) query = queries.where_after_entry(query, self._record) for record in query.order_by(model.Entry.local_date, model.Entry.id)[:1]: return Entry(record) return None
Get the next item in any particular category
async def _connect(self, hostname, port, reconnect=False, channels=[], encoding=protocol.DEFAULT_ENCODING, source_address=None): if not reconnect or not self.connection: self._autojoin_channels = channels self.connection = connection.Connection(hostname, port, source_address=source_address, eventloop=self.eventloop) self.encoding = encoding await self.connection.connect()
Connect to IRC host.
def add_patchs_to_build_without_pkg_config(self, lib_dir, include_dir): additional_patches = [ { 'src': r"pkgconfig\('--libs-only-L'\)", 'dest': "['{0}']".format(lib_dir), }, { 'src': r"pkgconfig\('--libs(-only-l)?'\)", 'dest': "['rpm', 'rpmio']", 'required': True, }, { 'src': r"pkgconfig\('--cflags'\)", 'dest': "['{0}']".format(include_dir), 'required': True, }, ] self.patches.extend(additional_patches)
Add patches to remove pkg-config command and rpm.pc part. Replace with given library_path: lib_dir and include_path: include_dir without rpm.pc file.
def _join_results(self, results, coarse, join): rval = OrderedDict() i_keys = set() for res in results: i_keys.update( res.keys() ) for i_key in sorted(i_keys): if coarse: rval[i_key] = join( [res.get(i_key) for res in results] ) else: rval[i_key] = OrderedDict() r_keys = set() for res in results: r_keys.update( res.get(i_key,{}).keys() ) for r_key in sorted(r_keys): rval[i_key][r_key] = join( [res.get(i_key,{}).get(r_key) for res in results] ) return rval
Join a list of results. Supports both get and series.
def apply_groups(cls, obj, options=None, backend=None, clone=True, **kwargs): if isinstance(options, basestring): from ..util.parser import OptsSpec try: options = OptsSpec.parse(options) except SyntaxError: options = OptsSpec.parse( '{clsname} {options}'.format(clsname=obj.__class__.__name__, options=options)) if kwargs: options = cls._group_kwargs_to_options(obj, kwargs) for backend, backend_opts in cls._grouped_backends(options, backend): obj = cls._apply_groups_to_backend(obj, backend_opts, backend, clone) return obj
Applies nested options definition grouped by type. Applies options on an object or nested group of objects, returning a new object with the options applied. This method accepts the separate option namespaces explicitly (i.e 'plot', 'style' and 'norm'). If the options are to be set directly on the object a simple format may be used, e.g.: opts.apply_groups(obj, style={'cmap': 'viridis'}, plot={'show_title': False}) If the object is nested the options must be qualified using a type[.group][.label] specification, e.g.: opts.apply_groups(obj, {'Image': {'plot': {'show_title': False}, 'style': {'cmap': 'viridis}}}) If no opts are supplied all options on the object will be reset. Args: options (dict): Options specification Options specification should be indexed by type[.group][.label] or option type ('plot', 'style', 'norm'). backend (optional): Backend to apply options to Defaults to current selected backend clone (bool, optional): Whether to clone object Options can be applied inplace with clone=False **kwargs: Keywords of options by type Applies options directly to the object by type (e.g. 'plot', 'style', 'norm') specified as dictionaries. Returns: Returns the object or a clone with the options applied
def flags(flags): names = sorted( descr for key, descr in OpCodeDebug.STREAM_CONSTANT.items() if key & flags ) return ", ".join(names)
Returns the names of the class description flags found in the given integer :param flags: A class description flag entry :return: The flags names as a single string
def log_warn(message, args): get_logger(DEFAULT_LOGGER, log_creation=False).log(logging.WARNING, message, *args)
Logs a warning message using the default logger.
def delete_database(client, db_name, username=None, password=None): (username, password) = get_user_creds(username, password) sys_db = client.db("_system", username=username, password=password) try: return sys_db.delete_database(db_name) except Exception: log.warn("No arango database {db_name} to delete, does not exist")
Delete Arangodb database
def addCmdClass(self, ctor, **opts): item = ctor(self, **opts) name = item.getCmdName() self.cmds[name] = item
Add a Cmd subclass to this cli.
def to_python(value, seen=None): seen = seen or set() if isinstance(value, framework.TupleLike): if value.ident in seen: raise RecursionException('to_python: infinite recursion while evaluating %r' % value) new_seen = seen.union([value.ident]) return {k: to_python(value[k], seen=new_seen) for k in value.exportable_keys()} if isinstance(value, dict): return {k: to_python(value[k], seen=seen) for k in value.keys()} if isinstance(value, list): return [to_python(x, seen=seen) for x in value] return value
Reify values to their Python equivalents. Does recursion detection, failing when that happens.
def rvs(df, gamma, n): if type(n) == list: u = np.random.uniform(size=n[0]*n[1]) result = Skewt.ppf(q=u, df=df, gamma=gamma) result = np.split(result,n[0]) return np.array(result) else: u = np.random.uniform(size=n) if isinstance(df, np.ndarray) or isinstance(gamma, np.ndarray): return np.array([Skewt.ppf(q=np.array([u[i]]), df=df[i], gamma=gamma[i])[0] for i in range(n)]) else: return Skewt.ppf(q=u, df=df, gamma=gamma)
Generates random variables from a Skew t distribution Parameters ---------- df : float degrees of freedom parameter gamma : float skewness parameter n : int or list Number of simulations to perform; if list input, produces array
async def unload_by_path(self, path): p, module = findModule(path, False) if module is None: raise ModuleLoadException('Cannot find module: ' + repr(path)) return await self.unloadmodule(module)
Unload a module by full path. Dependencies are automatically unloaded if they are marked to be services.
def _parse_text(self, element_name, namespace=''): try: text = self._channel.find('.//' + namespace + element_name).text except AttributeError: raise Exception( 'Element, {0} not found in RSS feed'.format(element_name) ) return text
Returns the text, as a string, of the specified element in the specified namespace of the RSS feed. Takes element_name and namespace as strings.
def ensure_namespace(self, name): if name not in self.namespaces: self.namespaces[name] = ApiNamespace(name) return self.namespaces[name]
Only creates a namespace if it hasn't yet been defined. :param str name: Name of the namespace. :return ApiNamespace:
def reorder_distance(p_atoms, q_atoms, p_coord, q_coord): unique_atoms = np.unique(p_atoms) view_reorder = np.zeros(q_atoms.shape, dtype=int) for atom in unique_atoms: p_atom_idx, = np.where(p_atoms == atom) q_atom_idx, = np.where(q_atoms == atom) A_coord = p_coord[p_atom_idx] B_coord = q_coord[q_atom_idx] A_norms = np.linalg.norm(A_coord, axis=1) B_norms = np.linalg.norm(B_coord, axis=1) reorder_indices_A = np.argsort(A_norms) reorder_indices_B = np.argsort(B_norms) translator = np.argsort(reorder_indices_A) view = reorder_indices_B[translator] view_reorder[p_atom_idx] = q_atom_idx[view] return view_reorder
Re-orders the input atom list and xyz coordinates by atom type and then by distance of each atom from the centroid. Parameters ---------- atoms : array (N,1) matrix, where N is points holding the atoms' names coord : array (N,D) matrix, where N is points and D is dimension Returns ------- atoms_reordered : array (N,1) matrix, where N is points holding the ordered atoms' names coords_reordered : array (N,D) matrix, where N is points and D is dimension (rows re-ordered)
def by_phone(self, phone, cc=None): header, content = self._http_request(self.BASE_URL, phone=phone, cc=cc) return json.loads(content)
Perform a Yelp Phone API Search based on phone number given. Args: phone - Phone number to search by cc - ISO 3166-1 alpha-2 country code. (Optional)
def reload(self): try: data = self._api.get(self.href, append_base=False).json() resource = File(api=self._api, **data) except Exception: try: data = self._api.get( self._URL['get'].format(id=self.id)).json() resource = File(api=self._api, **data) except Exception: raise SbgError('Resource can not be refreshed!') self._data = resource._data self._dirty = resource._dirty self._old = copy.deepcopy(self._data.data) try: delattr(self, '_method') except AttributeError: pass
Refreshes the file with the data from the server.
def save(self, path, group=None): ext = _os.path.splitext(path)[1] if ext == '.npy': _numpy.save(path, self.numpy()) elif ext == '.txt': if self.kind == 'real': _numpy.savetxt(path, self.numpy()) elif self.kind == 'complex': output = _numpy.vstack((self.numpy().real, self.numpy().imag)).T _numpy.savetxt(path, output) elif ext == '.hdf': key = 'data' if group is None else group f = h5py.File(path) f.create_dataset(key, data=self.numpy(), compression='gzip', compression_opts=9, shuffle=True) else: raise ValueError('Path must end with .npy, .txt, or .hdf')
Save array to a Numpy .npy, hdf, or text file. When saving a complex array as text, the real and imaginary parts are saved as the first and second column respectively. When using hdf format, the data is stored as a single vector, along with relevant attributes. Parameters ---------- path: string Destination file path. Must end with either .hdf, .npy or .txt. group: string Additional name for internal storage use. Ex. hdf storage uses this as the key value. Raises ------ ValueError If path does not end in .npy or .txt.
def missed_statements(self, filename): el = self._get_class_element_by_filename(filename) lines = el.xpath('./lines/line[@hits=0]') return [int(l.attrib['number']) for l in lines]
Return a list of uncovered line numbers for each of the missed statements found for the file `filename`.
def identical(self, a, b): return self._identical(self.convert(a), self.convert(b))
This should return whether `a` is identical to `b`. Of course, this isn't always clear. True should mean that it is definitely identical. False eans that, conservatively, it might not be. :param a: an AST :param b: another AST
def _get_rev(self, fpath): rev = None try: cmd = ["git", "log", "-n1", "--pretty=format:\"%h\"", fpath] rev = Popen(cmd, stdout=PIPE, stderr=PIPE).communicate()[0] except: pass if not rev: try: cmd = ["svn", "info", fpath] svninfo = Popen(cmd, stdout=PIPE, stderr=PIPE).stdout.readlines() for info in svninfo: tokens = info.split(":") if tokens[0].strip() == "Last Changed Rev": rev = tokens[1].strip() except: pass return rev
Get an SCM version number. Try svn and git.
def get_path(self, path, query=None): return self.get(self.url_path(path), query)
Make a GET request, optionally including a query, to a relative path. The path of the request includes a path on top of the base URL assigned to the endpoint. Parameters ---------- path : str The path to request, relative to the endpoint query : DataQuery, optional The query to pass when making the request Returns ------- resp : requests.Response The server's response to the request See Also -------- get_query, get, url_path
def data(self, data, part=False, dataset=''): links = self.parser(self.scanner(data, part), part, dataset) self.storage.add_links(links)
Parse data and update links. Parameters ---------- data Data to parse. part : `bool`, optional True if data is partial (default: `False`). dataset : `str`, optional Dataset key prefix (default: '').
def get_prebuilt_targets(build_context): logger.info('Scanning for cached base images') contained_deps = set() required_deps = set() cached_descendants = CachedDescendants(build_context.target_graph) for target_name, target in build_context.targets.items(): if 'image_caching_behavior' not in target.props: continue image_name = get_image_name(target) image_tag = target.props.image_tag icb = ImageCachingBehavior(image_name, image_tag, target.props.image_caching_behavior) target.image_id = handle_build_cache(build_context.conf, image_name, image_tag, icb) if target.image_id: image_deps = cached_descendants.get(target_name) contained_deps.update(image_deps) contained_deps.add(target.name) else: image_deps = cached_descendants.get(target_name) base_image_deps = cached_descendants.get(target.props.base_image) required_deps.update(image_deps - base_image_deps) return contained_deps - required_deps
Return set of target names that are contained within cached base images These targets may be considered "pre-built", and skipped during build.
def runWizard( self ): plugin = self.currentPlugin() if ( plugin and plugin.runWizard(self) ): self.accept()
Runs the current wizard.
def validate_session(self, client, session): if session: if session._client is not client: raise InvalidOperation( 'Can only use session with the MongoClient that' ' started it') if session._authset != self.authset: raise InvalidOperation( 'Cannot use session after authenticating with different' ' credentials')
Validate this session before use with client. Raises error if this session is logged in as a different user or the client is not the one that created the session.
def _amplitude_bounds(counts, bkg, model): if isinstance(counts, list): counts = np.concatenate([t.flat for t in counts]) bkg = np.concatenate([t.flat for t in bkg]) model = np.concatenate([t.flat for t in model]) s_model = np.sum(model) s_counts = np.sum(counts) sn = bkg / model imin = np.argmin(sn) sn_min = sn[imin] c_min = counts[imin] b_min = c_min / s_model - sn_min b_max = s_counts / s_model - sn_min return max(b_min, 0), b_max
Compute bounds for the root of `_f_cash_root_cython`. Parameters ---------- counts : `~numpy.ndarray` Count map. bkg : `~numpy.ndarray` Background map. model : `~numpy.ndarray` Source template (multiplied with exposure).
def backward(self): if not self.filt: self.forward() self.smth = [self.filt[-1]] log_trans = np.log(self.hmm.trans_mat) ctg = np.zeros(self.hmm.dim) for filt, next_ft in reversed(list(zip(self.filt[:-1], self.logft[1:]))): new_ctg = np.empty(self.hmm.dim) for k in range(self.hmm.dim): new_ctg[k] = rs.log_sum_exp(log_trans[k, :] + next_ft + ctg) ctg = new_ctg smth = rs.exp_and_normalise(np.log(filt) + ctg) self.smth.append(smth) self.smth.reverse()
Backward recursion. Upon completion, the following list of length T is available: * smth: marginal smoothing probabilities Note ---- Performs the forward step in case it has not been performed before.
def returner(ret): setup = _get_options(ret) log.debug('highstate setup %s', setup) report, failed = _generate_report(ret, setup) if report: _produce_output(report, failed, setup)
Check highstate return information and possibly fire off an email or save a file.
def _Dhcpcd(self, interfaces, logger): for interface in interfaces: dhcpcd = ['/sbin/dhcpcd'] try: subprocess.check_call(dhcpcd + ['-x', interface]) except subprocess.CalledProcessError: logger.info('Dhcpcd not yet running for interface %s.', interface) try: subprocess.check_call(dhcpcd + [interface]) except subprocess.CalledProcessError: logger.warning('Could not activate interface %s.', interface)
Use dhcpcd to activate the interfaces. Args: interfaces: list of string, the output device names to enable. logger: logger object, used to write to SysLog and serial port.
def _decode_datetime(obj): if '__datetime__' in obj: obj = datetime.datetime.strptime(obj['as_str'].decode(), "%Y%m%dT%H:%M:%S.%f") return obj
Decode a msgpack'ed datetime.
def _upd_unused(self, what): builder = getattr(self, '_{}_builder'.format(what)) updtrig = getattr(self, '_trigger_upd_unused_{}s'.format(what)) builder.unbind(decks=updtrig) funcs = OrderedDict() cards = list(self._action_builder.decks[1]) cards.reverse() for card in cards: funcs[card.ud['funcname']] = card for card in self._action_builder.decks[0]: if card.ud['funcname'] not in funcs: funcs[card.ud['funcname']] = card.copy() unused = list(funcs.values()) unused.reverse() builder.decks[1] = unused builder.bind(decks=updtrig)
Make sure to have exactly one copy of every valid function in the "unused" pile on the right. Doesn't read from the database. :param what: a string, 'trigger', 'prereq', or 'action'