code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def getAddr (self, ifname): if sys.platform == 'darwin': return ifconfig_inet(ifname).get('address') return self._getaddr(ifname, self.SIOCGIFADDR)
Get the inet addr for an interface. @param ifname: interface name @type ifname: string
def additive_coupling(name, x, mid_channels=512, reverse=False, activation="relu", dropout=0.0): with tf.variable_scope(name, reuse=tf.AUTO_REUSE): output_channels = common_layers.shape_list(x)[-1] // 2 x1, x2 = tf.split(x, num_or_size_splits=2, axis=-1) z1 = x1 shift = conv_stac...
Reversible additive coupling layer. Args: name: variable scope. x: 4-D Tensor, shape=(NHWC). mid_channels: number of channels in the coupling layer. reverse: Forward or reverse operation. activation: "relu" or "gatu" dropout: default, 0.0 Returns: output: 4-D Tensor, shape=(NHWC) ob...
def peak_templates(self): peak_templates = [] for peak_descr in self: expanded_dims = [dim_group.dimensions for dim_group in peak_descr] templates = product(*expanded_dims) for template in templates: peak_templates.append(PeakTemplate(template)) ...
Create a list of concrete peak templates from a list of general peak descriptions. :return: List of peak templates. :rtype: :py:class:`list`
def _clean(self): found_ids = {} nodes = [self._nodes[_node.Root.ID]] while nodes: node = nodes.pop() found_ids[node.id] = None nodes = nodes + node.children for node_id in self._nodes: if node_id in found_ids: continue ...
Recursively check that all nodes are reachable.
def _cell_scalar(self, name=None): if name is None: field, name = self.active_scalar_info if field != CELL_DATA_FIELD: raise RuntimeError('Must specify an array to fetch.') vtkarr = self.GetCellData().GetArray(name) if vtkarr is None: raise Ass...
Returns the cell scalars of a vtk object Parameters ---------- name : str Name of cell scalars to retrive. Returns ------- scalars : np.ndarray Numpy array of scalars
def set_boot_arch(arch='default'): if arch not in ['i386', 'x86_64', 'default']: msg = 'Invalid value passed for arch.\n' \ 'Must be i386, x86_64, or default.\n' \ 'Passed: {0}'.format(arch) raise SaltInvocationError(msg) cmd = 'systemsetup -setkernelbootarchitecture ...
Set the kernel to boot in 32 or 64 bit mode on next boot. .. note:: When this function fails with the error ``changes to kernel architecture failed to save!``, then the boot arch is not updated. This is either an Apple bug, not available on the test system, or a result of system fil...
def inv_matrix(self) -> np.ndarray: if self._inv_matrix is None: self._inv_matrix = inv(self._matrix) self._inv_matrix.setflags(write=False) return self._inv_matrix
Inverse of lattice matrix.
def _getImageSize(filename): result = None file = open(filename, 'rb') if file.read(8) == b'\x89PNG\r\n\x1a\n': while 1: length, = _struct.unpack('>i', file.read(4)) chunkID = file.read(4) if chunkID == '': break if chunkID == b'IHDR': ...
Try to get the width and height of a bmp of png image file
def setup_file_logger(filename, formatting, log_level): logger = logging.getLogger() if logger.handlers: logger.removeHandler(logger.handlers[0]) handler = logging.FileHandler(filename) logger.addHandler(handler) formatter = logging.Formatter(*formatting) handler.setFormatter(formatter) ...
A helper function for creating a file logger. Accepts arguments, as it is used in Status and LoggingWriter.
def djrepo_path(self): root, ext = os.path.splitext(self.filepath) path = root + ".djrepo" return path
The path of the djrepo file. None if file does not exist.
def _setup(self): default_settings.reload() environment_variable = self._kwargs.get( "ENVVAR_FOR_DYNACONF", default_settings.ENVVAR_FOR_DYNACONF ) settings_module = os.environ.get(environment_variable) self._wrapped = Settings( settings_module=settings_mod...
Initial setup, run once.
def _compile(pattern, flags): return re.compile(WcParse(pattern, flags & FLAG_MASK).parse())
Compile the pattern to regex.
def badnick(self, me=None, nick=None, **kw): if me == '*': self.bot.set_nick(self.bot.nick + '_') self.bot.log.debug('Trying to regain nickname in 30s...') self.nick_handle = self.bot.loop.call_later( 30, self.bot.set_nick, self.bot.original_nick)
Use alt nick on nick error
def get_assessment_section_mdata(): return { 'assessment_taken': { 'element_label': { 'text': 'assessment taken', 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE), 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE), 'formatTypeId': str(DEFAULT_FO...
Return default mdata map for AssessmentSection
def _process_unresolved_indirect_jumps(self): l.info("%d indirect jumps to resolve.", len(self._indirect_jumps_to_resolve)) all_targets = set() for idx, jump in enumerate(self._indirect_jumps_to_resolve): if self._low_priority: self._release_gil(idx, 20, 0.0001) ...
Resolve all unresolved indirect jumps found in previous scanning. Currently we support resolving the following types of indirect jumps: - Ijk_Call: indirect calls where the function address is passed in from a proceeding basic block - Ijk_Boring: jump tables - For an up-to-date list, se...
def list_previous_page(self): uri = self._paging.get("domain", {}).get("prev_uri") if uri is None: raise exc.NoMoreResults("There are no previous pages of domains " "to list.") return self._list(uri)
When paging through results, this will return the previous page, using the same limit. If there are no more results, a NoMoreResults exception will be raised.
def flatten_all_paths(self, group_filter=lambda x: True, path_filter=lambda x: True, path_conversions=CONVERSIONS): return flatten_all_paths(self.tree.getroot(), group_filter, path_filter, path_conversions)
Forward the tree of this document into the more general flatten_all_paths function and return the result.
def __get_package_manager(self): package_manager = "" args = "" sudo_required = True if system.is_osx(): package_manager = "brew" sudo_required = False args = " install" elif system.is_debian(): package_manager = "apt-get" ...
Installs and verifies package manager
def get_column_for_modelfield(model_field): while model_field.related_model: model_field = model_field.related_model._meta.pk for ColumnClass, modelfield_classes in COLUMN_CLASSES: if isinstance(model_field, tuple(modelfield_classes)): return ColumnClass
Return the built-in Column class for a model field class.
def get_hash_as_int(*args, group: cmod.PairingGroup = None): group = group if group else cmod.PairingGroup(PAIRING_GROUP) h_challenge = sha256() serialedArgs = [group.serialize(arg) if isGroupElement(arg) else cmod.Conversion.IP2OS(arg) for arg in args] for arg in...
Enumerate over the input tuple and generate a hash using the tuple values :param args: sequence of either group or integer elements :param group: pairing group if an element is a group element :return:
def _set(self, **kwargs): for param, value in kwargs.items(): p = getattr(self, param) if value is not None: try: value = p.typeConverter(value) except TypeError as e: raise TypeError('Invalid param value given for p...
Sets user-supplied params.
def find_best_candidate(self): self.fill_percent_done() i_b = np.argmax(self.percent_done.ravel()) if self.percent_done.ravel()[i_b] <= 0: return None I = self.percent_done.ravel() == self.percent_done.ravel()[i_b] if I.sum() == 1: return i_b else:...
Determine which tile, when processed, would complete the largest percentage of unresolved edge pixels. This is a heuristic function and does not give the optimal tile.
def get(self): opts = current_app.config['RECORDS_REST_SORT_OPTIONS'].get( self.search_index) sort_fields = [] if opts: for key, item in sorted(opts.items(), key=lambda x: x[1]['order']): sort_fields.append( {key: dict( ...
Get options.
def _ExtractGMailSearchQuery(self, url): if 'search/' not in url: return None _, _, line = url.partition('search/') line, _, _ = line.partition('/') line, _, _ = line.partition('?') return line.replace('+', ' ')
Extracts a search query from a GMail search URL. GMail: https://mail.google.com/mail/u/0/#search/query[/?] Args: url (str): URL. Returns: str: search query or None if no query was found.
def Range(start, limit, delta): return np.arange(start, limit, delta, dtype=np.int32),
Range op.
def _generate_key_map(entity_list, key, entity_class): key_map = {} for obj in entity_list: key_map[obj[key]] = entity_class(**obj) return key_map
Helper method to generate map from key to entity object for given list of dicts. Args: entity_list: List consisting of dict. key: Key in each dict which will be key in the map. entity_class: Class representing the entity. Returns: Map mapping key to entity object.
def escape_latex_str_if_str(value): if not isinstance(value, str): return value for regex, replace_text in REGEX_ESCAPE_CHARS: value = re.sub(regex, replace_text, value) value = re.sub(REGEX_BACKSLASH, r'\\\\', value) return value
Escape a latex string
def lookup_host(self, name): res = self.lookup_by_host(name=name) try: return dict(ip=res["ip-address"], mac=res["hardware-address"], hostname=res["name"].decode('utf-8')) except KeyError: raise OmapiErrorAttributeNotFound()
Look for a host object with given name and return the name, mac, and ip address @type name: str @rtype: dict or None @raises ValueError: @raises OmapiError: @raises OmapiErrorNotFound: if no host object with the given name could be found @raises OmapiErrorAttributeNotFound: if lease could be found, but o...
def clear(self): node = self._first while node is not None: next_node = node._next node._list = node._prev = node._next = None node = next_node self._size = 0
Remove all nodes from the list.
def populate_user(self, request, sociallogin, data): username = data.get('username') first_name = data.get('first_name') last_name = data.get('last_name') email = data.get('email') name = data.get('name') u...
Hook that can be used to further populate the user instance. For convenience, we populate several common fields. Note that the user instance being populated represents a suggested User instance that represents the social user that is in the process of being logged in. The User...
def key_press_event(self, event): if event.key() == QtCore.Qt.Key_Return: cursor = self.edit.textCursor() cursor.movePosition(cursor.EndOfBlock) self.edit.setTextCursor(cursor) code = _qkey_to_ascii(event) if code: self.process.writeData(code) ...
Directly writes the ascii code of the key to the process' stdin. :retuns: False to prevent the event from being propagated to the parent widget.
def delete(self, block, name): self._kvs.delete(self._key(block, name))
Reset the value of the field named `name` to the default
def open(self): self.h_info = SetupDiGetClassDevs(byref(self.guid), None, None, (DIGCF.PRESENT | DIGCF.DEVICEINTERFACE) ) return self.h_info
Calls SetupDiGetClassDevs to obtain a handle to an opaque device information set that describes the device interfaces supported by all the USB collections currently installed in the system. The application should specify DIGCF.PRESENT and DIGCF.INTERFACEDEVICE in the Flags parameter ...
def string_to_integer(value, strict=False): if is_undefined(value): if strict: raise ValueError('The value cannot be null') return None try: return int(value) except ValueError: raise ValueError('The specified string "%s" does not represent an integer' % value)
Return an integer corresponding to the string representation of a number. @param value: a string representation of an integer number. @param strict: indicate whether the specified string MUST be of a valid integer number representation. @return: the integer value represented by the string. ...
def df(self, version=None, tags=None, ext=None, **kwargs): ext = self._find_extension(version=version, tags=tags) if ext is None: attribs = "{}{}".format( "version={} and ".format(version) if version else "", "tags={}".format(tags) if tags else "", ...
Loads an instance of this dataset into a dataframe. Parameters ---------- version: str, optional The version of the instance of this dataset. tags : list of str, optional The tags associated with the desired instance of this dataset. ext : str, optional ...
def connect(port=8813, numRetries=10, host="localhost", proc=None): for wait in range(1, numRetries + 2): try: return Connection(host, port, proc) except socket.error as e: print("Could not connect to TraCI server at %s:%s" % (host, port), e) if ...
Establish a connection to a TraCI-Server and return the connection object. The connection is not saved in the pool and not accessible via traci.switch. It should be safe to use different connections established by this method in different threads.
async def ltrim(self, name, start, end): return await self.execute_command('LTRIM', name, start, end)
Trim the list ``name``, removing all values not within the slice between ``start`` and ``end`` ``start`` and ``end`` can be negative numbers just like Python slicing notation
def generate(env): java_file = SCons.Tool.CreateJavaFileBuilder(env) java_class = SCons.Tool.CreateJavaClassFileBuilder(env) java_class_dir = SCons.Tool.CreateJavaClassDirBuilder(env) java_class.add_emitter(None, emit_java_classes) java_class.add_emitter(env.subst('$JAVASUFFIX'), emit_java_classes) ...
Add Builders and construction variables for javac to an Environment.
def get_post_fields(request): fields = dict() for field,value in request.form.items(): fields[field] = value return fields
parse through a request, and return fields from post in a dictionary
def nan_circmean(samples, high=2.0*np.pi, low=0.0, axis=None): samples = np.asarray(samples) samples = samples[~np.isnan(samples)] if samples.size == 0: return np.nan ang = (samples - low) * 2.0 * np.pi / (high - low) ssum = np.sin(ang).sum(axis=axis) csum = np.cos(ang).sum(axis=axis) ...
NaN insensitive version of scipy's circular mean routine Parameters ----------- samples : array_like Input array low : float or int Lower boundary for circular standard deviation range (default=0) high: float or int Upper boundary for circular standard deviation range (defau...
def node_add_label(node_name, label_name, label_value, **kwargs): cfg = _setup_conn(**kwargs) try: api_instance = kubernetes.client.CoreV1Api() body = { 'metadata': { 'labels': { label_name: label_value} } } api_resp...
Set the value of the label identified by `label_name` to `label_value` on the node identified by the name `node_name`. Creates the lable if not present. CLI Examples:: salt '*' kubernetes.node_add_label node_name="minikube" \ label_name="foo" label_value="bar"
def _set_default_resource_names(self): self.ip_config_name = ''.join([ self.running_instance_id, '-ip-config' ]) self.nic_name = ''.join([self.running_instance_id, '-nic']) self.public_ip_name = ''.join([self.running_instance_id, '-public-ip'])
Generate names for resources based on the running_instance_id.
def _set_digraph_b(self, char): self.has_digraph_b = True self.active_vowel_ro = di_b_lt[char][0] self.active_dgr_b_info = di_b_lt[char]
Sets the second part of a digraph.
def build_connection(url): username = os.environ.get('ELASTICSEARCH_USERNAME') password = os.environ.get('ELASTICSEARCH_PASSWORD') if username and password: return Elasticsearch(url, http_auth=(username, password)) return Elasticsearch(url)
Build an Elasticsearch connection with the given url Elastic.co's Heroku addon doesn't create credientials with access to the cluster by default so they aren't exposed in the URL they provide either. This function works around the situation by grabbing our credentials from the environment via Django se...
def _head_length(self, port): if not port: return 0. parent_state_v = self.get_parent_state_v() if parent_state_v is port.parent: return port.port_size[1] return max(port.port_size[1] * 1.5, self._calc_line_width() / 1.3)
Distance from the center of the port to the perpendicular waypoint
def _evalAndDer(self,x): m = len(x) fx = np.zeros((m,self.funcCount)) for j in range(self.funcCount): fx[:,j] = self.functions[j](x) fx[np.isnan(fx)] = np.inf i = np.argmin(fx,axis=1) y = fx[np.arange(m),i] dydx = np.zeros_like(y) for j in rang...
Returns the level and first derivative of the function at each value in x. Only called internally by HARKinterpolator1D.eval_and_der.
def _validate_timeout(cls, value, name): if value is _Default: return cls.DEFAULT_TIMEOUT if value is None or value is cls.DEFAULT_TIMEOUT: return value if isinstance(value, bool): raise ValueError("Timeout cannot be a boolean value. It must " ...
Check that a timeout attribute is valid. :param value: The timeout value to validate :param name: The name of the timeout attribute to validate. This is used to specify in error messages. :return: The validated and casted version of the given value. :raises ValueError: If it...
def unit(self, unit): allowed_values = ["cm", "inch", "foot"] if unit is not None and unit not in allowed_values: raise ValueError( "Invalid value for `unit` ({0}), must be one of {1}" .format(unit, allowed_values) ) self._unit = unit
Sets the unit of this Dimensions. :param unit: The unit of this Dimensions. :type: str
def _call(self, method, *args, **kwargs): assert self.session if not kwargs.get('verify'): kwargs['verify'] = self.SSL_VERIFY response = self.session.request(method, *args, **kwargs) response_json = response.text and response.json() or {} if response.status_code < 200...
Call the remote service and return the response data.
def setup_logging(args): handler = logging.StreamHandler() handler.setLevel(args.log_level) formatter = logging.Formatter(('%(asctime)s - ' '%(name)s - ' '%(levelname)s - ' '%(message)s')) handler.se...
This sets up the logging. Needs the args to get the log level supplied :param args: The command line arguments
def parser(input_file_path='config.json'): try: with open(input_file_path, 'r') as config_file: config_new = json.load(config_file) config_file.close() except: raise Exception('Config file "'+input_file_path+'" not loaded properly. Please check it an try again.') impo...
Parser for the .json file containing the configuration of the method.
def get_config_files(): apps_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), APPS_DIR) custom_apps_dir = os.path.join(os.environ['HOME'], CUSTOM_APPS_DIR) config_files = set() custom_files = set() if os.path.isdir(custom_apps_dir): ...
Return the application configuration files. Return a list of configuration files describing the apps supported by Mackup. The files return are absolute full path to those files. e.g. /usr/lib/mackup/applications/bash.cfg Only one config file per application should be returned, custom c...
def usage(path): out = __salt__['cmd.run_all']("btrfs filesystem usage {0}".format(path)) salt.utils.fsutils._verify_run(out) ret = {} for section in out['stdout'].split("\n\n"): if section.startswith("Overall:\n"): ret['overall'] = _usage_overall(section) elif section.starts...
Show in which disk the chunks are allocated. CLI Example: .. code-block:: bash salt '*' btrfs.usage /your/mountpoint
def multiple_domains(self): domains = [] for cookie in iter(self): if cookie.domain is not None and cookie.domain in domains: return True domains.append(cookie.domain) return False
Returns True if there are multiple domains in the jar. Returns False otherwise. :rtype: bool
def _renderBlockDevice(self, block_device, build): rendered_block_device = yield build.render(block_device) if rendered_block_device['volume_size'] is None: source_type = rendered_block_device['source_type'] source_uuid = rendered_block_device['uuid'] volume_size = se...
Render all of the block device's values.
def get_isolated_cpus(): path = sysfs_path('devices/system/cpu/isolated') isolated = read_first_line(path) if isolated: return parse_cpu_list(isolated) cmdline = read_first_line(proc_path('cmdline')) if cmdline: match = re.search(r'\bisolcpus=([^ ]+)', cmdline) if match: ...
Get the list of isolated CPUs. Return a sorted list of CPU identifiers, or return None if no CPU is isolated.
def subscribe(self, topic, callback, qos): if topic in self.topics: return def _message_callback(mqttc, userdata, msg): callback(msg.topic, msg.payload.decode('utf-8'), msg.qos) self._mqttc.subscribe(topic, qos) self._mqttc.message_callback_add(topic, _message_cal...
Subscribe to an MQTT topic.
def make_flatten(decl_or_decls): def proceed_single(decl): answer = [decl] if not isinstance(decl, scopedef_t): return answer for elem in decl.declarations: if isinstance(elem, scopedef_t): answer.extend(proceed_single(elem)) else: ...
Converts tree representation of declarations to flatten one. :param decl_or_decls: reference to list of declaration's or single declaration :type decl_or_decls: :class:`declaration_t` or [ :class:`declaration_t` ] :rtype: [ all internal declarations ]
def setup_paths(self, environ, coll, record=False): if not coll or not self.warcserver.root_dir: return if coll != '$root': pop_path_info(environ) if record: pop_path_info(environ) paths = [self.warcserver.root_dir] if coll != '$root': ...
Populates the WSGI environment dictionary with the path information necessary to perform a response for content or record. :param dict environ: The WSGI environment dictionary for the request :param str coll: The name of the collection the record is to be served from :param bool record:...
def get_wrapper_class(backend_name): try: return _WRAPPERS[backend_name] except KeyError: if backend_name == 'ni': from .ctwrapper import NIVisaLibrary _WRAPPERS['ni'] = NIVisaLibrary return NIVisaLibrary try: pkg = __import__('pyvisa-' + backend_n...
Return the WRAPPER_CLASS for a given backend. :rtype: pyvisa.highlevel.VisaLibraryBase
def tokenize_words(self): if not self.is_tagged(SENTENCES): self.tokenize_sentences() tok = self.__word_tokenizer text = self.text dicts = [] for sentence in self[SENTENCES]: sent_start, sent_end = sentence[START], sentence[END] sent_text = tex...
Apply word tokenization and create ``words`` layer. Automatically creates ``paragraphs`` and ``sentences`` layers.
def checkout(self, revision, options): rev = revision.key self.repo.git.checkout(rev)
Checkout a specific revision. :param revision: The revision identifier. :type revision: :class:`Revision` :param options: Any additional options. :type options: ``dict``
def _sort(self, session_groups): session_groups.sort(key=operator.attrgetter('name')) for col_param, extractor in reversed(list(zip(self._request.col_params, self._extractors))): if col_param.order == api_pb2.ORDER_UNSPECIFIED: continue if co...
Sorts 'session_groups' in place according to _request.col_params.
def get_next(self, skip=1): r if super(Reader, self)._next(skip): entry = super(Reader, self)._get_all() if entry: entry['__REALTIME_TIMESTAMP'] = self._get_realtime() entry['__MONOTONIC_TIMESTAMP'] = self._get_monotonic() entry['__...
r"""Return the next log entry as a dictionary. Entries will be processed with converters specified during Reader creation. Optional `skip` value will return the `skip`-th log entry. Currently a standard dictionary of fields is returned, but in the future this might be changed ...
def process_request_thread(self, request, client_address): try: self.finish_request(request, client_address) self.shutdown_request(request) except Exception as e: self.logger.error(e) self.handle_error(request, client_address) self.shutdown_req...
Process the request.
def current_memory_usage(): import psutil proc = psutil.Process(os.getpid()) meminfo = proc.memory_info() rss = meminfo[0] vms = meminfo[1] return rss
Returns this programs current memory usage in bytes
def match_date(self, value, strict=False): value = stringify(value) try: parse(value) except Exception: self.shout('Value %r is not a valid date', strict, value)
if value is a date
def add_data(self, request, pk=None): resp = super().add_data(request, pk) entity = self.get_object() for collection in entity.collections.all(): collection.data.add(*request.data['ids']) return resp
Add data to Entity and it's collection.
def word_ends(self): if not self.is_tagged(WORDS): self.tokenize_words() return self.ends(WORDS)
The list of end positions representing ``words`` layer elements.
def _login(self, user, password, restrict_login=None): payload = {'login': user, 'password': password} if restrict_login: payload['restrict_login'] = True return self._proxy.User.login(payload)
Backend login method for Bugzilla3
def network_lpf(network, snapshots=None, skip_pre=False): _network_prepare_and_run_pf(network, snapshots, skip_pre, linear=True)
Linear power flow for generic network. Parameters ---------- snapshots : list-like|single snapshot A subset or an elements of network.snapshots on which to run the power flow, defaults to network.snapshots skip_pre: bool, default False Skip the preliminary steps of computing top...
def not_modified(cls, errors=None): if cls.expose_status: cls.response.content_type = 'application/json' cls.response._status_line = '304 Not Modified' return cls(304, None, errors).to_json
Shortcut API for HTTP 304 `Not Modified` response. Args: errors (list): Response key/value data. Returns: WSResponse Instance.
def get_indic_syllabic_category_property(value, is_bytes=False): obj = unidata.ascii_indic_syllabic_category if is_bytes else unidata.unicode_indic_syllabic_category if value.startswith('^'): negated = value[1:] value = '^' + unidata.unicode_alias['indicsyllabiccategory'].get(negated, negated) ...
Get `INDIC SYLLABIC CATEGORY` property.
def to_meta(self, md5=None, file=None): if not md5: if not file: raise ValueError('Must specify either file or md5') md5 = md5_for_file(file) size = os.stat(file).st_size else: size = None return { 'id': self.id_, ...
Return a dictionary of metadata, for use in the Remote api.
def set_working_directory(working_directory): logger.debug("starting") logger.debug(f"adding {working_directory} to sys.paths") sys.path.append(working_directory) logger.debug("done")
Add working_directory to sys.paths. This allows dynamic loading of arbitrary python modules in cwd. Args: working_directory: string. path to add to sys.paths
def read(fobj, **kwargs): fsamp, arr = wavfile.read(fobj, **kwargs) return TimeSeries(arr, sample_rate=fsamp)
Read a WAV file into a `TimeSeries` Parameters ---------- fobj : `file`, `str` open file-like object or filename to read from **kwargs all keyword arguments are passed onto :func:`scipy.io.wavfile.read` See also -------- scipy.io.wavfile.read for details on how the...
def linkify_hostgroups_hosts(self, hosts): for hostgroup in self: members = hostgroup.get_hosts() new_members = [] for member in members: member = member.strip() if not member: continue if member == '*': ...
We just search for each hostgroup the id of the hosts and replace the names by the found identifiers :param hosts: object Hosts :type hosts: alignak.objects.host.Hosts :return: None
def encode_int(n): global ENCODED_INT_CACHE try: return ENCODED_INT_CACHE[n] except KeyError: pass if n < MIN_29B_INT or n > MAX_29B_INT: raise OverflowError("Out of range") if n < 0: n += 0x20000000 bytes = '' real_value = None if n > 0x1fffff: re...
Encodes an int as a variable length signed 29-bit integer as defined by the spec. @param n: The integer to be encoded @return: The encoded string @rtype: C{str} @raise OverflowError: Out of range.
def enable_shuffle(self, value=None): if value is None: value = not self.shuffled spotifyconnect.Error.maybe_raise(lib.SpPlaybackEnableShuffle(value))
Enable shuffle mode
def entry_snapshots(self, space_id, environment_id, entry_id): return SnapshotsProxy(self, space_id, environment_id, entry_id, 'entries')
Provides access to entry snapshot management methods. API reference: https://www.contentful.com/developers/docs/references/content-management-api/#/reference/snapshots :return: :class:`SnapshotsProxy <contentful_management.snapshots_proxy.SnapshotsProxy>` object. :rtype: contentful.snapshots_p...
def job_file(self): job_file_name = '%s.job' % (self.name) job_file_path = os.path.join(self.initial_dir, job_file_name) self._job_file = job_file_path return self._job_file
The path to the submit description file representing this job.
def transcribe_to_modern(self, text) : phoneme_words = self.transcribe(text, as_phonemes = True) words = [''.join([self.to_modern[0][phoneme.ipa] for phoneme in word]) for word in phoneme_words] modern_text = ' '.join(words) for regexp, replacement in self.to_modern[1]: modern_text = re.sub(regexp, rep...
A very first attempt at trancribing from IPA to some modern orthography. The method is intended to provide the student with clues to the pronounciation of old orthographies.
def _treat_devices_removed(self): for device in self._removed_ports.copy(): eventlet.spawn_n(self._process_removed_port, device)
Process the removed devices.
def _dump(f, mesh): dae = mesh_to_collada(mesh) dae.write(f.name)
Writes a mesh to collada file format.
def list_rules(region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) try: ret = [] NextToken = '' while NextToken is not None: args = {'NextToken': NextToken} if NextToken else {} r = conn.list_rul...
List, with details, all Cloudwatch Event rules visible in the current scope. CLI example:: salt myminion boto_cloudwatch_event.list_rules region=us-east-1
def build_indentation_list(parser: str = 'github'): r indentation_list = list() if (parser == 'github' or parser == 'cmark' or parser == 'gitlab' or parser == 'commonmarker' or parser == 'redcarpet'): for i in range(0, md_parser[parser]['header']['max_levels']): indentation_l...
r"""Create a data structure that holds the state of indentations. :parameter parser: decides the length of the list. Defaults to ``github``. :type parser: str :returns: indentation_list, a list that contains the state of indentations given a header type. :rtype: list :raises: a bu...
def map(self, func): return dict((key, func(value)) for key, value in self.iteritems())
Return a dictionary of the results of func applied to each of the segmentlist objects in self. Example: >>> x = segmentlistdict() >>> x["H1"] = segmentlist([segment(0, 10)]) >>> x["H2"] = segmentlist([segment(5, 15)]) >>> x.map(lambda l: 12 in l) {'H2': True, 'H1': False}
def local_manager_is_default(self, adm_gid, gid): config = self.root['settings']['ugm_localmanager'].attrs rule = config[adm_gid] if gid not in rule['target']: raise Exception(u"group '%s' not managed by '%s'" % (gid, adm_gid)) return gid in rule['default']
Check whether gid is default group for local manager group.
def terminal_sexy_to_wal(data): data["colors"] = {} data["special"] = { "foreground": data["foreground"], "background": data["background"], "cursor": data["color"][9] } for i, color in enumerate(data["color"]): data["colors"]["color%s" % i] = color return data
Convert terminal.sexy json schema to wal.
def initalize(self, physics_dta): self.rotation = random.randint(self.rotation_range[0], self.rotation_range[1]) self.current_time = 0.0 self.color = self.start_color self.scale = self.start_scale self.physics = physics_dta
Prepare our particle for use. physics_dta describes the velocity, coordinates, and acceleration of the particle.
def draw_lines(self, *points): point_array = ffi.new('SDL_Point[]', len(points)) for i, p in enumerate(points): point_array[i] = p._ptr[0] check_int_err(lib.SDL_RenderDrawLines(self._ptr, point_array, len(points)))
Draw a series of connected lines on the current rendering target. Args: *points (Point): The points along the lines. Raises: SDLError: If an error is encountered.
def my_on_connect(client): client.send('You connected from %s\n' % client.addrport()) if CLIENTS: client.send('Also connected are:\n') for neighbor in CLIENTS: client.send('%s\n' % neighbor.addrport()) else: client.send('Sadly, you are alone.\n') CLIENTS.append(client...
Example on_connect handler.
def bods2c(name): name = stypes.stringToCharP(name) code = ctypes.c_int(0) found = ctypes.c_int(0) libspice.bods2c_c(name, ctypes.byref(code), ctypes.byref(found)) return code.value, bool(found.value)
Translate a string containing a body name or ID code to an integer code. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/bods2c_c.html :param name: String to be translated to an ID code. :type name: str :return: Integer ID code corresponding to name. :rtype: int
def add_constant(self, stream, value): if stream in self.constant_database: raise ArgumentError("Attempted to set the same constant twice", stream=stream, old_value=self.constant_database[stream], new_value=value) self.constant_database[stream] = value
Store a constant value for use in this sensor graph. Constant assignments occur after all sensor graph nodes have been allocated since they must be propogated to all appropriate virtual stream walkers. Args: stream (DataStream): The constant stream to assign the value to ...
def transpose(self, trans, scale="C"): if not isinstance(trans, int): raise TypeError("Expected integers, not {}".format(type(trans))) self._root = transpose_note(self._root, trans, scale) if self._on: self._on = transpose_note(self._on, trans, scale) self._reconf...
Transpose the chord :param int trans: Transpose key :param str scale: key scale :return:
def create(self, ticket, payload=None, expires=None): if not payload: payload = True self._client.set(str(ticket), payload, expires)
Create a session identifier in memcache associated with ``ticket``.
def _eval_meta_as_summary(meta): if meta == '': return False if len(meta)>500: return False if 'login' in meta.lower(): return False return True
some crude heuristics for now most are implemented on bot-side with domain whitelists
def children(self, unroll=False, skip_not_present=True): for child_inst in self.inst.children: if skip_not_present: if not child_inst.properties.get('ispresent', True): continue if unroll and isinstance(child_inst, comp.AddressableComponent) and child_...
Returns an iterator that provides nodes for all immediate children of this component. Parameters ---------- unroll : bool If True, any children that are arrays are unrolled. skip_not_present : bool If True, skips children whose 'ispresent' property is se...
def doLayout(self, width): self.width = width font_sizes = [0] + [frag.get("fontSize", 0) for frag in self] self.fontSize = max(font_sizes) self.height = self.lineHeight = max(frag * self.LINEHEIGHT for frag in font_sizes) y = (self.lineHeight - self.fontSize) for frag in...
Align words in previous line.
def _create_adapter_type(network_adapter, adapter_type, network_adapter_label=''): log.trace('Configuring virtual machine network ' 'adapter adapter_type=%s', adapter_type) if adapter_type in ['vmxnet', 'vmxnet2', 'vmxnet3', 'e1000', 'e1000e']: edited_network_adapt...
Returns a vim.vm.device.VirtualEthernetCard object specifying a virtual ethernet card information network_adapter None or VirtualEthernet object adapter_type String, type of adapter network_adapter_label string, network adapter name