code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def set_grade_system(self, grade_system_id): if self.get_grade_system_metadata().is_read_only(): raise errors.NoAccess() if not self._is_valid_id(grade_system_id): raise errors.InvalidArgument() self._my_map['gradeSystemId'] = str(grade_system_id)
Sets the grading system. arg: grade_system_id (osid.id.Id): the grade system raise: InvalidArgument - ``grade_system_id`` is invalid raise: NoAccess - ``Metadata.isReadOnly()`` is ``true`` *compliance: mandatory -- This method must be implemented.*
def _get_columns(self, blueprint): columns = [] for column in blueprint.get_added_columns(): sql = self.wrap(column) + ' ' + self._get_type(column) columns.append(self._add_modifiers(sql, blueprint, column)) return columns
Get the blueprint's columns definitions. :param blueprint: The blueprint :type blueprint: Blueprint :rtype: list
def on_delivery(self, channel, method, properties, body): self.callbacks.on_delivery( self.name, channel, method, properties, body)
Invoked by pika when RabbitMQ delivers a message from a queue. :param channel: The channel the message was delivered on :type channel: pika.channel.Channel :param method: The AMQP method frame :type method: pika.frame.Frame :param properties: The AMQP message properties :type properties: pika.spec.Basic.Properties :param bytes body: The message body
def remove(self): self.zone.alarmClock.DestroyAlarm([ ('ID', self._alarm_id) ]) alarm_id = self._alarm_id try: del Alarm._all_alarms[alarm_id] except KeyError: pass self._alarm_id = None
Remove the alarm from the Sonos system. There is no need to call `save`. The Python instance is not deleted, and can be saved back to Sonos again if desired.
def user_can_edit_newsitem(user, NewsItem): for perm in format_perms(NewsItem, ['add', 'change', 'delete']): if user.has_perm(perm): return True return False
Check if the user has permission to edit a particular NewsItem type.
def clean(self): if not self.metrics: self.metrics = dict( (name, spec.default) for name, spec in (metric_catalog.get(self.__class__, {}) .items())) return super(WithMetrics, self).clean()
Fill metrics with defaults on create
def readQuotes(self, start, end): if self.symbol is None: LOG.debug('Symbol is None') return [] return self.__yf.getQuotes(self.symbol, start, end)
read quotes from Yahoo Financial
def entails(self, entailed_independencies): if not isinstance(entailed_independencies, Independencies): return False implications = self.closure().get_assertions() return all(ind in implications for ind in entailed_independencies.get_assertions())
Returns `True` if the `entailed_independencies` are implied by this `Independencies`-object, otherwise `False`. Entailment is checked using the semi-graphoid axioms. Might be very slow if more than six variables are involved. Parameters ---------- entailed_independencies: Independencies()-object Examples -------- >>> from pgmpy.independencies import Independencies >>> ind1 = Independencies([['A', 'B'], ['C', 'D'], 'E']) >>> ind2 = Independencies(['A', 'C', 'E']) >>> ind1.entails(ind2) True >>> ind2.entails(ind1) False
def add_nexusnve_binding(vni, switch_ip, device_id, mcast_group): LOG.debug("add_nexusnve_binding() called") session = bc.get_writer_session() binding = nexus_models_v2.NexusNVEBinding(vni=vni, switch_ip=switch_ip, device_id=device_id, mcast_group=mcast_group) session.add(binding) session.flush() return binding
Adds a nexus nve binding.
def put(self, *items) -> "AttrIndexedDict": "Add items to the dict that will be indexed by self.attr." for item in items: self.data[getattr(item, self.attr)] = item return self
Add items to the dict that will be indexed by self.attr.
def send(self, sender, recipients, cc=None, bcc=None, subject='', body='', attachments=None, content='text'): if not self.connected: self._logger.error(('Server not connected, cannot send message, ' 'please connect() first and disconnect() when ' 'the connection is not needed any more')) return False try: message = Message(sender, recipients, cc, bcc, subject, body, attachments, content) self._smtp.sendmail(message.sender, message.recipients, message.as_string) result = True self._connected = False self._logger.debug('Done') except Exception: self._logger.exception('Something went wrong!') result = False return result
Sends the email :param sender: The server of the message :param recipients: The recipients (To:) of the message :param cc: The CC recipients of the message :param bcc: The BCC recipients of the message :param subject: The subject of the message :param body: The body of the message :param attachments: The attachments of the message :param content: The type of content the message [text/html] :return: True on success, False otherwise
def hazard_at_times(self, times, label=None): label = coalesce(label, self._label) return pd.Series(self._hazard(self._fitted_parameters_, times), index=_to_array(times), name=label)
Return a Pandas series of the predicted hazard at specific times. Parameters ----------- times: iterable or float values to return the hazard at. label: string, optional Rename the series returned. Useful for plotting. Returns -------- pd.Series
def rate_overall(self): elapsed = self.elapsed return self.rate if not elapsed else self.numerator / self.elapsed
Returns the overall average rate based on the start time.
def start_numbered_list(self): self._ordered = True self.start_container(List, stylename='_numbered_list') self.set_next_paragraph_style('numbered-list-paragraph' if self._item_level <= 0 else 'sublist-paragraph')
Start a numbered list.
def new_instance(settings): settings = set_default({}, settings) if not settings["class"]: Log.error("Expecting 'class' attribute with fully qualified class name") path = settings["class"].split(".") class_name = path[-1] path = ".".join(path[:-1]) constructor = None try: temp = __import__(path, globals(), locals(), [class_name], 0) constructor = object.__getattribute__(temp, class_name) except Exception as e: Log.error("Can not find class {{class}}", {"class": path}, cause=e) settings['class'] = None try: return constructor(kwargs=settings) except Exception as e: pass try: return constructor(**settings) except Exception as e: Log.error("Can not create instance of {{name}}", name=".".join(path), cause=e)
MAKE A PYTHON INSTANCE `settings` HAS ALL THE `kwargs`, PLUS `class` ATTRIBUTE TO INDICATE THE CLASS TO CREATE
def plot_eigh2(self, colorbar=True, cb_orientation='vertical', cb_label=None, ax=None, show=True, fname=None, **kwargs): if cb_label is None: cb_label = self._eigh2_label if self.eigh2 is None: self.compute_eigh() if ax is None: fig, axes = self.eigh2.plot(colorbar=colorbar, cb_orientation=cb_orientation, cb_label=cb_label, show=False, **kwargs) if show: fig.show() if fname is not None: fig.savefig(fname) return fig, axes else: self.eigh2.plot(colorbar=colorbar, cb_orientation=cb_orientation, cb_label=cb_label, ax=ax, **kwargs)
Plot the second eigenvalue of the horizontal tensor. Usage ----- x.plot_eigh2([tick_interval, xlabel, ylabel, ax, colorbar, cb_orientation, cb_label, show, fname]) Parameters ---------- tick_interval : list or tuple, optional, default = [30, 30] Intervals to use when plotting the x and y ticks. If set to None, ticks will not be plotted. xlabel : str, optional, default = 'longitude' Label for the longitude axis. ylabel : str, optional, default = 'latitude' Label for the latitude axis. ax : matplotlib axes object, optional, default = None A single matplotlib axes object where the plot will appear. colorbar : bool, optional, default = True If True, plot a colorbar. cb_orientation : str, optional, default = 'vertical' Orientation of the colorbar: either 'vertical' or 'horizontal'. cb_label : str, optional, default = '$\lambda_{h2}$, Eotvos$^{-1}$' Text label for the colorbar. show : bool, optional, default = True If True, plot the image to the screen. fname : str, optional, default = None If present, and if axes is not specified, save the image to the specified file. kwargs : optional Keyword arguements that will be sent to the SHGrid.plot() and plt.imshow() methods.
def add_new_host(self, hostname, ipv4addr, comment=None): host = Host(self.session, name=hostname) if host.ipv4addrs: host.ipv4addrs = [] host.add_ipv4addr(ipv4addr) host.comment = comment return host.save()
Add or update a host in the infoblox, overwriting any IP address entries. :param str hostname: Hostname to add/set :param str ipv4addr: IP Address to add/set :param str comment: The comment for the record
def find_classes(folder:Path)->FilePathList: "List of label subdirectories in imagenet-style `folder`." classes = [d for d in folder.iterdir() if d.is_dir() and not d.name.startswith('.')] assert(len(classes)>0) return sorted(classes, key=lambda d: d.name)
List of label subdirectories in imagenet-style `folder`.
def parse_compound(s, global_compartment=None): m = re.match(r'^\|(.*)\|$', s) if m: s = m.group(1) m = re.match(r'^(.+)\[(\S+)\]$', s) if m: compound_id = m.group(1) compartment = m.group(2) else: compound_id = s compartment = global_compartment return Compound(compound_id, compartment=compartment)
Parse a compound specification. If no compartment is specified in the string, the global compartment will be used.
def _init_topics(self): _LOGGER.info('Setting up initial MQTT topic subscription') init_topics = [ '{}/+/+/0/+/+'.format(self._in_prefix), '{}/+/+/3/+/+'.format(self._in_prefix), ] self._handle_subscription(init_topics) if not self.persistence: return topics = [ '{}/{}/{}/{}/+/+'.format( self._in_prefix, str(sensor.sensor_id), str(child.id), msg_type) for sensor in self.sensors.values() for child in sensor.children.values() for msg_type in (int(self.const.MessageType.set), int(self.const.MessageType.req)) ] topics.extend([ '{}/{}/+/{}/+/+'.format( self._in_prefix, str(sensor.sensor_id), int(self.const.MessageType.stream)) for sensor in self.sensors.values()]) self._handle_subscription(topics)
Set up initial subscription of mysensors topics.
def _stop_lock_renewer(self): if self._lock_renewal_thread is None or not self._lock_renewal_thread.is_alive(): return logger.debug("Signalling the lock refresher to stop") self._lock_renewal_stop.set() self._lock_renewal_thread.join() self._lock_renewal_thread = None logger.debug("Lock refresher has stopped")
Stop the lock renewer. This signals the renewal thread and waits for its exit.
def accept_dict(match, include_rejected=False, include_denied=False): skey = get_key(__opts__) return skey.accept(match_dict=match, include_rejected=include_rejected, include_denied=include_denied)
Accept keys based on a dict of keys. Returns a dictionary. match The dictionary of keys to accept. include_rejected To include rejected keys in the match along with pending keys, set this to ``True``. Defaults to ``False``. .. versionadded:: 2016.3.4 include_denied To include denied keys in the match along with pending keys, set this to ``True``. Defaults to ``False``. .. versionadded:: 2016.3.4 Example to move a list of keys from the ``minions_pre`` (pending) directory to the ``minions`` (accepted) directory: .. code-block:: python >>> wheel.cmd('key.accept_dict', { 'minions_pre': [ 'jerry', 'stuart', 'bob', ], }) {'minions': ['jerry', 'stuart', 'bob']}
def get_single_stack(self): single = None while self.single_stack: single = self.single_stack.pop() return single
Get the correct single stack item to use.
def get_model_by_name(model_name): if isinstance(model_name, six.string_types) and \ len(model_name.split('.')) == 2: app_name, model_name = model_name.split('.') if django.VERSION[:2] < (1, 8): model = models.get_model(app_name, model_name) else: from django.apps import apps model = apps.get_model(app_name, model_name) else: raise ValueError("{0} is not a Django model".format(model_name)) return model
Get model by its name. :param str model_name: name of model. :return django.db.models.Model: Example: get_concrete_model_by_name('auth.User') django.contrib.auth.models.User
def send_message(self, chat_id, text, **options): return self.api_call("sendMessage", chat_id=chat_id, text=text, **options)
Send a text message to chat :param int chat_id: ID of the chat to send the message to :param str text: Text to send :param options: Additional sendMessage options (see https://core.telegram.org/bots/api#sendmessage)
def during(rrule, duration=None, timestamp=None, **kwargs): result = False if isinstance(rrule, string_types): rrule_object = rrule_class.rrulestr(rrule) else: rrule_object = rrule_class(**rrule) if timestamp is None: timestamp = time() now = datetime.fromtimestamp(timestamp) duration_delta = now if duration is None else relativedelta(**duration) last_date = rrule_object.before(now, inc=True) if last_date is not None: next_date = last_date + duration_delta result = last_date <= now <= next_date return result
Check if input timestamp is in rrule+duration period :param rrule: rrule to check :type rrule: str or dict (freq, dtstart, interval, count, wkst, until, bymonth, byminute, etc.) :param dict duration: time duration from rrule step. Ex:{'minutes': 60} :param float timestamp: timestamp to check between rrule+duration. If None, use now
def _validate_charset(data, charset): if len(charset) > 1: charset_data_length = 0 for symbol_charset in charset: if symbol_charset not in ('A', 'B', 'C'): raise Code128.CharsetError charset_data_length += 2 if symbol_charset is 'C' else 1 if charset_data_length != len(data): raise Code128.CharsetLengthError elif len(charset) == 1: if charset not in ('A', 'B', 'C'): raise Code128.CharsetError elif charset is not None: raise Code128.CharsetError
Validate that the charset is correct and throw an error if it isn't.
def description(self): lines = [] for line in self.__doc__.split('\n')[2:]: line = line.strip() if line: lines.append(line) return ' '.join(lines)
Attribute that returns the plugin description from its docstring.
def generate_pattern_list(self): patterns = {} items = self.frequent.keys() if self.root.value is None: suffix_value = [] else: suffix_value = [self.root.value] patterns[tuple(suffix_value)] = self.root.count for i in range(1, len(items) + 1): for subset in itertools.combinations(items, i): pattern = tuple(sorted(list(subset) + suffix_value)) patterns[pattern] = \ min([self.frequent[x] for x in subset]) return patterns
Generate a list of patterns with support counts.
def get_point_model(cls): if is_plugin_point(cls): raise Exception(_('This method is only available to plugin ' 'classes.')) else: return PluginPointModel.objects.\ get(plugin__pythonpath=cls.get_pythonpath())
Returns plugin point model instance. Only used from plugin classes.
def _expand_independent_outputs(fvar, full_cov, full_output_cov): if full_cov and full_output_cov: fvar = tf.matrix_diag(tf.transpose(fvar)) fvar = tf.transpose(fvar, [0, 2, 1, 3]) if not full_cov and full_output_cov: fvar = tf.matrix_diag(fvar) if full_cov and not full_output_cov: pass if not full_cov and not full_output_cov: pass return fvar
Reshapes fvar to the correct shape, specified by `full_cov` and `full_output_cov`. :param fvar: has shape N x P (full_cov = False) or P x N x N (full_cov = True). :return: 1. full_cov: True and full_output_cov: True fvar N x P x N x P 2. full_cov: True and full_output_cov: False fvar P x N x N 3. full_cov: False and full_output_cov: True fvar N x P x P 4. full_cov: False and full_output_cov: False fvar N x P
def dallinger(): from logging.config import fileConfig fileConfig( os.path.join(os.path.dirname(__file__), "logging.ini"), disable_existing_loggers=False, )
Dallinger command-line utility.
def user_agent(style=None) -> _UserAgent: global useragent if (not useragent) and style: useragent = UserAgent() return useragent[style] if style else DEFAULT_USER_AGENT
Returns an apparently legit user-agent, if not requested one of a specific style. Defaults to a Chrome-style User-Agent.
def merge_from_obj(self, obj, lists_only=False): self.clean() obj.clean() obj_config = obj._config all_props = self.__class__.CONFIG_PROPERTIES for key, value in six.iteritems(obj_config): attr_config = all_props[key] attr_type, default, __, merge_func = attr_config[:4] if (merge_func is not False and value != default and (not lists_only or (attr_type and issubclass(attr_type, list)))): self._merge_value(attr_type, merge_func, key, value)
Merges a configuration object into this one. See :meth:`ConfigurationObject.merge` for details. :param obj: Values to update the ConfigurationObject with. :type obj: ConfigurationObject :param lists_only: Ignore single-value attributes and update dictionary options. :type lists_only: bool
def c2f(r, i, ctype_name): ftype = c2f_dict[ctype_name] return np.typeDict[ctype_name](ftype(r) + 1j * ftype(i))
Convert strings to complex number instance with specified numpy type.
def describe_api_resource(restApiId, path, region=None, key=None, keyid=None, profile=None): r = describe_api_resources(restApiId, region=region, key=key, keyid=keyid, profile=profile) resources = r.get('resources') if resources is None: return r for resource in resources: if resource['path'] == path: return {'resource': resource} return {'resource': None}
Given rest api id, and an absolute resource path, returns the resource id for the given path. CLI Example: .. code-block:: bash salt myminion boto_apigateway.describe_api_resource myapi_id resource_path
def is_switched_on(self, refresh=False): if refresh: self.refresh() return self.get_brightness(refresh) > 0
Get dimmer state. Refresh data from Vera if refresh is True, otherwise use local cache. Refresh is only needed if you're not using subscriptions.
def _normalize_joliet_path(self, joliet_path): tmp_path = b'' if self.joliet_vd is not None: if not joliet_path: raise pycdlibexception.PyCdlibInvalidInput('A Joliet path must be passed for a Joliet ISO') tmp_path = utils.normpath(joliet_path) else: if joliet_path: raise pycdlibexception.PyCdlibInvalidInput('A Joliet path can only be specified for a Joliet ISO') return tmp_path
An internal method to check whether this ISO does or does not require a Joliet path. If a Joliet path is required, the path is normalized and returned. Parameters: joliet_path - The joliet_path to normalize (if necessary). Returns: The normalized joliet_path if this ISO has Joliet, None otherwise.
def send_message(self, message, sign=True): self.check_connected() self.protocol.send_message(message, sign)
Sends the given message to the connection. @type message: OmapiMessage @type sign: bool @param sign: whether the message needs to be signed @raises OmapiError: @raises socket.error:
def lssitepackages_cmd(argv): site = sitepackages_dir() print(*sorted(site.iterdir()), sep=os.linesep) extra_paths = site / '_virtualenv_path_extensions.pth' if extra_paths.exists(): print('from _virtualenv_path_extensions.pth:') with extra_paths.open() as extra: print(''.join(extra.readlines()))
Show the content of the site-packages directory of the current virtualenv.
def removeService(self, service): for name, wrapper in self.services.iteritems(): if service in (name, wrapper.service): del self.services[name] return raise NameError("Service %r not found" % (service,))
Removes a service from the gateway. @param service: Either the name or t of the service to remove from the gateway, or . @type service: C{callable} or a class instance @raise NameError: Service not found.
def describe(self): counts = self.value_counts(dropna=False) freqs = counts / float(counts.sum()) from pandas.core.reshape.concat import concat result = concat([counts, freqs], axis=1) result.columns = ['counts', 'freqs'] result.index.name = 'categories' return result
Describes this Categorical Returns ------- description: `DataFrame` A dataframe with frequency and counts by category.
def _format_with_same_year_and_month(format_specifier): test_format_specifier = format_specifier + "_SAME_YEAR_SAME_MONTH" test_format = get_format(test_format_specifier, use_l10n=True) if test_format == test_format_specifier: no_year = re.sub(YEAR_RE, '', get_format(format_specifier)) return re.sub(MONTH_RE, '', no_year) else: return test_format
Return a version of `format_specifier` that renders a date assuming it has the same year and month as another date. Usually this means ommitting the year and month. This can be overridden by specifying a format that has `_SAME_YEAR_SAME_MONTH` appended to the name in the project's `formats` spec.
def clear(self, color: Tuple[int, int, int]) -> None: lib.TCOD_image_clear(self.image_c, color)
Fill this entire Image with color. Args: color (Union[Tuple[int, int, int], Sequence[int]]): An (r, g, b) sequence or Color instance.
def yunit(self): try: return self._dy.unit except AttributeError: try: return self._y0.unit except AttributeError: return self._default_yunit
Unit of Y-axis index :type: `~astropy.units.Unit`
def _load_form_data(self): if "form" in self.__dict__: return _assert_not_shallow(self) if self.want_form_data_parsed: content_type = self.environ.get("CONTENT_TYPE", "") content_length = get_content_length(self.environ) mimetype, options = parse_options_header(content_type) parser = self.make_form_data_parser() data = parser.parse( self._get_stream_for_parsing(), mimetype, content_length, options ) else: data = ( self.stream, self.parameter_storage_class(), self.parameter_storage_class(), ) d = self.__dict__ d["stream"], d["form"], d["files"] = data
Method used internally to retrieve submitted data. After calling this sets `form` and `files` on the request object to multi dicts filled with the incoming form data. As a matter of fact the input stream will be empty afterwards. You can also call this method to force the parsing of the form data. .. versionadded:: 0.8
def get(cls, key, section=None, **kwargs): section = section or cls._default_sect if section not in cls._conf: cls._load(section=section) value = cls._conf[section].get(key) if not value and section != cls._default_sect: value = cls._conf[cls._default_sect].get(key) if cls._default_sect in cls._conf else None if value is None: if 'default' in kwargs: _def_value = kwargs['default'] logger.warn("Static configuration [{}] was not found. Using the default value [{}].".format(key, _def_value)) return _def_value else: raise InvalidConfigException(u'Not found entry: {}'.format(key)) try: value = from_json(value) except (TypeError, ValueError): pass return value
Retrieves a config value from dict. If not found twrows an InvalidScanbooconfigException.
def add_alternative (self, target_instance): assert isinstance(target_instance, AbstractTarget) if self.built_main_targets_: raise IllegalOperation ("add-alternative called when main targets are already created for project '%s'" % self.full_name ()) self.alternatives_.append (target_instance)
Add new target alternative.
def connect_sftp_with_cb(sftp_cb, *args, **kwargs): with _connect_sftp(*args, **kwargs) as (ssh, sftp): sftp_cb(ssh, sftp)
A "managed" SFTP session. When the SSH session and an additional SFTP session are ready, invoke the sftp_cb callback.
def _get_urls(self, version, cluster_stats): pshard_stats_url = "/_stats" health_url = "/_cluster/health" if version >= [0, 90, 10]: pending_tasks_url = "/_cluster/pending_tasks" stats_url = "/_nodes/stats" if cluster_stats else "/_nodes/_local/stats" if version < [5, 0, 0]: stats_url += "?all=true" else: pending_tasks_url = None stats_url = "/_cluster/nodes/stats?all=true" if cluster_stats else "/_cluster/nodes/_local/stats?all=true" return health_url, stats_url, pshard_stats_url, pending_tasks_url
Compute the URLs we need to hit depending on the running ES version
def list_objects(self, prefix=None, delimiter=None): return self._client.list_objects( instance=self._instance, bucket_name=self.name, prefix=prefix, delimiter=delimiter)
List the objects for this bucket. :param str prefix: If specified, only objects that start with this prefix are listed. :param str delimiter: If specified, return only objects whose name do not contain the delimiter after the prefix. For the other objects, the response contains (in the prefix response parameter) the name truncated after the delimiter. Duplicates are omitted.
def _CheckIsLink(self, file_entry): if definitions.FILE_ENTRY_TYPE_LINK not in self._file_entry_types: return False return file_entry.IsLink()
Checks the is_link find specification. Args: file_entry (FileEntry): file entry. Returns: bool: True if the file entry matches the find specification, False if not.
def add_geo_facet(self, *args, **kwargs): self.facets.append(GeoDistanceFacet(*args, **kwargs))
Add a geo factory facet
def match_any_char(self, chars, offset=0): if not self.has_space(offset=offset): return '' current = self.string[self.pos + offset] return current if current in chars else ''
Match and return the current SourceString char if its in chars.
def get_transition_m(self, transition_id): for transition_m in self.transitions: if transition_m.transition.transition_id == transition_id: return transition_m return None
Searches and return the transition model with the given in the given container state model :param transition_id: The transition id to be searched :return: The model of the transition or None if it is not found
def iter_target_siblings_and_ancestors(self, target): def iter_targets_in_spec_path(spec_path): try: siblings = SiblingAddresses(spec_path) for address in self._build_graph.inject_specs_closure([siblings]): yield self._build_graph.get_target(address) except AddressLookupError: pass def iter_siblings_and_ancestors(spec_path): for sibling in iter_targets_in_spec_path(spec_path): yield sibling parent_spec_path = os.path.dirname(spec_path) if parent_spec_path != spec_path: for parent in iter_siblings_and_ancestors(parent_spec_path): yield parent for target in iter_siblings_and_ancestors(target.address.spec_path): yield target
Produces an iterator over a target's siblings and ancestor lineage. :returns: A target iterator yielding the target and its siblings and then it ancestors from nearest to furthest removed.
def bytes2str(bs): if isinstance(bs, type(b'')) and PY_MAJOR_VERSION > 2: return bs.decode('latin1') else: return bs
For cross compatibility between Python 2 and Python 3 strings.
def parse(self, line): if not line: raise KatcpSyntaxError("Empty message received.") type_char = line[0] if type_char not in self.TYPE_SYMBOL_LOOKUP: raise KatcpSyntaxError("Bad type character %r." % (type_char,)) mtype = self.TYPE_SYMBOL_LOOKUP[type_char] parts = self.WHITESPACE_RE.split(line) if not parts[-1]: del parts[-1] name = parts[0][1:] arguments = [self._parse_arg(x) for x in parts[1:]] match = self.NAME_RE.match(name) if match: name = match.group('name') mid = match.group('id') else: raise KatcpSyntaxError("Bad message name (and possibly id) %r." % (name,)) return Message(mtype, name, arguments, mid)
Parse a line, return a Message. Parameters ---------- line : str The line to parse (should not contain the terminating newline or carriage return). Returns ------- msg : Message object The resulting Message.
def validate_keys(dict_, expected, funcname): expected = set(expected) received = set(dict_) missing = expected - received if missing: raise ValueError( "Missing keys in {}:\n" "Expected Keys: {}\n" "Received Keys: {}".format( funcname, sorted(expected), sorted(received), ) ) unexpected = received - expected if unexpected: raise ValueError( "Unexpected keys in {}:\n" "Expected Keys: {}\n" "Received Keys: {}".format( funcname, sorted(expected), sorted(received), ) )
Validate that a dictionary has an expected set of keys.
def get_parser(): parser = argparse.ArgumentParser( formatter_class=argparse.RawTextHelpFormatter, description=__doc__, ) parser.add_argument('--host', default='localhost') parser.add_argument('--port', default=6379, type=int) parser.add_argument('--db', default=0, type=int) parser.add_argument('command', choices=(str('follow'), str('lock'), str('reset'), str('status'))) parser.add_argument('resources', nargs='*', metavar='RESOURCE') return parser
Return argument parser.
def shared_dataset_ids(self): shared_ids = set(self.scenes[0].keys()) for scene in self.scenes[1:]: shared_ids &= set(scene.keys()) return shared_ids
Dataset IDs shared by all children.
def queue_path(cls, project, location, queue): return google.api_core.path_template.expand( "projects/{project}/locations/{location}/queues/{queue}", project=project, location=location, queue=queue, )
Return a fully-qualified queue string.
def finish(self): "End this session" log.debug("Session disconnected.") try: self.sock.shutdown(socket.SHUT_RDWR) except: pass self.session_end()
End this session
def common_start(*args): def _iter(): for s in zip(*args): if len(set(s)) < len(args): yield s[0] else: return out = "".join(_iter()).strip() result = [s for s in args if not s.startswith(out)] result.insert(0, out) return ', '.join(result)
returns the longest common substring from the beginning of sa and sb
def stash(self, payload): succeeded = [] failed = [] for key in payload['keys']: if self.queue.get(key) is not None: if self.queue[key]['status'] == 'queued': self.queue[key]['status'] = 'stashed' succeeded.append(str(key)) else: failed.append(str(key)) else: failed.append(str(key)) message = '' if len(succeeded) > 0: message += 'Stashed entries: {}.'.format(', '.join(succeeded)) status = 'success' if len(failed) > 0: message += '\nNo queued entry for keys: {}'.format(', '.join(failed)) status = 'error' answer = {'message': message.strip(), 'status': status} return answer
Stash the specified processes.
def add_child(self, tree): tree.parent = self self.children.append(tree) return tree
Add a child to the list of this tree's children This tree becomes the added tree's parent
def add_one(self, url: str, url_properties: Optional[URLProperties]=None, url_data: Optional[URLData]=None): self.add_many([AddURLInfo(url, url_properties, url_data)])
Add a single URL to the table. Args: url: The URL to be added url_properties: Additional values to be saved url_data: Additional data to be saved
def unit_action(self, cmd, pos, shift): action = sc_pb.Action() if pos: action_spatial = pos.action_spatial(action) unit_command = action_spatial.unit_command unit_command.ability_id = cmd.ability_id unit_command.queue_command = shift if pos.surf.surf_type & SurfType.SCREEN: pos.obs_pos.assign_to(unit_command.target_screen_coord) elif pos.surf.surf_type & SurfType.MINIMAP: pos.obs_pos.assign_to(unit_command.target_minimap_coord) else: if self._feature_screen_px: action.action_feature_layer.unit_command.ability_id = cmd.ability_id else: action.action_render.unit_command.ability_id = cmd.ability_id self.clear_queued_action() return action
Return a `sc_pb.Action` filled with the cmd and appropriate target.
def _get_line(self, search_string, search_file, return_string=True, case_sens=True): if os.path.isfile(search_file): if type(search_string) == type(''): search_string = [search_string] if not case_sens: search_string = [i.lower() for i in search_string] with open(search_file) as fp: for line in fp: query_line = line if case_sens else line.lower() if all([i in query_line for i in search_string]): return line if return_string else True if return_string: raise Exception('%s not found in %s'%(' & '.join(search_string), search_file)) else: return False else: raise Exception('%s file does not exist'%search_file)
Return the first line containing a set of strings in a file. If return_string is False, we just return whether such a line was found. If case_sens is False, the search is case insensitive.
def config_namespace(config_file=None, auto_find=False, verify=True, **cfg_options): return ConfigNamespace(**config_dict(config_file, auto_find, verify, **cfg_options))
Return configuration options as a Namespace. .. code:: python reusables.config_namespace(os.path.join("test", "data", "test_config.ini")) # <Namespace: {'General': {'example': 'A regul...> :param config_file: path or paths to the files location :param auto_find: look for a config type file at this location or below :param verify: make sure the file exists before trying to read :param cfg_options: options to pass to the parser :return: Namespace of the config files
def index(self, item): for count, other in enumerate(self): if item == other: return count raise ValueError('%r is not in OrderedSet' % (item,))
Find the index of `item` in the OrderedSet Example: >>> # ENABLE_DOCTEST >>> import utool as ut >>> self = ut.oset([1, 2, 3]) >>> assert self.index(1) == 0 >>> assert self.index(2) == 1 >>> assert self.index(3) == 2 >>> ut.assert_raises(ValueError, self.index, 4)
def del_port(self, port_name): command = ovs_vsctl.VSCtlCommand('del-port', (self.br_name, port_name)) self.run_command([command])
Deletes a port on OVS instance. This method is corresponding to the following ovs-vsctl command:: $ ovs-vsctl del-port <bridge> <port>
def regularrun( shell, prompt_template="default", aliases=None, envvars=None, extra_commands=None, speed=1, test_mode=False, commentecho=False, ): loop_again = True command_string = regulartype(prompt_template) if command_string == TAB: loop_again = False return loop_again run_command( command_string, shell, aliases=aliases, envvars=envvars, extra_commands=extra_commands, test_mode=test_mode, ) return loop_again
Allow user to run their own live commands until CTRL-Z is pressed again.
def list_datastore_clusters(kwargs=None, call=None): if call != 'function': raise SaltCloudSystemExit( 'The list_datastore_clusters function must be called with ' '-f or --function.' ) return {'Datastore Clusters': salt.utils.vmware.list_datastore_clusters(_get_si())}
List all the datastore clusters for this VMware environment CLI Example: .. code-block:: bash salt-cloud -f list_datastore_clusters my-vmware-config
def play_mode(self, play_mode): play_mode = play_mode.upper() if play_mode not in PLAY_MODES: raise KeyError("'%s' is not a valid play mode" % play_mode) self._play_mode = play_mode
See `playmode`.
def weighted_n(self): if not self.is_weighted: return float(self.unweighted_n) return float(sum(self._cube_dict["result"]["measures"]["count"]["data"]))
float count of returned rows adjusted for weighting.
def set_coverage_placeName(self): if (self.solr_response and self.solr_response != 'error' and self.solr_response.response != 'error'): location_list = self.solr_response.get_location_list_facet().facet_list else: location_list = [] form_dict = { 'view_type': 'prefill', 'value_json': json.dumps(location_list, ensure_ascii=False), 'value_py': location_list, } return form_dict
Determine the properties for the placeName coverage field.
def delete_dscp_marking_rule(self, rule, policy): return self.delete(self.qos_dscp_marking_rule_path % (policy, rule))
Deletes a DSCP marking rule.
def _write_pidfile(self): LOGGER.debug('Writing pidfile: %s', self.pidfile_path) with open(self.pidfile_path, "w") as handle: handle.write(str(os.getpid()))
Write the pid file out with the process number in the pid file
def set_register(self, reg, data): if reg < 0: return elif reg < len(self._register_list): regName = self._register_list[reg].name regBits = self._register_list[reg].bitsize if regBits == 64: value = conversion.hex16_to_u64be(data) else: value = conversion.hex8_to_u32be(data) logging.debug("GDB: write reg %s: 0x%X", regName, value) self._context.write_core_register_raw(regName, value)
Set single register from GDB hexadecimal string. reg parameter is the index of register in targetXML sent to GDB.
def exists(name, path=None): _exists = name in ls_(path=path) if not _exists: _exists = name in ls_(cache=False, path=path) return _exists
Returns whether the named container exists. path path to the container parent directory (default: /var/lib/lxc) .. versionadded:: 2015.8.0 CLI Example: .. code-block:: bash salt '*' lxc.exists name
def create_csr(cls, common_name, private_key=None, params=None): params = params or [] params = [(key, val) for key, val in params if val] subj = '/' + '/'.join(['='.join(value) for value in params]) cmd, private_key = cls.gen_pk(common_name, private_key) if private_key.endswith('.crt') or private_key.endswith('.key'): csr_file = re.sub(r'\.(crt|key)$', '.csr', private_key) else: csr_file = private_key + '.csr' cmd = cmd % {'csr': csr_file, 'key': private_key, 'subj': subj} result = cls.execute(cmd) if not result: cls.echo('CSR creation failed') cls.echo(cmd) return return csr_file
Create CSR.
def createBlendedFolders(): create_folder(os.path.join(cwd, "templates")) create_folder(os.path.join(cwd, "templates", "assets")) create_folder(os.path.join(cwd, "templates", "assets", "css")) create_folder(os.path.join(cwd, "templates", "assets", "js")) create_folder(os.path.join(cwd, "templates", "assets", "img")) create_folder(os.path.join(cwd, "content"))
Creates the standard folders for a Blended website
def unstruct_strat(self): return ( UnstructureStrategy.AS_DICT if self._unstructure_attrs == self.unstructure_attrs_asdict else UnstructureStrategy.AS_TUPLE )
The default way of unstructuring ``attrs`` classes.
def IsCloud(self, request, bios_version, services): if request.bios_version_regex and bios_version: if re.match(request.bios_version_regex, bios_version): return True if request.service_name_regex and services: if re.search(request.service_name_regex, services): return True return False
Test to see if we're on a cloud machine.
def write_single_xso(x, dest): gen = XMPPXMLGenerator(dest, short_empty_elements=True, sorted_attributes=True) x.unparse_to_sax(gen)
Write a single XSO `x` to a binary file-like object `dest`.
def repopulateWinowMenu(self, actionGroup): for action in self.windowMenu.actions(): self.windowMenu.removeAction(action) for action in actionGroup.actions(): self.windowMenu.addAction(action)
Clear the window menu and fills it with the actions of the actionGroup
def tofile(self, fileobj): for entry in self: print >>fileobj, str(entry) fileobj.close()
write a cache object to the fileobj as a lal cache file
def findElementsWithId(node, elems=None): if elems is None: elems = {} id = node.getAttribute('id') if id != '': elems[id] = node if node.hasChildNodes(): for child in node.childNodes: if child.nodeType == Node.ELEMENT_NODE: findElementsWithId(child, elems) return elems
Returns all elements with id attributes
def describe_features(self, traj): feature_descs = [] top = traj.topology residue_indices = [[top.atom(i[0]).residue.index, top.atom(i[1]).residue.index] \ for i in self.atom_indices] aind = [] resseqs = [] resnames = [] for ind,resid_ids in enumerate(residue_indices): aind += [[i for i in self.atom_indices[ind]]] resseqs += [[top.residue(ri).resSeq for ri in resid_ids]] resnames += [[top.residue(ri).name for ri in resid_ids]] zippy = itertools.product(["AtomPairs"], ["Distance"], ["Exponent {}".format(self.exponent)], zip(aind, resseqs, residue_indices, resnames)) feature_descs.extend(dict_maker(zippy)) return feature_descs
Return a list of dictionaries describing the atom pair features. Parameters ---------- traj : mdtraj.Trajectory The trajectory to describe Returns ------- feature_descs : list of dict Dictionary describing each feature with the following information about the atoms participating in each dihedral - resnames: unique names of residues - atominds: the two atom inds - resseqs: unique residue sequence ids (not necessarily 0-indexed) - resids: unique residue ids (0-indexed) - featurizer: AtomPairsFeaturizer - featuregroup: Distance. - other info : Value of the exponent
def alerts(self, alert_level='High'): alerts = self.zap.core.alerts() alert_level_value = self.alert_levels[alert_level] alerts = sorted((a for a in alerts if self.alert_levels[a['risk']] >= alert_level_value), key=lambda k: self.alert_levels[k['risk']], reverse=True) return alerts
Get a filtered list of alerts at the given alert level, and sorted by alert level.
def connect(dbapi_connection, connection_record): try: cursor = dbapi_connection.cursor() try: cursor.execute("PRAGMA foreign_keys = ON;") cursor.execute("PRAGMA foreign_keys;") if cursor.fetchone()[0] != 1: raise Exception() finally: cursor.close() except Exception: dbapi_connection.close() raise sqlite3.Error()
Called once by SQLAlchemy for each new SQLite DB-API connection. Here is where we issue some PRAGMA statements to configure how we're going to access the SQLite database. @param dbapi_connection: A newly connected raw SQLite DB-API connection. @param connection_record: Unused by this method.
def _parse_ranking(self, field, boxscore): ranking = None index = BOXSCORE_ELEMENT_INDEX[field] teams_boxscore = boxscore(BOXSCORE_SCHEME[field]) if str(teams_boxscore) == '': return ranking team = pq(teams_boxscore[index]) if 'pollrank' in str(team): rank_str = re.findall(r'\(\d+\)', str(team)) if len(rank_str) == 1: ranking = int(rank_str[0].replace('(', '').replace(')', '')) return ranking
Parse each team's rank if applicable. Retrieve the team's rank according to the rankings published each week. The ranking for the week is only located in the scores section at the top of the page and not in the actual boxscore information. The rank is after the team name inside a parenthesis with a special 'pollrank' attribute. If this is not in the team's boxscore information, the team is assumed to not have a rank and will return a value of None. Parameters ---------- field : string The name of the attribute to parse. boxscore : PyQuery object A PyQuery obejct containing all of the HTML data from the boxscore. Returns ------- int An int representing the team's ranking or None if the team is not ranked.
def shuffle(self): info = self._get_command_info(CommandInfo_pb2.ChangeShuffleMode) return None if info is None else info.shuffleMode
If shuffle is enabled or not.
def contains(self, token: str) -> bool: self._validate_token(token) return token in self
Return if the token is in the list or not.
def worker(job): ret = False try: if job.full_url is not None: req = requests.get(job.full_url, stream=True) ret = save_and_check(req, job.local_file, job.expected_checksum) if not ret: return ret ret = create_symlink(job.local_file, job.symlink_path) except KeyboardInterrupt: logging.debug("Ignoring keyboard interrupt.") return ret
Run a single download job.
def get_next_entry(self, method, info, request): if method not in self.current_entries: self.current_entries[method] = 0 entries_for_method = [e for e in self.entries if e.method == method] if self.current_entries[method] >= len(entries_for_method): self.current_entries[method] = -1 if not self.entries or not entries_for_method: raise ValueError('I have no entries for method %s: %s' % (method, self)) entry = entries_for_method[self.current_entries[method]] if self.current_entries[method] != -1: self.current_entries[method] += 1 entry.info = info entry.request = request return entry
Cycle through available responses, but only once. Any subsequent requests will receive the last response
def __precision(y_true, y_pred): y_true = np.copy(y_true) y_pred = np.copy(y_pred) is_nan = np.isnan(y_true) y_true[is_nan] = 0 y_pred[is_nan] = 0 precision = precision_score(y_true, y_pred) return precision
Precision metric tolerant to unlabeled data in y_true, NA values are ignored for the precision calculation
def slice(self, start, until): return self._transform(transformations.slice_t(start, until))
Takes a slice of the sequence starting at start and until but not including until. >>> seq([1, 2, 3, 4]).slice(1, 2) [2] >>> seq([1, 2, 3, 4]).slice(1, 3) [2, 3] :param start: starting index :param until: ending index :return: slice including start until but not including until
def _merge_defaults(self, data, method_params, defaults): if defaults: optional_args = method_params[-len(defaults):] for key, value in zip(optional_args, defaults): if not key in data: data[key] = value return data
Helper method for adding default values to the data dictionary. The `defaults` are the default values inspected from the method that will be called. For any values that are not present in the incoming data, the default value is added.