code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def deactivate_object(brain_or_object): obj = get_object(brain_or_object) if is_root(obj): fail(401, "Deactivating the Portal is not allowed") try: do_transition_for(brain_or_object, "deactivate") except Unauthorized: fail(401, "Not allowed to deactivate object '%s'" % obj.getId())
Deactivate the given object :param brain_or_object: A single catalog brain or content object :type brain_or_object: ATContentType/DexterityContentType/CatalogBrain :returns: Nothing :rtype: None
def get_offers_per_page(self, per_page=1000, page=1, params=None): return self._get_resource_per_page(resource=OFFERS, per_page=per_page, page=page, params=params)
Get offers per page :param per_page: How many objects per page. Default: 1000 :param page: Which page. Default: 1 :param params: Search parameters. Default: {} :return: list
def get_pex_python_paths(): ppp = Variables.from_rc().get('PEX_PYTHON_PATH') if ppp: return ppp.split(os.pathsep) else: return []
Returns a list of paths to Python interpreters as defined in a pexrc file. These are provided by a PEX_PYTHON_PATH in either of '/etc/pexrc', '~/.pexrc'. PEX_PYTHON_PATH defines a colon-separated list of paths to interpreters that a pex can be built and run against.
def uuid1mc_from_datetime(dt): fields = list(uuid1mc().fields) if isinstance(dt, datetime): timeval = time.mktime(dt.timetuple()) + dt.microsecond / 1e6 else: timeval = dt nanoseconds = int(timeval * 1e9) timestamp = int(nanoseconds // 100) + 0x01b21dd213814000 time_low = timestamp & 0xffffffff time_mid = (timestamp >> 32) & 0xffff time_hi_version = (timestamp >> 48) & 0x0fff fields[0] = time_low fields[1] = time_mid fields[2] = time_hi_version return uuid.UUID(fields=tuple(fields))
Return a UUID1 with a random multicast MAC id and with a timestamp matching the given datetime object or timestamp value. .. warning:: This function does not consider the timezone, and is not guaranteed to return a unique UUID. Use under controlled conditions only. >>> dt = datetime.now() >>> u1 = uuid1mc() >>> u2 = uuid1mc_from_datetime(dt) >>> # Both timestamps should be very close to each other but not an exact match >>> u1.time > u2.time True >>> u1.time - u2.time < 5000 True >>> d2 = datetime.fromtimestamp((u2.time - 0x01b21dd213814000) * 100 / 1e9) >>> d2 == dt True
def create(self, unique_name=values.unset, data=values.unset): data = values.of({'UniqueName': unique_name, 'Data': serialize.object(data), }) payload = self._version.create( 'POST', self._uri, data=data, ) return DocumentInstance(self._version, payload, service_sid=self._solution['service_sid'], )
Create a new DocumentInstance :param unicode unique_name: The unique_name :param dict data: The data :returns: Newly created DocumentInstance :rtype: twilio.rest.preview.sync.service.document.DocumentInstance
def _GetRoutingMap(self, router): try: routing_map = self._routing_maps_cache.Get(router.__class__) except KeyError: routing_map = self._BuildHttpRoutingMap(router.__class__) self._routing_maps_cache.Put(router.__class__, routing_map) return routing_map
Returns a routing map for a given router instance.
def primitive(self, primitive): self.entry = Entry() self.entry.primitive = primitive primitive = copy(primitive) for field in self.entry.fields: del primitive[field] self.item = Item() self.item.primitive = primitive
Record from Python primitive.
def info(): print("convenience function for listing prms") print(type(prms)) print(prms.__name__) print(f"prm file: {_get_prm_file()}") for key in prms.__dict__: if isinstance(prms.__dict__[key], box.Box): print() print(80 * "=") print(f"prms.{key}:") print(80 * "-") for subkey in prms.__dict__[key]: print( f"prms.{key}.{subkey} = ", f"{prms.__dict__[key][subkey]}" ) print(80 * "=")
this function will show only the 'box'-type attributes and their content in the cellpy.prms module
def line_is_interesting(self, line): if line.startswith('Name'): return None if line.startswith('--------'): return None if line.startswith('TOTAL'): return None if '100%' in line: return False if line == '\n': return None if self._last_line_was_printable else False return True
Return True, False, or None. True means always output, False means never output, None means output only if there are interesting lines.
def center(self, axis=1): if axis == 1: return self.map(lambda x: x - mean(x)) elif axis == 0: meanval = self.mean().toarray() return self.map(lambda x: x - meanval) else: raise Exception('Axis must be 0 or 1')
Subtract the mean either within or across records. Parameters ---------- axis : int, optional, default = 1 Which axis to center along, within (1) or across (0) records.
def update(self): stats = self.get_init_value() if self.input_method == 'local': self.uptime = datetime.now() - datetime.fromtimestamp(psutil.boot_time()) stats = str(self.uptime).split('.')[0] elif self.input_method == 'snmp': uptime = self.get_stats_snmp(snmp_oid=snmp_oid)['_uptime'] try: stats = str(timedelta(seconds=int(uptime) / 100)) except Exception: pass self.stats = stats return self.stats
Update uptime stat using the input method.
def process_email(ctx, param, value): user = User.query.filter(User.email == value).first() if not user: raise click.BadParameter('User with email \'%s\' not found.', value) return user
Return an user if it exists.
async def delete(self, query, *, dc=None): query_id = extract_attr(query, keys=["ID"]) response = await self._api.delete("/v1/query", query_id, params={"dc": dc}) return response.status == 200
Delete existing prepared query Parameters: query (ObjectID): Query ID dc (str): Specify datacenter that will be used. Defaults to the agent's local datacenter. Results: bool: ``True`` on success
def find_repo_type(self): is_git = self.call(['git', 'rev-parse', '--is-inside-work-tree'], devnull=True) if is_git != 0: if self.debug: click.echo('not git') is_hg = self.call(['hg', '-q', 'stat'], devnull=True) if is_hg != 0: if self.debug: click.echo('not hg') exit(1) else: self.vc_name = 'hg'
Check for git or hg repository
def call(self, method, *args, **kwargs): tried_reconnect = False for _ in range(2): try: self._send_call(self.deluge_version, self.deluge_protocol_version, method, *args, **kwargs) return self._receive_response(self.deluge_version, self.deluge_protocol_version) except (socket.error, ConnectionLostException, CallTimeoutException): if self.automatic_reconnect: if tried_reconnect: raise FailedToReconnectException() else: try: self.reconnect() except (socket.error, ConnectionLostException, CallTimeoutException): raise FailedToReconnectException() tried_reconnect = True else: raise
Calls an RPC function
def _get(self, url: str) -> str: resp = self.session.get(url, headers=self.HEADERS) if resp.status_code is 200: return resp.text else: raise RuneConnectionError(resp.status_code)
A small wrapper method which makes a quick GET request. Parameters ---------- url : str The URL to get. Returns ------- str The raw html of the requested page. Raises ------ RuneConnectionError If the GET response status is not 200.
def get_all_bandwidth_groups(self): bandwidth_groups = self._call("getAllBandwidthGroups") bandwidth_groups = [IBandwidthGroup(a) for a in bandwidth_groups] return bandwidth_groups
Get all managed bandwidth groups. return bandwidth_groups of type :class:`IBandwidthGroup` The array of managed bandwidth groups.
def get_relation_kwargs(field_name, relation_info): model_field, related_model = relation_info kwargs = {} if related_model and not issubclass(related_model, EmbeddedDocument): kwargs['queryset'] = related_model.objects if model_field: if hasattr(model_field, 'verbose_name') and needs_label(model_field, field_name): kwargs['label'] = capfirst(model_field.verbose_name) if hasattr(model_field, 'help_text'): kwargs['help_text'] = model_field.help_text kwargs['required'] = model_field.required if model_field.null: kwargs['allow_null'] = True if getattr(model_field, 'unique', False): validator = UniqueValidator(queryset=related_model.objects) kwargs['validators'] = [validator] return kwargs
Creating a default instance of a flat relational field.
def pivot_bin(self, pivot_columns, value_column, bins=None, **vargs) : pivot_columns = _as_labels(pivot_columns) selected = self.select(pivot_columns + [value_column]) grouped = selected.groups(pivot_columns, collect=lambda x:x) if bins is not None: vargs['bins'] = bins _, rbins = np.histogram(self[value_column],**vargs) vargs['bins'] = rbins binned = type(self)().with_column('bin',rbins) for group in grouped.rows: col_label = "-".join(map(str,group[0:-1])) col_vals = group[-1] counts,_ = np.histogram(col_vals,**vargs) binned[col_label] = np.append(counts,0) return binned
Form a table with columns formed by the unique tuples in pivot_columns containing counts per bin of the values associated with each tuple in the value_column. By default, bins are chosen to contain all values in the value_column. The following named arguments from numpy.histogram can be applied to specialize bin widths: Args: ``bins`` (int or sequence of scalars): If bins is an int, it defines the number of equal-width bins in the given range (10, by default). If bins is a sequence, it defines the bin edges, including the rightmost edge, allowing for non-uniform bin widths. ``range`` ((float, float)): The lower and upper range of the bins. If not provided, range contains all values in the table. Values outside the range are ignored. ``normed`` (bool): If False, the result will contain the number of samples in each bin. If True, the result is normalized such that the integral over the range is 1.
def to_user_agent(self): ua = "" if self.user_agent is not None: ua += "{user_agent} " ua += "gl-python/{python_version} " if self.grpc_version is not None: ua += "grpc/{grpc_version} " ua += "gax/{api_core_version} " if self.gapic_version is not None: ua += "gapic/{gapic_version} " if self.client_library_version is not None: ua += "gccl/{client_library_version} " return ua.format(**self.__dict__).strip()
Returns the user-agent string for this client info.
def num_adjacent(self, i, j): if i < 1 or i > self.height - 2 or j < 1 and j > self.width - 2: raise ValueError('Pixels out of bounds') count = 0 diffs = [[-1, 0], [1, 0], [0, -1], [0, 1]] for d in diffs: if self.data[i + d[0]][j + d[1]] > self._threshold: count += 1 return count
Counts the number of adjacent nonzero pixels to a given pixel. Parameters ---------- i : int row index of query pixel j : int col index of query pixel Returns ------- int number of adjacent nonzero pixels
def _inhibitColumnsWithLateral(self, overlaps, lateralConnections): n,m = self.shape y = np.zeros(n) s = self.sparsity L = lateralConnections desiredWeight = self.codeWeight inhSignal = np.zeros(n) sortedIndices = np.argsort(overlaps, kind='mergesort')[::-1] currentWeight = 0 for i in sortedIndices: if overlaps[i] < self._stimulusThreshold: break inhTooStrong = ( inhSignal[i] >= s ) if not inhTooStrong: y[i] = 1. currentWeight += 1 inhSignal[:] += L[i,:] if self.enforceDesiredWeight and currentWeight == desiredWeight: break activeColumns = np.where(y==1.0)[0] return activeColumns
Performs an experimentatl local inhibition. Local inhibition is iteratively performed on a column by column basis.
def rollback(self, date): if self.onOffset(date): return date else: return date - YearEnd(month=self.month)
Roll date backward to nearest end of year
def set_link(self, prop, value): if not isinstance(value, URIRef): value = URIRef(value) self.metadata.add(prop, value)
Set given link in CTS Namespace .. example:: collection.set_link(NAMESPACES.CTS.about, "urn:cts:latinLit:phi1294.phi002") :param prop: Property to set (Without namespace) :param value: Value to set for given property
def _feature_first_back(self, results): try: first_back = results['hits']['hits'][0]['country_code3'] except (TypeError, IndexError): first_back = "" try: second_back = results['hits']['hits'][1]['country_code3'] except (TypeError, IndexError): second_back = "" top = (first_back, second_back) return top
Get the country of the first two results back from geonames. Parameters ----------- results: dict elasticsearch results Returns ------- top: tuple first and second results' country name (ISO)
def lincon(self, x, theta=0.01): if x[0] < 0: return np.NaN return theta * x[1] + x[0]
ridge like linear function with one linear constraint
def make_block_same_class(self, values, placement=None, ndim=None, dtype=None): if dtype is not None: warnings.warn("dtype argument is deprecated, will be removed " "in a future release.", DeprecationWarning) if placement is None: placement = self.mgr_locs return make_block(values, placement=placement, ndim=ndim, klass=self.__class__, dtype=dtype)
Wrap given values in a block of same type as self.
def DbDeleteClassProperty(self, argin): self._log.debug("In DbDeleteClassProperty()") klass_name = argin[0] for prop_name in argin[1:]: self.db.delete_class_property(prop_name)
Delete class properties from database :param argin: Str[0] = Tango class name Str[1] = Property name Str[n] = Property name :type: tango.DevVarStringArray :return: :rtype: tango.DevVoid
def plotChIds(self, maptype=None, modout=False): if maptype is None: maptype = self.defaultMap polyList = self.getAllChannelsAsPolygons(maptype) for p in polyList: p.identifyModule(modout=modout)
Print the channel numbers on the plotting display Note: --------- This method will behave poorly if you are plotting in mixed projections. Because the channel vertex polygons are already projected using self.defaultMap, applying this function when plotting in a different reference frame may cause trouble.
def _dispatch_gen(self): if not os.path.isdir(self._args.output): raise exception.Base("%s is not a writeable directory" % self._args.output) if not os.path.isfile(self._args.models_definition): if not self.check_package_exists(self._args.models_definition): raise exception.Base("failed to locate package or models definitions file at: %s" % self._args.models_definition) from prestans.devel.gen import Preplate preplate = Preplate( template_type=self._args.template, models_definition=self._args.models_definition, namespace=self._args.namespace, filter_namespace=self._args.filter_namespace, output_directory=self._args.output) preplate.run()
Process the generate subset of commands.
def get_fields(self, serializer_fields): fields = OrderedDict() for field_name, field in serializer_fields.items(): if field_name == 'tags': continue info = self.get_field_info(field, field_name) if info: fields[field_name] = info return fields
Get fields metadata skipping empty fields
def _handle_function_call(tokens, tokens_len, index): def _end_function_call(token_index, tokens): return tokens[token_index].type == TokenType.RightParen next_index, call_body = _ast_worker(tokens, tokens_len, index + 2, _end_function_call) function_call = FunctionCall(name=tokens[index].content, arguments=call_body.arguments, line=tokens[index].line, col=tokens[index].col, index=index) try: handler = _FUNCTION_CALL_DISAMBIGUATE[tokens[index].content.lower()] except KeyError: handler = None if handler: return handler(tokens, tokens_len, next_index, function_call) else: return (next_index, function_call)
Handle function calls, which could include a control statement. In CMake, all control flow statements are also function calls, so handle the function call first and then direct tree construction to the appropriate control flow statement constructor found in _FUNCTION_CALL_DISAMBIGUATE
def serialize_tag(tag, *, indent=None, compact=False, quote=None): serializer = Serializer(indent=indent, compact=compact, quote=quote) return serializer.serialize(tag)
Serialize an nbt tag to its literal representation.
def conj_phrase(list_, cond='or'): if len(list_) == 0: return '' elif len(list_) == 1: return list_[0] elif len(list_) == 2: return ' '.join((list_[0], cond, list_[1])) else: condstr = ''.join((', ' + cond, ' ')) return ', '.join((', '.join(list_[:-2]), condstr.join(list_[-2:])))
Joins a list of words using English conjunction rules Args: list_ (list): of strings cond (str): a conjunction (or, and, but) Returns: str: the joined cconjunction phrase References: http://en.wikipedia.org/wiki/Conjunction_(grammar) Example: >>> # ENABLE_DOCTEST >>> from utool.util_str import * # NOQA >>> list_ = ['a', 'b', 'c'] >>> result = conj_phrase(list_, 'or') >>> print(result) a, b, or c Example1: >>> # ENABLE_DOCTEST >>> from utool.util_str import * # NOQA >>> list_ = ['a', 'b'] >>> result = conj_phrase(list_, 'and') >>> print(result) a and b
def header(self, method, client='htmlshark'): return {'token': self._request_token(method, client), 'privacy': 0, 'uuid': self.session.user, 'clientRevision': grooveshark.const.CLIENTS[client]['version'], 'session': self.session.session, 'client': client, 'country': self.session.country}
generates Grooveshark API Json header
def serialize(self, raw=False): if raw: return self._key.encode() return self._key.encode(nacl.encoding.Base64Encoder)
Encode the private part of the key in a base64 format by default, but when raw is True it will return hex encoded bytes. @return: bytes
def random_line(file_path: str, encoding: str = FORCED_ENCODING) -> str: line_num = 0 selected_line = "" with open(file_path, encoding=encoding) as stream: while True: line = stream.readline() if not line: break line_num += 1 if random.uniform(0, line_num) < 1: selected_line = line return selected_line.strip()
Get random line from a file.
def setspan(self, *args): self.data = [] for child in args: self.append(child)
Sets the span of the span element anew, erases all data inside. Arguments: *args: Instances of :class:`Word`, :class:`Morpheme` or :class:`Phoneme`
def channelModeModifyAcknowledge(): a = TpPd(pd=0x6) b = MessageType(mesType=0x17) c = ChannelDescription2() d = ChannelMode() packet = a / b / c / d return packet
CHANNEL MODE MODIFY ACKNOWLEDGE Section 9.1.6
def make_monitoring_log(level, message, timestamp=None, to_logger=False): level = level.lower() if level not in ['debug', 'info', 'warning', 'error', 'critical']: return False if to_logger: logging.getLogger(ALIGNAK_LOGGER_NAME).debug("Monitoring log: %s / %s", level, message) message = message.replace('\r', '\\r') message = message.replace('\n', '\\n') logger_ = logging.getLogger(MONITORING_LOGGER_NAME) logging_function = getattr(logger_, level) try: message = message.decode('utf8', 'ignore') except UnicodeEncodeError: pass except AttributeError: pass if timestamp: st = datetime.datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d %H:%M:%S') logging_function(message, extra={'my_date': st}) else: logging_function(message) return True return Brok({'type': 'monitoring_log', 'data': {'level': level, 'message': message}})
Function used to build the monitoring log. Emit a log message with the provided level to the monitoring log logger. Build a Brok typed as monitoring_log with the provided message When to_logger is True, the information is sent to the python logger, else a monitoring_log Brok is returned. The Brok is managed by the daemons to build an Event that will br logged by the Arbiter when it collects all the events. TODO: replace with dedicated brok for each event to log - really useful? :param level: log level as defined in logging :type level: str :param message: message to send to the monitoring log logger :type message: str :param to_logger: when set, send to the logger, else raise a brok :type to_logger: bool :param timestamp: if set, force the log event timestamp :return: a monitoring_log Brok :rtype: alignak.brok.Brok
def build_graph(self): for child, parents in self.dependencies.items(): if child not in self.nodes: raise NodeNotFoundError( "App %s SQL item dependencies reference nonexistent child node %r" % ( child[0], child), child ) for parent in parents: if parent not in self.nodes: raise NodeNotFoundError( "App %s SQL item dependencies reference nonexistent parent node %r" % ( child[0], parent), parent ) self.node_map[child].add_parent(self.node_map[parent]) self.node_map[parent].add_child(self.node_map[child]) for node in self.nodes: self.ensure_not_cyclic(node, lambda x: (parent.key for parent in self.node_map[x].parents))
Read lazy dependency list and build graph.
def get_killer(args): if POSIX: log.debug('Platform: POSIX') from killer.killer_posix import KillerPosix return KillerPosix(config_path=args.config, debug=args.debug) elif WINDOWS: log.debug('Platform: Windows') from killer.killer_windows import KillerWindows return KillerWindows(config_path=args.config, debug=args.debug) else: raise NotImplementedError("Your platform is not currently supported." "If you would like support to be added, or " "if your platform is supported and this is " "a bug, please open an issue on GitHub!")
Returns a KillerBase instance subclassed based on the OS.
def from_string(cls, string, *, default_func=None): if not isinstance(string, str): raise TypeError(f'service must be a string: {string}') parts = string.split('://', 1) if len(parts) == 2: protocol, address = parts else: item, = parts protocol = None if default_func: if default_func(item, ServicePart.HOST) and default_func(item, ServicePart.PORT): protocol, address = item, '' else: protocol, address = default_func(None, ServicePart.PROTOCOL), item if not protocol: raise ValueError(f'invalid service string: {string}') if default_func: default_func = partial(default_func, protocol.lower()) address = NetAddress.from_string(address, default_func=default_func) return cls(protocol, address)
Construct a Service from a string. If default_func is provided and any ServicePart is missing, it is called with default_func(protocol, part) to obtain the missing part.
def volume_create(self, label, region=None, linode=None, size=20, **kwargs): if not (region or linode): raise ValueError('region or linode required!') params = { "label": label, "size": size, "region": region.id if issubclass(type(region), Base) else region, "linode_id": linode.id if issubclass(type(linode), Base) else linode, } params.update(kwargs) result = self.post('/volumes', data=params) if not 'id' in result: raise UnexpectedResponseError('Unexpected response when creating volume!', json=result) v = Volume(self, result['id'], result) return v
Creates a new Block Storage Volume, either in the given Region or attached to the given Instance. :param label: The label for the new Volume. :type label: str :param region: The Region to create this Volume in. Not required if `linode` is provided. :type region: Region or str :param linode: The Instance to attach this Volume to. If not given, the new Volume will not be attached to anything. :type linode: Instance or int :param size: The size, in GB, of the new Volume. Defaults to 20. :type size: int :returns: The new Volume. :rtype: Volume
def make_path(base_uri, path, filename, path_dimensions, split_length): assert len(path) > path_dimensions * split_length uri_parts = [] for i in range(path_dimensions): uri_parts.append(path[0:split_length]) path = path[split_length:] uri_parts.append(path) uri_parts.append(filename) return os.path.join(base_uri, *uri_parts)
Generate a path as base location for file instance. :param base_uri: The base URI. :param path: The relative path. :param path_dimensions: Number of chunks the path should be split into. :param split_length: The length of any chunk. :returns: A string representing the full path.
def draw(self): self.draw_nodes() self.draw_edges() if hasattr(self, "groups") and self.groups: self.draw_group_labels() logging.debug("DRAW: {0}".format(self.sm)) if self.sm: self.figure.subplots_adjust(right=0.8) cax = self.figure.add_axes([0.85, 0.2, 0.05, 0.6]) self.figure.colorbar(self.sm, cax=cax) self.ax.relim() self.ax.autoscale_view() self.ax.set_aspect("equal")
Draws the Plot to screen. If there is a continuous datatype for the nodes, it will be reflected in self.sm being constructed (in `compute_node_colors`). It will then automatically add in a colorbar to the plot and scale the plot axes accordingly.
def distance_landscape_as_3d_data(self, x_axis, y_axis): if not self.distance_landscape: raise Exception('No distance landscape returned. Re-run inference with return_distance_landscape=True') index_x = self.parameter_index(x_axis) index_y = self.parameter_index(y_axis) x = [] y = [] z = [] for parameters, initial_conditions, distance in self.distance_landscape: all_values = list(parameters) + list(initial_conditions) x.append(all_values[index_x]) y.append(all_values[index_y]) z.append(distance) return x, y, z
Returns the distance landscape as three-dimensional data for the specified projection. :param x_axis: variable to be plotted on the x axis of projection :param y_axis: variable to be plotted on the y axis of projection :return: a 3-tuple (x, y, z) where x and y are the lists of coordinates and z the list of distances at respective coordinates
def get_variant_slice(self, package_name, range_): variant_list = self.variant_lists.get(package_name) if variant_list is None: variant_list = _PackageVariantList(package_name, self.solver) self.variant_lists[package_name] = variant_list entries = variant_list.get_intersection(range_) if not entries: return None slice_ = _PackageVariantSlice(package_name, entries=entries, solver=self.solver) return slice_
Get a list of variants from the cache. Args: package_name (str): Name of package. range_ (`VersionRange`): Package version range. Returns: `_PackageVariantSlice` object.
def stop_refreshing_token(self): with self.lock: self.timer_stopped = True self.timer.cancel()
The timer needs to be canceled if the application is terminating, if not the timer will keep going.
def intToID(idnum, prefix): rid = '' while idnum > 0: idnum -= 1 rid = chr((idnum % 26) + ord('a')) + rid idnum = int(idnum / 26) return prefix + rid
Returns the ID name for the given ID number, spreadsheet-style, i.e. from a to z, then from aa to az, ba to bz, etc., until zz.
def remove_role_from_user(user, role): user = _query_to_user(user) role = _query_to_role(role) if click.confirm(f'Are you sure you want to remove {role!r} from {user!r}?'): user.roles.remove(role) user_manager.save(user, commit=True) click.echo(f'Successfully removed {role!r} from {user!r}') else: click.echo('Cancelled.')
Remove a role from a user.
def signMsg(self, msg: Dict, identifier: Identifier=None, otherIdentifier: Identifier=None): idr = self.requiredIdr(idr=identifier or otherIdentifier) signer = self._signerById(idr) signature = signer.sign(msg) return signature
Creates signature for message using specified signer :param msg: message to sign :param identifier: signer identifier :param otherIdentifier: :return: signature that then can be assigned to request
def create(self, to, media_url, quality=values.unset, status_callback=values.unset, from_=values.unset, sip_auth_username=values.unset, sip_auth_password=values.unset, store_media=values.unset, ttl=values.unset): data = values.of({ 'To': to, 'MediaUrl': media_url, 'Quality': quality, 'StatusCallback': status_callback, 'From': from_, 'SipAuthUsername': sip_auth_username, 'SipAuthPassword': sip_auth_password, 'StoreMedia': store_media, 'Ttl': ttl, }) payload = self._version.create( 'POST', self._uri, data=data, ) return FaxInstance(self._version, payload, )
Create a new FaxInstance :param unicode to: The phone number to receive the fax :param unicode media_url: The Twilio-hosted URL of the PDF that contains the fax :param FaxInstance.Quality quality: The quality of this fax :param unicode status_callback: The URL we should call to send status information to your application :param unicode from_: The number the fax was sent from :param unicode sip_auth_username: The username for SIP authentication :param unicode sip_auth_password: The password for SIP authentication :param bool store_media: Whether to store a copy of the sent media :param unicode ttl: How long in minutes to try to send the fax :returns: Newly created FaxInstance :rtype: twilio.rest.fax.v1.fax.FaxInstance
def disable(iface): if is_disabled(iface): return True cmd = ['netsh', 'interface', 'set', 'interface', 'name={0}'.format(iface), 'admin=DISABLED'] __salt__['cmd.run'](cmd, python_shell=False) return is_disabled(iface)
Disable an interface CLI Example: .. code-block:: bash salt -G 'os_family:Windows' ip.disable 'Local Area Connection #2'
def dim_range_key(eldim): if isinstance(eldim, dim): dim_name = repr(eldim) if dim_name.startswith("'") and dim_name.endswith("'"): dim_name = dim_name[1:-1] else: dim_name = eldim.name return dim_name
Returns the key to look up a dimension range.
def parse_xml_node(self, node): if node.getElementsByTagNameNS(RTS_NS, 'Participant').length != 1: raise InvalidParticipantNodeError self.target_component = TargetComponent().parse_xml_node(\ node.getElementsByTagNameNS(RTS_NS, 'Participant')[0]) return self
Parse an xml.dom Node object representing a participant into this object.
def _compute_sync_map_file_path( self, root, hierarchy_type, custom_id, file_name ): prefix = root if hierarchy_type == HierarchyType.PAGED: prefix = gf.norm_join(prefix, custom_id) file_name_joined = gf.norm_join(prefix, file_name) return self._replace_placeholder(file_name_joined, custom_id)
Compute the sync map file path inside the output container. :param string root: the root of the sync map files inside the container :param job_os_hierarchy_type: type of job output hierarchy :type job_os_hierarchy_type: :class:`~aeneas.hierarchytype.HierarchyType` :param string custom_id: the task custom id (flat) or page directory name (paged) :param string file_name: the output file name for the sync map :rtype: string
def write_xml(self): key = None if self. language is not None: lang = {} lang['{http://www.w3.org/XML/1998/namespace}lang'] = self.language key = etree.Element('vocabulary-key', attrib=lang) else: key = etree.Element('vocabulary-key') name = etree.Element('name') name.text = self.name key.append(name) if self.family is not None: family = etree.Element('family') family.text = self.family key.append(family) if self.version is not None: version = etree.Element('version') version.text = self.version key.append(version) if self.code_value is not None: code_value = etree.Element('code-value') code_value.text = self.code_value key.append(code_value) return key
Writes a VocabularyKey Xml as per Healthvault schema. :returns: lxml.etree.Element representing a single VocabularyKey
def _import_ucsmsdk(self): if not CONF.ml2_cisco_ucsm.ucsm_https_verify: LOG.warning(const.SSL_WARNING) from networking_cisco.ml2_drivers.ucsm import ucs_ssl ucs_driver = importutils.import_module('ucsmsdk.ucsdriver') ucs_driver.ssl = ucs_ssl class ucsmsdk(object): handle = importutils.import_class( 'ucsmsdk.ucshandle.UcsHandle') fabricVlan = importutils.import_class( 'ucsmsdk.mometa.fabric.FabricVlan.FabricVlan') vnicProfile = importutils.import_class( 'ucsmsdk.mometa.vnic.VnicProfile.VnicProfile') vnicEtherIf = importutils.import_class( 'ucsmsdk.mometa.vnic.VnicEtherIf.VnicEtherIf') vmVnicProfCl = importutils.import_class( 'ucsmsdk.mometa.vm.VmVnicProfCl.VmVnicProfCl') return ucsmsdk
Imports the ucsmsdk module. This module is not installed as part of the normal Neutron distributions. It is imported dynamically in this module so that the import can be mocked, allowing unit testing without requiring the installation of ucsmsdk.
def _write_vcf_breakend(brend, out_handle): out_handle.write("{0}\n".format("\t".join(str(x) for x in [brend.chrom, brend.pos + 1, brend.id, brend.ref, brend.alt, ".", "PASS", brend.info])))
Write out a single VCF line with breakpoint information.
def _query_api(self, method, url, fields=None, extra_headers=None, req_body=None): with self.auth.authenticate() as token: logging.debug('PA Authentication returned token %s', token) headers = { 'Authorization': 'Bearer %s' % (token,), 'Realm': self.auth_realm } if extra_headers is not None: headers.update(extra_headers) logging.info('[%s] %s', method, url) if req_body is not None: response = self.http.request(method, url, fields, headers, body=req_body) else: response = self.http.request(method, url, fields, headers) if response.status != 200: print(response.data) logging.warning('Got non-200 HTTP status from API: %d', response.status) raise ApiQueryError("Failed to get API data", response.status) return json.loads(response.data.decode())
Abstracts http queries to the API
def get_current_temperature(self, refresh=False): if refresh: self.refresh() try: return float(self.get_value('temperature')) except (TypeError, ValueError): return None
Get current temperature
def result(self): if not self._result: if not self._persistence_engine: return None self._result = self._persistence_engine.get_context_result(self) return self._result
Return the context result object pulled from the persistence_engine if it has been set.
def heartbeat(self): if self._rpc is not None and self._rpc.is_active: self._rpc.send(types.StreamingPullRequest())
Sends an empty request over the streaming pull RPC. This always sends over the stream, regardless of if ``self._UNARY_REQUESTS`` is set or not.
def __is_block_data_move(self): block_data_move_instructions = ('movs', 'stos', 'lods') isBlockDataMove = False instruction = None if self.pc is not None and self.faultDisasm: for disasm in self.faultDisasm: if disasm[0] == self.pc: instruction = disasm[2].lower().strip() break if instruction: for x in block_data_move_instructions: if x in instruction: isBlockDataMove = True break return isBlockDataMove
Private method to tell if the instruction pointed to by the program counter is a block data move instruction. Currently only works for x86 and amd64 architectures.
def _compile_path_pattern(pattern): r def replace_variable(match): if match.lastindex > 1: var_name = ApiConfigManager._to_safe_path_param_name(match.group(2)) return '%s(?P<%s>%s)' % (match.group(1), var_name, _PATH_VALUE_PATTERN) return match.group(0) pattern = re.sub('(/|^){(%s)}(?=/|$|:)' % _PATH_VARIABLE_PATTERN, replace_variable, pattern) return re.compile(pattern + '/?$')
r"""Generates a compiled regex pattern for a path pattern. e.g. '/MyApi/v1/notes/{id}' returns re.compile(r'/MyApi/v1/notes/(?P<id>[^/?#\[\]{}]*)') Args: pattern: A string, the parameterized path pattern to be checked. Returns: A compiled regex object to match this path pattern.
def UpsertStoredProcedure(self, collection_link, sproc, options=None): if options is None: options = {} collection_id, path, sproc = self._GetContainerIdWithPathForSproc(collection_link, sproc) return self.Upsert(sproc, path, 'sprocs', collection_id, None, options)
Upserts a stored procedure in a collection. :param str collection_link: The link to the document collection. :param str sproc: :param dict options: The request options for the request. :return: The upserted Stored Procedure. :rtype: dict
def pots(self, refresh=False): if not refresh and self._cached_pots: return self._cached_pots endpoint = '/pots/listV1' response = self._get_response( method='get', endpoint=endpoint, ) pots_json = response.json()['pots'] pots = [MonzoPot(data=pot) for pot in pots_json] self._cached_pots = pots return pots
Returns a list of pots owned by the currently authorised user. Official docs: https://monzo.com/docs/#pots :param refresh: decides if the pots information should be refreshed. :type refresh: bool :returns: list of Monzo pots :rtype: list of MonzoPot
def nic_add(self, container, nic): args = { 'container': container, 'nic': nic } self._nic_add.check(args) return self._client.json('corex.nic-add', args)
Hot plug a nic into a container :param container: container ID :param nic: { 'type': nic_type # one of default, bridge, zerotier, macvlan, passthrough, vlan, or vxlan (note, vlan and vxlan only supported by ovs) 'id': id # depends on the type bridge: bridge name, zerotier: network id, macvlan: the parent link name, passthrough: the link name, vlan: the vlan tag, vxlan: the vxlan id 'name': name of the nic inside the container (ignored in zerotier type) 'hwaddr': Mac address of nic. 'config': { # config is only honored for bridge, vlan, and vxlan types 'dhcp': bool, 'cidr': static_ip # ip/mask 'gateway': gateway 'dns': [dns] } } :return:
def append(self, other, inplace=False, **kwargs): if not isinstance(other, MAGICCData): other = MAGICCData(other, **kwargs) if inplace: super().append(other, inplace=inplace) self.metadata.update(other.metadata) else: res = super().append(other, inplace=inplace) res.metadata = deepcopy(self.metadata) res.metadata.update(other.metadata) return res
Append any input which can be converted to MAGICCData to self. Parameters ---------- other : MAGICCData, pd.DataFrame, pd.Series, str Source of data to append. inplace : bool If True, append ``other`` inplace, otherwise return a new ``MAGICCData`` instance. **kwargs Passed to ``MAGICCData`` constructor (only used if ``MAGICCData`` is not a ``MAGICCData`` instance).
def add_string_widget(self, ref, text="Text", x=1, y=1): if ref not in self.widgets: widget = widgets.StringWidget(screen=self, ref=ref, text=text, x=x, y=y) self.widgets[ref] = widget return self.widgets[ref]
Add String Widget
def wrap(self, data): if self.nested: return data name = self.obj.__class__.__name__ self._nested_schema_classes[name] = data root = { 'definitions': self._nested_schema_classes, '$ref': ' } return root
Wrap this with the root schema definitions.
def get_random_name(sep: str='-'): r = random.SystemRandom() return '{}{}{}'.format(r.choice(_left), sep, r.choice(_right))
Generate random docker-like name with the given separator. :param sep: adjective-name separator string :return: random docker-like name
def distutils_servers(self): if not multiple_pypi_support(): return [] try: raw_index_servers = self.config.get('distutils', 'index-servers') except (NoSectionError, NoOptionError): return [] ignore_servers = [''] if self.is_old_pypi_config(): ignore_servers.append('pypi') index_servers = [ server.strip() for server in raw_index_servers.split('\n') if server.strip() not in ignore_servers] return index_servers
Return a list of known distutils servers for collective.dist. If the config has an old pypi config, remove the default pypi server from the list.
def store(self, value, l, dir_only): if l and value in (b'', ''): return globstar = value in (b'**', '**') and self.globstar magic = self.is_magic(value) if magic: value = compile(value, self.flags) l.append(WcGlob(value, magic, globstar, dir_only, False))
Group patterns by literals and potential magic patterns.
def stop(self): vm_state = yield from self._get_state() if vm_state == "poweroff": self.running = False return yield from self._execute("controlvm", [self._vmname, "acpipowerbutton"], timeout=3) trial = 120 while True: try: vm_state = yield from self._get_state() except GNS3VMError: vm_state = "running" if vm_state == "poweroff": break trial -= 1 if trial == 0: yield from self._execute("controlvm", [self._vmname, "poweroff"], timeout=3) break yield from asyncio.sleep(1) log.info("GNS3 VM has been stopped") self.running = False
Stops the GNS3 VM.
def _get_hangul_syllable_name(hangul_syllable): if not _is_hangul_syllable(hangul_syllable): raise ValueError("Value passed in does not represent a Hangul syllable!") jamo = decompose_hangul_syllable(hangul_syllable, fully_decompose=True) result = '' for j in jamo: if j is not None: result += _get_jamo_short_name(j) return result
Function for taking a Unicode scalar value representing a Hangul syllable and converting it to its syllable name as defined by the Unicode naming rule NR1. See the Unicode Standard, ch. 04, section 4.8, Names, for more information. :param hangul_syllable: Unicode scalar value representing the Hangul syllable to convert :return: String representing its syllable name as transformed according to naming rule NR1.
def total_flux(flux, A): r X = set(np.arange(flux.shape[0])) A = set(A) notA = X.difference(A) W = flux.tocsr() W = W[list(A), :] W = W.tocsc() W = W[:, list(notA)] F = W.sum() return F
r"""Compute the total flux between reactant and product. Parameters ---------- flux : (M, M) scipy.sparse matrix Matrix of flux values between pairs of states. A : array_like List of integer state labels for set A (reactant) Returns ------- F : float The total flux between reactant and product
def _MaybeWriteIndex(self, i, ts, mutation_pool): if i > self._max_indexed and i % self.INDEX_SPACING == 0: if ts[0] < (rdfvalue.RDFDatetime.Now() - self.INDEX_WRITE_DELAY).AsMicrosecondsSinceEpoch(): mutation_pool.CollectionAddIndex(self.collection_id, i, ts[0], ts[1]) self._index[i] = ts self._max_indexed = max(i, self._max_indexed)
Write index marker i.
def bits(self, count): if count < 0: raise ValueError if count > self._bits: n_bytes = (count - self._bits + 7) // 8 data = self._fileobj.read(n_bytes) if len(data) != n_bytes: raise BitReaderError("not enough data") for b in bytearray(data): self._buffer = (self._buffer << 8) | b self._bits += n_bytes * 8 self._bits -= count value = self._buffer >> self._bits self._buffer &= (1 << self._bits) - 1 assert self._bits < 8 return value
Reads `count` bits and returns an uint, MSB read first. May raise BitReaderError if not enough data could be read or IOError by the underlying file object.
def formfield(self, **kwargs): default = kwargs.get("widget", None) or AdminTextareaWidget if default is AdminTextareaWidget: from yacms.conf import settings richtext_widget_path = settings.RICHTEXT_WIDGET_CLASS try: widget_class = import_dotted_path(richtext_widget_path) except ImportError: raise ImproperlyConfigured(_("Could not import the value of " "settings.RICHTEXT_WIDGET_CLASS: " "%s" % richtext_widget_path)) kwargs["widget"] = widget_class() kwargs.setdefault("required", False) formfield = super(RichTextField, self).formfield(**kwargs) return formfield
Apply the widget class defined by the ``RICHTEXT_WIDGET_CLASS`` setting.
def user_filter(config, message, fasnick=None, *args, **kw): fasnick = kw.get('fasnick', fasnick) if fasnick: return fasnick in fmn.rules.utils.msg2usernames(message, **config)
A particular user Use this rule to include messages that are associated with a specific user.
def addPSF(self, psf, date=None, info='', light_spectrum='visible'): self._registerLight(light_spectrum) date = _toDate(date) f = self.coeffs['psf'] if light_spectrum not in f: f[light_spectrum] = [] f[light_spectrum].insert(_insertDateIndex(date, f[light_spectrum]), [date, info, psf])
add a new point spread function
async def pulse(self, *args, **kwargs): return await self._makeApiCall(self.funcinfo["pulse"], *args, **kwargs)
Publish a Pulse Message Publish a message on pulse with the given `routingKey`. This method takes input: ``v1/pulse-request.json#`` This method is ``experimental``
def conditional_write(strm, fmt, value, *args, **kwargs): if value is not None: strm.write(fmt.format(value, *args, **kwargs))
Write to stream using fmt and value if value is not None
def get_queryset(self): if self.queryset is not None: queryset = self.queryset if hasattr(queryset, '_clone'): queryset = queryset._clone() elif self.model is not None: queryset = self.model._default_manager.all() else: msg = '{0} must define ``queryset`` or ``model``' raise ImproperlyConfigured(msg.format(self.__class__.__name__)) return queryset
Get the list of items for this view. This must be an interable, and may be a queryset (in which qs-specific behavior will be enabled). See original in ``django.views.generic.list.MultipleObjectMixin``.
def solve(guess_a, guess_b, power, solver='scipy'): x = sp.symbols('x:2', real=True) p = sp.Symbol('p', real=True, negative=False, integer=True) f = [x[0] + (x[0] - x[1])**p/2 - 1, (x[1] - x[0])**p/2 + x[1]] neqsys = SymbolicSys(x, f, [p]) return neqsys.solve([guess_a, guess_b], [power], solver=solver)
Constructs a pyneqsys.symbolic.SymbolicSys instance and returns from its ``solve`` method.
def avail_sizes(call=None): if call == 'action': raise SaltCloudSystemExit( 'The avail_sizes function must be called with ' '-f or --function, or with the --list-sizes option' ) conn = get_conn() data = conn.list_role_sizes() ret = {} for item in data.role_sizes: ret[item.name] = object_to_dict(item) return ret
Return a list of sizes from Azure
def add_task(self, pid): _register_process_with_cgrulesengd(pid) for cgroup in self.paths: with open(os.path.join(cgroup, 'tasks'), 'w') as tasksFile: tasksFile.write(str(pid))
Add a process to the cgroups represented by this instance.
def CopyTextToLabel(cls, text, prefix=''): text = '{0:s}{1:s}'.format(prefix, text) return cls._INVALID_LABEL_CHARACTERS_REGEX.sub('_', text)
Copies a string to a label. A label only supports a limited set of characters therefore unsupported characters are replaced with an underscore. Args: text (str): label text. prefix (Optional[str]): label prefix. Returns: str: label.
def ProcessMessage(self, message): if (message.auth_state != rdf_flows.GrrMessage.AuthorizationState.AUTHENTICATED): return now = time.time() with self.lock: if (self.foreman_cache is None or now > self.foreman_cache.age + self.cache_refresh_time): self.foreman_cache = aff4.FACTORY.Open( "aff4:/foreman", mode="rw", token=self.token) self.foreman_cache.age = now if message.source: self.foreman_cache.AssignTasksToClient(message.source.Basename())
Run the foreman on the client.
def set_ys(self, word): if word[0] == 'y': word = 'Y' + word[1:] for match in re.finditer("[aeiou]y", word): y_index = match.end() - 1 char_list = [x for x in word] char_list[y_index] = 'Y' word = ''.join(char_list) return word
Identify Ys that are to be treated as consonants and make them uppercase.
def enriched(self, thresh=0.05, idx=True): return self.upregulated(thresh=thresh, idx=idx)
Enriched features. {threshdoc}
def progress(self): total = len(self.all_jobs) remaining = total - len(self.active_jobs) if total > 0 else 0 percent = int(100 * (float(remaining) / total)) if total > 0 else 0 return percent
Returns the percentage, current and total number of jobs in the queue.
def tar_dir(tarfile, srcdir): files = os.listdir(srcdir) packtar(tarfile, files, srcdir)
Pack a tar file using all the files in the given srcdir
def step(self, action, blocking=True): promise = self.call('step', action) if blocking: return promise() else: return promise
Step the environment. Args: action: The action to apply to the environment. blocking: Whether to wait for the result. Returns: Transition tuple when blocking, otherwise callable that returns the transition tuple.
def _generate_keys(self): from helpme.defaults import HELPME_CLIENT_SECRETS keypair_dir = os.path.join(os.path.dirname(HELPME_CLIENT_SECRETS), 'discourse') self.keypair_file = os.path.join(keypair_dir, 'private.pem') if not hasattr(self, 'key'): self.key = generate_keypair(self.keypair_file) if not hasattr(self, 'public_key'): self.public_key = load_keypair(self.keypair_file)
the discourse API requires the interactions to be signed, so we generate a keypair on behalf of the user
def create(self, environment, target_name): remote_server_command( ["ssh", environment.deploy_target, "create", target_name], environment, self, clean_up=True, )
Sends "create project" command to the remote server
def accept_best_match(accept_header, mimetypes): for mimetype_pattern, _ in _parse_and_sort_accept_header(accept_header): matched_types = fnmatch.filter(mimetypes, mimetype_pattern) if matched_types: return matched_types[0] return mimetypes[0]
Return a mimetype best matched the accept headers. >>> accept_best_match('application/json, text/html', ['application/json', 'text/plain']) 'application/json' >>> accept_best_match('application/json;q=0.5, text/*', ['application/json', 'text/plain']) 'text/plain'
def _from_dict(cls, _dict): args = {} if 'element_pair' in _dict: args['element_pair'] = [ ElementPair._from_dict(x) for x in (_dict.get('element_pair')) ] if 'identical_text' in _dict: args['identical_text'] = _dict.get('identical_text') if 'provenance_ids' in _dict: args['provenance_ids'] = _dict.get('provenance_ids') if 'significant_elements' in _dict: args['significant_elements'] = _dict.get('significant_elements') return cls(**args)
Initialize a AlignedElement object from a json dictionary.