code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def get_device_sdr(self, record_id, reservation_id=None): (next_id, record_data) = \ get_sdr_data_helper(self.reserve_device_sdr_repository, self._get_device_sdr_chunk, record_id, reservation_id) return sdr.SdrCommon.from_data(record_data, next_id)
Collects all data from the sensor device to get the SDR specified by record id. `record_id` the Record ID. `reservation_id=None` can be set. if None the reservation ID will be determined.
def serialize_structmap(self, recurse=True, normative=False): if not self.label: return None if self.is_empty_dir and not normative: return None el = etree.Element(utils.lxmlns("mets") + "div", TYPE=self.mets_div_type) el.attrib["LABEL"] = self.label if (not normative) and self.file_id(): etree.SubElement(el, utils.lxmlns("mets") + "fptr", FILEID=self.file_id()) if self.dmdids: if (not normative) or (normative and self.is_empty_dir): el.set("DMDID", " ".join(self.dmdids)) if recurse and self._children: for child in self._children: child_el = child.serialize_structmap( recurse=recurse, normative=normative ) if child_el is not None: el.append(child_el) return el
Return the div Element for this file, appropriate for use in a structMap. If this FSEntry represents a directory, its children will be recursively appended to itself. If this FSEntry represents a file, it will contain a <fptr> element. :param bool recurse: If true, serialize and apppend all children. Otherwise, only serialize this element but not any children. :param bool normative: If true, we are creating a "Normative Directory Structure" logical structmap, in which case we add div elements for empty directories and do not add fptr elements for files. :return: structMap element for this FSEntry
def SetTicketAcceso(self, ta_string): "Establecer el token y sign desde un ticket de acceso XML" if ta_string: ta = SimpleXMLElement(ta_string) self.Token = str(ta.credentials.token) self.Sign = str(ta.credentials.sign) return True else: raise RuntimeError("Ticket de Acceso vacio!")
Establecer el token y sign desde un ticket de acceso XML
def interface_type(self): return self.visalib.parse_resource(self._resource_manager.session, self.resource_name)[0].interface_type
The interface type of the resource as a number.
def interpret_maskval(paramDict): if 'maskval' not in paramDict: return 0 maskval = paramDict['maskval'] if maskval is None: maskval = np.nan else: maskval = float(maskval) return maskval
Apply logic for interpreting final_maskval value...
def satisfier(self, term): assigned_term = None for assignment in self._assignments: if assignment.dependency.name != term.dependency.name: continue if ( not assignment.dependency.is_root and not assignment.dependency.name == term.dependency.name ): if not assignment.is_positive(): continue assert not term.is_positive() return assignment if assigned_term is None: assigned_term = assignment else: assigned_term = assigned_term.intersect(assignment) if assigned_term.satisfies(term): return assignment raise RuntimeError("[BUG] {} is not satisfied.".format(term))
Returns the first Assignment in this solution such that the sublist of assignments up to and including that entry collectively satisfies term.
def get_request_feature(self, name): if '[]' in name: return self.request.query_params.getlist( name) if name in self.features else None elif '{}' in name: return self._extract_object_params( name) if name in self.features else {} else: return self.request.query_params.get( name) if name in self.features else None
Parses the request for a particular feature. Arguments: name: A feature name. Returns: A feature parsed from the URL if the feature is supported, or None.
def set_credentials(username, api_key=None, password=None, region=None, tenant_id=None, authenticate=True): global regions, services pw_key = password or api_key region = _safe_region(region) tenant_id = tenant_id or settings.get("tenant_id") identity.set_credentials(username=username, password=pw_key, tenant_id=tenant_id, region=region, authenticate=authenticate) regions = tuple(identity.regions) services = tuple(identity.services.keys()) connect_to_services(region=region)
Set the credentials directly, and then try to authenticate. If the region is passed, it will authenticate against the proper endpoint for that region, and set the default region for connections.
def save(self): data = super().save() data['expr'] = self.expr.pattern data['default_end'] = self.default_end return data
Convert the scanner to JSON. Returns ------- `dict` JSON data.
def get_science_segments(workflow, out_dir, tags=None): if tags is None: tags = [] logging.info('Starting generation of science segments') make_analysis_dir(out_dir) start_time = workflow.analysis_time[0] end_time = workflow.analysis_time[1] sci_seg_name = "SCIENCE" sci_segs = {} sci_seg_dict = segments.segmentlistdict() sci_seg_summ_dict = segments.segmentlistdict() for ifo in workflow.ifos: curr_sci_segs, curr_sci_xml, curr_seg_name = get_sci_segs_for_ifo(ifo, workflow.cp, start_time, end_time, out_dir, tags) sci_seg_dict[ifo + ':' + sci_seg_name] = curr_sci_segs sci_segs[ifo] = curr_sci_segs sci_seg_summ_dict[ifo + ':' + sci_seg_name] = \ curr_sci_xml.seg_summ_dict[ifo + ':' + curr_seg_name] sci_seg_file = SegFile.from_segment_list_dict(sci_seg_name, sci_seg_dict, extension='xml', valid_segment=workflow.analysis_time, seg_summ_dict=sci_seg_summ_dict, directory=out_dir, tags=tags) logging.info('Done generating science segments') return sci_seg_file, sci_segs, sci_seg_name
Get the analyzable segments after applying ini specified vetoes. Parameters ----------- workflow : Workflow object Instance of the workflow object out_dir : path Location to store output files tags : list of strings Used to retrieve subsections of the ini file for configuration options. Returns -------- sci_seg_file : workflow.core.SegFile instance The segment file combined from all ifos containing the science segments. sci_segs : Ifo keyed dict of ligo.segments.segmentlist instances The science segs for each ifo, keyed by ifo sci_seg_name : str The name with which science segs are stored in the output XML file.
def percentile_between(self, min_percentile, max_percentile, mask=NotSpecified): return PercentileFilter( self, min_percentile=min_percentile, max_percentile=max_percentile, mask=mask, )
Construct a new Filter representing entries from the output of this Factor that fall within the percentile range defined by min_percentile and max_percentile. Parameters ---------- min_percentile : float [0.0, 100.0] Return True for assets falling above this percentile in the data. max_percentile : float [0.0, 100.0] Return True for assets falling below this percentile in the data. mask : zipline.pipeline.Filter, optional A Filter representing assets to consider when percentile calculating thresholds. If mask is supplied, percentile cutoffs are computed each day using only assets for which ``mask`` returns True. Assets for which ``mask`` produces False will produce False in the output of this Factor as well. Returns ------- out : zipline.pipeline.filters.PercentileFilter A new filter that will compute the specified percentile-range mask. See Also -------- zipline.pipeline.filters.filter.PercentileFilter
def addFilter(self, field, value): if "<" not in value or ">" not in value or ".." not in value: value = ":" + value if self.__urlFilters: self.__urlFilters += "+" + field + str(quote(value)) else: self.__urlFilters += field + str(quote(value))
Add a filter to the seach. :param field: what field filter (see GitHub search). :type field: str. :param value: value of the filter (see GitHub search). :type value: str.
def send(self, obj): if not isinstance(obj, NotificationMessage): raise ValueError, u"You can only send NotificationMessage objects." self._send_queue.put(obj)
Send a push notification
def verify(self, id): url = self._url('%s/verify' % (id)) return self.client.post(url)
Verify a custom domain Args: id (str): The id of the custom domain to delete See: https://auth0.com/docs/api/management/v2#!/Custom_Domains/post_verify
def get_timex(self, timex_id): if timex_id in self.idx: return Ctime(self.idx[timex_id]) else: return None
Returns the timex object for the supplied identifier @type timex_id: string @param timex_id: timex identifier
def get_default_net_device(): with open('/proc/net/route') as fh: for line in fh: iface, dest, _ = line.split(None, 2) if dest == '00000000': return iface return None
Find the device where the default route is.
def get_access_flags_string(value): flags = [] for k, v in ACCESS_FLAGS.items(): if (k & value) == k: flags.append(v) return " ".join(flags)
Transform an access flag field to the corresponding string :param value: the value of the access flags :type value: int :rtype: string
def padding_to_length(padding): non_padding = 1.0 - padding return tf.to_int32(tf.reduce_sum(non_padding, axis=-1))
Calculate the length of mask based on padding. Args: padding: a Tensor with shape [..., length]. Returns: a Tensor with shape [...].
def get_xml_root(xml_path): r = requests.get(xml_path) root = ET.fromstring(r.content) return root
Load and parse an xml by given xml_path and return its root. :param xml_path: URL to a xml file :type xml_path: str :return: xml root
def symbolic_master_equation(self, rho=None): L, H = self.L, self.H if rho is None: rho = OperatorSymbol('rho', hs=self.space) return (-I * (H * rho - rho * H) + sum(Lk * rho * adjoint(Lk) - (adjoint(Lk) * Lk * rho + rho * adjoint(Lk) * Lk) / 2 for Lk in L.matrix.ravel()))
Compute the symbolic Liouvillian acting on a state rho If no rho is given, an OperatorSymbol is created in its place. This correspnds to the RHS of the master equation in which an average is taken over the external noise degrees of freedom. Args: rho (Operator): A symbolic density matrix operator Returns: Operator: The RHS of the master equation.
def set_backbuffer(self, preferred_backbuffer_size, flags=0): if not (isinstance(preferred_backbuffer_size, Vector2)): raise ValueError("preferred_backbuffer_size must be of type Vector2") self.__backbuffer = pygame.display.set_mode(preferred_backbuffer_size, flags) self.Camera.world_center = preferred_backbuffer_size / 2.0
Create the backbuffer for the game.
def _add_nat(self): if is_period_dtype(self): raise TypeError('Cannot add {cls} and {typ}' .format(cls=type(self).__name__, typ=type(NaT).__name__)) result = np.zeros(len(self), dtype=np.int64) result.fill(iNaT) return type(self)(result, dtype=self.dtype, freq=None)
Add pd.NaT to self
def decode(self, envelope, session, target=None, modification_code=None, **kwargs): self.__args_check(envelope, target, modification_code) message = envelope.message() if len(message) < len(modification_code): raise ValueError('Invalid message length') if isinstance(envelope, WMessengerTextEnvelope): target_envelope_cls = WMessengerTextEnvelope else: target_envelope_cls = WMessengerBytesEnvelope if target == WMessengerFixedModificationLayer.Target.head: if message[:len(modification_code)] != modification_code: raise ValueError('Invalid header in message') return target_envelope_cls(message[len(modification_code):], meta=envelope) else: if message[-len(modification_code):] != modification_code: raise ValueError('Invalid tail in message') return target_envelope_cls(message[:-len(modification_code)], meta=envelope)
Methods checks envelope for 'modification_code' existence and removes it. :param envelope: original envelope :param session: original session :param target: flag, that specifies whether code must be searched and removed at the start or at the end :param modification_code: code to search/remove :param kwargs: additional arguments :return: WMessengerTextEnvelope or WMessengerBytesEnvelope (depends on the original envelope)
def reset(self): self.trnOverlaps = [] self.activeTRNSegments = [] self.activeTRNCellIndices = [] self.relayOverlaps = [] self.activeRelaySegments = [] self.burstReadyCellIndices = [] self.burstReadyCells = np.zeros((self.relayWidth, self.relayHeight))
Set everything back to zero
def _read_byte(self): from .getch import _Getch try: g = _Getch() self.tape[self.pointer] = ord(g()) except TypeError as e: print "Here's what _Getch() is giving me {}".format(g())
Read a single byte from the user without waiting for the \n character
def results(self, campaign_id): return super(API, self).get( resource_id=campaign_id, resource_action='results', resource_cls=CampaignResults)
Returns just the results for a given campaign
def ipostorder(self): children = [self, ] seen = set() while children: cur_node = children[-1] if cur_node not in seen: seen.add(cur_node) children.extend(reversed(cur_node.children)) else: children.pop() yield cur_node
Depth-first post-order iteration of tree nodes
def can_route(self, endpoint, method=None, **kwargs): view = flask.current_app.view_functions.get(endpoint) if not view: endpoint, args = flask._request_ctx.top.match(endpoint) view = flask.current_app.view_functions.get(endpoint) if not view: return False return self.can('http.' + (method or 'GET').lower(), view, **kwargs)
Make sure we can route to the given endpoint or url. This checks for `http.get` permission (or other methods) on the ACL of route functions, attached via the `ACL` decorator. :param endpoint: A URL or endpoint to check for permission to access. :param method: The HTTP method to check; defaults to `'GET'`. :param **kwargs: The context to pass to predicates.
def _generate_atom_feed(self, feed): atom_feed = self.init_atom_feed(feed) atom_feed.title("Feed") return atom_feed
A function returning a feed like `feedgen.feed.FeedGenerator`. The function can be overwritten when used in other applications. :param feed: a feed object :return: an atom feed `feedgen.feed.FeedGenerator`
def namedb_get_account_tokens(cur, address): sql = 'SELECT DISTINCT type FROM accounts WHERE address = ?;' args = (address,) rows = namedb_query_execute(cur, sql, args) ret = [] for row in rows: ret.append(row['type']) return ret
Get an account's tokens Returns the list of tokens on success Returns None if not found
def _get_initial_sync_op(self): def strip_port(s): if s.endswith(':0'): return s[:-2] return s local_vars = tf.local_variables() local_var_by_name = dict([(strip_port(v.name), v) for v in local_vars]) ops = [] nr_shadow_vars = len(self._shadow_vars) for v in self._shadow_vars: vname = strip_port(v.name) for i in range(self.nr_gpu): name = 'tower%s/%s' % (i, vname) assert name in local_var_by_name, \ "Shadow variable {} doesn't match a corresponding local variable!".format(v.name) copy_to = local_var_by_name[name] ops.append(copy_to.assign(v.read_value())) return tf.group(*ops, name='sync_{}_variables_from_ps'.format(nr_shadow_vars))
Get the op to copy-initialized all local variables from PS.
def git_remote(self): if self._git_remotes is None or len(self._git_remotes) < 1: return None if 'origin' in self._git_remotes: return self._git_remotes['origin'] k = sorted(self._git_remotes.keys())[0] return self._git_remotes[k]
If the distribution is installed via git, return the first URL of the 'origin' remote if one is configured for the repo, or else the first URL of the lexicographically-first remote, or else None. :return: origin or first remote URL :rtype: :py:obj:`str` or :py:data:`None`
def add_child(self, child): assert isinstance(child, Term) self.children.append(child) child.parent = self assert not child.term_is("Datafile.Section")
Add a term to this term's children. Also sets the child term's parent
def claim_messages(self, queue, ttl, grace, count=None): return queue.claim_messages(ttl, grace, count=count)
Claims up to `count` unclaimed messages from the specified queue. If count is not specified, the default is to claim 10 messages. The `ttl` parameter specifies how long the server should wait before releasing the claim. The ttl value MUST be between 60 and 43200 seconds. The `grace` parameter is the message grace period in seconds. The value of grace MUST be between 60 and 43200 seconds. The server extends the lifetime of claimed messages to be at least as long as the lifetime of the claim itself, plus a specified grace period to deal with crashed workers (up to 1209600 or 14 days including claim lifetime). If a claimed message would normally live longer than the grace period, its expiration will not be adjusted. Returns a QueueClaim object, whose 'messages' attribute contains the list of QueueMessage objects representing the claimed messages.
def write(self, valuedict): result = [] if self.identifier in valuedict: values = valuedict[self.identifier] else: return result if self.comment != "": result.append(self.comment) if self.repeat is not None and type(values) == type([]): if self.repeat.isdigit(): for i in range(int(self.repeat)): result.extend(self._write_iterate(values[i])) else: for value in values: result.extend(self._write_iterate(value)) elif type(values) == type({}): result = self._write_iterate(values) return result
Generates the lines for the converted input file using the specified value dictionary.
def __get_logged_in_id(self): if self.__logged_in_id == None: self.__logged_in_id = self.account_verify_credentials().id return self.__logged_in_id
Fetch the logged in users ID, with caching. ID is reset on calls to log_in.
def linear(current, target, rate, dt): sign = (target > current) - (target < current) if not sign: return current new_value = current + sign * rate * dt if sign * new_value > sign * target: return target return new_value
This function returns the new value after moving towards target at the given speed constantly for the time dt. If for example the current position is 10 and the target is -20, the returned value will be less than 10 if rate and dt are greater than 0: .. sourcecode:: Python new_pos = linear(10, -20, 10, 0.1) # new_pos = 9 The function makes sure that the returned value never overshoots: .. sourcecode:: Python new_pos = linear(10, -20, 10, 100) # new_pos = -20 :param current: The current value of the variable to be changed. :param target: The target value to approach. :param rate: The rate at which the parameter should move towards target. :param dt: The time for which to calculate the change. :return: The new variable value.
def prefix(rowPrefix): fp = Range.followingPrefix(rowPrefix) return Range(srow=rowPrefix, sinclude=True, erow=fp, einclude=False)
Returns a Range that covers all rows beginning with a prefix
def get_vm_ids_by_ud(access_token, subscription_id, resource_group, vmssname, updatedomain): instance_viewlist = azurerm.list_vmss_vm_instance_view(access_token, subscription_id, resource_group, vmssname) udinstancelist = [] for instance_view in instance_viewlist['value']: vmud = instance_view['properties']['instance_view']['platformUpdateDomain'] if vmud == updatedomain: udinstancelist.append(instance_view['instanceId']) udinstancelist.sort() return udinstancelist
look at VMSS VM instance view to get VM IDs by UD
def split(self, X, y): check_ts_data(X, y) Xt, Xc = get_ts_data_parts(X) Ns = len(Xt) Xt_new, y_new = self._ts_slice(Xt, y) if Xc is not None: Xc_new = np.concatenate([Xc] * self.n_splits) X_new = TS_Data(Xt_new, Xc_new) else: X_new = np.array(Xt_new) cv = self._make_indices(Ns) return X_new, y_new, cv
Splits time series data and target arrays, and generates splitting indices Parameters ---------- X : array-like, shape [n_series, ...] Time series data and (optionally) contextual data y : array-like shape [n_series, ] target vector Returns ------- X : array-like, shape [n_series * n_splits, ] Split time series data and contextual data y : array-like, shape [n_series * n_splits] Split target data cv : list, shape [2, n_splits] Splitting indices
def update_hierarchy(self, hierarchy_form): if self._catalog_session is not None: return self._catalog_session.update_catalog(catalog_form=hierarchy_form) collection = JSONClientValidated('hierarchy', collection='Hierarchy', runtime=self._runtime) if not isinstance(hierarchy_form, ABCHierarchyForm): raise errors.InvalidArgument('argument type is not an HierarchyForm') if not hierarchy_form.is_for_update(): raise errors.InvalidArgument('the HierarchyForm is for update only, not create') try: if self._forms[hierarchy_form.get_id().get_identifier()] == UPDATED: raise errors.IllegalState('hierarchy_form already used in an update transaction') except KeyError: raise errors.Unsupported('hierarchy_form did not originate from this session') if not hierarchy_form.is_valid(): raise errors.InvalidArgument('one or more of the form elements is invalid') collection.save(hierarchy_form._my_map) self._forms[hierarchy_form.get_id().get_identifier()] = UPDATED return objects.Hierarchy(osid_object_map=hierarchy_form._my_map, runtime=self._runtime, proxy=self._proxy)
Updates an existing hierarchy. arg: hierarchy_form (osid.hierarchy.HierarchyForm): the form containing the elements to be updated raise: IllegalState - ``hierarchy_form`` already used in an update transaction raise: InvalidArgument - the form contains an invalid value raise: NullArgument - ``hierarchy_id`` or ``hierarchy_form`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure raise: Unsupported - ``hierarchy_form`` did not originate from ``get_hierarchy_form_for_update()`` *compliance: mandatory -- This method must be implemented.*
def GetFormattedMessages(self, event): event_formatter = self.GetEventFormatter(event) if not event_formatter: return None, None return event_formatter.GetMessages(self._formatter_mediator, event)
Retrieves the formatted messages related to the event. Args: event (EventObject): event. Returns: tuple: containing: str: full message string or None if no event formatter was found. str: short message string or None if no event formatter was found.
def delete_attributes(self, item_name, attributes=None, expected_values=None): return self.connection.delete_attributes(self, item_name, attributes, expected_values)
Delete attributes from a given item. :type item_name: string :param item_name: The name of the item whose attributes are being deleted. :type attributes: dict, list or :class:`boto.sdb.item.Item` :param attributes: Either a list containing attribute names which will cause all values associated with that attribute name to be deleted or a dict or Item containing the attribute names and keys and list of values to delete as the value. If no value is supplied, all attribute name/values for the item will be deleted. :type expected_value: list :param expected_value: If supplied, this is a list or tuple consisting of a single attribute name and expected value. The list can be of the form: * ['name', 'value'] In which case the call will first verify that the attribute "name" of this item has a value of "value". If it does, the delete will proceed, otherwise a ConditionalCheckFailed error will be returned. The list can also be of the form: * ['name', True|False] which will simply check for the existence (True) or non-existence (False) of the attribute. :rtype: bool :return: True if successful
def inc_ptr(self, ptr): result = ptr + self.reading_len[self.ws_type] if result >= 0x10000: result = self.data_start return result
Get next circular buffer data pointer.
def datatype_from_token(self, token): if token.type == SystemRDLParser.ID: typ = self.compiler.namespace.lookup_type(get_ID_text(token)) if typ is None: self.msg.fatal( "Type '%s' is not defined" % get_ID_text(token), SourceRef.from_antlr(token) ) if rdltypes.is_user_enum(typ) or rdltypes.is_user_struct(typ): return typ else: self.msg.fatal( "Type '%s' is not a struct or enum" % get_ID_text(token), SourceRef.from_antlr(token) ) else: return self._DataType_Map[token.type]
Given a SystemRDLParser token, lookup the type This only includes types under the "data_type" grammar rule
def _get_offset_day(self, other): mstart = datetime(other.year, other.month, 1) wday = mstart.weekday() shift_days = (self.weekday - wday) % 7 return 1 + shift_days + self.week * 7
Find the day in the same month as other that has the same weekday as self.weekday and is the self.week'th such day in the month. Parameters ---------- other : datetime Returns ------- day : int
def _export_corpus(self): if not os.path.exists(self.mallet_bin): raise IOError("MALLET path invalid or non-existent.") self.input_path = os.path.join(self.temp, "input.mallet") exit = subprocess.call([ self.mallet_bin, 'import-file', '--input', self.corpus_path, '--output', self.input_path, '--keep-sequence', '--remove-stopwords']) if exit != 0: msg = "MALLET import-file failed with exit code {0}.".format(exit) raise RuntimeError(msg)
Calls MALLET's `import-file` method.
def add_teardown_callback(self, callback: Callable, pass_exception: bool = False) -> None: assert check_argument_types() self._check_closed() self._teardown_callbacks.append((callback, pass_exception))
Add a callback to be called when this context closes. This is intended for cleanup of resources, and the list of callbacks is processed in the reverse order in which they were added, so the last added callback will be called first. The callback may return an awaitable. If it does, the awaitable is awaited on before calling any further callbacks. :param callback: a callable that is called with either no arguments or with the exception that ended this context, based on the value of ``pass_exception`` :param pass_exception: ``True`` to pass the callback the exception that ended this context (or ``None`` if the context ended cleanly)
def slot_availability_array(events, slots): array = np.ones((len(events), len(slots))) for row, event in enumerate(events): for col, slot in enumerate(slots): if slot in event.unavailability or event.duration > slot.duration: array[row, col] = 0 return array
Return a numpy array mapping events to slots - Rows corresponds to events - Columns correspond to stags Array has value 0 if event cannot be scheduled in a given slot (1 otherwise)
def _split_explanation(explanation): raw_lines = (explanation or u('')).split('\n') lines = [raw_lines[0]] for l in raw_lines[1:]: if l and l[0] in ['{', '}', '~', '>']: lines.append(l) else: lines[-1] += '\\n' + l return lines
Return a list of individual lines in the explanation This will return a list of lines split on '\n{', '\n}' and '\n~'. Any other newlines will be escaped and appear in the line as the literal '\n' characters.
def iter_items(cls, repo, common_path=None, remote=None): common_path = common_path or cls._common_path_default if remote is not None: common_path = join_path(common_path, str(remote)) return super(RemoteReference, cls).iter_items(repo, common_path)
Iterate remote references, and if given, constrain them to the given remote
def _get_exe(prog): if prog in prog_to_env_var: env_var = prog_to_env_var[prog] if env_var in os.environ: return os.environ[env_var] return prog_to_default[prog]
Given a program name, return what we expect its exectuable to be called
def line(line_def, **kwargs): def replace(s): return "(%s)" % ansi.aformat(s.group()[1:], attrs=["bold", ]) return ansi.aformat( re.sub('@.?', replace, line_def), **kwargs)
Highlights a character in the line
def battery_voltage(self): msb = self.bus.read_byte_data(AXP209_ADDRESS, BATTERY_VOLTAGE_MSB_REG) lsb = self.bus.read_byte_data(AXP209_ADDRESS, BATTERY_VOLTAGE_LSB_REG) voltage_bin = msb << 4 | lsb & 0x0f return voltage_bin * 1.1
Returns voltage in mV
def checkForeignKeys(self, engine: Engine) -> None: missing = (sqlalchemy_utils.functions .non_indexed_foreign_keys(self._metadata, engine=engine)) for table, keys in missing.items(): for key in keys: logger.warning("Missing index on ForeignKey %s" % key.columns)
Check Foreign Keys Log any foreign keys that don't have indexes assigned to them. This is a performance issue.
def _assert_valid_permission(self, perm_str): if perm_str not in ORDERED_PERM_LIST: raise d1_common.types.exceptions.InvalidRequest( 0, 'Permission must be one of {}. perm_str="{}"'.format( ', '.join(ORDERED_PERM_LIST), perm_str ), )
Raise D1 exception if ``perm_str`` is not a valid permission.
def add_for_targets(self, targets, classpath_elements): for target in targets: self.add_for_target(target, classpath_elements)
Adds classpath path elements to the products of all the provided targets.
def target_sequence_length(self): if not self.is_aligned(): raise ValueError("no length for reference when read is not not aligned") if self.entries.tlen: return self.entries.tlen if self.header: if self.entries.rname in self.header.sequence_lengths: return self.header.sequence_lengths[self.entries.rname] elif self.reference: return len(self.reference[self.entries.rname]) else: raise ValueError("some reference needs to be set to go from psl to bam\n") raise ValueError("No reference available")
Get the length of the target sequence. length of the entire chromosome throws an error if there is no information available :return: length :rtype: int
def _get_instance(self, **kwargs): current_app.logger.info("Getting instance") current_app.logger.debug("kwargs: {}".format(kwargs)) current_app.logger.info( "Loading instance: {}".format(kwargs['obj_id'])) rec = self.db_query.get_instance(self.db_collection, kwargs['obj_id']) g._resource_instance = rec current_app.logger.debug( "g._resource_instance: {}".format(g._resource_instance)) return rec
Loads the record specified by the `obj_id` path in the url and stores it in g._resource_instance
def dumps(self, obj, *, max_nested_level=100): self._max_nested_level = max_nested_level return self._encode(obj)
Returns a string representing a JSON-encoding of ``obj``. The second optional ``max_nested_level`` argument controls the maximum allowed recursion/nesting level. See class description for details.
def search(self, query, results=10, suggestion=False): self._check_query(query, "Query must be specified") search_params = { "list": "search", "srprop": "", "srlimit": results, "srsearch": query, } if suggestion: search_params["srinfo"] = "suggestion" raw_results = self.wiki_request(search_params) self._check_error_response(raw_results, query) search_results = [d["title"] for d in raw_results["query"]["search"]] if suggestion: sug = None if raw_results["query"].get("searchinfo"): sug = raw_results["query"]["searchinfo"]["suggestion"] return search_results, sug return search_results
Search for similar titles Args: query (str): Page title results (int): Number of pages to return suggestion (bool): Use suggestion Returns: tuple or list: tuple (list results, suggestion) if \ suggestion is **True**; list of results \ otherwise
def run_rc_file(editor, rc_file): assert isinstance(editor, Editor) assert isinstance(rc_file, six.string_types) rc_file = os.path.expanduser(rc_file) if not os.path.exists(rc_file): print('Impossible to read %r' % rc_file) _press_enter_to_continue() return try: namespace = {} with open(rc_file, 'r') as f: code = compile(f.read(), rc_file, 'exec') six.exec_(code, namespace, namespace) if 'configure' in namespace: namespace['configure'](editor) except Exception as e: traceback.print_exc() _press_enter_to_continue()
Run rc file.
def validate_subfolders(filedir, metadata): if not os.path.isdir(filedir): print("Error: " + filedir + " is not a directory") return False subfolders = os.listdir(filedir) for subfolder in subfolders: if subfolder not in metadata: print("Error: folder " + subfolder + " present on disk but not in metadata") return False for subfolder in metadata: if subfolder not in subfolders: print("Error: folder " + subfolder + " present in metadata but not on disk") return False return True
Check that all folders in the given directory have a corresponding entry in the metadata file, and vice versa. :param filedir: This field is the target directory from which to match metadata :param metadata: This field contains the metadata to be matched.
def dvdot(s1, s2): assert len(s1) is 6 and len(s2) is 6 s1 = stypes.toDoubleVector(s1) s2 = stypes.toDoubleVector(s2) return libspice.dvdot_c(s1, s2)
Compute the derivative of the dot product of two double precision position vectors. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dvdot_c.html :param s1: First state vector in the dot product. :type s1: 6-Element Array of floats :param s2: Second state vector in the dot product. :type s2: 6-Element Array of floats :return: The derivative of the dot product. :rtype: float
def op_list_apps(self): self.logger.info('Listing known applications ...') apps = self.get_apps() for app in apps: self.logger.info('Found `%s`' % app) else: self.logger.info('\nDONE. No applications found in `%s` directory.\n' % APPS_DIRNAME) return apps
Prints out and returns a list of known applications. :rtype: list :return: list of applications
def encrypt_seal(self, data: Union[str, bytes]) -> bytes: curve25519_public_key = libnacl.crypto_sign_ed25519_pk_to_curve25519(self.pk) return libnacl.crypto_box_seal(ensure_bytes(data), curve25519_public_key)
Encrypt data with a curve25519 version of the ed25519 public key :param data: Bytes data to encrypt
def avatar(self, blogname, size=64): url = "/v2/blog/{}/avatar/{}".format(blogname, size) return self.send_api_request("get", url)
Retrieves the url of the blog's avatar :param blogname: a string, the blog you want the avatar for :returns: A dict created from the JSON response
def tee_log(tee_file: TextIO, loglevel: int) -> None: handler = get_monochrome_handler(stream=tee_file) handler.setLevel(loglevel) rootlogger = logging.getLogger() rootlogger.addHandler(handler) with TeeContextManager(tee_file, capture_stdout=True): with TeeContextManager(tee_file, capture_stderr=True): try: yield except Exception: exc_type, exc_value, exc_traceback = sys.exc_info() lines = traceback.format_exception(exc_type, exc_value, exc_traceback) log.critical("\n" + "".join(lines)) raise
Context manager to add a file output stream to our logging system. Args: tee_file: file-like object to write to loglevel: log level (e.g. ``logging.DEBUG``) to use for this stream
def cfg_from_file(self, yaml_filename, config_dict): import yaml from easydict import EasyDict as edict with open(yaml_filename, 'r') as f: yaml_cfg = edict(yaml.load(f)) return self._merge_a_into_b(yaml_cfg, config_dict)
Load a config file and merge it into the default options.
def incrementor(start=0, step=1): def fxn(_): nonlocal start rval = start start += step return rval return fxn
Returns a function that first returns the start value, and returns previous value + step on each subsequent call.
def get_available_fcp(self): available_list = [] free_unreserved = self.db.get_all_free_unreserved() for item in free_unreserved: available_list.append(item[0]) return available_list
get all the fcps not reserved
def wrap_function_name(self, name): if len(name) > 32: ratio = 2.0/3.0 height = max(int(len(name)/(1.0 - ratio) + 0.5), 1) width = max(len(name)/height, 32) name = textwrap.fill(name, width, break_long_words=False) name = name.replace(", ", ",") name = name.replace("> >", ">>") name = name.replace("> >", ">>") return name
Split the function name on multiple lines.
def select_random(ports=None, exclude_ports=None): if ports is None: ports = available_good_ports() if exclude_ports is None: exclude_ports = set() ports.difference_update(set(exclude_ports)) for port in random.sample(ports, min(len(ports), 100)): if not port_is_used(port): return port raise PortForException("Can't select a port")
Returns random unused port number.
def read_chunks(stream, block_size=2**10): while True: chunk = stream.read(block_size) if not chunk: break yield chunk
Given a byte stream with reader, yield chunks of block_size until the stream is consusmed.
def database_all(self): all = {} for engine in self.engines(): all[engine] = self._database_all(engine) return all
Return a dictionary mapping engines with databases
def selected_band(self): item = self.lstBands.currentItem() return item.data(QtCore.Qt.UserRole)
Obtain the layer mode selected by user. :returns: selected layer mode. :rtype: string, None
def _process_exlist(self, exc, raised): if (not raised) or (raised and exc.endswith("*")): return exc[:-1] if exc.endswith("*") else exc return None
Remove raised info from exception message and create separate list for it.
def reference_links(doc): if doc.get('type') == 'organisation' and doc.get('state') != 'deactivated': for asset_id_type, link in doc.get('reference_links', {}).get('links', {}).items(): value = { 'organisation_id': doc['_id'], 'link': link } yield asset_id_type, value
Get reference links
def extract_source_params(src): tags = get_taglist(src) data = [] for key, param, vtype in BASE_PARAMS: if key in src.attrib: if vtype == "c": data.append((param, src.attrib[key])) elif vtype == "f": data.append((param, float(src.attrib[key]))) else: data.append((param, None)) elif key in tags: if vtype == "c": data.append((param, src.nodes[tags.index(key)].text)) elif vtype == "f": data.append((param, float(src.nodes[tags.index(key)].text))) else: data.append((param, None)) else: data.append((param, None)) return dict(data)
Extract params from source object.
def opt_allow_select_scan(self, allow): allow = allow.lower() in ("true", "t", "yes", "y") self.conf["allow_select_scan"] = allow self.engine.allow_select_scan = allow
Set option allow_select_scan
def _rsadp(self, c): n = self.modulus if type(c) is int: c = long(c) if type(c) is not long or c > n-1: warning("Key._rsaep() expects a long between 0 and n-1") return None return self.key.decrypt(c)
Internal method providing raw RSA decryption, i.e. simple modular exponentiation of the given ciphertext representative 'c', a long between 0 and n-1. This is the decryption primitive RSADP described in PKCS#1 v2.1, i.e. RFC 3447 Sect. 5.1.2. Input: c: ciphertest representative, a long between 0 and n-1, where n is the key modulus. Output: ciphertext representative, a long between 0 and n-1 Not intended to be used directly. Please, see encrypt() method.
def _preconditions_snapshots_postconditions(checker: Callable) -> _PrePostSnaps: preconditions = getattr(checker, "__preconditions__", []) assert all(isinstance(precondition_group, list) for precondition_group in preconditions) assert (all( isinstance(precondition, icontract._Contract) for precondition_group in preconditions for precondition in precondition_group)) preconditions = [group for group in preconditions if len(group) > 0] snapshots = getattr(checker, "__postcondition_snapshots__", []) assert all(isinstance(snap, icontract._Snapshot) for snap in snapshots) postconditions = getattr(checker, "__postconditions__", []) assert all(isinstance(postcondition, icontract._Contract) for postcondition in postconditions) return _PrePostSnaps(preconditions=preconditions, snapshots=snapshots, postconditions=postconditions)
Collect the preconditions, snapshots and postconditions from a contract checker of a function.
def monitored_resource_descriptor_path(cls, project, monitored_resource_descriptor): return google.api_core.path_template.expand( "projects/{project}/monitoredResourceDescriptors/{monitored_resource_descriptor}", project=project, monitored_resource_descriptor=monitored_resource_descriptor, )
Return a fully-qualified monitored_resource_descriptor string.
def delete_feature(ctx, dataset, fid): service = ctx.obj.get('service') res = service.delete_feature(dataset, fid) if res.status_code != 204: raise MapboxCLIException(res.text.strip())
Delete a feature. $ mapbox datasets delete-feature dataset-id feature-id All endpoints require authentication. An access token with `datasets:write` scope is required, see `mapbox --help`.
def refresh_state_in_ec(self, ec_index): with self._mutex: if ec_index >= len(self.owned_ecs): ec_index -= len(self.owned_ecs) if ec_index >= len(self.participating_ecs): raise exceptions.BadECIndexError(ec_index) state = self._get_ec_state(self.participating_ecs[ec_index]) self.participating_ec_states[ec_index] = state else: state = self._get_ec_state(self.owned_ecs[ec_index]) self.owned_ec_states[ec_index] = state return state
Get the up-to-date state of the component in an execution context. This function will update the state, rather than using the cached value. This may take time, if the component is executing on a remote node. @param ec_index The index of the execution context to check the state in. This index is into the total array of contexts, that is both owned and participating contexts. If the value of ec_index is greater than the length of @ref owned_ecs, that length is subtracted from ec_index and the result used as an index into @ref participating_ecs.
def _build_split_filenames(self, split_info_list): filenames = [] for split_info in split_info_list: filenames.extend(naming.filepaths_for_dataset_split( dataset_name=self.name, split=split_info.name, num_shards=split_info.num_shards, data_dir=self._data_dir, filetype_suffix=self._file_format_adapter.filetype_suffix, )) return filenames
Construct the split filenames associated with the split info. The filenames correspond to the pre-processed datasets files present in the root directory of the dataset. Args: split_info_list: (list[SplitInfo]) List of split from which generate the filenames Returns: filenames: (list[str]) The list of filenames path corresponding to the split info object
def on(self, message, namespace=None): namespace = namespace or '/' def decorator(handler): def _handler(sid, *args): return self._handle_event(handler, message, namespace, sid, *args) if self.server: self.server.on(message, _handler, namespace=namespace) else: self.handlers.append((message, _handler, namespace)) return handler return decorator
Decorator to register a SocketIO event handler. This decorator must be applied to SocketIO event handlers. Example:: @socketio.on('my event', namespace='/chat') def handle_my_custom_event(json): print('received json: ' + str(json)) :param message: The name of the event. This is normally a user defined string, but a few event names are already defined. Use ``'message'`` to define a handler that takes a string payload, ``'json'`` to define a handler that takes a JSON blob payload, ``'connect'`` or ``'disconnect'`` to create handlers for connection and disconnection events. :param namespace: The namespace on which the handler is to be registered. Defaults to the global namespace.
def label(self, nid, id_if_null=False): g = self.get_graph() if nid in g: n = g.node[nid] if 'label' in n: return n['label'] else: if id_if_null: return nid else: return None else: if id_if_null: return nid else: return None
Fetches label for a node Arguments --------- nid : str Node identifier for entity to be queried id_if_null : bool If True and node has no label return id as label Return ------ str
def saveFile(self): filepath, _ = QtWidgets.QFileDialog.getSaveFileName( self, "Save File", '', "Androguard Session (*.ag)") if filepath: if not filepath.endswith(".ag"): filepath = "{}.ag".format(filepath) self.showStatus("Saving %s..." % str(filepath)) self.saveSession(filepath) self.showStatus("Saved Session to %s!" % str(filepath))
User clicked Save menu. Display a Dialog to ask whwre to save.
def generate_grid_coords(gx, gy): r return np.vstack([gx.ravel(), gy.ravel()]).T
r"""Calculate x,y coordinates of each grid cell. Parameters ---------- gx: numeric x coordinates in meshgrid gy: numeric y coordinates in meshgrid Returns ------- (X, Y) ndarray List of coordinates in meshgrid
def set_exception(self, exception): self.set_exc_info( (exception.__class__, exception, getattr(exception, '__traceback__', None)))
Sets the exception of a ``Future.``
def assign_license(license_key, license_name, entity, entity_display_name, safety_checks=True, service_instance=None): log.trace('Assigning license %s to entity %s', license_key, entity) _validate_entity(entity) if safety_checks: licenses = salt.utils.vmware.get_licenses(service_instance) if not [l for l in licenses if l.licenseKey == license_key]: raise VMwareObjectRetrievalError('License \'{0}\' wasn\'t found' ''.format(license_name)) salt.utils.vmware.assign_license( service_instance, license_key, license_name, entity_ref=_get_entity(service_instance, entity), entity_name=entity_display_name)
Assigns a license to an entity license_key Key of the license to assign See ``_get_entity`` docstrings for format. license_name Display name of license entity Dictionary representation of an entity entity_display_name Entity name used in logging safety_checks Specify whether to perform safety check or to skip the checks and try performing the required task. Default is False. service_instance Service instance (vim.ServiceInstance) of the vCenter/ESXi host. Default is None. .. code-block:: bash salt '*' vsphere.assign_license license_key=00000:00000 license name=test entity={type:cluster,datacenter:dc,cluster:cl}
def parse_yaml_config(args): try: import yaml except ImportError: yaml = None yml = {} try: with open(args.coveralls_yaml, 'r') as fp: if not yaml: raise SystemExit('PyYAML is required for parsing configuration') yml = yaml.load(fp) except IOError: pass yml = yml or {} return yml
Parse yaml config
def filter(self, condition): if isinstance(condition, basestring): jdf = self._jdf.filter(condition) elif isinstance(condition, Column): jdf = self._jdf.filter(condition._jc) else: raise TypeError("condition should be string or Column") return DataFrame(jdf, self.sql_ctx)
Filters rows using the given condition. :func:`where` is an alias for :func:`filter`. :param condition: a :class:`Column` of :class:`types.BooleanType` or a string of SQL expression. >>> df.filter(df.age > 3).collect() [Row(age=5, name=u'Bob')] >>> df.where(df.age == 2).collect() [Row(age=2, name=u'Alice')] >>> df.filter("age > 3").collect() [Row(age=5, name=u'Bob')] >>> df.where("age = 2").collect() [Row(age=2, name=u'Alice')]
def remove_peer(self, peer): if peer.index < 0 or peer.index >= self.size(): raise IndexError('Peer index is out of range') assert peer is self.peers[peer.index], "peer is not in the heap" return heap.remove(self, peer.index)
Remove the peer from the heap. Return: removed peer if peer exists. If peer's index is out of range, raise IndexError.
def preprocess_model(model, rewrap=True, **kwargs): args = {**kwargs, **config.get('args', {})} model = _process_template(model, **args) if rewrap: model = rewrap_model(model) return model
Preprocess a MiniZinc model. This function takes care of preprocessing the model by resolving the template using the arguments passed as keyword arguments to this function. Optionally, this function can also "rewrap" the model, deleting spaces at the beginning of the lines while preserving indentation. Parameters ---------- model : str The minizinc model (i.e. the content of a ``.mzn`` file). rewrap : bool Whether to "rewrap" the model, i.e. to delete leading spaces, while preserving indentation. Default is ``True``. **kwargs Additional arguments to pass to the template engine. Returns ------- str The preprocessed model.
def run(self, command): if isinstance(command, basestring): command = command.split() else: command = list(command) self.external.omero_cli(command)
Runs a command as if from the command-line without the need for using popen or subprocess
def from_http( cls, raw_body: MutableMapping, verification_token: Optional[str] = None, team_id: Optional[str] = None, ) -> "Event": if verification_token and raw_body["token"] != verification_token: raise exceptions.FailedVerification(raw_body["token"], raw_body["team_id"]) if team_id and raw_body["team_id"] != team_id: raise exceptions.FailedVerification(raw_body["token"], raw_body["team_id"]) if raw_body["event"]["type"].startswith("message"): return Message(raw_body["event"], metadata=raw_body) else: return Event(raw_body["event"], metadata=raw_body)
Create an event with data coming from the HTTP Event API. If the event type is a message a :class:`slack.events.Message` is returned. Args: raw_body: Decoded body of the Event API request verification_token: Slack verification token used to verify the request came from slack team_id: Verify the event is for the correct team Returns: :class:`slack.events.Event` or :class:`slack.events.Message` Raises: :class:`slack.exceptions.FailedVerification`: when `verification_token` or `team_id` does not match the incoming event's.
def which(cwd=None): if cwd is None: cwd = os.getcwd() for (k, using_vc) in globals().items(): if k.startswith('using_') and using_vc(cwd=cwd): return VersionControl.from_string(k[6:]) raise NotImplementedError("Unknown version control system, " "or you're not in the project directory.")
Try to find which version control system contains the cwd directory. Returns the VersionControl superclass e.g. Git, if none were found this will raise a NotImplementedError.
def __get_query_range(cls, date_field, start=None, end=None): if not start and not end: return '' start_end = {} if start: start_end["gte"] = "%s" % start.isoformat() if end: start_end["lte"] = "%s" % end.isoformat() query_range = {date_field: start_end} return query_range
Create a filter dict with date_field from start to end dates. :param date_field: field with the date value :param start: date with the from value. Should be a datetime.datetime object of the form: datetime.datetime(2018, 5, 25, 15, 17, 39) :param end: date with the to value. Should be a datetime.datetime object of the form: datetime.datetime(2018, 5, 25, 15, 17, 39) :return: a dict containing a range filter which can be used later in an es_dsl Search object using the `filter` method.