code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def blob_counter(self): import aa from ROOT import EventFile try: event_file = EventFile(self.filename) except Exception: raise SystemExit("Could not open file") num_blobs = 0 for event in event_file: num_blobs += 1 return num_blobs
Create a blob counter.
def getOriginLocalizedName(self, origin, pchNameArray, unNameArraySize, unStringSectionsToInclude): fn = self.function_table.getOriginLocalizedName result = fn(origin, pchNameArray, unNameArraySize, unStringSectionsToInclude) return result
Retrieves the name of the origin in the current language. unStringSectionsToInclude is a bitfield of values in EVRInputStringBits that allows the application to specify which parts of the origin's information it wants a string for.
def styled_plot(*style_sheets): def decorator(get_plot): def wrapper(*args, fonts=None, style=None, no_base_style=False, **kwargs): if no_base_style: list_style = [] else: list_style = list(style_sheets) if style is not None: if isinstance(style, list): list_style += style else: list_style += [style] if fonts is not None: list_style += [{'font.family': 'sans-serif', 'font.sans-serif': fonts}] matplotlib.pyplot.style.use(list_style) return get_plot(*args, **kwargs) return wrapper return decorator
Return a decorator that will apply matplotlib style sheets to a plot. ``style_sheets`` is a base set of styles, which will be ignored if ``no_base_style`` is set in the decorated function arguments. The style will further be overwritten by any styles in the ``style`` optional argument of the decorated function. Args: style_sheets (:obj:`list`, :obj:`str`, or :obj:`dict`): Any matplotlib supported definition of a style sheet. Can be a list of style of style sheets.
def getpart(self, ix): if self.offsets[ix] == 0: return comp, ofs, size, checksum = self.getsectioninfo(ix) fh = FileSection(self.fh, ofs, ofs + size) if comp == 2: import zlib wbits = -15 if self.magic == 'IDA0' else 15 fh = makeStringIO(zlib.decompress(fh.read(size), wbits)) elif comp == 0: pass else: raise Exception("unsupported section encoding: %02x" % comp) return fh
Returns a fileobject for the specified section. This method optionally decompresses the data found in the .idb file, and returns a file-like object, with seek, read, tell.
def text2html_table(items:Collection[Collection[str]])->str: "Put the texts in `items` in an HTML table, `widths` are the widths of the columns in %." html_code = f html_code += f for i in items[0]: html_code += f" <th>{_treat_html(i)}</th>" html_code += f" </tr>\n </thead>\n <tbody>" html_code += " <tbody>" for line in items[1:]: html_code += " <tr>" for i in line: html_code += f" <td>{_treat_html(i)}</td>" html_code += " </tr>" html_code += " </tbody>\n</table>" return html_code
Put the texts in `items` in an HTML table, `widths` are the widths of the columns in %.
def parse_url(arg, extract, key=None): return ops.ParseURL(arg, extract, key).to_expr()
Returns the portion of a URL corresponding to a part specified by 'extract' Can optionally specify a key to retrieve an associated value if extract parameter is 'QUERY' Parameters ---------- extract : one of {'PROTOCOL', 'HOST', 'PATH', 'REF', 'AUTHORITY', 'FILE', 'USERINFO', 'QUERY'} key : string (optional) Examples -------- >>> url = "https://www.youtube.com/watch?v=kEuEcWfewf8&t=10" >>> parse_url(url, 'QUERY', 'v') # doctest: +SKIP 'kEuEcWfewf8' Returns ------- extracted : string
def add_to_cart(item_id): cart = Cart(session['cart']) if cart.change_item(item_id, 'add'): session['cart'] = cart.to_dict() return list_products()
Cart with Product
def apply(self, p_todolist, p_archive): if self.todolist and p_todolist: p_todolist.replace(self.todolist.todos()) if self.archive and p_archive: p_archive.replace(self.archive.todos())
Applies backup on supplied p_todolist.
def addFilter(self, filterclass): if filterclass not in self.filters: self.filters.append(filterclass)
Add a filter class to the parser.
def micros_to_timestamp(micros, timestamp): seconds = long(micros / _MICROS_PER_SECOND) micro_remainder = micros % _MICROS_PER_SECOND timestamp.seconds = seconds timestamp.nanos = micro_remainder * _NANOS_PER_MICRO
Convert microseconds from utc epoch to google.protobuf.timestamp. Args: micros: a long, number of microseconds since utc epoch. timestamp: a google.protobuf.timestamp.Timestamp to populate.
def parse(cls, filepath, filecontent, parser): try: objects = parser.parse(filepath, filecontent) except Exception as e: raise MappingError('Failed to parse {}:\n{}'.format(filepath, e)) objects_by_name = {} for obj in objects: if not Serializable.is_serializable(obj): raise UnaddressableObjectError('Parsed a non-serializable object: {!r}'.format(obj)) attributes = obj._asdict() name = attributes.get('name') if not name: raise UnaddressableObjectError('Parsed a non-addressable object: {!r}'.format(obj)) if name in objects_by_name: raise DuplicateNameError('An object already exists at {!r} with name {!r}: {!r}. Cannot ' 'map {!r}'.format(filepath, name, objects_by_name[name], obj)) objects_by_name[name] = obj return cls(filepath, OrderedDict(sorted(objects_by_name.items())))
Parses a source for addressable Serializable objects. No matter the parser used, the parsed and mapped addressable objects are all 'thin'; ie: any objects they point to in other namespaces or even in the same namespace but from a seperate source are left as unresolved pointers. :param string filepath: The path to the byte source containing serialized objects. :param string filecontent: The content of byte source containing serialized objects to be parsed. :param symbol_table: The symbol table cls to expose a symbol table dict. :type symbol_table: Instance of :class:`pants.engine.parser.SymbolTable`. :param parser: The parser cls to use. :type parser: A :class:`pants.engine.parser.Parser`.
def get_pulls_list(project, auth=False, **params): params.setdefault("state", "closed") url = "https://api.github.com/repos/{project}/pulls".format(project=project) if auth: headers = make_auth_header() else: headers = None pages = get_paged_request(url, headers=headers, **params) return pages
get pull request list
def valid_words_set(path_to_user_dictionary=None, user_dictionary_words=None): def read_file(binary_file): return binary_file.read().decode("ascii").splitlines() try: valid = _valid_words_cache[path_to_user_dictionary] return valid except KeyError: words = set() with resource_stream("polysquarelinter", "en_US.txt") as words_file: words |= set(["".join(l).lower() for l in read_file(words_file)]) if path_to_user_dictionary: words |= set([w.lower() for w in user_dictionary_words]) words |= user_dictionary_words _valid_words_cache[path_to_user_dictionary] = words return words
Get a set of valid words. If :path_to_user_dictionary: is specified, then the newline-separated words in that file will be added to the word set.
def create_or_update_group_alias(self, name, alias_id=None, mount_accessor=None, canonical_id=None, mount_point=DEFAULT_MOUNT_POINT): params = { 'name': name, 'mount_accessor': mount_accessor, 'canonical_id': canonical_id, } if alias_id is not None: params['id'] = alias_id api_path = '/v1/{mount_point}/group-alias'.format(mount_point=mount_point) response = self._adapter.post( url=api_path, json=params, ) return response.json()
Creates or update a group alias. Supported methods: POST: /{mount_point}/group-alias. Produces: 200 application/json :param alias_id: ID of the group alias. If set, updates the corresponding existing group alias. :type alias_id: str | unicode :param name: Name of the group alias. :type name: str | unicode :param mount_accessor: Mount accessor to which this alias belongs to :type mount_accessor: str | unicode :param canonical_id: ID of the group to which this is an alias. :type canonical_id: str | unicode :param mount_point: The "path" the method/backend was mounted on. :type mount_point: str | unicode :return: The JSON response of the request. :rtype: requests.Response
def rewrite_elife_references_json(json_content, doi): references_rewrite_json = elife_references_rewrite_json() if doi in references_rewrite_json: json_content = rewrite_references_json(json_content, references_rewrite_json[doi]) if doi == "10.7554/eLife.12125": for i, ref in enumerate(json_content): if ref.get("id") and ref.get("id") == "bib11": del json_content[i] return json_content
this does the work of rewriting elife references json
def main(): with open(FILENAME, "r") as file_obj: contents = file_obj.read() desired = get_desired() if contents == EXPECTED: with open(FILENAME, "w") as file_obj: file_obj.write(desired) elif contents != desired: raise ValueError("Unexpected contents", contents, "Expected", EXPECTED)
Main entry point to replace autogenerated contents. Raises: ValueError: If the file doesn't contain the expected or desired contents.
def environment_session_path(cls, project, environment, user, session): return google.api_core.path_template.expand( 'projects/{project}/agent/environments/{environment}/users/{user}/sessions/{session}', project=project, environment=environment, user=user, session=session, )
Return a fully-qualified environment_session string.
def sas_logical_interconnects(self): if not self.__sas_logical_interconnects: self.__sas_logical_interconnects = SasLogicalInterconnects(self.__connection) return self.__sas_logical_interconnects
Gets the SasLogicalInterconnects API client. Returns: SasLogicalInterconnects:
def setTimeout(self, time): self.conversation.SetDDETimeout(round(time)) return self.conversation.GetDDETimeout()
Set global timeout value, in seconds, for all DDE calls
def querying_context(self, packet_type): if self.set_state(tds_base.TDS_QUERYING) != tds_base.TDS_QUERYING: raise tds_base.Error("Couldn't switch to state") self._writer.begin_packet(packet_type) try: yield except: if self.state != tds_base.TDS_DEAD: self.set_state(tds_base.TDS_IDLE) raise else: self.set_state(tds_base.TDS_PENDING) self._writer.flush()
Context manager for querying. Sets state to TDS_QUERYING, and reverts it to TDS_IDLE if exception happens inside managed block, and to TDS_PENDING if managed block succeeds and flushes buffer.
def predict(self, features): distances = [self._distance(x) for x in features] class_predict = [np.argmin(d) for d in distances] return self.le.inverse_transform(class_predict)
Predict class outputs for an unlabelled feature set
def copy_out(source: Iterable[bytes], dest: io.BytesIO, use_placeholders: bool = False): for line in source: if use_placeholders: if not line.strip(): continue if line.startswith(PLACEHOLDER): line = b"\n" dest.write(line)
Copy lines from source to destination. :param source: Source line iterable. :param dest: Destination open file. :param use_placeholders: When true, convert lines containing placeholders to empty lines and drop true empty lines (assume to be spuriously generated).
def clean_proc(proc, wait_for_kill=10): if not proc: return try: waited = 0 while proc.is_alive(): proc.terminate() waited += 1 time.sleep(0.1) if proc.is_alive() and (waited >= wait_for_kill): log.error('Process did not die with terminate(): %s', proc.pid) os.kill(proc.pid, signal.SIGKILL) except (AssertionError, AttributeError): pass
Generic method for cleaning up multiprocessing procs
def get_managers(self): if self._single_env: return None if not hasattr(self, '_managers'): self._managers = self.env.get_slave_managers() return self._managers
Get managers for the slave environments.
def re_balance(self): self.update_heights(recursive=False) self.update_balances(False) while self.balance < -1 or self.balance > 1: if self.balance > 1: if self.node.left.balance < 0: self.node.left.rotate_left() self.update_heights() self.update_balances() self.rotate_right() self.update_heights() self.update_balances() if self.balance < -1: if self.node.right.balance > 0: self.node.right.rotate_right() self.update_heights() self.update_balances() self.rotate_left() self.update_heights() self.update_balances()
Re balance tree. After inserting or deleting a node,
def _resolve_call(self, table, column='', value='', **kwargs): if not column: return self.catalog(table) elif not value: return self.catalog(table, column) column = column.upper() value = str(value).upper() data = self.call_api(table, column, value, **kwargs) if isinstance(data, dict): data = data.values()[0] return data
Internal method to resolve the API wrapper call.
def set_int_attribute(self, target, display_mask, attr, value): reply = NVCtrlSetAttributeAndGetStatusReplyRequest(display=self.display, opcode=self.display.get_extension_major(extname), target_id=target.id(), target_type=target.type(), display_mask=display_mask, attr=attr, value=value) return reply._data.get('flags') != 0
Set the value of an integer attribute
def executors(opts, functions=None, context=None, proxy=None): executors = LazyLoader( _module_dirs(opts, 'executors', 'executor'), opts, tag='executor', pack={'__salt__': functions, '__context__': context or {}, '__proxy__': proxy or {}}, ) executors.pack['__executors__'] = executors return executors
Returns the executor modules
def plot_phi(self, colorbar=True, cb_orientation='vertical', cb_label='$g_\phi$, m s$^{-2}$', ax=None, show=True, fname=None, **kwargs): if ax is None: fig, axes = self.phi.plot(colorbar=colorbar, cb_orientation=cb_orientation, cb_label=cb_label, show=False, **kwargs) if show: fig.show() if fname is not None: fig.savefig(fname) return fig, axes else: self.phi.plot(colorbar=colorbar, cb_orientation=cb_orientation, cb_label=cb_label, ax=ax, **kwargs)
Plot the phi component of the gravity field. Usage ----- x.plot_phi([tick_interval, xlabel, ylabel, ax, colorbar, cb_orientation, cb_label, show, fname, **kwargs]) Parameters ---------- tick_interval : list or tuple, optional, default = [30, 30] Intervals to use when plotting the x and y ticks. If set to None, ticks will not be plotted. xlabel : str, optional, default = 'longitude' Label for the longitude axis. ylabel : str, optional, default = 'latitude' Label for the latitude axis. ax : matplotlib axes object, optional, default = None A single matplotlib axes object where the plot will appear. colorbar : bool, optional, default = True If True, plot a colorbar. cb_orientation : str, optional, default = 'vertical' Orientation of the colorbar: either 'vertical' or 'horizontal'. cb_label : str, optional, default = '$g_\phi$, m s$^{-2}$' Text label for the colorbar. show : bool, optional, default = True If True, plot the image to the screen. fname : str, optional, default = None If present, and if axes is not specified, save the image to the specified file. kwargs : optional Keyword arguements that will be sent to the SHGrid.plot() and plt.imshow() methods.
def exists(self, path, mtime=None): self._connect() if self.sftp: exists = self._sftp_exists(path, mtime) else: exists = self._ftp_exists(path, mtime) self._close() return exists
Return `True` if file or directory at `path` exist, False otherwise. Additional check on modified time when mtime is passed in. Return False if the file's modified time is older mtime.
def compare(self, other, r_threshold=1e-3): return compute_rmsd(self.r, other.r) < r_threshold
Compare two rotations The RMSD of the rotation matrices is computed. The return value is True when the RMSD is below the threshold, i.e. when the two rotations are almost identical.
def block_view(request): blocked_ip_list = get_blocked_ips() blocked_username_list = get_blocked_usernames() context = {'blocked_ip_list': blocked_ip_list, 'blocked_username_list': blocked_username_list} return render(request, 'defender/admin/blocks.html', context)
List the blocked IP and Usernames
def handle_onchain_secretreveal( mediator_state: MediatorTransferState, onchain_secret_reveal: ContractReceiveSecretReveal, channelidentifiers_to_channels: ChannelMap, pseudo_random_generator: random.Random, block_number: BlockNumber, ) -> TransitionResult[MediatorTransferState]: secrethash = onchain_secret_reveal.secrethash is_valid_reveal = is_valid_secret_reveal( state_change=onchain_secret_reveal, transfer_secrethash=mediator_state.secrethash, secret=onchain_secret_reveal.secret, ) if is_valid_reveal: secret = onchain_secret_reveal.secret block_number = onchain_secret_reveal.block_number secret_reveal = set_onchain_secret( state=mediator_state, channelidentifiers_to_channels=channelidentifiers_to_channels, secret=secret, secrethash=secrethash, block_number=block_number, ) balance_proof = events_for_balanceproof( channelidentifiers_to_channels=channelidentifiers_to_channels, transfers_pair=mediator_state.transfers_pair, pseudo_random_generator=pseudo_random_generator, block_number=block_number, secret=secret, secrethash=secrethash, ) iteration = TransitionResult(mediator_state, secret_reveal + balance_proof) else: iteration = TransitionResult(mediator_state, list()) return iteration
The secret was revealed on-chain, set the state of all transfers to secret known.
def his_from_sql(self, db_name, point): his = self._read_from_sql('select * from "%s"' % "history", db_name) his.index = his["index"].apply(Timestamp) return his.set_index("index")[point]
Retrive point histories from SQL database
def xml(self, operator='set', indent = ""): xml = indent + "<meta id=\"" + self.key + "\"" if operator != 'set': xml += " operator=\"" + operator + "\"" if not self.value: xml += " />" else: xml += ">" + self.value + "</meta>" return xml
Serialize the metadata field to XML
def _check_len(self, pkt): if len(pkt) % 2: last_chr = pkt[-1] if last_chr <= b'\x80': return pkt[:-1] + b'\x00' + last_chr else: return pkt[:-1] + b'\xff' + chb(orb(last_chr) - 1) else: return pkt
Check for odd packet length and pad according to Cisco spec. This padding is only used for checksum computation. The original packet should not be altered.
def _add_warc_action_log(self, path, url): _logger.debug('Adding action log record.') actions = [] with open(path, 'r', encoding='utf-8', errors='replace') as file: for line in file: actions.append(json.loads(line)) log_data = json.dumps( {'actions': actions}, indent=4, ).encode('utf-8') self._action_warc_record = record = WARCRecord() record.set_common_fields('metadata', 'application/json') record.fields['WARC-Target-URI'] = 'urn:X-wpull:snapshot?url={0}' \ .format(wpull.url.percent_encode_query_value(url)) record.block_file = io.BytesIO(log_data) self._warc_recorder.set_length_and_maybe_checksums(record) self._warc_recorder.write_record(record)
Add the action log to the WARC file.
def exists(name, region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) try: conn.get_queue_url(QueueName=name) except botocore.exceptions.ClientError as e: missing_code = 'AWS.SimpleQueueService.NonExistentQueue' if e.response.get('Error', {}).get('Code') == missing_code: return {'result': False} return {'error': __utils__['boto3.get_error'](e)} return {'result': True}
Check to see if a queue exists. CLI Example: .. code-block:: bash salt myminion boto_sqs.exists myqueue region=us-east-1
def get_module(name: str) -> typing.Union[types.ModuleType, None]: return sys.modules.get(name)
Retrieves the loaded module for the given module name or returns None if no such module has been loaded. :param name: The name of the module to be retrieved :return: Either the loaded module with the specified name, or None if no such module has been imported.
def VerifyStructure(self, parser_mediator, line): self._last_month = 0 self._year_use = parser_mediator.GetEstimatedYear() key = 'header' try: structure = self._MAC_WIFI_HEADER.parseString(line) except pyparsing.ParseException: structure = None if not structure: key = 'turned_over_header' try: structure = self._MAC_WIFI_TURNED_OVER_HEADER.parseString(line) except pyparsing.ParseException: structure = None if not structure: logger.debug('Not a Mac Wifi log file') return False time_elements_tuple = self._GetTimeElementsTuple(key, structure) try: dfdatetime_time_elements.TimeElementsInMilliseconds( time_elements_tuple=time_elements_tuple) except ValueError: logger.debug( 'Not a Mac Wifi log file, invalid date and time: {0!s}'.format( structure.date_time)) return False self._last_month = time_elements_tuple[1] return True
Verify that this file is a Mac Wifi log file. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. line (str): line from a text file. Returns: bool: True if the line is in the expected format, False if not.
def version_from_frame(frame): module = getmodule(frame) if module is None: s = "<unknown from {0}:{1}>" return s.format(frame.f_code.co_filename, frame.f_lineno) module_name = module.__name__ variable = "AUTOVERSION_{}".format(module_name.upper()) override = os.environ.get(variable, None) if override is not None: return override while True: try: get_distribution(module_name) except DistributionNotFound: module_name, dot, _ = module_name.partition(".") if dot == "": break else: return getversion(module_name) return None
Given a ``frame``, obtain the version number of the module running there.
def merge_versioned(releases, schema=None, merge_rules=None): if not merge_rules: merge_rules = get_merge_rules(schema) merged = OrderedDict() for release in sorted(releases, key=lambda release: release['date']): release = release.copy() ocid = release.pop('ocid') merged[('ocid',)] = ocid releaseID = release['id'] date = release['date'] tag = release.pop('tag', None) flat = flatten(release, merge_rules) processed = process_flattened(flat) for key, value in processed.items(): if key in merged and value == merged[key][-1]['value']: continue if key not in merged: merged[key] = [] merged[key].append(OrderedDict([ ('releaseID', releaseID), ('releaseDate', date), ('releaseTag', tag), ('value', value), ])) return unflatten(merged, merge_rules)
Merges a list of releases into a versionedRelease.
def apply_init(m, init_func:LayerFunc): "Initialize all non-batchnorm layers of `m` with `init_func`." apply_leaf(m, partial(cond_init, init_func=init_func))
Initialize all non-batchnorm layers of `m` with `init_func`.
def get_min_distance(element): try: from scipy.spatial.distance import pdist return pdist(element.array([0, 1])).min() except: return _get_min_distance_numpy(element)
Gets the minimum sampling distance of the x- and y-coordinates in a grid.
def write_spo(sub, prd, obj): rcvtriples.append(make_spo(sub, prd, obj))
write triples to a buffer incase we decide to drop them
def query_struct(self, name): sql = 'select id, file_id, name from code_items '\ 'where name = ?' self.cursor.execute(sql, (name,)) for i in self.cursor.fetchall(): sql = 'select id, type, name from code_items ' \ 'where parent_id = ?' self.cursor.execute(sql, (i[0],)) members = self.cursor.fetchall() if members: print(self.file_id_to_name(i[1]), i[2]) print(members)
Query struct.
def to_pandas_closed_closed(date_range, add_tz=True): if not date_range: return None start = date_range.start end = date_range.end if start: start = to_dt(start, mktz()) if add_tz else start if date_range.startopen: start += timedelta(milliseconds=1) if end: end = to_dt(end, mktz()) if add_tz else end if date_range.endopen: end -= timedelta(milliseconds=1) return DateRange(start, end)
Pandas DateRange slicing is CLOSED-CLOSED inclusive at both ends. Parameters ---------- date_range : `DateRange` object converted to CLOSED_CLOSED form for Pandas slicing add_tz : `bool` Adds a TimeZone to the daterange start and end if it doesn't have one. Returns ------- Returns a date_range with start-end suitable for slicing in pandas.
def bundle(context, name): if context.obj['db'].bundle(name): click.echo(click.style('bundle name already exists', fg='yellow')) context.abort() new_bundle = context.obj['db'].new_bundle(name) context.obj['db'].add_commit(new_bundle) new_version = context.obj['db'].new_version(created_at=new_bundle.created_at) new_version.bundle = new_bundle context.obj['db'].add_commit(new_version) click.echo(click.style(f"new bundle added: {new_bundle.name} ({new_bundle.id})", fg='green'))
Add a new bundle.
def _calculateEncodingKey(comparator): encodingName = None for k, v in list(_encodings.items()): if v == comparator: encodingName = k break return encodingName
Gets the first key of all available encodings where the corresponding value matches the comparator. Args: comparator (string): A view name for an encoding. Returns: str: A key for a specific encoding used by python.
def update_boot_system_use(self, boot_sys_use): if not self._initialized: raise pycdlibexception.PyCdlibInternalError('Boot Record not yet initialized') self.boot_system_use = boot_sys_use.ljust(197, b'\x00')
A method to update the boot system use field of this Boot Record. Parameters: boot_sys_use - The new boot system use field for this Boot Record. Returns: Nothing.
def get_collection(self, folderid, username="", offset=0, limit=10): if not username and self.standard_grant_type == "authorization_code": response = self._req('/collections/{}'.format(folderid), { "offset":offset, "limit":limit }) else: if not username: raise DeviantartError("No username defined.") else: response = self._req('/collections/{}'.format(folderid), { "username":username, "offset":offset, "limit":limit }) deviations = [] for item in response['results']: d = Deviation() d.from_dict(item) deviations.append(d) if "name" in response: name = response['name'] else: name = None return { "results" : deviations, "name" : name, "has_more" : response['has_more'], "next_offset" : response['next_offset'] }
Fetch collection folder contents :param folderid: UUID of the folder to list :param username: The user to list folders for, if omitted the authenticated user is used :param offset: the pagination offset :param limit: the pagination limit
def get_game(site, description="", create=False): game = None games = Game.objects.filter(site=site).order_by("-created") try: game = games[0] except IndexError: game = None if game is None or game.is_expired() or is_after_endtime(): if create: if is_starttime(): game = Game(site=site, description=description) game.save() else: raise TimeRangeError( _(u"game start outside of the valid timerange")) else: game = None elif not is_after_endtime(): game = games[0] return game
get the current game, if its still active, else creates a new game, if the current time is inside the GAME_START_TIMES interval and create=True @param create: create a game, if there is no active game @returns: None if there is no active Game, and none shoul be created or the (new) active Game.
def validate_username(username): try: validate_slug(username) except ValidationError: username = slugify(username) user_model_cls = get_user_model() _username = username while user_model_cls.objects.filter(username=_username).exists(): _username = '{}{}'.format(username, random.randint(1, 100)) return _username
This validation step is done when we are sure the user does not exit on the systems and we need to create a new user.
def iterfd(fd): unpk = msgpack.Unpacker(fd, **unpacker_kwargs) for mesg in unpk: yield mesg
Generator which unpacks a file object of msgpacked content. Args: fd: File object to consume data from. Notes: String objects are decoded using utf8 encoding. In order to handle potentially malformed input, ``unicode_errors='surrogatepass'`` is set to allow decoding bad input strings. Yields: Objects from a msgpack stream.
def pdhg(x, f, g, A, tau, sigma, niter, **kwargs): def fun_select(k): return [0] f = odl.solvers.SeparableSum(f) A = odl.BroadcastOperator(A, 1) y = kwargs.pop('y', None) if y is None: y_new = None else: y_new = A.range.element([y]) spdhg_generic(x, f, g, A, tau, [sigma], niter, fun_select, y=y_new, **kwargs) if y is not None: y.assign(y_new[0])
Computes a saddle point with PDHG. This algorithm is the same as "algorithm 1" in [CP2011a] but with extrapolation on the dual variable. Parameters ---------- x : primal variable This variable is both input and output of the method. f : function Functional Y -> IR_infty that has a convex conjugate with a proximal operator, i.e. f.convex_conj.proximal(sigma) : Y -> Y. g : function Functional X -> IR_infty that has a proximal operator, i.e. g.proximal(tau) : X -> X. A : function Operator A : X -> Y that possesses an adjoint: A.adjoint tau : scalar / vector / matrix Step size for primal variable. Note that the proximal operator of g has to be well-defined for this input. sigma : scalar Scalar / vector / matrix used as step size for dual variable. Note that the proximal operator related to f (see above) has to be well-defined for this input. niter : int Number of iterations Other Parameters ---------------- y: dual variable Dual variable is part of a product space z: variable Adjoint of dual variable, z = A^* y. theta : scalar Extrapolation factor. callback : callable Function called with the current iterate after each iteration. References ---------- [CP2011a] Chambolle, A and Pock, T. *A First-Order Primal-Dual Algorithm for Convex Problems with Applications to Imaging*. Journal of Mathematical Imaging and Vision, 40 (2011), pp 120-145.
def get_default(self, node): if self.opposite_property in node.inst.properties: return not node.inst.properties[self.opposite_property] else: return self.default
If not explicitly set, check if the opposite was set first before returning default
def get(self, key): key = self._service_key(key) return self._service_ops['get'](key)
Return the object in `service` named by `key` or None. Args: key: Key naming the object to retrieve. Returns: object or None
def _handle_event(self, connection, event): with self.mutex: matching_handlers = sorted( self.handlers.get("all_events", []) + self.handlers.get(event.type, []) ) for handler in matching_handlers: result = handler.callback(connection, event) if result == "NO MORE": return
Handle an Event event incoming on ServerConnection connection.
def genkeyhex(): while True: key = hash256( hexlify(os.urandom(40) + str(datetime.datetime.now()) .encode("utf-8"))) if int(key,16) > 1 and int(key,16) < N: break return key
Generate new random Bitcoin private key, using os.urandom and double-sha256. Hex format.
def _device_expiry_callback(self): expired = 0 for adapters in self._devices.values(): to_remove = [] now = monotonic() for adapter_id, dev in adapters.items(): if 'expires' not in dev: continue if now > dev['expires']: to_remove.append(adapter_id) local_conn = "adapter/%d/%s" % (adapter_id, dev['connection_string']) if local_conn in self._conn_strings: del self._conn_strings[local_conn] for entry in to_remove: del adapters[entry] expired += 1 if expired > 0: self._logger.info('Expired %d devices', expired)
Periodic callback to remove expired devices from visible_devices.
def _K(m): M = m*(m - 1)//2 K = np.zeros((M, m**2), dtype=np.int64) row = 0 for j in range(1, m): col = (j - 1)*m + j s = m - j K[row:(row+s), col:(col+s)] = np.eye(s) row += s return K
matrix K_m from Wiktorsson2001
def save(self, fname, mode=None, validate=True, wd=False, inline=False, relative=True, pack=False, encoding='utf-8'): super(WorkflowGenerator, self).save(fname, mode=mode, validate=validate, wd=wd, inline=inline, relative=relative, pack=pack, encoding=encoding)
Save workflow to file For nlppln, the default is to save workflows with relative paths.
def set_authoring_nodes(self, editor): project_node = self.default_project_node file_node = self.register_file(editor.file, project_node) editor_node = self.register_editor(editor, file_node) return True
Sets the Model authoring Nodes using given editor. :param editor: Editor to set. :type editor: Editor :return: Method success. :rtype: bool
def DirContains(self,f) : def match(fsNode) : if not fsNode.isdir() : return False for c in fsNode.children() : if f(c) : return True return False return self.make_return(match)
Matches dirs that have a child that matches filter f
def val_to_signed_integer(value, bitwidth): if isinstance(value, WireVector) or isinstance(bitwidth, WireVector): raise PyrtlError('inputs must not be wirevectors') if bitwidth < 1: raise PyrtlError('bitwidth must be a positive integer') neg_mask = 1 << (bitwidth - 1) neg_part = value & neg_mask pos_mask = neg_mask - 1 pos_part = value & pos_mask return pos_part - neg_part
Return value as intrepreted as a signed integer under twos complement. :param value: a python integer holding the value to convert :param bitwidth: the length of the integer in bits to assume for conversion Given an unsigned integer (not a wirevector!) covert that to a signed integer. This is useful for printing and interpreting values which are negative numbers in twos complement. :: val_to_signed_integer(0xff, 8) == -1
def add_plot(x, y, xl, yl, fig, ax, LATEX=False, linestyle=None, **kwargs): if LATEX: xl_data = xl[1] yl_data = yl[1] else: xl_data = xl[0] yl_data = yl[0] for idx in range(len(y)): ax.plot(x, y[idx], label=yl_data[idx], linestyle=linestyle) ax.legend(loc='upper right') ax.set_ylim(auto=True)
Add plots to an existing plot
def space(self): schema = SpaceSchema() resp = self.service.get(self.base+'space/') return self.service.decode(schema, resp)
Get system disk space usage. :return: :class:`system.Space <system.Space>` object :rtype: system.Space
def of(cls, msg_header: MessageHeader) -> 'MessageDecoder': cte_hdr = msg_header.parsed.content_transfer_encoding return cls.of_cte(cte_hdr)
Return a decoder from the message header object. See Also: :meth:`.of_cte` Args: msg_header: The message header object.
def state(self): state = self._resource.get('state', self.default_state) if state not in State: state = getattr(State, state) if not self.parent: raise Exception('Unable to check the parent state') parent_state = self.parent.state return max([state, parent_state], key=attrgetter('value'))
Get the SubResource state If the parents state has a higher priority, then it overrides the SubResource state ..note:: This assumes that self.parent is populated
def advertise( self, routers=None, name=None, timeout=None, router_file=None, jitter=None, ): name = name or self.name if not self.is_listening(): self.listen() return hyperbahn.advertise( self, name, routers, timeout, router_file, jitter, )
Make a service available on the Hyperbahn routing mesh. This will make contact with a Hyperbahn host from a list of known Hyperbahn routers. Additional Hyperbahn connections will be established once contact has been made with the network. :param router: A seed list of addresses of Hyperbahn routers, e.g., ``["127.0.0.1:23000"]``. :param name: The identity of this service on the Hyperbahn. This is usually unnecessary, as it defaults to the name given when initializing the :py:class:`TChannel` (which is used as your identity as a caller). :returns: A future that resolves to the remote server's response after the first advertise finishes. Advertisement will continue to happen periodically.
def start(self): with self._operational_lock: ready = threading.Event() thread = threading.Thread( name=_BIDIRECTIONAL_CONSUMER_NAME, target=self._thread_main, args=(ready,) ) thread.daemon = True thread.start() ready.wait() self._thread = thread _LOGGER.debug("Started helper thread %s", thread.name)
Start the background thread and begin consuming the thread.
def form_invalid(self, form): LOGGER.debug("Invalid Email Form Submitted") messages.add_message(self.request, messages.ERROR, _("Invalid Email Address.")) return super(EmailTermsView, self).form_invalid(form)
Override of CreateView method, logs invalid email form submissions.
def call_with_context(func, context, *args): return make_context_aware(func, len(args))(*args + (context,))
Check if given function has more arguments than given. Call it with context as last argument or without it.
def _gluster_output_cleanup(result): ret = '' for line in result.splitlines(): if line.startswith('gluster>'): ret += line[9:].strip() elif line.startswith('Welcome to gluster prompt'): pass else: ret += line.strip() return ret
Gluster versions prior to 6 have a bug that requires tricking isatty. This adds "gluster> " to the output. Strip it off and produce clean xml for ElementTree.
def complete(self, GET): token = self.get_access_token(verifier=GET.get('oauth_verifier', None)) return token
When redirect back to our application, try to complete the flow by requesting an access token. If the access token request fails, it'll throw an `OAuthError`. Tries to complete the flow by validating against the `GET` paramters received.
def validate(schema_text, data_text, deserializer=_default_deserializer): schema = Schema(deserializer(schema_text)) data = deserializer(data_text) return Validator.validate(schema, data)
Validate specified JSON text with specified schema. Both arguments are converted to JSON objects with :func:`simplejson.loads`, if present, or :func:`json.loads`. :param schema_text: Text of the JSON schema to check against :type schema_text: :class:`str` :param data_text: Text of the JSON object to check :type data_text: :class:`str` :param deserializer: Function to convert the schema and data to JSON objects :type deserializer: :class:`callable` :returns: Same as :meth:`json_schema_validator.validator.Validator.validate` :raises: Whatever may be raised by simplejson (in particular :class:`simplejson.decoder.JSONDecoderError`, a subclass of :class:`ValueError`) or json :raises: Whatever may be raised by :meth:`json_schema_validator.validator.Validator.validate`. In particular :class:`json_schema_validator.errors.ValidationError` and :class:`json_schema_validator.errors.SchemaError`
def __refresh_statusbar(self, index): finfo = self.data[index] self.encoding_changed.emit(finfo.encoding) line, index = finfo.editor.get_cursor_line_column() self.sig_editor_cursor_position_changed.emit(line, index)
Refreshing statusbar widgets
def socket_reader(connection: socket, buffer_size: int = 1024): while connection is not None: try: buffer = connection.recv(buffer_size) if not len(buffer): raise ConnectionAbortedError except ConnectionAbortedError: print('connection aborted') connection.close() yield None except OSError: print('socket closed') connection.close() yield None else: yield buffer
read data from adb socket
def check_flavors(session): nclient = nova.Client(NOVA_VERSION, session=session, region_name=os.environ['OS_REGION_NAME']) flavors = nclient.flavors.list() to_id = dict(list(map(lambda n: [n.name, n.id], flavors))) to_flavor = dict(list(map(lambda n: [n.id, n.name], flavors))) return to_id, to_flavor
Build the flavors mapping returns the mappings id <-> flavor
def scheduling_blocks(): sbi_list = SchedulingBlockInstanceList() return dict(active=sbi_list.active, completed=sbi_list.completed, aborted=sbi_list.aborted)
Return list of Scheduling Block instances known to SDP.
def avail_locations(call=None): if call == 'action': raise SaltCloudSystemExit( 'The avail_locations function must be called with ' '-f or --function, or with the --list-locations option.' ) server, user, password = _get_xml_rpc() auth = ':'.join([user, password]) host_pool = server.one.hostpool.info(auth)[1] locations = {} for host in _get_xml(host_pool): locations[host.find('NAME').text] = _xml_to_dict(host) return locations
Return available OpenNebula locations. CLI Example: .. code-block:: bash salt-cloud --list-locations opennebula salt-cloud --function avail_locations opennebula salt-cloud -f avail_locations opennebula
def write_class(self, obj, parent=None): self._writeStruct(">B", 1, (self.TC_CLASS,)) self.write_classdesc(obj)
Writes a class to the stream :param obj: A JavaClass object :param parent:
def getMAC(self, bType=MacType.RandomMac): print '%s call getMAC' % self.port if self.isPowerDown: macAddr64 = self.mac else: if bType == MacType.FactoryMac: macAddr64 = self.__stripValue(self.__sendCommand(WPANCTL_CMD + 'getprop -v NCP:HardwareAddress')[0]) elif bType == MacType.HashMac: macAddr64 = self.__stripValue(self.__sendCommand(WPANCTL_CMD + 'getprop -v NCP:MACAddress')[0]) else: macAddr64 = self.__stripValue(self.__sendCommand(WPANCTL_CMD + 'getprop -v NCP:ExtendedAddress')[0]) return int(macAddr64, 16)
get one specific type of MAC address currently OpenThreadWpan only supports Random MAC address Args: bType: indicate which kind of MAC address is required Returns: specific type of MAC address
def delete_secret(self, path, mount_point=DEFAULT_MOUNT_POINT): api_path = '/v1/{mount_point}/{path}'.format(mount_point=mount_point, path=path) return self._adapter.delete( url=api_path, )
Delete the secret at the specified location. Supported methods: DELETE: /{mount_point}/{path}. Produces: 204 (empty body) :param path: Specifies the path of the secret to delete. This is specified as part of the URL. :type path: str | unicode :param mount_point: The "path" the secret engine was mounted on. :type mount_point: str | unicode :return: The response of the delete_secret request. :rtype: requests.Response
def MI_get_item(self, key, index=0): 'return list of item' index = _key_to_index_single(force_list(self.indices.keys()), index) if index != 0: key = self.indices[index][key] value = super(MIMapping, self).__getitem__(key) N = len(self.indices) if N == 1: return [key] if N == 2: value = [value] return [key] + value
return list of item
def save_html_with_metadata(fig, filename, fig_kwds, kwds): if isinstance(fig, str): text = fig else: from mpld3 import fig_to_html text = fig_to_html(fig, **fig_kwds) f = open(filename, 'w') for key, value in kwds.items(): value = escape(value, escape_table) line = "<div class=pycbc-meta key=\"%s\" value=\"%s\"></div>" % (str(key), value) f.write(line) f.write(text)
Save a html output to file with metadata
async def dump_field(obj, elem, elem_type, params=None): if isinstance(elem, (int, bool)) or issubclass(elem_type, x.UVarintType) or issubclass(elem_type, x.IntType): return set_elem(obj, elem) elif issubclass(elem_type, x.BlobType) or isinstance(obj, bytes) or isinstance(obj, bytearray): return set_elem(obj, await dump_blob(elem)) elif issubclass(elem_type, x.UnicodeType) or isinstance(elem, str): return set_elem(obj, elem) elif issubclass(elem_type, x.VariantType): return set_elem(obj, await dump_variant(None, elem, elem_type, params)) elif issubclass(elem_type, x.ContainerType): return set_elem(obj, await dump_container(None, elem, elem_type, params)) elif issubclass(elem_type, x.MessageType): return set_elem(obj, await dump_message(None, elem)) else: raise TypeError
Dumps generic field to the popo object representation, according to the element specification. General multiplexer. :param obj: :param elem: :param elem_type: :param params: :return:
def rooms_clean_history(self, room_id, latest, oldest, **kwargs): return self.__call_api_post('rooms.cleanHistory', roomId=room_id, latest=latest, oldest=oldest, kwargs=kwargs)
Cleans up a room, removing messages from the provided time range.
def write_kwargs_to_attrs(cls, attrs, **kwargs): for arg, val in kwargs.items(): if val is None: val = str(None) if isinstance(val, dict): attrs[arg] = val.keys() cls.write_kwargs_to_attrs(attrs, **val) else: attrs[arg] = val
Writes the given keywords to the given ``attrs``. If any keyword argument points to a dict, the keyword will point to a list of the dict's keys. Each key is then written to the attrs with its corresponding value. Parameters ---------- attrs : an HDF attrs The ``attrs`` of an hdf file or a group in an hdf file. \**kwargs : The keywords to write.
def mem_ds(res, extent, srs=None, dtype=gdal.GDT_Float32): dst_ns = int((extent[2] - extent[0])/res + 0.99) dst_nl = int((extent[3] - extent[1])/res + 0.99) m_ds = gdal.GetDriverByName('MEM').Create('', dst_ns, dst_nl, 1, dtype) m_gt = [extent[0], res, 0, extent[3], 0, -res] m_ds.SetGeoTransform(m_gt) if srs is not None: m_ds.SetProjection(srs.ExportToWkt()) return m_ds
Create a new GDAL Dataset in memory Useful for various applications that require a Dataset
def onMessageReceived(self, method_frame, properties, body): if "UUID" not in properties.headers: self.process_exception( e=ValueError("No UUID provided, message ignored."), uuid="", routing_key=self.parseKey(method_frame), body=body ) return True key = self.parseKey(method_frame) uuid = properties.headers["UUID"] try: result = self.react_fn( serializers.deserialize(body, self.globals), self.get_sendback(uuid, key) ) print "sending response", key self.sendResponse( serializers.serialize(result), uuid, key ) except Exception, e: self.process_exception( e=e, uuid=uuid, routing_key=key, body=str(e), tb=traceback.format_exc().strip() ) return True
React to received message - deserialize it, add it to users reaction function stored in ``self.react_fn`` and send back result. If `Exception` is thrown during process, it is sent back instead of message. Note: In case of `Exception`, response message doesn't have useful `body`, but in headers is stored following (string) parameters: - ``exception``, where the Exception's message is stored - ``exception_type``, where ``e.__class__`` is stored - ``exception_name``, where ``e.__class__.__name__`` is stored - ``traceback`` where the full traceback is stored (contains line number) This allows you to react to unexpected cases at the other end of the AMQP communication.
def pg_backup(self, pg_dump_exe='pg_dump', exclude_schema=None): command = [ pg_dump_exe, '-Fc', '-f', self.file, 'service={}'.format(self.pg_service) ] if exclude_schema: command.append(' '.join("--exclude-schema={}".format(schema) for schema in exclude_schema)) subprocess.check_output(command, stderr=subprocess.STDOUT)
Call the pg_dump command to create a db backup Parameters ---------- pg_dump_exe: str the pg_dump command path exclude_schema: str[] list of schemas to be skipped
def kick_user(self, room_id, user_id, reason=""): self.set_membership(room_id, user_id, "leave", reason)
Calls set_membership with membership="leave" for the user_id provided
def access_token(self, value, request): if self.validate(value, request) is not None: return None access_token = AccessToken.objects.for_token(value) return access_token
Try to get the `AccessToken` associated with the provided token. *The provided value must pass `BearerHandler.validate()`*
def accept( self ): if ( not self.uiNameTXT.text() ): QMessageBox.information(self, 'Invalid Name', 'You need to supply a name for your layout.') return prof = self.profile() if ( not prof ): prof = XViewProfile() prof.setName(nativestring(self.uiNameTXT.text())) prof.setVersion(self.uiVersionSPN.value()) prof.setDescription(nativestring(self.uiDescriptionTXT.toPlainText())) prof.setIcon(self.uiIconBTN.filepath()) super(XViewProfileDialog, self).accept()
Saves the data to the profile before closing.
def get_filter_args_for_all_events_from_channel( token_network_address: TokenNetworkAddress, channel_identifier: ChannelID, contract_manager: ContractManager, from_block: BlockSpecification = GENESIS_BLOCK_NUMBER, to_block: BlockSpecification = 'latest', ) -> Dict: event_filter_params = get_filter_args_for_specific_event_from_channel( token_network_address=token_network_address, channel_identifier=channel_identifier, event_name=ChannelEvent.OPENED, contract_manager=contract_manager, from_block=from_block, to_block=to_block, ) event_filter_params['topics'] = [None, event_filter_params['topics'][1]] return event_filter_params
Return the filter params for all events of a given channel.
def _relation_module(role, interface): _append_path(hookenv.charm_dir()) _append_path(os.path.join(hookenv.charm_dir(), 'hooks')) base_module = 'relations.{}.{}'.format(interface, role) for module in ('reactive.{}'.format(base_module), base_module): if module in sys.modules: break try: importlib.import_module(module) break except ImportError: continue else: hookenv.log('Unable to find implementation for relation: ' '{} of {}'.format(role, interface), hookenv.ERROR) return None return sys.modules[module]
Return module for relation based on its role and interface, or None. Prefers new location (reactive/relations) over old (hooks/relations).
def main(self): args = self.args parsed_pytree, pypackages = self.parse_py_tree(pytree=args.pytree) parsed_doctree = self.parse_doc_tree(doctree=args.doctree, pypackages=pypackages) return self.compare_trees(parsed_pytree=parsed_pytree, parsed_doctree=parsed_doctree)
Parse package trees and report on any discrepancies.
def _construct_output_to_match(output_block): output_block.validate() selections = ( u'%s AS `%s`' % (output_block.fields[key].to_match(), key) for key in sorted(output_block.fields.keys()) ) return u'SELECT %s FROM' % (u', '.join(selections),)
Transform a ConstructResult block into a MATCH query string.
def update(self, other): if isinstance(other, cookielib.CookieJar): for cookie in other: self.set_cookie(copy.copy(cookie)) else: super(RequestsCookieJar, self).update(other)
Updates this jar with cookies from another CookieJar or dict-like