code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def _n_onset_midi(patterns): return len([o_m for pat in patterns for occ in pat for o_m in occ])
Computes the number of onset_midi objects in a pattern Parameters ---------- patterns : A list of patterns using the format returned by :func:`mir_eval.io.load_patterns()` Returns ------- n_onsets : int Number of onsets within the pattern.
def __try_read_byte_prev(self, address): if address not in self.__memory_prev: return False, None return True, self.__memory_prev[address]
Read previous value for memory location. Return a tuple (True, Byte) in case of successful read, (False, None) otherwise.
def is_active(self, name): if name in self._plugins.keys(): return self._plugins["name"].active return None
Returns True if plugin exists and is active. If plugin does not exist, it returns None :param name: plugin name :return: boolean or None
def t_insert_dict_if_new(self, tblname, d, PKfields, fields=None): SQL, values = self._insert_dict_if_new_inner(tblname, d, PKfields, fields=fields) if SQL != False: self.execute_select(SQL, parameters=values, locked=True) return True, d return False, values
A version of insertDictIfNew for transactions. This does not call commit.
def _put_bucket_lifecycle(self): status = 'deleted' if self.s3props['lifecycle']['enabled']: lifecycle_config = { 'Rules': self.s3props['lifecycle']['lifecycle_rules'] } LOG.debug('Lifecycle Config: %s', lifecycle_config) _response = self.s...
Adds bucket lifecycle configuration.
def push_state(self): new = dict(self.states[-1]) self.states.append(new) return self.state
Push a copy of the topmost state on top of the state stack, returns the new top.
def start(self): setproctitle('oq-zworkerpool %s' % self.ctrl_url[6:]) self.workers = [] for _ in range(self.num_workers): sock = z.Socket(self.task_out_port, z.zmq.PULL, 'connect') proc = multiprocessing.Process(target=self.worker, args=(sock,)) proc.start() ...
Start worker processes and a control loop
def to_coo(self, fp=None, vartype_header=False): import dimod.serialization.coo as coo if fp is None: return coo.dumps(self, vartype_header) else: coo.dump(self, fp, vartype_header)
Serialize the binary quadratic model to a COOrdinate_ format encoding. .. _COOrdinate: https://en.wikipedia.org/wiki/Sparse_matrix#Coordinate_list_(COO) Args: fp (file, optional): `.write()`-supporting `file object`_ to save the linear and quadratic biases o...
def glyph_has_ink(font: TTFont, name: Text) -> bool: if 'glyf' in font: return ttf_glyph_has_ink(font, name) elif ('CFF ' in font) or ('CFF2' in font): return cff_glyph_has_ink(font, name) else: raise Exception("Could not find 'glyf', 'CFF ', or 'CFF2' table.")
Checks if specified glyph has any ink. That is, that it has at least one defined contour associated. Composites are considered to have ink if any of their components have ink. Args: font: the font glyph_name: The name of the glyph to check for ink. Returns: True if the font has at least...
def extract_mime(self, mime, def_mime='unk'): self['mime'] = def_mime if mime: self['mime'] = self.MIME_RE.split(mime, 1)[0] self['_content_type'] = mime
Utility function to extract mimetype only from a full content type, removing charset settings
def run_details(self, run): run_data = dict(run=run) req = urllib.request.Request("%s/nglims/api_run_details" % self._base_url, urllib.parse.urlencode(run_data)) response = urllib.request.urlopen(req) info = json.loads(response.read()) if "error" in info: ...
Retrieve sequencing run details as a dictionary.
def validate(cls, **kwargs): errors = ValidationErrors() obj = cls() redis = cls.get_redis() for fieldname, field in obj.proxy: if not field.fillable: value = field.default else: try: value = field.validate(kwarg...
Validates the data received as keyword arguments whose name match this class attributes.
def get_ref(profile, ref): resource = "/refs/" + ref data = api.get_request(profile, resource) return prepare(data)
Fetch a ref. Args: profile A profile generated from ``simplygithub.authentication.profile``. Such profiles tell this module (i) the ``repo`` to connect to, and (ii) the ``token`` to connect with. ref The ref to fetch, e.g., ``heads/my-feature-branch...
def qsize(self, qname): if qname in self._queues: return self._queues[qname].qsize() else: raise ValueError(_("queue %s is not defined"), qname)
Return the approximate size of the queue.
def _joint_calling(items): jointcaller = tz.get_in(("config", "algorithm", "jointcaller"), items[0]) if jointcaller: assert len(items) == 1, "Can only do joint calling preparation with GATK with single samples" assert tz.get_in(("metadata", "batch"), items[0]) is not None, \ "Joint c...
Determine if this call feeds downstream into joint calls.
def run_step(context): logger.debug("started") context.assert_key_has_value(key='defaults', caller=__name__) context.set_defaults(context['defaults']) logger.info(f"set {len(context['defaults'])} context item defaults.") logger.debug("done")
Set hierarchy into context with substitutions if it doesn't exist yet. context is a dictionary or dictionary-like. context['defaults'] must exist. It's a dictionary. Will iterate context['defaults'] and add these as new values where their keys don't already exist. While it's doing so, it will leave ...
def _get_names(dirs): alphabets = set() label_names = {} for d in dirs: for example in _walk_omniglot_dir(d): alphabet, alphabet_char_id, label, _ = example alphabets.add(alphabet) label_name = "%s_%d" % (alphabet, alphabet_char_id) if label in label_names: assert label_names[l...
Get alphabet and label names, union across all dirs.
def xread_group(self, group_name, consumer_name, streams, timeout=0, count=None, latest_ids=None): args = self._xread(streams, timeout, count, latest_ids) fut = self.execute( b'XREADGROUP', b'GROUP', group_name, consumer_name, *args ) return wait_convert(f...
Perform a blocking read on the given stream as part of a consumer group :raises ValueError: if the length of streams and latest_ids do not match
def adjust_bounding_box(bbox): for i in range(0, 4): if i in bounding_box: bbox[i] = bounding_box[i] else: bbox[i] += delta_bounding_box[i] return bbox
Adjust the bounding box as specified by user. Returns the adjusted bounding box. - bbox: Bounding box computed from the canvas drawings. It must be a four-tuple of numbers.
def configure(root_url, **kwargs): default = kwargs.pop('default', True) kwargs['client_agent'] = 'example-client/' + __version__ if 'headers' not in kwargs: kwargs['headers'] = {} kwargs['headers']['Accept-Type'] = 'application/json' if default: default_config.reset(root_url, **kwar...
Notice that `configure` can either apply to the default configuration or `Client.config`, which is the configuration used by the current thread since `Client` inherits form `threading.local`.
def _ParseTriggerStartTime(self, parser_mediator, trigger): time_elements_tuple = ( trigger.start_date.year, trigger.start_date.month, trigger.start_date.day_of_month, trigger.start_time.hours, trigger.start_time.minutes, 0) date_time = None if time_elements_tuple != (0, 0, 0, 0, 0, ...
Parses the start time from a trigger. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. trigger (job_trigger): a trigger. Returns: dfdatetime.DateTimeValues: last run date and time or None if not ...
def _call_zincrby(self, command, value, *args, **kwargs): if self.indexable: self.index([value]) return self._traverse_command(command, value, *args, **kwargs)
This command update a score of a given value. But it can be a new value of the sorted set, so we index it.
def find_module(self, fullname, path=None): root, base, target = fullname.partition(self.root_name + '.') if root: return if not any(map(target.startswith, self.vendored_names)): return return self
Return self when fullname starts with root_name and the target module is one vendored through this importer.
def add_repository(self, name, repository_type, repository_class, aggregate_class, make_default, configuration): repo_mgr = self.get_registered_utility(IRepositoryManager) if name is None: name = REPOSITORY_DOMAINS.ROOT repo = repo_mgr.new(repository_type, name...
Generic method for adding a repository.
def add_genes(in_file, data, max_distance=10000, work_dir=None): gene_file = regions.get_sv_bed(data, "exons", out_dir=os.path.dirname(in_file)) if gene_file and utils.file_exists(in_file): out_file = "%s-annotated.bed" % utils.splitext_plus(in_file)[0] if work_dir: out_file = os.pat...
Add gene annotations to a BED file from pre-prepared RNA-seq data. max_distance -- only keep annotations within this distance of event
def to_fs_path(uri): scheme, netloc, path, _params, _query, _fragment = urlparse(uri) if netloc and path and scheme == 'file': value = "//{}{}".format(netloc, path) elif RE_DRIVE_LETTER_PATH.match(path): value = path[1].lower() + path[2:] else: value = path if IS_WIN: ...
Returns the filesystem path of the given URI. Will handle UNC paths and normalize windows drive letters to lower-case. Also uses the platform specific path separator. Will *not* validate the path for invalid characters and semantics. Will *not* look at the scheme of this URI.
def from_url(cls, url, db=None, **kwargs): connection_pool = ConnectionPool.from_url(url, db=db, **kwargs) return cls(connection_pool=connection_pool)
Return a Redis client object configured from the given URL For example:: redis://[:password]@localhost:6379/0 rediss://[:password]@localhost:6379/0 unix://[:password]@/path/to/socket.sock?db=0 Three URL schemes are supported: - ```redis://`` <htt...
def _post(self, uri, data, headers=None): if not headers: headers = self._get_headers() logging.debug("URI=" + str(uri)) logging.debug("HEADERS=" + str(headers)) logging.debug("BODY=" + str(data)) response = self.session.post(uri, headers=headers, data...
Simple POST request for a given uri path.
def parse_torrent_properties(table_datas): output = {'category': table_datas[0].text, 'subcategory': None, 'quality': None, 'language': None} for i in range(1, len(table_datas)): td = table_datas[i] url = td.get('href') params = Parser.get_params(url) if P...
Static method that parses a given list of table data elements and using helper methods `Parser.is_subcategory`, `Parser.is_quality`, `Parser.is_language`, collects torrent properties. :param list lxml.HtmlElement table_datas: table_datas to parse :return: identified category, subcategory, quali...
def is_executable(path): return (stat.S_IXUSR & os.stat(path)[stat.ST_MODE] or stat.S_IXGRP & os.stat(path)[stat.ST_MODE] or stat.S_IXOTH & os.stat(path)[stat.ST_MODE])
is the given path executable?
def kill(self): self._killed.set() if not self.is_alive(): logging.debug('Cannot kill thread that is no longer running.') return if not self._is_thread_proc_running(): logging.debug("Thread's _thread_proc function is no longer running, " 'will not kill; letting thread e...
Terminates the current thread by raising an error.
def cache_info(self): return { 'single_node_repertoire': self._single_node_repertoire_cache.info(), 'repertoire': self._repertoire_cache.info(), 'mice': self._mice_cache.info() }
Report repertoire cache statistics.
def mset_list(item, index, value): 'set mulitple items via index of int, slice or list' if isinstance(index, (int, slice)): item[index] = value else: map(item.__setitem__, index, value)
set mulitple items via index of int, slice or list
def write_packets(self): while self.running: if len(self.write_queue) > 0: self.write_queue[0].send(self.client) self.write_queue.pop(0)
Write packets from the queue
def getAssociationFilename(self, server_url, handle): if server_url.find('://') == -1: raise ValueError('Bad server URL: %r' % server_url) proto, rest = server_url.split('://', 1) domain = _filenameEscape(rest.split('/', 1)[0]) url_hash = _safe64(server_url) if handle...
Create a unique filename for a given server url and handle. This implementation does not assume anything about the format of the handle. The filename that is returned will contain the domain name from the server URL for ease of human inspection of the data directory. (str, str) ...
def send_faucet_coins(address_to_fund, satoshis, api_key, coin_symbol='bcy'): assert coin_symbol in ('bcy', 'btc-testnet') assert is_valid_address_for_coinsymbol(b58_address=address_to_fund, coin_symbol=coin_symbol) assert satoshis > 0 assert api_key, 'api_key required' url = make_url(coin_symbol, '...
Send yourself test coins on the bitcoin or blockcypher testnet You can see your balance info at: - https://live.blockcypher.com/bcy/ for BCY - https://live.blockcypher.com/btc-testnet/ for BTC Testnet
def is_enabled(): cmd = 'service -e' services = __salt__['cmd.run'](cmd, python_shell=False) for service in services.split('\\n'): if re.search('jail', service): return True return False
See if jail service is actually enabled on boot CLI Example: .. code-block:: bash salt '*' jail.is_enabled <jail name>
def delete(self): self.__dmlquery__(self.__class__, self, batch=self._batch, timestamp=self._timestamp, consistency=self.__consistency__, timeout=self._timeout).delete()
Deletes this instance
def mark_all_as_read(self, recipient=None): qset = self.unread(True) if recipient: qset = qset.filter(recipient=recipient) return qset.update(unread=False)
Mark as read any unread messages in the current queryset. Optionally, filter these by recipient first.
def trim_core(self): for i in range(self.trim): self.oracle.solve(assumptions=self.core) new_core = self.oracle.get_core() if len(new_core) == len(self.core): break self.core = new_core
This method trims a previously extracted unsatisfiable core at most a given number of times. If a fixed point is reached before that, the method returns.
def url_to_text(self, url): path, headers = urllib.request.urlretrieve(url) return self.path_to_text(path)
Download PDF file and transform its document to string. Args: url: PDF url. Returns: string.
def batch_contains_deleted(self): "Check if current batch contains already deleted images." if not self._duplicates: return False imgs = [self._all_images[:self._batch_size][0][1], self._all_images[:self._batch_size][1][1]] return any(img in self._deleted_fns for img in imgs)
Check if current batch contains already deleted images.
def remove_forms(self, form_names): for form in form_names: try: self.parentApp.removeForm(form) except Exception as e: pass return
Remove all forms supplied
def remote_evb_cfgd_uneq_store(self, remote_evb_cfgd): if remote_evb_cfgd != self.remote_evb_cfgd: self.remote_evb_cfgd = remote_evb_cfgd return True return False
This saves the EVB cfg, if it is not the same as stored.
def drop(manager: Manager, network_id: Optional[int], yes): if network_id: manager.drop_network_by_id(network_id) elif yes or click.confirm('Drop all networks?'): manager.drop_networks()
Drop a network by its identifier or drop all networks.
def start_stress(self, stress_cmd): with open(os.devnull, 'w') as dev_null: try: stress_proc = subprocess.Popen(stress_cmd, stdout=dev_null, stderr=dev_null) self.set_stress_process(psutil.Process(stress_proc.pid)) ...
Starts a new stress process with a given cmd
def _serialize(self): if self._defcode is None: raise exceptions.UnboundResponse() resp = self.response_class(request=self.req, status=self.code, headerlist=self._headers.items()) if self.result: resp.content_type = self.content_type ...
Serialize the ResponseObject. Returns a webob `Response` object.
def unlock_kinetis(jlink): if not jlink.connected(): raise ValueError('No target to unlock.') method = UNLOCK_METHODS.get(jlink.tif, None) if method is None: raise NotImplementedError('Unsupported target interface for unlock.') return method(jlink)
Unlock for Freescale Kinetis K40 or K60 device. Args: jlink (JLink): an instance of a J-Link that is connected to a target. Returns: ``True`` if the device was successfully unlocked, otherwise ``False``. Raises: ValueError: if the J-Link is not connected to a target.
def create(): if request.method == "POST": title = request.form["title"] body = request.form["body"] error = None if not title: error = "Title is required." if error is not None: flash(error) else: db.session.add(Post(title=title, b...
Create a new post for the current user.
def position_for_index(self, index): if not self.elements: return 0 start = 0 end = int(len(self.elements) / 2) slice_length = end - start pivot_point = int(slice_length / 2) pivot_index = self.elements[pivot_point * 2] while slice_length > 1: ...
Calculates the position within the vector to insert a given index. This is used internally by insert and upsert. If there are duplicate indexes then the position is returned as if the value for that index were to be updated, but it is the callers responsibility to check whether there is...
def network_traffic_ports(instance): for key, obj in instance['objects'].items(): if ('type' in obj and obj['type'] == 'network-traffic' and ('src_port' not in obj or 'dst_port' not in obj)): yield JSONError("The Network Traffic object '%s' should contain " ...
Ensure network-traffic objects contain both src_port and dst_port.
def resource_create(resource_id, resource_type, resource_options=None, cibfile=None): return item_create(item='resource', item_id=resource_id, item_type=resource_type, extra_args=resource_options, cibfile=cibfile)
Create a resource via pcs command resource_id name for the resource resource_type resource type (f.e. ocf:heartbeat:IPaddr2 or VirtualIP) resource_options additional options for creating the resource cibfile use cibfile instead of the live CIB for manipulation CLI E...
def format_output(func): return func @wraps(func) def wrapper(*args, **kwargs): try: response = func(*args, **kwargs) except Exception as error: print(colored(error, 'red'), file=sys.stderr) sys.exit(1) else: print(response) ...
Format output.
def n_chunks(self): return self._data_source.n_chunks(self.chunksize, stride=self.stride, skip=self.skip)
rough estimate of how many chunks will be processed
def do_batch(args): if args.subcommand == 'list': do_batch_list(args) if args.subcommand == 'show': do_batch_show(args) if args.subcommand == 'status': do_batch_status(args) if args.subcommand == 'submit': do_batch_submit(args)
Runs the batch list, batch show or batch status command, printing output to the console Args: args: The parsed arguments sent to the command at runtime
def connect(self, listener, pass_signal=False): info = listenerinfo(listener, pass_signal) self._listeners.append(info) _logger.debug("connect %r to %r", str(listener), self._name) if inspect.ismethod(listener): listener_object = listener.__self__ if not hasattr(l...
Connect a new listener to this signal :param listener: The listener (callable) to add :param pass_signal: An optional argument that controls if the signal object is explicitly passed to this listener when it is being fired. If enabled, a ``signal=`` keywo...
def split(expr, frac, seed=None): if hasattr(expr, '_xflow_split'): return expr._xflow_split(frac, seed=seed) else: return _split(expr, frac, seed=seed)
Split the current column into two column objects with certain ratio. :param float frac: Split ratio :return: two split DataFrame objects
def nla_for_each_attr(head, len_, rem): pos = head rem.value = len_ while nla_ok(pos, rem): yield pos pos = nla_next(pos, rem)
Iterate over a stream of attributes. https://github.com/thom311/libnl/blob/libnl3_2_25/include/netlink/attr.h#L262 Positional arguments: head -- first nlattr with more in its bytearray payload (nlattr class instance). len_ -- length of attribute stream (integer). rem -- initialized to len, holds b...
def register(self, email, username, password, first_name, last_name, birthday="1974-11-20", captcha_result=None): self.username = username self.password = password register_message = sign_up.RegisterRequest(email, username, password, first_name, last_name, birthday, captcha_result, ...
Sends a register request to sign up a new user to kik with the given details.
def get(self, alias, target=None): for target_part in reversed(list(self._get_targets(target))): options = self._get(target_part, alias) if options: return options
Get a dictionary of aliased options. :param alias: The name of the aliased options. :param target: Get alias for this specific target (optional). If no matching alias is found, returns ``None``.
def footnotemap(self, cache=True): if self.__footnotemap is not None and cache==True: return self.__footnotemap else: x = self.xml(src='word/footnotes.xml') d = Dict() if x is None: return d for footnote in x.root.xpath("w:footnote", namespaces...
return the footnotes from the docx, keyed to string id.
def nth(lst, n): expect_type(n, (String, Number), unit=None) if isinstance(n, String): if n.value.lower() == 'first': i = 0 elif n.value.lower() == 'last': i = -1 else: raise ValueError("Invalid index %r" % (n,)) else: i = n.to_python_index...
Return the nth item in the list.
def write(self, string): self.make_dir() with open(self.path, "w") as f: if not string.endswith("\n"): return f.write(string + "\n") else: return f.write(string)
Write string to file.
def save_pkl(self, filename): with open(filename, 'wb') as fout: pickle.dump(self, fout)
Pickles TransitSignal.
def validateDayOfWeek(value, blank=False, strip=None, allowlistRegexes=None, blocklistRegexes=None, dayNames=ENGLISH_DAYS_OF_WEEK, excMsg=None): try: return validateMonth(value, blank=blank, strip=strip, allowlistRegexes=allowlistRegexes, blocklistRegexes=blocklistRegexes, monthNames=ENGLISH_DAYS_OF_WEEK) ...
Raises ValidationException if value is not a day of the week, such as 'Mon' or 'Friday'. Returns the titlecased day of the week. * value (str): The value being validated as a day of the week. * blank (bool): If True, a blank string will be accepted. Defaults to False. * strip (bool, str, None): If Non...
def set_y(self, y): "Set y position and reset x" self.x=self.l_margin if(y>=0): self.y=y else: self.y=self.h+y
Set y position and reset x
def _write_cpr(self, f, cType, parameter) -> int: f.seek(0, 2) byte_loc = f.tell() block_size = CDF.CPR_BASE_SIZE64 + 4 section_type = CDF.CPR_ rfuA = 0 pCount = 1 cpr = bytearray(block_size) cpr[0:8] = struct.pack('>q', block_size) cpr[8:12] = str...
Write compression info to the end of the file in a CPR.
def leaders(self, current_page, **options): return self.leaders_in(self.leaderboard_name, current_page, **options)
Retrieve a page of leaders from the leaderboard. @param current_page [int] Page to retrieve from the leaderboard. @param options [Hash] Options to be used when retrieving the page from the leaderboard. @return a page of leaders from the leaderboard.
def soviet_checksum(code): def sum_digits(code, offset=1): total = 0 for digit, index in zip(code[:7], count(offset)): total += int(digit) * index summed = (total / 11 * 11) return total - summed check = sum_digits(code, 1) if check == 10: check = sum_digi...
Courtesy of Sir Vlad Lavrov.
def _instance_parser(self, plugins): plugins = util.return_list(plugins) for instance in plugins: if inspect.isclass(instance): self._handle_class_instance(instance) else: self._handle_object_instance(instance)
internal method to parse instances of plugins. Determines if each class is a class instance or object instance and calls the appropiate handler method.
def get_connect_redirect_url(self, request, socialaccount): assert request.user.is_authenticated url = reverse('socialaccount_connections') return url
Returns the default URL to redirect to after successfully connecting a social account.
def _consolidate_classpath(self, targets, classpath_products): entries_map = defaultdict(list) for (cp, target) in classpath_products.get_product_target_mappings_for_targets(targets, True): entries_map[target].append(cp) with self.invalidated(targets=targets, invalidate_dependents=True) as invalidatio...
Convert loose directories in classpath_products into jars.
def reportProgress(self, state, action, text=None, tick=None): if self.progressFunc is not None: self.progressFunc(state=state, action=action, text=text, tick=tick)
If we want to keep other code updated about our progress. state: 'prep' reading sources 'generate' making instances 'done' wrapping up 'error' reporting a problem action: 'start' begin generatin...
def _always_running_service(name): service_info = show(name) try: keep_alive = service_info['plist']['KeepAlive'] except KeyError: return False if isinstance(keep_alive, dict): for _file, value in six.iteritems(keep_alive.get('PathState', {})): if value is True and os...
Check if the service should always be running based on the KeepAlive Key in the service plist. :param str name: Service label, file name, or full path :return: True if the KeepAlive key is set to True, False if set to False or not set in the plist at all. :rtype: bool .. versionadded:: 2...
def total_area_per_neurite(neurites, neurite_type=NeuriteType.all): return [neurite.area for neurite in iter_neurites(neurites, filt=is_type(neurite_type))]
Surface area in a collection of neurites. The area is defined as the sum of the area of the sections.
def dragDrop(self, target, target2=None, modifiers=""): if modifiers != "": keyboard.keyDown(modifiers) if target2 is None: dragFrom = self._lastMatch dragTo = target else: dragFrom = target dragTo = target2 self.drag(dragFrom) ...
Performs a dragDrop operation. Holds down the mouse button on ``dragFrom``, moves the mouse to ``dragTo``, and releases the mouse button. ``modifiers`` may be a typeKeys() compatible string. The specified keys will be held during the drag-drop operation.
def parse_json(raw_data): orig_data = raw_data data = filter_leading_non_json_lines(raw_data) try: return json.loads(data) except: results = {} try: tokens = shlex.split(data) except: print "failed to parse json: "+ data raise f...
this version for module return data only
def taskotron_changed_outcome(config, message): if not taskotron_result_new(config, message): return False outcome = message['msg']['result'].get('outcome') prev_outcome = message['msg']['result'].get('prev_outcome') return prev_outcome is not None and outcome != prev_outcome
Taskotron task outcome changed With this rule, you can limit messages to only those task results with changed outcomes. This is useful when an object (a build, an update, etc) gets retested and either the object itself or the environment changes and the task outcome is now different (e.g. FAILED ->...
def is_not_blocked(self, item: str) -> bool: assert item is not None item = self._encode_item(item) connection = self.__get_connection() key = self.__redis_conf['blacklist_template'].format(item) value = connection.get(key) if value is None: BlackRed.__release...
Check if an item is _not_ already on the blacklist :param str item: The item to check :return: True, when the item is _not_ on the blacklist :rtype: bool
def _finished_callback(self, batch_fut, todo): self._running.remove(batch_fut) err = batch_fut.get_exception() if err is not None: tb = batch_fut.get_traceback() for (fut, _) in todo: if not fut.done(): fut.set_exception(err, tb)
Passes exception along. Args: batch_fut: the batch future returned by running todo_tasklet. todo: (fut, option) pair. fut is the future return by each add() call. If the batch fut was successful, it has already called fut.set_result() on other individual futs. This method only handles when the...
def load_config_vars(target_config, source_config): for attr in dir(source_config): if attr.startswith('_'): continue val = getattr(source_config, attr) if val is not None: setattr(target_config, attr, val)
Loads all attributes from source config into target config @type target_config: TestRunConfigManager @param target_config: Config to dump variables into @type source_config: TestRunConfigManager @param source_config: The other config @return: True
def list_policies(self): api_path = '/v1/sys/policy' response = self._adapter.get( url=api_path, ) return response.json()
List all configured policies. Supported methods: GET: /sys/policy. Produces: 200 application/json :return: The JSON response of the request. :rtype: dict
def remove_note(self, note, octave=-1): res = [] for x in self.notes: if type(note) == str: if x.name != note: res.append(x) else: if x.octave != octave and octave != -1: res.append(x) ...
Remove note from container. The note can either be a Note object or a string representing the note's name. If no specific octave is given, the note gets removed in every octave.
async def get_googlecast_settings(self) -> List[Setting]: return [ Setting.make(**x) for x in await self.services["system"]["getWuTangInfo"]({}) ]
Get Googlecast settings.
def handle_command(editor, input_string): m = COMMAND_GRAMMAR.match(input_string) if m is None: return variables = m.variables() command = variables.get('command') go_to_line = variables.get('go_to_line') shell_command = variables.get('shell_command') if go_to_line is not None: ...
Handle commands entered on the Vi command line.
def data_directory(): package_directory = os.path.abspath(os.path.dirname(__file__)) return os.path.join(package_directory, "data")
Return the absolute path to the directory containing the package data.
def offer_pdf(self, offer_id): return self._create_get_request(resource=OFFERS, billomat_id=offer_id, command=PDF)
Opens a pdf of an offer :param offer_id: the offer id :return: dict
def get_site_amplification(self, C, sites): ampl = np.zeros(sites.vs30.shape) ampl[sites.vs30measured] = (C["d0_obs"] + C["d1_obs"] * np.log(sites.vs30[sites.vs30measured])) idx = np.logical_not(sites.vs30measured) ampl[idx] = (C["d0_inf"] + C["d1_inf"...
Returns the linear site amplification term depending on whether the Vs30 is observed of inferred
def handle_overrides(graph, overrides): for key in overrides: levels = key.split('.') part = graph for lvl in levels[:-1]: try: part = part[lvl] except KeyError: raise KeyError("'%s' override failed at '%s'", (key, lvl)) try: ...
Handle any overrides for this model configuration. Parameters ---------- graph : dict or object A dictionary (or an ObjectProxy) containing the object graph loaded from a YAML file. overrides : dict A dictionary containing overrides to apply. The location of the override...
def summary_permutation(context_counts, context_to_mut, seq_context, gene_seq, score_dir, num_permutations=10000, min_frac=0.0, min_recur=2, ...
Performs null-permutations and summarizes the results as features over the gene. Parameters ---------- context_counts : pd.Series number of mutations for each context context_to_mut : dict dictionary mapping nucleotide context to a list of observed somatic base changes. ...
def add_role(ctx, role): if role is None: log('Specify the role with --role') return if ctx.obj['username'] is None: log('Specify the username with --username') return change_user = ctx.obj['db'].objectmodels['user'].find_one({ 'name': ctx.obj['username'] }) i...
Grant a role to an existing user
def group(self, indent: int = DEFAULT_INDENT, add_line: bool = True) -> _TextGroup: return _TextGroup(self, indent, add_line)
Returns a context manager which adds an indentation before each line. :param indent: Number of spaces to print. :param add_line: If True, a new line will be printed after the group. :return: A TextGroup context manager.
def estimateAbsoluteMagnitude(spectralType): from .astroclasses import SpectralType specType = SpectralType(spectralType) if specType.classLetter == '': return np.nan elif specType.classNumber == '': specType.classNumber = 5 if specType.lumType == '': specType.lumType = 'V' ...
Uses the spectral type to lookup an approximate absolute magnitude for the star.
def revoke(self, target, **prefs): hash_algo = prefs.pop('hash', None) if isinstance(target, PGPUID): sig_type = SignatureType.CertRevocation elif isinstance(target, PGPKey): if target.is_primary: sig_type = SignatureType.KeyRevocation else: ...
Revoke a key, a subkey, or all current certification signatures of a User ID that were generated by this key so far. :param target: The key to revoke :type target: :py:obj:`PGPKey`, :py:obj:`PGPUID` :raises: :py:exc:`~pgpy.errors.PGPError` if the key is passphrase-protected and has not been unl...
def execute_and_commit(*args, **kwargs): db, cursor = CoyoteDb.execute(*args, **kwargs) db.commit() return cursor
Executes and commits the sql statement @return: None
def get_archiver(self, kind): archivers = { 'tar': TarArchiver, 'tbz2': Tbz2Archiver, 'tgz': TgzArchiver, 'zip': ZipArchiver, } return archivers[kind]()
Returns instance of archiver class specific to given kind :param kind: archive kind
def checkscript(self, content): if "VERSION" not in self.__capabilities: raise NotImplementedError( "server does not support CHECKSCRIPT command") content = tools.to_bytes(content) content = tools.to_bytes("{%d+}" % len(content)) + CRLF + content code, data = ...
Check whether a script is valid See MANAGESIEVE specifications, section 2.12 :param name: script's content :rtype: boolean
def decompose(miz_file: Path, output_folder: Path): mission_folder, assets_folder = NewMiz._get_subfolders(output_folder) NewMiz._wipe_folders(mission_folder, assets_folder) LOGGER.info('unzipping mission file') with Miz(miz_file) as miz: version = miz.mission.d['version'] ...
Decompose this Miz into json Args: output_folder: folder to output the json structure as a Path miz_file: MIZ file path as a Path
def from_val(val_schema): definition = getattr(val_schema, "definition", val_schema) if isinstance( val_schema, BaseSchema) else val_schema if isinstance(definition, dict): return _dict_to_teleport(definition) if isinstance(definition, list): if len(definition) == 1: retu...
Serialize a val schema to teleport.
def deserialize_encryption_context(serialized_encryption_context): if len(serialized_encryption_context) > aws_encryption_sdk.internal.defaults.MAX_BYTE_ARRAY_SIZE: raise SerializationError("Serialized context is too long.") if serialized_encryption_context == b"": _LOGGER.debug("No encryption c...
Deserializes the contents of a byte string into a dictionary. :param bytes serialized_encryption_context: Source byte string containing serialized dictionary :returns: Deserialized encryption context :rtype: dict :raises SerializationError: if serialized encryption context is too large :raises Seri...