code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def pop_callback(obj): callbacks = obj._callbacks if not callbacks: return if isinstance(callbacks, Node): node = callbacks obj._callbacks = None else: node = callbacks.first callbacks.remove(node) if not callbacks: obj._callbacks = None return node.data, node.extra
Pop a single callback.
def overwrite_file_check(args, filename): if not args['overwrite'] and os.path.exists(filename): if args['no_overwrite']: overwrite = False else: try: overwrite = confirm_input(input('Overwrite {0}? (yes/no): ' .format(filename))) except (KeyboardInterrupt, EOFError): sys.exit() if not overwrite: new_filename = modify_filename_id(filename) while os.path.exists(new_filename): new_filename = modify_filename_id(new_filename) return new_filename return filename
If filename exists, overwrite or modify it to be unique.
def add_scan_host_detail(self, scan_id, host='', name='', value=''): self.scan_collection.add_result(scan_id, ResultType.HOST_DETAIL, host, name, value)
Adds a host detail result to scan_id scan.
def iter_previewers(self, previewers=None): if self.entry_point_group is not None: self.load_entry_point_group(self.entry_point_group) self.entry_point_group = None previewers = previewers or \ self.app.config.get('PREVIEWER_PREFERENCE', []) for item in previewers: if item in self.previewers: yield self.previewers[item]
Get previewers ordered by PREVIEWER_PREVIEWERS_ORDER.
def unary_operator(op): valid_ops = {'~'} if op not in valid_ops: raise ValueError("Invalid unary operator %s." % op) def unary_operator(self): if isinstance(self, NumericalExpression): return NumExprFilter.create( "{op}({expr})".format(op=op, expr=self._expr), self.inputs, ) else: return NumExprFilter.create("{op}x_0".format(op=op), (self,)) unary_operator.__doc__ = "Unary Operator: '%s'" % op return unary_operator
Factory function for making unary operator methods for Filters.
def next_header_class(self): if self._next_header_class_key == '': return None key = getattr(self, self._next_header_class_key) rv = self._next_header_map.get(key, None) if rv is None: log_warn("No class exists to handle next header value {}".format(key)) return rv
Return class of next header, if known.
def __get_connection_info(): conn_info = {} try: conn_info['hostname'] = __opts__['mysql_auth']['hostname'] conn_info['username'] = __opts__['mysql_auth']['username'] conn_info['password'] = __opts__['mysql_auth']['password'] conn_info['database'] = __opts__['mysql_auth']['database'] conn_info['auth_sql'] = __opts__['mysql_auth']['auth_sql'] except KeyError as e: log.error('%s does not exist', e) return None return conn_info
Grab MySQL Connection Details
def write_byte(self, cmd, value): self.bus.write_byte_data(self.address, cmd, value) self.log.debug( "write_byte: Wrote 0x%02X to command register 0x%02X" % ( value, cmd ) )
Writes an 8-bit byte to the specified command register
def reset(self): self._episode_steps = 0 if self._episode_count: self._restart() self._episode_count += 1 logging.info("Starting episode: %s", self._episode_count) self._metrics.increment_episode() self._last_score = [0] * self._num_agents self._state = environment.StepType.FIRST if self._realtime: self._last_step_time = time.time() self._target_step = 0 return self._observe()
Start a new episode.
def _process_response(self, reply): self._advance_cluster_time(reply.get('$clusterTime')) self._advance_operation_time(reply.get('operationTime')) if self._in_transaction and self._transaction.sharded: recovery_token = reply.get('recoveryToken') if recovery_token: self._transaction.recovery_token = recovery_token
Process a response to a command that was run with this session.
def _ValidateReturnValue(return_value, type_check_dict): return_check = type_check_dict.get("returns", None) if not return_check: return [] messages = [] if not _ValidateValue(return_value, return_check): message = ("Invalid return value '%s'. Expected %s" % (return_value, _FormatTypeCheck(return_check))) messages.append(message) return messages
Validate return value and return list of errors messages.
def validate_sort_fields(self): sort_fields = ','.join(self.options.sort_fields) if sort_fields == '*': sort_fields = self.get_output_fields() return formatting.validate_sort_fields(sort_fields or config.sort_fields)
Take care of sorting.
def update_task_redundancy(config, task_id, redundancy): if task_id is None: msg = ("Are you sure you want to update all the tasks redundancy?") if click.confirm(msg): res = _update_tasks_redundancy(config, task_id, redundancy) click.echo(res) else: click.echo("Aborting.") else: res = _update_tasks_redundancy(config, task_id, redundancy) click.echo(res)
Update task redudancy for a project.
def GetWsdlMethod(ns, wsdlName): with _lazyLock: method = _wsdlMethodMap[(ns, wsdlName)] if isinstance(method, ManagedMethod): return method elif method: LoadManagedType(*method) return _wsdlMethodMap[(ns, wsdlName)] else: raise KeyError("{0} {1}".format(ns, name))
Get wsdl method from ns, wsdlName
def setup_logger(logging_level, logging_format): logger = logging.getLogger("ray") if type(logging_level) is str: logging_level = logging.getLevelName(logging_level.upper()) logger.setLevel(logging_level) global _default_handler if _default_handler is None: _default_handler = logging.StreamHandler() logger.addHandler(_default_handler) _default_handler.setFormatter(logging.Formatter(logging_format)) logger.propagate = False
Setup default logging for ray.
def _setChoiceDict(self): self.choiceDict = minmatch.MinMatchDict() for c in self.choice: self.choiceDict.add(c, c)
Create min-match dictionary for choice list
def _extract_node_text(node): texts = map( six.text_type.strip, map(six.text_type, map(unescape, node.xpath(".//text()"))) ) return " ".join(text for text in texts if text)
Extract text from a given lxml node.
def mode_new_collection(): print globals()['mode_new_collection'].__doc__ collection_name = raw_input("Collection name: ") item_attr_list = [] collection_node_id = None if collection_name: collection_node_id = insert_node(name=collection_name, value=None) insert_query(name='select_link_node_from_node.sql', node_id=collection_node_id) item_attr = True while item_attr: item_attr = raw_input("Add a collection item attribute name: ") if item_attr: item_attr_list.append(item_attr) selection = collection_name while selection: selection = select([ 'Add item', ]) if selection == 'Add item': add_item_with_attributes_to_collection( collection_name=collection_name, collection_node_id=collection_node_id, item_attr_list=item_attr_list) if collection_node_id: print "Added collection name '{0}' with node id: {1}".format(collection_name, collection_node_id)
Create a new collection of items with common attributes.
def _get_username_for_key(self): response = self.client.get_access_key_last_used( AccessKeyId=self.compromised_resource['access_key_id'] ) username = response['UserName'] return username
Find the user for a given access key
def compute_laplacian_matrix(affinity_matrix, method='auto', **kwargs): if method == 'auto': method = 'geometric' return Laplacian.init(method, **kwargs).laplacian_matrix(affinity_matrix)
Compute the laplacian matrix with the given method
def _format_data(self, name=None): is_justify = not (self.inferred_type in ('string', 'unicode') or (self.inferred_type == 'categorical' and is_object_dtype(self.categories))) return format_object_summary(self, self._formatter_func, is_justify=is_justify, name=name)
Return the formatted data as a unicode string.
def function_body(self): if self.shared.function_vars > 0: const = self.symtab.insert_constant("0{}".format(self.shared.function_vars * 4), SharedData.TYPES.UNSIGNED) self.arithmetic("-", "%15", const, "%15") self.newline_label(self.shared.function_name + "_body", True, True)
Inserts a local variable initialization and body label
def treble(self, treble): treble = int(treble) treble = max(-10, min(treble, 10)) self.renderingControl.SetTreble([ ('InstanceID', 0), ('DesiredTreble', treble) ])
Set the speaker's treble.
def push_source(self, newstream, newfile=None): "Push an input source onto the lexer's input source stack." if isinstance(newstream, basestring): newstream = StringIO(newstream) self.filestack.appendleft((self.infile, self.instream, self.lineno)) self.infile = newfile self.instream = newstream self.lineno = 1
Push an input source onto the lexer's input source stack.
def invocation(): cmdargs = [sys.executable] + sys.argv[:] invocation = " ".join(shlex.quote(s) for s in cmdargs) return invocation
reconstructs the invocation for this python program
def checkin(self): self._query_instance.checkin(self.place_id, self._query_instance.sensor)
Checks in an anonymous user in.
def savepoint(self): if self._last_image: self._savepoints.append(self._last_image) self._last_image = None
Copies the last displayed image.
def utc_dt_to_local_dt(dtm): utc_zone = mktz("UTC") if dtm.tzinfo is not None and dtm.tzinfo != utc_zone: raise ValueError( "Expected dtm without tzinfo or with UTC, not %r" % ( dtm.tzinfo ) ) if dtm.tzinfo is None: dtm = dtm.replace(tzinfo=utc_zone) return dtm.astimezone(mktz())
Convert a UTC datetime to datetime in local timezone
def tostr(self): element = _transform.SVGFigure(self.width, self.height) element.append(self) svgstr = element.to_str() return svgstr
Export SVG as a string
def mkdir(dir, enter): if not os.path.exists(dir): os.makedirs(dir)
Create directory with template for topic of the current environment
def patch(self, urls=None, **overrides): if urls is not None: overrides['urls'] = urls return self.where(accept='PATCH', **overrides)
Sets the acceptable HTTP method to PATCH
def _batchify(self, batch_data, batch_label, start=0): i = start batch_size = self.batch_size try: while i < batch_size: label, s = self.next_sample() data = self.imdecode(s) try: self.check_valid_image([data]) label = self._parse_label(label) data, label = self.augmentation_transform(data, label) self._check_valid_label(label) except RuntimeError as e: logging.debug('Invalid image, skipping: %s', str(e)) continue for datum in [data]: assert i < batch_size, 'Batch size must be multiples of augmenter output length' batch_data[i] = self.postprocess_data(datum) num_object = label.shape[0] batch_label[i][0:num_object] = nd.array(label) if num_object < batch_label[i].shape[0]: batch_label[i][num_object:] = -1 i += 1 except StopIteration: if not i: raise StopIteration return i
Override the helper function for batchifying data
def apply_deprecations(cls, path): "Convert any potentially deprecated paths and issue appropriate warnings" split = path.split('.') msg = 'Element {old} deprecated. Use {new} instead.' for old, new in cls.deprecations: if split[0] == old: parsewarning.warning(msg.format(old=old, new=new)) return '.'.join([new] + split[1:]) return path
Convert any potentially deprecated paths and issue appropriate warnings
def cache_invalidate_by_tags(tags, cache=None): if isinstance(tags, basestring): tags = [tags] tag_keys = [CACHE_TAG_KEY % tag for tag in tags if tag] if not tag_keys: raise ValueError('Attr tags invalid') if cache is None: cache = default_cache tag_keys_for_delete = [] if cache.__class__.__name__ == 'RedisCache': from django_redis.exceptions import ConnectionInterrupted try: redis_client = cache.client.get_client() for tag_key in tag_keys: keys = redis_client.smembers(tag_key) if keys: cache.delete_many(keys) tag_keys_for_delete.append(tag_key) except ConnectionInterrupted: pass else: for tag_key in tag_keys: keys = cache.get(tag_key) if keys: cache.delete_many(keys) tag_keys_for_delete.append(tag_key) if tag_keys_for_delete: cache.delete_many(tag_keys_for_delete)
Clear cache by tags.
def part_lister(mpupload, part_number_marker=None): more_results = True part = None while more_results: parts = mpupload.get_all_parts(None, part_number_marker) for part in parts: yield part part_number_marker = mpupload.next_part_number_marker more_results= mpupload.is_truncated
A generator function for listing parts of a multipart upload.
def compress(self): for ast_token in self.ast_tokens: if type(ast_token) in self.dispatcher: self.dispatcher[type(ast_token)](ast_token) else: self.dispatcher['default'](ast_token)
Main function of compression.
def session(self, create=True): if hasattr(self.local, 'session'): return self.local.session else: if create: s = Session(self.name) self.local.session = s return s
Used to created default session
def sign(ctx, filename): if filename: tx = filename.read() else: tx = sys.stdin.read() tx = TransactionBuilder(eval(tx), bitshares_instance=ctx.bitshares) tx.appendMissingSignatures() tx.sign() print_tx(tx.json())
Sign a json-formatted transaction
def new(cls, arg): content = None if arg.kind == 'file': if os.path.exists(arg.value): with open(arg.value, 'r') as f: content = f.read() else: raise Exception('File does not exist: {}'.format(arg.value)) elif arg.kind == 'cli': content = arg.value for source_cls in cls.sources: if source_cls.supports_source(arg): return source_cls(content) msg = 'Unsupported Parameter Source "{}"' raise Execption(msg.format(arg.value))
Creates a new Parameter object from the given ParameterArgument.
def clear_layout(layout: QLayout) -> None: if layout is not None: while layout.count(): item = layout.takeAt(0) widget = item.widget() if widget is not None: widget.deleteLater() else: clear_layout(item.layout())
Clear the layout off all its components
def visit_Yield(self, node): self.generic_visit(node) self.combine(self.current, node.value)
Compute yield type and merges it with others yield type.
def check_create_folder(filename): os.makedirs(os.path.dirname(filename), exist_ok=True)
Check if the folder exisits. If not, create the folder
def visit_SetComp(self, node: AST, dfltChaining: bool = True) -> str: return f"{{{self.visit(node.elt)} " \ f"{' '.join(self.visit(gen) for gen in node.generators)}}}"
Return `node`s representation as set comprehension.
def view_num_units(token, dstore): taxo = dstore['assetcol/tagcol/taxonomy'].value counts = collections.Counter() for asset in dstore['assetcol']: counts[taxo[asset['taxonomy']]] += asset['number'] data = sorted(counts.items()) data.append(('*ALL*', sum(d[1] for d in data))) return rst_table(data, header=['taxonomy', 'num_units'])
Display the number of units by taxonomy
def _get_connection(self, x, y): if (self._width is None or self._height is None or self._root_chip is None): return self.connections[None] else: eth_chip = spinn5_local_eth_coord(x, y, self._width, self._height, *self._root_chip) conn = self.connections.get(eth_chip) if conn is not None: return conn else: return self.connections[None]
Get the appropriate connection for a chip.
def add(self, *tasks): nodes = [x.node for x in tasks] self.node.add(*nodes) return self
Interfaces the GraphNode `add` method
def load(self, url, offset=0, length=-1): file_only = url.startswith(('/', '.')) filename = from_file_url(url) if filename != url: file_only = True url = filename try: afile = open(url, 'rb') except IOError: if file_only: raise return super(LocalFileLoader, self).load(url, offset, length) if offset > 0: afile.seek(offset) if length >= 0: return LimitReader(afile, length) else: return afile
Load a file-like reader from the local file system
def _disconnect(self): if not self.protocol or not self.protocol.transport: self.protocol = None return _LOGGER.info('Disconnecting from gateway') self.protocol.transport.close() self.protocol = None
Disconnect from the transport.
def _get_tables(self, ods): childnodes = ods.spreadsheet.childNodes qname_childnodes = [(s.qname[1], s) for s in childnodes] return [node for name, node in qname_childnodes if name == u"table"]
Returns list of table nodes from ods object
def parse_command_line(self): result = _Distribution.parse_command_line(self) if self.features: self._finalize_features() return result
Process features after parsing command line options
def _from_dict(cls, _dict): args = {} if 'classifier_id' in _dict: args['classifier_id'] = _dict.get('classifier_id') if 'url' in _dict: args['url'] = _dict.get('url') if 'text' in _dict: args['text'] = _dict.get('text') if 'top_class' in _dict: args['top_class'] = _dict.get('top_class') if 'classes' in _dict: args['classes'] = [ ClassifiedClass._from_dict(x) for x in (_dict.get('classes')) ] return cls(**args)
Initialize a Classification object from a json dictionary.
def validate(self, value): value = super(Boolean, self).validate(value) if value is not None: value = bool(value) return value
Always returns a Python boolean.
def clear_cache_root(): logger.debug("Clearing root cache") if os.path.isdir(_root_songcache_dir): for filename in os.listdir(_root_songcache_dir): file_path = os.path.join(_root_songcache_dir, filename) try: if os.path.isfile(file_path): os.unlink(file_path) elif os.path.isdir(file_path): shutil.rmtree(file_path) except PermissionError: pass except Exception as e: logger.exception(e) logger.debug("Root cache cleared")
Clears everything in the song cache
def _is_excluded(self, path, dir_only): return self.npatterns and self._match_excluded(path, self.npatterns)
Check if file is excluded.
def _clear(self): (colour, attr, bg) = self.palette["background"] self._canvas.clear_buffer(colour, attr, bg)
Clear the current canvas.
def add_option(self, K=None, price=None, St=None, kind="call", pos="long"): kinds = { "call": Call, "Call": Call, "c": Call, "C": Call, "put": Put, "Put": Put, "p": Put, "P": Put, } St = self.St if St is None else St option = kinds[kind](St=St, K=K, price=price, pos=pos) self.options.append(option)
Add an option to the object's `options` container.
def rpm_eval(macro): try: value = subprocess.Popen( ['rpm', '--eval', macro], stdout=subprocess.PIPE).communicate()[0].strip() except OSError: logger.error('Failed to get value of {0} rpm macro'.format( macro), exc_info=True) value = b'' return console_to_str(value)
Get value of given macro using rpm tool
def _log_task_info(headers, extra_task_info=None): ran_at = time.time() task_eta = float(headers.get('X-Appengine-Tasketa', 0.0)) task_info = { 'retry_count': headers.get('X-Appengine-Taskretrycount', ''), 'execution_count': headers.get('X-Appengine-Taskexecutioncount', ''), 'task_eta': task_eta, 'ran': ran_at, 'gae_latency_seconds': ran_at - task_eta } if extra_task_info: task_info['extra'] = extra_task_info logging.debug('TASK-INFO: %s', json.dumps(task_info))
Processes the header from task requests to log analytical data.
def serialize(self): res = '<?xml version="1.0" encoding="UTF-8"?>' for ns in self.namespaces: self.top_grammar.attr["xmlns:" + self.namespaces[ns]] = ns res += self.top_grammar.start_tag() for ch in self.top_grammar.children: res += ch.serialize() res += self.tree.serialize() for d in self.global_defs: res += self.global_defs[d].serialize() for i in self.identities: res += self.identities[i].serialize() return res + self.top_grammar.end_tag()
Return the string representation of the receiver.
def getCoeffStr(self): txt = '' for key, val in self.coeffs.items(): txt += '%s = %s\n' % (key, val) return txt
get the distortion coeffs in a formated string
def escape_header(val): if val is None: return None try: return quote(val, encoding="ascii", safe="/ ") except ValueError: return "utf-8''" + quote(val, encoding="utf-8", safe="/ ")
Escapes a value so that it can be used in a mime header
def discover_settings(conf_base=None): settings = { 'zmq_prefix': '', 'libzmq_extension': False, 'no_libzmq_extension': False, 'skip_check_zmq': False, 'build_ext': {}, 'bdist_egg': {}, } if sys.platform.startswith('win'): settings['have_sys_un_h'] = False if conf_base: merge(settings, load_config('config', conf_base)) merge(settings, get_cfg_args()) merge(settings, get_eargs()) return settings
Discover custom settings for ZMQ path
def _perm_dict_from_pyxb(self, access_pyxb): subj_dict = self._subj_dict_from_pyxb(access_pyxb) return self._perm_dict_from_subj_dict(subj_dict)
Return dict representation of AccessPolicy PyXB obj.
def RunOnce(self): global WEBAUTH_MANAGER WEBAUTH_MANAGER = BaseWebAuthManager.GetPlugin( config.CONFIG["AdminUI.webauth_manager"])() logging.info("Using webauth manager %s", WEBAUTH_MANAGER)
Run this once on init.
def predict(self, X): return self.__cost(self.__unroll(self.__thetas), 0, np.matrix(X))
Returns predictions of input test cases.
def _generate_time_steps(self, trajectory_list): for time_step in env_problem.EnvProblem._generate_time_steps( self, trajectory_list): frame_np = np.array(time_step.pop(env_problem.OBSERVATION_FIELD)) frame_np = frame_np.reshape( [self.frame_height, self.frame_width, self.num_channels]) frame = png.from_array(frame_np, "RGB", info={"bitdepth": 8}) frame_buffer = six.BytesIO() frame.save(frame_buffer) time_step[_IMAGE_ENCODED_FIELD] = [frame_buffer.getvalue()] time_step[_IMAGE_FORMAT_FIELD] = [_FORMAT] time_step[_IMAGE_HEIGHT_FIELD] = [self.frame_height] time_step[_IMAGE_WIDTH_FIELD] = [self.frame_width] time_step[_FRAME_NUMBER_FIELD] = time_step[env_problem.TIMESTEP_FIELD] yield time_step
Transforms time step observations to frames of a video.
def pdebug(*args, **kwargs): if should_msg(kwargs.get("groups", ["debug"])): global colorama_init if not colorama_init: colorama_init = True colorama.init() args = indent_text(*args, **kwargs) sys.stderr.write(colorama.Fore.CYAN) sys.stderr.write("".join(args)) sys.stderr.write(colorama.Fore.RESET) sys.stderr.write("\n")
print formatted output to stdout with indentation control
def changes(self, adding=None, deleting=None): if deleting is not None: for deleted in deleting: self.root_node.remove(deleted) if adding is not None: for added in adding: self.root_node.add(added) added = list() removed = list() for csn in self._get_conflict_set_nodes(): c_added, c_removed = csn.get_activations() added.extend(c_added) removed.extend(c_removed) return (added, removed)
Pass the given changes to the root_node.
def _raw(s): if isinstance(s, list): s = "\n".join(_raw(item) for item in s) if s == EOF: return "EOF" s = repr(s) s = s[1:-1] if len(s) > 15: s = s[:15] + "..." return s
Get raw representation of s, truncating if too long.
def output_to_graphviz(file, namer=_graphviz_default_namer, block=None): print(block_to_graphviz_string(block, namer), file=file)
Walk the block and output it in graphviz format to the open file.
def create_shared_noise(count): seed = 123 noise = np.random.RandomState(seed).randn(count).astype(np.float32) return noise
Create a large array of noise to be shared by all workers.
def addContinuousSearchOptions(parser): addContinuousSetIdArgument(parser) addContinuousReferenceNameArgument(parser) addStartArgument(parser) addEndArgument(parser)
Adds common options to a continuous search command line parser.
def list_product_versions_for_build_configuration(id=None, name=None, page_size=200, page_index=0, sort="", q=""): data = list_product_versions_for_build_configuration_raw(id, name, page_size, page_index, sort, q) if data: return utils.format_json_list(data)
List all ProductVersions associated with a BuildConfiguration
def _purge_expired(self): time_horizon = time.time() - self._keep_time new_cache = {} for (k, v) in self._cache.items(): if v.timestamp > time_horizon: new_cache[k] = v self._cache = new_cache
Remove all expired entries from the cache.
def finalize(self, sched): super(AddCoro, self).finalize(sched) return self.result
Return a reference to the instance of the newly added coroutine.
def _is_suffix(self, t): return t not in NOT_SUFFIX and (t.replace('.', '') in SUFFIXES or t.replace('.', '') in SUFFIXES_LOWER)
Return true if t is a suffix.
def _filter_data(self, pattern): removed = [] filtered = [] for param in self.data: if not param[0].startswith(pattern): filtered.append(param) else: removed.append(param) self.data = filtered return removed
Removes parameters which match the pattern from the config data
def Betainc(a, b, x): return sp.special.betainc(a, b, x),
Complemented, incomplete gamma op.
def genslices_ndim(ndim, shape): iterables = [genslices(shape[n]) for n in range(ndim)] yield from product(*iterables)
Generate all possible slice tuples for 'shape'.
def _add_filter_node(root, filter_, value): filter_el = ElementTree.SubElement(root, 'Filter') filter_el.set('name', filter_.name) if filter_.type == 'boolean': if value is True or value.lower() in {'included', 'only'}: filter_el.set('excluded', '0') elif value is False or value.lower() == 'excluded': filter_el.set('excluded', '1') else: raise ValueError('Invalid value for boolean filter ({})' .format(value)) elif isinstance(value, list) or isinstance(value, tuple): filter_el.set('value', ','.join(map(str, value))) else: filter_el.set('value', str(value))
Adds filter xml node to root.
def refresh(self): if self.exists: self.delete() self.populate() self.open()
Refresh the cache by deleting the old one and creating a new one.
def _send_rpc(self, device_info, control_info, address, rpc_id, payload, poll_interval, timeout): write_address, write_data = control_info.format_rpc(address, rpc_id, payload) self._jlink.memory_write32(write_address, write_data) self._trigger_rpc(device_info) start = monotonic() now = start poll_address, poll_mask = control_info.poll_info() while (now - start) < timeout: time.sleep(poll_interval) value, = self._jlink.memory_read8(poll_address, 1) if value & poll_mask: break now = monotonic() if (now - start) >= timeout: raise HardwareError("Timeout waiting for RPC response", timeout=timeout, poll_interval=poll_interval) read_address, read_length = control_info.response_info() read_data = self._read_memory(read_address, read_length, join=True) return control_info.format_response(read_data)
Write and trigger an RPC.
def tob(data, enc='utf8'): return data.encode(enc) if isinstance(data, six.text_type) else bytes(data)
Convert anything to bytes
def remove_nullchars(block): data = block.lstrip(b'\00') padding = b'\00' * ((len(block) - len(data)) % 8) return padding + data
Strips NULL chars taking care of bytes alignment.
def _get_program(self): return ansible_mitogen.target.get_small_file( context=self.service_context, path=self.path, )
Fetch the module binary from the master if necessary.
def _rules_from_env(self, val): val = val.split(':') if 'DEFAULT_RULES' in val: val = const.DEFAULT_RULES + [rule for rule in val if rule != 'DEFAULT_RULES'] return val
Transforms rules list from env-string to python.
def log_once(log_func, msg, *args, **kwargs): if msg not in _LOG_ONCE_SEEN: log_func(msg, *args, **kwargs) _LOG_ONCE_SEEN.add(msg)
Logs a message only once.
def reset_password(self, token=None, login_user=None): pwcol = self.options['password_column'] if not token: if "token" in request.view_args: token = request.view_args["token"] elif "token" in request.values: token = request.values["token"] else: raise OptionMissingError(("Missing 'token' option or 'token' view arg " "or 'token' GET paramater in 'reset_password' action")) user = self.find_by_token(token, salt="password-reset", max_age=self.options["reset_password_ttl"]) if user is None: if self.options["reset_password_error_message"]: flash(self.options["reset_password_error_message"], "error") current_context.exit(trigger_action_group="reset_password_failed") self.update_password_from_form(user) self.reset_password_signal.send(self, user=user) if (login_user is None and self.options["login_user_on_reset_password"]) or login_user: flask_login.login_user(user) return user
Resets the password of the user identified by the token
def publish_message_to_centrifugo(sender, instance, created, **kwargs): if created is True: client = Client("{0}api/".format(getattr(settings, "CENTRIFUGE_ADDRESS")), getattr(settings, "CENTRIFUGE_SECRET")) active_participants = [participation.participant.id for participation in Participation.objects.filter(thread=instance.thread, date_left__isnull=True).select_related('participant')] client.publish( build_channel(settings.CENTRIFUGO_MESSAGE_NAMESPACE, instance.thread.id, active_participants), { "id": instance.id, "body": instance.body, "sender": instance.sender.id, "thread": instance.thread.id, "sent_at": str(instance.sent_at), "is_notification": True, } )
Publishes each saved message to Centrifugo.
def master_tops(self): log.debug( 'The _ext_nodes master function has been renamed to _master_tops. ' 'To ensure compatibility when using older Salt masters we will ' 'continue to invoke the function as _ext_nodes until the ' 'Magnesium release.' ) load = {'cmd': '_ext_nodes', 'id': self.opts['id'], 'opts': self.opts} if self.auth: load['tok'] = self.auth.gen_token(b'salt') return salt.utils.data.decode(self.channel.send(load)) if six.PY2 \ else self.channel.send(load)
Return the metadata derived from the master_tops system
def bank_account_query(self, number, date, account_type, bank_id): return self.authenticated_query( self._bareq(number, date, account_type, bank_id) )
Bank account statement request
def remove_partition(self, partition): if partition in self._partitions: self._partitions.remove(partition) partition.replicas.remove(self) else: raise ValueError( 'Partition: {topic_id}:{partition_id} not found in broker ' '{broker_id}'.format( topic_id=partition.topic.id, partition_id=partition.partition_id, broker_id=self._id, ) )
Remove partition from partition list.
def json_data(self): return { "info_in_id": self.info_in_id, "info_out_id": self.info_out_id, "node_id": self.node_id, "network_id": self.network_id, }
The json representation of a transformation.
def write_device_config(self, device_config): if not self.capabilities.have_usb_mode(device_config._mode): raise yubikey_base.YubiKeyVersionError("USB mode: %02x not supported for %s" % (device_config._mode, self)) return self._device._write_config(device_config, SLOT.DEVICE_CONFIG)
Write a DEVICE_CONFIG to the YubiKey NEO.
def _add_plots_to_output(out, data): out["plot"] = {} diagram_plot = _add_diagram_plot(out, data) if diagram_plot: out["plot"]["diagram"] = diagram_plot scatter = _add_scatter_plot(out, data) if scatter: out["plot"]["scatter"] = scatter scatter_global = _add_global_scatter_plot(out, data) if scatter_global: out["plot"]["scatter_global"] = scatter_global return out
Add CNVkit plots summarizing called copy number values.
def _adjust_font(self): fnames = [k for k in Widget_fontdict.keys()] fl = list(map(str.lower, fnames)) if (not self.text_font) or self.text_font.lower() not in fl: self.text_font = "helv" i = fl.index(self.text_font.lower()) self.text_font = fnames[i] return
Ensure the font name is from our list and correctly spelled.
def check(self): for (path, handler) in self.handlers.items(): current_signature = self.signatures[path] new_signature = self.get_path_signature(path) if new_signature != current_signature: self.signatures[path] = new_signature handler.on_change(Event(path))
Check if a file is changed
def link_href(self, rel): link = self.link(rel) if (link is not None): link = link['href'] return(link)
Look for link with specified rel, return href from it or None.
def default(self) -> Optional[ScalarValue]: if self.mandatory: return None if self._default is not None: return self._default return (None if self.type.default is None else ArrayValue([self.type.default]))
Default value of the receiver, if any.
def chi_eff(mass1, mass2, spin1z, spin2z): return (spin1z * mass1 + spin2z * mass2) / (mass1 + mass2)
Returns the effective spin from mass1, mass2, spin1z, and spin2z.