code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def convert(self, imtls, idx=0): curve = numpy.zeros(1, imtls.dt) for imt in imtls: curve[imt] = self.array[imtls(imt), idx] return curve[0]
Convert a probability curve into a record of dtype `imtls.dt`. :param imtls: DictArray instance :param idx: extract the data corresponding to the given inner index
def make_request(self, model, action, url_params={}, post_data=None): url = self._create_url(model, **url_params) headers = self._headers(action) try: response = requests.request(action, url, headers=headers, data=post_data) except Exception as e: raise APIError("There was an error communicating with Union: %s" % e) if not self._is_valid(response): raise ValidationError("The Union response returned an error: %s" % response.content) return self._parse_response(response)
Send request to API then validate, parse, and return the response
def get_symbol_dict(self, voigt=True, zero_index=False, **kwargs): d = {} if voigt: array = self.voigt else: array = self grouped = self.get_grouped_indices(voigt=voigt, **kwargs) if zero_index: p = 0 else: p = 1 for indices in grouped: sym_string = self.symbol + '_' sym_string += ''.join([str(i + p) for i in indices[0]]) value = array[indices[0]] if not np.isclose(value, 0): d[sym_string] = array[indices[0]] return d
Creates a summary dict for tensor with associated symbol Args: voigt (bool): whether to get symbol dict for voigt notation tensor, as opposed to full notation, defaults to true zero_index (bool): whether to set initial index to zero, defaults to false, since tensor notations tend to use one-indexing, rather than zero indexing like python **kwargs: keyword args for np.isclose. Can take atol and rtol for absolute and relative tolerance, e. g. >>> tensor.get_symbol_dict(atol=1e-8) or >>> tensor.get_symbol_dict(rtol=1e-5) Returns: list of index groups where tensor values are equivalent to within tolerances Returns:
def sequence_length(fasta): sequences = SeqIO.parse(fasta, "fasta") records = {record.id: len(record) for record in sequences} return records
return a dict of the lengths of sequences in a fasta file
def _get_custom_contract(param_contract): if not isinstance(param_contract, str): return None for custom_contract in _CUSTOM_CONTRACTS: if re.search(r"\b{0}\b".format(custom_contract), param_contract): return custom_contract return None
Return True if parameter contract is a custom contract, False otherwise.
def on_delete(self, btn): "Flag this image as delete or keep." btn.button_style = "" if btn.flagged_for_delete else "danger" btn.flagged_for_delete = not btn.flagged_for_delete
Flag this image as delete or keep.
def on_exception(self, exception): logger.error('Exception from stream!', exc_info=True) self.streaming_exception = exception
An exception occurred in the streaming thread
def connectionJustEstablished(self): assert not self.disconnecting assert not self.disconnected try: p = self.factory.buildProtocol(PTCPAddress( self.peerAddressTuple, self.pseudoPortPair)) p.makeConnection(self) except: log.msg("Exception during PTCP connection setup.") log.err() self.loseConnection() else: self.protocol = p
We sent out SYN, they acknowledged it. Congratulations, you have a new baby connection.
def load(self, id, *args, **kwargs): self._pre_load(id, *args, **kwargs) response = self._load(id, *args, **kwargs) response = self._post_load(response, *args, **kwargs) return response
loads a remote resource by id
def dump(self, zone, output_dir, lenient, split, source, *sources): self.log.info('dump: zone=%s, sources=%s', zone, sources) sources = [source] + list(sources) try: sources = [self.providers[s] for s in sources] except KeyError as e: raise Exception('Unknown source: {}'.format(e.args[0])) clz = YamlProvider if split: clz = SplitYamlProvider target = clz('dump', output_dir) zone = Zone(zone, self.configured_sub_zones(zone)) for source in sources: source.populate(zone, lenient=lenient) plan = target.plan(zone) if plan is None: plan = Plan(zone, zone, [], False) target.apply(plan)
Dump zone data from the specified source
def ParseNetworkDataUsage( self, parser_mediator, cache=None, database=None, table=None, **unused_kwargs): self._ParseGUIDTable( parser_mediator, cache, database, table, self._NETWORK_DATA_USAGE_VALUES_MAP, SRUMNetworkDataUsageEventData)
Parses the network data usage monitor table. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. cache (Optional[ESEDBCache]): cache, which contains information about the identifiers stored in the SruDbIdMapTable table. database (Optional[pyesedb.file]): ESE database. table (Optional[pyesedb.table]): table.
def fin(self): self.connection.fin(self.id) self.processed = True
Indicate that this message is finished processing
def _get_index_urls_locations(self, project_name): def mkurl_pypi_url(url): loc = posixpath.join( url, urllib_parse.quote(canonicalize_name(project_name))) if not loc.endswith('/'): loc = loc + '/' return loc return [mkurl_pypi_url(url) for url in self.index_urls]
Returns the locations found via self.index_urls Checks the url_name on the main (first in the list) index and use this url_name to produce all locations
def generate_scheduling_block_id(num_blocks, project='test'): _date = strftime("%Y%m%d", gmtime()) _project = project for i in range(num_blocks): yield '{}-{}-sbi{:03d}'.format(_date, _project, i)
Generate a scheduling_block id
def _quantize_wp(wp, nbits, qm, axis=0, **kwargs): scale = bias = lut = None if qm == _QUANTIZATION_MODE_LINEAR_QUANTIZATION: qw, scale, bias = _quantize_channelwise_linear(wp, nbits, axis) elif qm == _QUANTIZATION_MODE_LOOKUP_TABLE_KMEANS: lut, qw = _get_kmeans_lookup_table_and_weight(nbits, wp) elif qm == _QUANTIZATION_MODE_CUSTOM_LOOKUP_TABLE: if 'lut_function' not in kwargs.keys(): raise Exception('Custom lookup table quantization mode ' 'selected but no lookup table function passed') lut_function = kwargs['lut_function'] if not callable(lut_function): raise Exception('Argument for Lookup Table passed in but is ' 'not callable') try: lut, qw = lut_function(nbits, wp) except Exception as e: raise Exception('{}\nCall to Lookup Table function failed' .format(e.message)) elif qm == _QUANTIZATION_MODE_LOOKUP_TABLE_LINEAR: lut, qw = _get_linear_lookup_table_and_weight(nbits, wp) else: raise NotImplementedError('Quantization method "{}" not supported'.format(qm)) quantized_wp = _np.uint8(qw) return scale, bias, lut, quantized_wp
Quantize the weight blob :param wp: numpy.array Weight parameters :param nbits: int Number of bits :param qm: Quantization mode :param lut_function: (``callable function``) Python callable representing a look-up table Returns ------- scale: numpy.array Per-channel scale bias: numpy.array Per-channel bias lut: numpy.array Lookup table quantized_wp: numpy.array Quantized weight of same shape as wp, with dtype numpy.uint8
def _get_frdata(stream, num, name, ctype=None): ctypes = (ctype,) if ctype else ('adc', 'proc', 'sim') for ctype in ctypes: _reader = getattr(stream, 'ReadFr{0}Data'.format(ctype.title())) try: return _reader(num, name) except IndexError as exc: if FRERR_NO_CHANNEL_OF_TYPE.match(str(exc)): continue raise raise ValueError("no Fr{{Adc,Proc,Sim}}Data structures with the " "name {0}".format(name))
Brute force-ish method to return the FrData structure for a channel This saves on pulling the channel type from the TOC
def smart_guess_lexer(file_name, local_file): lexer = None text = get_file_head(file_name) lexer1, accuracy1 = guess_lexer_using_filename(local_file or file_name, text) lexer2, accuracy2 = guess_lexer_using_modeline(text) if lexer1: lexer = lexer1 if (lexer2 and accuracy2 and (not accuracy1 or accuracy2 > accuracy1)): lexer = lexer2 return lexer
Guess Pygments lexer for a file. Looks for a vim modeline in file contents, then compares the accuracy of that lexer with a second guess. The second guess looks up all lexers matching the file name, then runs a text analysis for the best choice.
def delete(self, key): self._cur_batch.delete(key) self._num_mutations += 1 if self._num_mutations >= MAX_MUTATIONS_IN_BATCH: self.commit() self.begin()
Adds deletion of the entity with given key to the mutation buffer. If mutation buffer reaches its capacity then this method commit all pending mutations from the buffer and emties it. Args: key: key of the entity which should be deleted
def most_seen_creators_by_works(work_kind=None, role_name=None, num=10): return Creator.objects.by_works(kind=work_kind, role_name=role_name)[:num]
Returns a QuerySet of the Creators that are associated with the most Works.
def hmget(self, hashkey, keys, *args): redis_hash = self._get_hash(hashkey, 'HMGET') attributes = self._list_or_args(keys, args) return [redis_hash.get(self._encode(attribute)) for attribute in attributes]
Emulate hmget.
def configure_stream_logger(logger='', level=None, formatter='%(levelname)-8s %(message)s'): level = level or logging.WARNING if isinstance(level, str): level = getattr(logging, level, None) if level is None: raise ValueError('invalid log level: ' + level) root_logger = logging.getLogger('') for handler in root_logger.handlers: root_logger.removeHandler(handler) logging.getLogger(logger).setLevel(logging.DEBUG) console_log_handler = logging.StreamHandler() console_log_handler.setLevel(level) if isinstance(formatter, str): formatter = logging.Formatter(formatter) elif not isinstance(formatter, logging.Formatter): raise TypeError('formatter must be an instance of logging.Formatter') console_log_handler.setFormatter(formatter) logging.getLogger(logger).addHandler(console_log_handler) logging.captureWarnings(True) return console_log_handler
Configure the default stream handler for logging messages to the console, remove other logging handlers, and enable capturing warnings. .. versionadded:: 1.3.0 :param str logger: The logger to add the stream handler for. :param level: The level to set the logger to, will default to WARNING if no level is specified. :type level: None, int, str :param formatter: The format to use for logging messages to the console. :type formatter: str, :py:class:`logging.Formatter` :return: The new configured stream handler. :rtype: :py:class:`logging.StreamHandler`
def snake_to_camel(value): camel = "".join(word.title() for word in value.split("_")) return value[:1].lower() + camel[1:]
Converts a snake_case_string to a camelCaseString. >>> snake_to_camel("foo_bar_baz") 'fooBarBaz'
def mutate(self, row): mutation_count = len(row._get_mutations()) if mutation_count > MAX_MUTATIONS: raise MaxMutationsError( "The row key {} exceeds the number of mutations {}.".format( row.row_key, mutation_count ) ) if (self.total_mutation_count + mutation_count) >= MAX_MUTATIONS: self.flush() self.rows.append(row) self.total_mutation_count += mutation_count self.total_size += row.get_mutations_size() if self.total_size >= self.max_row_bytes or len(self.rows) >= self.flush_count: self.flush()
Add a row to the batch. If the current batch meets one of the size limits, the batch is sent synchronously. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_batcher_mutate] :end-before: [END bigtable_batcher_mutate] :type row: class :param row: class:`~google.cloud.bigtable.row.DirectRow`. :raises: One of the following: * :exc:`~.table._BigtableRetryableError` if any row returned a transient error. * :exc:`RuntimeError` if the number of responses doesn't match the number of rows that were retried * :exc:`.batcher.MaxMutationsError` if any row exceeds max mutations count.
def inserir(self, name): net_type_map = dict() net_type_map['name'] = name code, xml = self.submit( {'net_type': net_type_map}, 'POST', 'net_type/') return self.response(code, xml)
Insert new network type and return its identifier. :param name: Network type name. :return: Following dictionary: {'net_type': {'id': < id >}} :raise InvalidParameterError: Network type is none or invalid. :raise NomeTipoRedeDuplicadoError: A network type with this name already exists. :raise DataBaseError: Networkapi failed to access the database. :raise XMLError: Networkapi failed to generate the XML response.
def update(self): stats = self.get_init_value() if self.input_method == 'local': for k, v in iteritems(self.glances_amps.update()): stats.append({'key': k, 'name': v.NAME, 'result': v.result(), 'refresh': v.refresh(), 'timer': v.time_until_refresh(), 'count': v.count(), 'countmin': v.count_min(), 'countmax': v.count_max()}) else: pass self.stats = stats return self.stats
Update the AMP list.
def standardize_names_groundings(stmts): print('Standardize names to groundings') for stmt in stmts: for concept in stmt.agent_list(): db_ns, db_id = concept.get_grounding() if db_id is not None: if isinstance(db_id, list): db_id = db_id[0][0].split('/')[-1] else: db_id = db_id.split('/')[-1] db_id = db_id.replace('|', ' ') db_id = db_id.replace('_', ' ') db_id = db_id.replace('ONT::', '') db_id = db_id.capitalize() concept.name = db_id return stmts
Standardize the names of Concepts with respect to an ontology. NOTE: this function is currently optimized for Influence Statements obtained from Eidos, Hume, Sofia and CWMS. It will possibly yield unexpected results for biology-specific Statements.
def mode(self, set_bytes): self._mode = set_bytes data = [self.ISS_CMD, self.ISS_SET_MODE] + set_bytes self.write_data(data) response = self.read_data(2) if response[0] == 0: error_dict = { 0x05: 'Unknown Command', 0x06: 'Internal Error 1', 0x07: 'Internal Error 2' } try: raise USBISSError(error_dict[response(1)]) except KeyError: raise USBISSError('Undocumented Error')
Set the operating protocol of the USB-ISS with additional parameters for the protocol
def set_close_callback(self, cb): assert self._close_cb is None, ( 'A close_callback has already been set for this connection.' ) self._close_cb = stack_context.wrap(cb) if self.closed: self._close_cb()
Specify a function to be called when this connection is closed. :param cb: A callable that takes no arguments. This callable will be called when this connection is closed.
def _get_access_token(self, verifier=None): response, content = self.client(verifier).request( self.access_token_url, "POST") content = smart_unicode(content) if not response['status'] == '200': raise OAuthError(_( u"Invalid status code %s while obtaining access token from %s: %s") % (response['status'], self.access_token_url, content)) token = dict(urlparse.parse_qsl(content)) return (oauth.Token(token['oauth_token'], token['oauth_token_secret']), token)
Fetch an access token from `self.access_token_url`.
def _ttl(self): return self.hlim if isinstance(self, scapy.layers.inet6.IPv6) else self.ttl
Returns ttl or hlim, depending on the IP version
def register_element(self, model, idx): if idx is None: idx = model + '_' + str(len(self._idx_model)) self._idx_model[idx] = model self._idx.append(idx) return idx
Register element with index ``idx`` to ``model`` :param model: model name :param idx: element idx :return: final element idx
def create_cfg(self, cfg_file, defaults=None, mode='json'): assert mode in ('json', 'yaml') self.cfg_mode = mode self.cfg_file = cfg_file try: self.cfg = CfgDict(app=self, cfg=self.load_cfg()) logging.info('cfg file found : %s' % self.cfg_file) except FileNotFoundError: self.cfg = CfgDict(app=self, cfg={'first_run': True}) with suppress(TypeError): self.cfg.update(defaults) self.cfg.save() set_windows_permissions(self.cfg_file) logging.info( 'Created cfg file for first time!: %s' % self.cfg_file) if self._check_first_run(): self.first_run = True else: self.first_run = False
set mode to json or yaml? probably remove this option..Todo Creates the config file for your app with default values The file will only be created if it doesn't exits also sets up the first_run attribute. also sets correct windows permissions you can add custom stuff to the config by doing app.cfg['fkdsfa'] = 'fdsaf' # todo auto save on change remember to call cfg.save()
def future(self, request, timeout=None, metadata=None, credentials=None): return _utils.wrap_future_call(self._inner.future(request, timeout, metadata, credentials), self._loop, self._executor)
Asynchronously invokes the underlying RPC. Args: request: The request value for the RPC. timeout: An optional duration of time in seconds to allow for the RPC. metadata: Optional :term:`metadata` to be transmitted to the service-side of the RPC. credentials: An optional CallCredentials for the RPC. Returns: An object that is both a Call for the RPC and a Future. In the event of RPC completion, the return Call-Future's result value will be the response message of the RPC. Should the event terminate with non-OK status, the returned Call-Future's exception value will be an RpcError.
def top(**kwargs): if 'id' not in kwargs['opts']: return {} cmd = '{0} {1}'.format( __opts__['master_tops']['ext_nodes'], kwargs['opts']['id'] ) ndata = salt.utils.yaml.safe_load( subprocess.Popen( cmd, shell=True, stdout=subprocess.PIPE).communicate()[0] ) if not ndata: log.info('master_tops ext_nodes call did not return any data') ret = {} if 'environment' in ndata: env = ndata['environment'] else: env = 'base' if 'classes' in ndata: if isinstance(ndata['classes'], dict): ret[env] = list(ndata['classes']) elif isinstance(ndata['classes'], list): ret[env] = ndata['classes'] else: return ret else: log.info('master_tops ext_nodes call did not have a dictionary with a "classes" key.') return ret
Run the command configured
def _guess_package(self, path): supported_prefixes = ('com', 'org', 'net',) package = '' slash = path.rfind(os.path.sep) prefix_with_slash = max(path.rfind(os.path.join('', prefix, '')) for prefix in supported_prefixes) if prefix_with_slash < 0: package = path[:slash] elif prefix_with_slash >= 0: package = path[prefix_with_slash:slash] package = package.replace(os.path.sep, ' ') package = package.strip().replace(' ', '.') return package
Used in execute_codegen to actually invoke the compiler with the proper arguments, and in _sources_to_be_generated to declare what the generated files will be.
def fill_form(form, data): for (key, value) in data.items(): if hasattr(form, key): if isinstance(value, dict): fill_form(getattr(form, key), value) else: getattr(form, key).data = value return form
Prefill form with data. :param form: The form to fill. :param data: The data to insert in the form. :returns: A pre-filled form.
def get_role(role_id,**kwargs): try: role = db.DBSession.query(Role).filter(Role.id==role_id).one() return role except NoResultFound: raise HydraError("Role not found (role_id={})".format(role_id))
Get a role by its ID.
def read_string(self, len): format = '!' + str(len) + 's' length = struct.calcsize(format) info = struct.unpack(format, self.data[self.offset:self.offset + length]) self.offset += length return info[0]
Reads a string of a given length from the packet
def query(self, query): path = self.path(query.key) if os.path.exists(path): filenames = os.listdir(path) filenames = list(set(filenames) - set(self.ignore_list)) filenames = map(lambda f: os.path.join(path, f), filenames) iterable = self._read_object_gen(filenames) else: iterable = list() return query(iterable)
Returns an iterable of objects matching criteria expressed in `query` FSDatastore.query queries all the `.obj` files within the directory specified by the query.key. Args: query: Query object describing the objects to return. Raturns: Cursor with all objects matching criteria
def parse_blockwise(value): length = byte_len(value) if length == 1: num = value & 0xF0 num >>= 4 m = value & 0x08 m >>= 3 size = value & 0x07 elif length == 2: num = value & 0xFFF0 num >>= 4 m = value & 0x0008 m >>= 3 size = value & 0x0007 else: num = value & 0xFFFFF0 num >>= 4 m = value & 0x000008 m >>= 3 size = value & 0x000007 return num, int(m), pow(2, (size + 4))
Parse Blockwise option. :param value: option value :return: num, m, size
def split(args): p = OptionParser(split.__doc__) opts, args = p.parse_args(args) if len(args) != 2: sys.exit(not p.print_help()) gffile, outdir = args mkdir(outdir) g = Gff(gffile) seqids = g.seqids for s in seqids: outfile = op.join(outdir, s + ".gff") extract([gffile, "--contigs=" + s, "--outfile=" + outfile])
%prog split gffile outdir Split the gff into one contig per file. Will also take sequences if the file contains FASTA sequences.
def returnOneEntry(self, last=False): if len(self.table)==0: return None else: if last: return self.table[len(self.table)-1] else: return self.table[0]
Return the first entry in the current list. If 'last=True', then the last entry is returned." Returns None is the list is empty. Example of use: >>> test = [ ... {"name": "Jim", "age": 18, "income": 93000, "order": 2}, ... {"name": "Larry", "age": 18, "order": 3}, ... {"name": "Joe", "age": 20, "income": 15000, "order": 1}, ... {"name": "Bill", "age": 19, "income": 29000, "order": 4}, ... ] >>> print PLOD(test).returnOneEntry() {'age': 18, 'order': 2, 'name': 'Jim', 'income': 93000} >>> print PLOD(test).returnOneEntry(last=True) {'age': 19, 'order': 4, 'name': 'Bill', 'income': 29000} :param last: If True, the last entry is returned rather than the first. :return: A list entry, or None if the list is empty.
def emit(self, span_datas): try: zipkin_spans = self.translate_to_zipkin(span_datas) result = requests.post( url=self.url, data=json.dumps(zipkin_spans), headers=ZIPKIN_HEADERS) if result.status_code not in SUCCESS_STATUS_CODE: logging.error( "Failed to send spans to Zipkin server! Spans are {}" .format(zipkin_spans)) except Exception as e: logging.error(getattr(e, 'message', e))
Send SpanData tuples to Zipkin server, default using the v2 API. :type span_datas: list of :class: `~opencensus.trace.span_data.SpanData` :param list of opencensus.trace.span_data.SpanData span_datas: SpanData tuples to emit
def genome_name_from_fasta_path(fasta_path): filename = os.path.basename(fasta_path) return re.sub(r'(\.fa$)|(\.fas$)|(\.fasta$)|(\.fna$)|(\.\w{1,}$)', '', filename)
Extract genome name from fasta filename Get the filename without directory and remove the file extension. Example: With fasta file path ``/path/to/genome_1.fasta``:: fasta_path = '/path/to/genome_1.fasta' genome_name = genome_name_from_fasta_path(fasta_path) print(genome_name) # => "genome_1" Args: fasta_path (str): fasta file path Returns: str: genome name
def hdf5(self): if self._rundir['hdf5'] is UNDETERMINED: h5_folder = self.path / self.par['ioin']['hdf5_output_folder'] if (h5_folder / 'Data.xmf').is_file(): self._rundir['hdf5'] = h5_folder else: self._rundir['hdf5'] = None return self._rundir['hdf5']
Path of output hdf5 folder if relevant, None otherwise.
def control(controllee: Union['cirq.Gate', op_tree.OP_TREE], control_qubits: Sequence['cirq.Qid'] = None, default: Any = RaiseTypeErrorIfNotProvided) -> Any: if control_qubits is None: control_qubits = [] controller = getattr(controllee, 'controlled_by', None) result = NotImplemented if controller is None else controller( *control_qubits) if result is not NotImplemented: return result if isinstance(controllee, collections.Iterable): return op_tree.transform_op_tree( controllee, op_transformation=lambda op: control(op, control_qubits)) if default is not RaiseTypeErrorIfNotProvided: return default if controller is None: raise TypeError("object of type '{}' has no controlled_by " "method.".format(type(controllee))) raise TypeError("object of type '{}' does have a controlled_by method, " "but it returned NotImplemented.".format(type(controllee)))
Returns a Controlled version of the given value, if defined. Controllees define how to be controlled by defining a method controlled_by(self, control_qubits). Note that the method may return NotImplemented to indicate a particular controlling can't be done. Args: controllee: The gate, operation or iterable of operations to control. control_qubits: A list of Qids that would control this controllee. default: Determines the fallback behavior when `controllee` doesn't have a controlling defined. If `default` is not set and the fallback occurs, a TypeError is raised instead. Returns: If `controllee` has a controlled_by method that returns something besides NotImplemented, that result is returned. For an OP_TREE, transformation is applied at the leaf. Otherwise, if a default value was specified, the default value is returned. Raises: TypeError: `controllee` doesn't have a controlled_by method (or that method returned NotImplemented) and no `default` was specified.
def gpu_load(wproc=0.5, wmem=0.5): GPULoad = namedtuple('GPULoad', ['processor', 'memory', 'weighted']) gpus = GPUtil.getGPUs() load = [] for g in gpus: wload = (wproc * g.load + wmem * g.memoryUtil) / (wproc + wmem) load.append(GPULoad(g.load, g.memoryUtil, wload)) return load
Return a list of namedtuples representing the current load for each GPU device. The processor and memory loads are fractions between 0 and 1. The weighted load represents a weighted average of processor and memory loads using the parameters `wproc` and `wmem` respectively.
def _inquire(self, **kwargs): if rname_rfc6680 is None: raise NotImplementedError("Your GSSAPI implementation does not " "support RFC 6680 (the GSSAPI naming " "extensions)") if not kwargs: default_val = True else: default_val = False attrs = kwargs.get('attrs', default_val) mech_name = kwargs.get('mech_name', default_val) return rname_rfc6680.inquire_name(self, mech_name=mech_name, attrs=attrs)
Inspect this name for information. This method inspects the name for information. If no keyword arguments are passed, all available information is returned. Otherwise, only the keyword arguments that are passed and set to `True` are returned. Args: mech_name (bool): get whether this is a mechanism name, and, if so, the associated mechanism attrs (bool): get the attributes names for this name Returns: InquireNameResult: the results of the inquiry, with unused fields set to None Raises: GSSError
def _check_filter_specific_md(self, specific_md: list): if isinstance(specific_md, list): if len(specific_md) > 0: for md in specific_md: if not self.check_is_uuid(md): specific_md.remove(md) logging.error("Metadata UUID is not correct: {}".format(md)) specific_md = ",".join(specific_md) else: specific_md = "" else: raise TypeError("'specific_md' expects a list") return specific_md
Check if specific_md parameter is valid. :param list specific_md: list of specific metadata UUID to check
async def extend(self, additional_time): if self.local.token is None: raise LockError("Cannot extend an unlocked lock") if self.timeout is None: raise LockError("Cannot extend a lock with no timeout") return await self.do_extend(additional_time)
Adds more time to an already acquired lock. ``additional_time`` can be specified as an integer or a float, both representing the number of seconds to add.
def until_state(self, state, timeout=None): return self._state.until_state(state, timeout=timeout)
Future that resolves when a certain client state is attained Parameters ---------- state : str Desired state, one of ("disconnected", "syncing", "synced") timeout: float Timeout for operation in seconds.
def parse_n3(row, src='csv'): if row.strip() == '': return '','' l_root = 'opencyc' key = '' val = '' if src == 'csv': cols = row.split(',') if len(cols) < 3: return '','' key = '' val = '' key = l_root + ':' + cols[1].strip('"').strip() + ':' + cols[2].strip('"').strip() try: val = cols[3].strip('"').strip() except Exception: val = "Error parsing " + row elif src == 'n3': pass return key, val
takes a row from an n3 file and returns the triple NOTE - currently parses a CSV line already split via cyc_extract.py
def geojson(self): return { "type": "FeatureCollection", "features": [f.geojson(i) for i, f in self._features.items()] }
Render features as a FeatureCollection.
def _load_config_file(self): with open(self._config_file) as f: config = yaml.safe_load(f) patch_config(config, self.__environment_configuration) return config
Loads config.yaml from filesystem and applies some values which were set via ENV
def get_resource_pool(self, cluster, pool_name): pool_obj = None cluster_pools_list = cluster.resourcePool.resourcePool pool_selections = self.get_obj( [vim.ResourcePool], pool_name, return_all=True ) if pool_selections: for p in pool_selections: if p in cluster_pools_list: pool_obj = p break return pool_obj
Find a resource pool given a pool name for desired cluster
def save(self, async=False, callback=None, encrypted=True): if self._new_password and encrypted: self.password = Sha1.encrypt(self._new_password) controller = NURESTSession.get_current_session().login_controller controller.password = self._new_password controller.api_key = None data = json.dumps(self.to_dict()) request = NURESTRequest(method=HTTP_METHOD_PUT, url=self.get_resource_url(), data=data) if async: return self.send_request(request=request, async=async, local_callback=self._did_save, remote_callback=callback) else: connection = self.send_request(request=request) return self._did_save(connection)
Updates the user and perform the callback method
def get_image_dimensions(request): if request.service_type is ServiceType.WCS or (isinstance(request.size_x, int) and isinstance(request.size_y, int)): return request.size_x, request.size_y if not isinstance(request.size_x, int) and not isinstance(request.size_y, int): raise ValueError("At least one of parameters 'width' and 'height' must have an integer value") missing_dimension = get_image_dimension(request.bbox, width=request.size_x, height=request.size_y) if request.size_x is None: return missing_dimension, request.size_y if request.size_y is None: return request.size_x, missing_dimension raise ValueError("Parameters 'width' and 'height' must be integers or None")
Verifies or calculates image dimensions. :param request: OGC-type request :type request: WmsRequest or WcsRequest :return: horizontal and vertical dimensions of requested image :rtype: (int or str, int or str)
def getPackage(self, name, **kwargs): packageinfo = yield self.call('getPackage', name, **kwargs) package = Package.fromDict(packageinfo) if package: package.connection = self defer.returnValue(package)
Load information about a package and return a custom Package class. Calls "getPackage" XML-RPC. :param package_id: ``int``, for example 12345 :returns: deferred that when fired returns a Package (Munch, dict-like) object representing this Koji package, or None if no build was found.
def alt_click(self, locator, params=None, timeout=None): self._click(locator, params, timeout, Keys.ALT)
Alt-click web element. :param locator: locator tuple or WebElement instance :param params: (optional) locator parameters :param timeout: (optional) time to wait for element :return: None
def add_prefix(self, prefix, flags, prf): self._req('prefix add %s %s %s' % (prefix, flags, prf)) time.sleep(1) self._req('netdataregister')
Add network prefix. Args: prefix (str): network prefix. flags (str): network prefix flags, please refer thread documentation for details prf (str): network prf, please refer thread documentation for details
async def start_serving(self, address=None, sockets=None, backlog=100, sslcontext=None): if self._server: raise RuntimeError('Already serving') create_server = self._loop.create_server server = None if sockets: for sock in sockets: srv = await create_server(self.create_protocol, sock=sock, backlog=backlog, ssl=sslcontext) if server: server.sockets.extend(srv.sockets) else: server = srv elif isinstance(address, tuple): server = await create_server(self.create_protocol, host=address[0], port=address[1], backlog=backlog, ssl=sslcontext) else: raise RuntimeError('sockets or address must be supplied') self._set_server(server)
Start serving. :param address: optional address to bind to :param sockets: optional list of sockets to bind to :param backlog: Number of maximum connections :param sslcontext: optional SSLContext object
def set_system_time(self, time_source, ntp_server, date_format, time_format, time_zone, is_dst, dst, year, mon, day, hour, minute, sec, callback=None): if ntp_server not in ['time.nist.gov', 'time.kriss.re.kr', 'time.windows.com', 'time.nuri.net', ]: raise ValueError('Unsupported ntpServer') params = {'timeSource': time_source, 'ntpServer' : ntp_server, 'dateFormat': date_format, 'timeFormat': time_format, 'timeZone' : time_zone, 'isDst' : is_dst, 'dst' : dst, 'year' : year, 'mon' : mon, 'day' : day, 'hour' : hour, 'minute' : minute, 'sec' : sec } return self.execute_command('setSystemTime', params, callback=callback)
Set systeim time
def add_ignore(self, depend): try: self._add_child(self.ignore, self.ignore_set, depend) except TypeError as e: e = e.args[0] if SCons.Util.is_List(e): s = list(map(str, e)) else: s = str(e) raise SCons.Errors.UserError("attempted to ignore a non-Node dependency of %s:\n\t%s is a %s, not a Node" % (str(self), s, type(e)))
Adds dependencies to ignore.
def set_historylog(self, historylog): historylog.add_history(self.shell.history_filename) self.shell.append_to_history.connect(historylog.append_to_history)
Bind historylog instance to this console Not used anymore since v2.0
def getCallSet(self, id_): if id_ not in self._callSetIdMap: raise exceptions.CallSetNotFoundException(id_) return self._callSetIdMap[id_]
Returns a CallSet with the specified id, or raises a CallSetNotFoundException if it does not exist.
def and_(*fs): ensure_argcount(fs, min_=1) fs = list(imap(ensure_callable, fs)) if len(fs) == 1: return fs[0] if len(fs) == 2: f1, f2 = fs return lambda *args, **kwargs: ( f1(*args, **kwargs) and f2(*args, **kwargs)) if len(fs) == 3: f1, f2, f3 = fs return lambda *args, **kwargs: ( f1(*args, **kwargs) and f2(*args, **kwargs) and f3(*args, **kwargs)) def g(*args, **kwargs): for f in fs: if not f(*args, **kwargs): return False return True return g
Creates a function that returns true for given arguments iff every given function evalutes to true for those arguments. :param fs: Functions to combine :return: Short-circuiting function performing logical conjunction on results of ``fs`` applied to its arguments
def cancel_lb(self, loadbal_id): lb_billing = self.lb_svc.getBillingItem(id=loadbal_id) billing_id = lb_billing['id'] billing_item = self.client['Billing_Item'] return billing_item.cancelService(id=billing_id)
Cancels the specified load balancer. :param int loadbal_id: Load Balancer ID to be cancelled.
def install (self): outs = super(MyInstallLib, self).install() infile = self.create_conf_file() outfile = os.path.join(self.install_dir, os.path.basename(infile)) self.copy_file(infile, outfile) outs.append(outfile) return outs
Install the generated config file.
def debug_complete(): if not 'uniqueId' in request.args: raise ExperimentError('improper_inputs') else: unique_id = request.args['uniqueId'] mode = request.args['mode'] try: user = Participant.query.\ filter(Participant.uniqueid == unique_id).one() user.status = COMPLETED user.endhit = datetime.datetime.now() db_session.add(user) db_session.commit() except: raise ExperimentError('error_setting_worker_complete') else: if (mode == 'sandbox' or mode == 'live'): return render_template('closepopup.html') else: return render_template('complete.html')
Debugging route for complete.
def mpool(self, k_height, k_width, d_height=2, d_width=2, mode="VALID", input_layer=None, num_channels_in=None): return self._pool("mpool", pooling_layers.max_pooling2d, k_height, k_width, d_height, d_width, mode, input_layer, num_channels_in)
Construct a max pooling layer.
def cleanup(): for install_dir in linters.INSTALL_DIRS: try: shutil.rmtree(install_dir, ignore_errors=True) except Exception: print( "{0}\nFailed to delete {1}".format( traceback.format_exc(), install_dir ) ) sys.stdout.flush()
Delete standard installation directories.
def set_log_level(verbose, match=None, return_old=False): if isinstance(verbose, bool): verbose = 'info' if verbose else 'warning' if isinstance(verbose, string_types): verbose = verbose.lower() if verbose not in logging_types: raise ValueError('Invalid argument "%s"' % verbose) verbose = logging_types[verbose] else: raise TypeError('verbose must be a bool or string') logger = logging.getLogger('vispy') old_verbose = logger.level old_match = _lh._vispy_set_match(match) logger.setLevel(verbose) if verbose <= logging.DEBUG: _lf._vispy_set_prepend(True) else: _lf._vispy_set_prepend(False) out = None if return_old: out = (old_verbose, old_match) return out
Convenience function for setting the logging level Parameters ---------- verbose : bool, str, int, or None The verbosity of messages to print. If a str, it can be either DEBUG, INFO, WARNING, ERROR, or CRITICAL. Note that these are for convenience and are equivalent to passing in logging.DEBUG, etc. For bool, True is the same as 'INFO', False is the same as 'WARNING'. match : str | None String to match. Only those messages that both contain a substring that regexp matches ``'match'`` (and the ``verbose`` level) will be displayed. return_old : bool If True, return the old verbosity level and old match. Notes ----- If ``verbose=='debug'``, then the ``vispy`` method emitting the log message will be prepended to each log message, which is useful for debugging. If ``verbose=='debug'`` or ``match is not None``, then a small performance overhead is added. Thus it is suggested to only use these options when performance is not crucial. See also -------- vispy.util.use_log_level
def ToByteArray(self): lc = self.InternalEncodeLc() out = bytearray(4) out[0] = self.cla out[1] = self.ins out[2] = self.p1 out[3] = self.p2 if self.data: out.extend(lc) out.extend(self.data) out.extend([0x00, 0x00]) else: out.extend([0x00, 0x00, 0x00]) return out
Serialize the command. Encodes the command as per the U2F specs, using the standard ISO 7816-4 extended encoding. All Commands expect data, so Le is always present. Returns: Python bytearray of the encoded command.
def demote_admin(self, group_jid, peer_jid): log.info("[+] Demoting user {} to a regular member in group {}".format(peer_jid, group_jid)) return self._send_xmpp_element(group_adminship.DemoteAdminRequest(group_jid, peer_jid))
Turns an admin of a group into a regular user with no amidships capabilities. :param group_jid: The group JID in which the rights apply :param peer_jid: The admin user to demote :return:
def ok_hash(token: str) -> bool: LOGGER.debug('Tails.ok_hash >>> token: %s', token) rv = re.match('[{}]{{42,44}}$'.format(B58), token) is not None LOGGER.debug('Tails.ok_hash <<< %s', rv) return rv
Whether input token looks like a valid tails hash. :param token: candidate string :return: whether input token looks like a valid tails hash
def _construct_filename(self, batchno): return os.path.join(self.dirpath, "{0}.{1}".format(self.prefix, batchno))
Construct a filename for a database. Parameters: batchno -- batch number for the rotated database. Returns the constructed path as a string.
def expires(self): not_after = self._cert.get_notAfter().decode('ascii') return datetime.datetime.strptime(not_after, '%Y%m%d%H%M%SZ').replace( tzinfo=datetime.timezone.utc)
The date and time after which the certificate will be considered invalid.
def _handle_conflict(self, name): err = HTTPConflict('Member "%s" already exists!' % name).exception return self.request.get_response(err)
Handles requests that triggered a conflict. Respond with a 409 "Conflict"
def get_file_md5sum(path): with open(path, 'rb') as fh: h = str(hashlib.md5(fh.read()).hexdigest()) return h
Calculate the MD5 hash for a file.
def to_dict(self, remove_nones=False): if remove_nones: return super().to_dict(remove_nones=True) tags = None if self.tags is not None: tags = [tag.to_dict(remove_nones=remove_nones) for tag in self.tags] return { 'value': self.value, 'indicatorType': self.type, 'priorityLevel': self.priority_level, 'correlationCount': self.correlation_count, 'whitelisted': self.whitelisted, 'weight': self.weight, 'reason': self.reason, 'firstSeen': self.first_seen, 'lastSeen': self.last_seen, 'source': self.source, 'notes': self.notes, 'tags': tags, 'enclaveIds': self.enclave_ids }
Creates a dictionary representation of the indicator. :param remove_nones: Whether ``None`` values should be filtered out of the dictionary. Defaults to ``False``. :return: A dictionary representation of the indicator.
def stop(self): self.logger.warning("Stop executed") try: self._reader.close() except serial.serialutil.SerialException: self.logger.error("Error while closing device") raise VelbusException("Error while closing device") time.sleep(1)
Close serial port.
def _unpack_msg(self, *msg): l = [] for m in msg: l.append(str(m)) return " ".join(l)
Convert all message elements to string
def OnCut(self, event): entry_line = \ self.main_window.entry_line_panel.entry_line_panel.entry_line if wx.Window.FindFocus() != entry_line: selection = self.main_window.grid.selection with undo.group(_("Cut")): data = self.main_window.actions.cut(selection) self.main_window.clipboard.set_clipboard(data) self.main_window.grid.ForceRefresh() else: entry_line.Cut() event.Skip()
Clipboard cut event handler
def list_repos(owner=None, **kwargs): client = get_repos_api() api_kwargs = {} api_kwargs.update(utils.get_page_kwargs(**kwargs)) repos_list = client.repos_list_with_http_info if owner is not None: api_kwargs["owner"] = owner if hasattr(client, "repos_list0_with_http_info"): repos_list = client.repos_list0_with_http_info else: if hasattr(client, "repos_all_list_with_http_info"): repos_list = client.repos_all_list_with_http_info with catch_raise_api_exception(): res, _, headers = repos_list(**api_kwargs) ratelimits.maybe_rate_limit(client, headers) page_info = PageInfo.from_headers(headers) return [x.to_dict() for x in res], page_info
List repositories in a namespace.
def reject(self, f, *args): match = self.match(f, *args) if match: token = self.peek(0) raise errors.EfilterParseError( query=self.tokenizer.source, token=token, message="Was not expecting a %s here." % token.name)
Like 'match', but throw a parse error if 'f' matches. This is useful when a parser wants to be strict about specific things being prohibited. For example, DottySQL bans the use of SQL keywords as variable names.
def threshold_count(da, op, thresh, freq): from xarray.core.ops import get_op if op in binary_ops: op = binary_ops[op] elif op in binary_ops.values(): pass else: raise ValueError("Operation `{}` not recognized.".format(op)) func = getattr(da, '_binary_op')(get_op(op)) c = func(da, thresh) * 1 return c.resample(time=freq).sum(dim='time')
Count number of days above or below threshold. Parameters ---------- da : xarray.DataArray Input data. op : {>, <, >=, <=, gt, lt, ge, le } Logical operator, e.g. arr > thresh. thresh : float Threshold value. freq : str Resampling frequency defining the periods defined in http://pandas.pydata.org/pandas-docs/stable/timeseries.html#resampling. Returns ------- xarray.DataArray The number of days meeting the constraints for each period.
def _target_classes(self, target): target_classes = set() contents = ClasspathUtil.classpath_contents((target,), self.runtime_classpath) for f in contents: classname = ClasspathUtil.classname_for_rel_classfile(f) if classname: target_classes.add(classname) return target_classes
Set of target's provided classes. Call at the target level is to memoize efficiently.
def get_smtp_header(self): header = "From: %s\r\n" % self.get_sender() header += "To: %s\r\n" % ',\r\n '.join(self.get_to()) header += "Cc: %s\r\n" % ',\r\n '.join(self.get_cc()) header += "Bcc: %s\r\n" % ',\r\n '.join(self.get_bcc()) header += "Subject: %s\r\n" % self.get_subject() return header
Returns the SMTP formatted header of the line. :rtype: string :return: The SMTP header.
def _get_schema(self): d={} layout_kwargs=dict((_,'') for _ in get_layout_kwargs()) for _ in ('data','layout','theme','panels'): d[_]={} for __ in eval('__QUANT_FIGURE_{0}'.format(_.upper())): layout_kwargs.pop(__,None) d[_][__]=None d['layout'].update(annotations=dict(values=[], params=utils.make_dict_from_list(get_annotation_kwargs()))) d['layout'].update(shapes=utils.make_dict_from_list(get_shapes_kwargs())) [layout_kwargs.pop(_,None) for _ in get_annotation_kwargs()+get_shapes_kwargs()] d['layout'].update(**layout_kwargs) return d
Returns a dictionary with the schema for a QuantFigure
def validate_otp(hsm, args): try: res = pyhsm.yubikey.validate_otp(hsm, args.otp) if args.verbose: print "OK counter=%04x low=%04x high=%02x use=%02x" % \ (res.use_ctr, res.ts_low, res.ts_high, res.session_ctr) return 0 except pyhsm.exception.YHSM_CommandFailed, e: if args.verbose: print "%s" % (pyhsm.defines.status2str(e.status)) for r in [pyhsm.defines.YSM_OTP_INVALID, \ pyhsm.defines.YSM_OTP_REPLAY, \ pyhsm.defines.YSM_ID_NOT_FOUND]: if e.status == r: return r - pyhsm.defines.YSM_RESPONSE return 0xff
Validate an OTP.
def _update_data_dict(self, data_dict, back_or_front): data_dict['back_or_front'] = back_or_front if 'slim' in data_dict and 'scur' in data_dict: try: data_dict['spct'] = (data_dict['scur'] / data_dict['slim']) * 100 except (TypeError, ZeroDivisionError): pass
Adds spct if relevant, adds service
def filtered_notebook_metadata(notebook): metadata = copy(notebook.metadata) metadata = filter_metadata(metadata, notebook.metadata.get('jupytext', {}).get('notebook_metadata_filter'), _DEFAULT_NOTEBOOK_METADATA) if 'jupytext' in metadata: del metadata['jupytext'] return metadata
Notebook metadata, filtered for metadata added by Jupytext itself
def UQRatio(s1, s2, full_process=True): return QRatio(s1, s2, force_ascii=False, full_process=full_process)
Unicode quick ratio Calls QRatio with force_ascii set to False :param s1: :param s2: :return: similarity ratio
def _load_market_scheme(self): try: self.scheme = yaml.load(open(self.scheme_path, 'r')) except Exception, error: raise LoadMarketSchemeFailed(reason=error)
Load market yaml description
def colstack(self, new, mode='abort'): if isinstance(new,list): return tab_colstack([self] + new,mode) else: return tab_colstack([self, new], mode)
Horizontal stacking for tabarrays. Stack tabarray(s) in `new` to the right of `self`. **See also** :func:`tabular.tabarray.tab_colstack`, :func:`tabular.spreadsheet.colstack`
def other_Orange_tables(self): target_table = self.db.target_table if not self.db.orng_tables: return [self.convert_table(table, None) for table in self.db.tables if table != target_table] else: return [table for name, table in list(self.db.orng_tables.items()) if name != target_table]
Returns the related tables as Orange example tables. :rtype: list
def multis_2_mono(table): for row in range(len(table)): for column in range(len(table[row])): table[row][column] = table[row][column].replace('\n', ' ') return table
Converts each multiline string in a table to single line. Parameters ---------- table : list of list of str A list of rows containing strings Returns ------- table : list of lists of str
def get_mysql_args(db_config): db = db_config['NAME'] mapping = [('--user={0}', db_config.get('USER')), ('--password={0}', db_config.get('PASSWORD')), ('--host={0}', db_config.get('HOST')), ('--port={0}', db_config.get('PORT'))] args = apply_arg_values(mapping) args.append(db) return args
Returns an array of argument values that will be passed to a `mysql` or `mysqldump` process when it is started based on the given database configuration.
def _mantissa(dval): bb = _double_as_bytes(dval) mantissa = bb[1] & 0x0f << 48 mantissa += bb[2] << 40 mantissa += bb[3] << 32 mantissa += bb[4] return mantissa
Extract the _mantissa bits from a double-precision floating point value.
def unset_required_for(cls, sharable_fields): if 'link_content' in cls.base_fields and 'link_content' not in sharable_fields: cls.base_fields['link_content'].required = False if 'link_type' in cls.base_fields and 'link' not in sharable_fields: cls.base_fields['link_type'].required = False
Fields borrowed by `SharedGlossaryAdmin` to build its temporary change form, only are required if they are declared in `sharable_fields`. Otherwise just deactivate them.