code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def load_pem(cls, private_key, password=None): maybe_path = normpath(private_key) if os.path.isfile(maybe_path): with open(maybe_path, 'rb') as pkf: private_key = pkf.read() if not isinstance(private_key, six.binary_type): private_key = private_key.encode('utf-8') pkey = serialization.load_pem_private_key( private_key, password=password, backend=crypto_backends.default_backend()) return cls(pkey)
Return a PrivateKey instance. :param private_key: Private key string (PEM format) or the path to a local private key file.
def _get_metadata_from_ndk_string(self, gcmt, ndk_string): gcmt.identifier = ndk_string[:16] inversion_data = re.split('[A-Z:]+', ndk_string[17:61]) gcmt.metadata['BODY'] = [float(x) for x in inversion_data[1].split()] gcmt.metadata['SURFACE'] = [ float(x) for x in inversion_data[2].split()] gcmt.metadata['MANTLE'] = [float(x) for x in inversion_data[3].split()] further_meta = re.split('[: ]+', ndk_string[62:]) gcmt.metadata['CMT'] = int(further_meta[1]) gcmt.metadata['FUNCTION'] = {'TYPE': further_meta[2], 'DURATION': float(further_meta[3])} return gcmt
Reads the GCMT metadata from line 2 of the ndk batch
def configure_replacefor(self, ns, definition): @self.add_route(ns.relation_path, Operation.ReplaceFor, ns) @request(definition.request_schema) @response(definition.response_schema) @wraps(definition.func) def replace(**path_data): headers = dict() request_data = load_request_data(definition.request_schema) response_data = require_response_data(definition.func(**merge_data(path_data, request_data))) definition.header_func(headers, response_data) response_format = self.negotiate_response_content(definition.response_formats) return dump_response_data( definition.response_schema, response_data, status_code=Operation.ReplaceFor.value.default_code, headers=headers, response_format=response_format, ) replace.__doc__ = "Replace a {} relative to a {}".format(pluralize(ns.object_name), ns.subject_name)
Register a replace-for relation endpoint. For typical usage, this relation is not strictly required; once an object exists and has its own ID, it is better to operate on it directly via dedicated CRUD routes. However, in some cases, the composite key of (subject_id, object_id) is required to look up the object. This happens, for example, when using DynamoDB where an object which maintains both a hash key and a range key requires specifying them both for access. The definition's func should be a replace function, which must: - accept kwargs for the new instance replacement parameters - return the instance :param ns: the namespace :param definition: the endpoint definition
def add_hyperedge(self, nodes, attr_dict=None, **attr): attr_dict = self._combine_attribute_arguments(attr_dict, attr) if not nodes: raise ValueError("nodes argument cannot be empty.") frozen_nodes = frozenset(nodes) is_new_hyperedge = not self.has_hyperedge(frozen_nodes) if is_new_hyperedge: self.add_nodes(frozen_nodes) hyperedge_id = self._assign_next_hyperedge_id() for node in frozen_nodes: self._star[node].add(hyperedge_id) self._node_set_to_hyperedge[frozen_nodes] = hyperedge_id self._hyperedge_attributes[hyperedge_id] = \ {"nodes": nodes, "__frozen_nodes": frozen_nodes, "weight": 1} else: hyperedge_id = self._node_set_to_hyperedge[frozen_nodes] self._hyperedge_attributes[hyperedge_id].update(attr_dict) return hyperedge_id
Adds a hyperedge to the hypergraph, along with any related attributes of the hyperedge. This method will automatically add any node from the node set that was not in the hypergraph. A hyperedge without a "weight" attribute specified will be assigned the default value of 1. :param nodes: iterable container of references to nodes in the hyperedge to be added. :param attr_dict: dictionary of attributes of the hyperedge being added. :param attr: keyword arguments of attributes of the hyperedge; attr's values will override attr_dict's values if both are provided. :returns: str -- the ID of the hyperedge that was added. :raises: ValueError -- nodes arguments cannot be empty. Examples: :: >>> H = UndirectedHypergraph() >>> x = H.add_hyperedge(["A", "B", "C"]) >>> y = H.add_hyperedge(("A", "D"), weight=2) >>> z = H.add_hyperedge(set(["B", "D"]), {color: "red"})
def split_by_connected_component(self, idents): idents_remaining = set(idents) connected_components = [] for ident in idents: if ident not in idents_remaining: continue idents_remaining.remove(ident) connected_component = [ident] for label in self.connected_component(ident): cids = label.content_id1, label.content_id2 for cid in cids: if cid in idents_remaining: connected_component.append(cid) idents_remaining.remove(cid) connected_components.append(sorted(connected_component)) return connected_components
Split idents into equivalence classes based on connected components.
def rename(self, name=None, sourceNetwork=None, verbose=False): sourceNetwork=check_network(self,sourceNetwork,verbose=verbose) PARAMS=set_param(["name","sourceNetwork"],[name,sourceNetwork]) response=api(url=self.__url+"/rename", PARAMS=PARAMS, method="POST", verbose=verbose) return response
Rename an existing network. The SUID of the network is returned :param name (string): Enter a new title for the network :param sourceNetwork (string): Specifies a network by name, or by SUID if the prefix SUID: is used. The keyword CURRENT, or a blank value can also be used to specify the current network. :param verbose: print more :returns: SUID of the network is returned
def load_data_file(path_of_file): if os.path.exists(path_of_file): return storage_utils.load_objects_from_json(path_of_file) raise ValueError("Data file not found: {0}".format(path_of_file))
Loads the content of a file by using json.load. :param path_of_file: the path of the file to load :return: the file content as a string :raises exceptions.ValueError: if the file was not found
def concat(attrs, inputs, proto_obj): new_attrs = translation_utils._fix_attribute_names(attrs, {'axis': 'dim'}) return 'concat', new_attrs, inputs
Joins input arrays along a given axis.
def set_per_page(self, entries=100): if isinstance(entries, int) and entries <= 200: self.per_page = int(entries) return self else: raise SalesKingException("PERPAGE_ONLYINT", "Please set an integer <200 for the per-page limit");
set entries per page max 200
def get_barycenter(self): try: mass = self['mass'].values except KeyError: mass = self.add_data('mass')['mass'].values pos = self.loc[:, ['x', 'y', 'z']].values return (pos * mass[:, None]).sum(axis=0) / self.get_total_mass()
Return the mass weighted average location. Args: None Returns: :class:`numpy.ndarray`:
def get_bin_hierarchy_design_session(self): if not self.supports_bin_hierarchy_design(): raise errors.Unimplemented() return sessions.BinHierarchyDesignSession(runtime=self._runtime)
Gets the bin hierarchy design session. return: (osid.resource.BinHierarchyDesignSession) - a ``BinHierarchyDesignSession`` raise: OperationFailed - unable to complete request raise: Unimplemented - ``supports_bin_hierarchy_design()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_bin_hierarchy_design()`` is ``true``.*
def serialize_math(ctx, document, elem, root): _div = etree.SubElement(root, 'span') if ctx.options['embed_styles']: _div.set('style', 'border: 1px solid red') _div.text = 'We do not support Math blocks at the moment.' fire_hooks(ctx, document, elem, _div, ctx.get_hook('math')) return root
Serialize math element. Math objects are not supported at the moment. This is wht we only show error message.
def smart_content_encoding(self): encoding = self.content_encoding if not encoding: base_list = self.basename.split('.') while (not encoding) and len(base_list) > 1: _, encoding = mimetypes.guess_type('.'.join(base_list)) base_list.pop() return encoding
Smart content encoding.
def reversed_dotted_parts(s): idx = -1 if s: yield s while s: idx = s.rfind('.', 0, idx) if idx == -1: break yield s[:idx]
For a string "a.b.c", yields "a.b.c", "a.b", "a".
def read_block_epb(self, block, size): intid, tshigh, tslow, caplen, wirelen = struct.unpack( self.endian + "5I", block[:20], ) return (block[20:20 + caplen][:size], RawPcapNgReader.PacketMetadata(linktype=self.interfaces[intid][0], tsresol=self.interfaces[intid][2], tshigh=tshigh, tslow=tslow, wirelen=wirelen))
Enhanced Packet Block
def _align_sequences_to_hmm(self, hmm_file, sequences_file, output_alignment_file): ss = SequenceSearcher(hmm_file) with tempfile.NamedTemporaryFile(prefix='graftm', suffix='.aln.fasta') as tempalign: ss.hmmalign_sequences(hmm_file, sequences_file, tempalign.name) ss.alignment_correcter([tempalign.name], output_alignment_file)
Align sequences to an HMM, and write an alignment of these proteins after cleanup so that they can be used for tree-making Parameters ---------- sequences_file: str path to file of unaligned protein sequences hmm_file: str path to hmm file output_alignment_file: str write alignment to this file Returns ------- nothing
def alpha2(self, code): code = force_text(code).upper() if code.isdigit(): lookup_code = int(code) def find(alt_codes): return alt_codes[1] == lookup_code elif len(code) == 3: lookup_code = code def find(alt_codes): return alt_codes[0] == lookup_code else: find = None if find: code = None for alpha2, alt_codes in self.alt_codes.items(): if find(alt_codes): code = alpha2 break if code in self.countries: return code return ""
Return the two letter country code when passed any type of ISO 3166-1 country code. If no match is found, returns an empty string.
def install(self): items = [] for typeName in self.types: it = self.store.findOrCreate(namedAny(typeName)) installOn(it, self.store) items.append(str(it.storeID).decode('ascii')) self._items = items
Called when installed on the user store. Installs my powerups.
def bpoints(self): return self.start, self.control1, self.control2, self.end
returns the Bezier control points of the segment.
def available_dataset_names(self, reader_name=None, composites=False): return sorted(set(x.name for x in self.available_dataset_ids( reader_name=reader_name, composites=composites)))
Get the list of the names of the available datasets.
def from_file(cls, file, charset='utf-8', errors='strict', unicode_mode=True): close = False f = file if isinstance(file, basestring): f = open(file, 'r') close = True try: data = _decode_unicode(f.read(), charset, errors) finally: if close: f.close() return cls(data, getattr(f, 'name', '<template>'), charset, errors, unicode_mode)
Load a template from a file. .. versionchanged:: 0.5 The encoding parameter was renamed to charset. :param file: a filename or file object to load the template from. :param charset: the charset of the template to load. :param errors: the error behavior of the charset decoding. :param unicode_mode: set to `False` to disable unicode mode. :return: a template
def addVariantAnnotationSet(self, variantAnnotationSet): id_ = variantAnnotationSet.getId() self._variantAnnotationSetIdMap[id_] = variantAnnotationSet self._variantAnnotationSetIds.append(id_)
Adds the specified variantAnnotationSet to this dataset.
def remove_node(self, node): if not self.has_node(node): raise ValueError("No such node exists.") for hyperedge_id in self._star[node]: frozen_nodes = \ self._hyperedge_attributes[hyperedge_id]["__frozen_nodes"] del self._node_set_to_hyperedge[frozen_nodes] del self._hyperedge_attributes[hyperedge_id] del self._star[node] del self._node_attributes[node]
Removes a node and its attributes from the hypergraph. Removes every hyperedge that contains this node. :param node: reference to the node being added. :raises: ValueError -- No such node exists. Examples: :: >>> H = UndirectedHypergraph() >>> H.add_node("A", label="positive") >>> H.remove_node("A")
def _do_analysis(options): module = _function_location(options) core_results = _call_analysis_function(options, module) if module == 'emp' and ('models' in options.keys()): fit_results = _fit_models(options, core_results) else: fit_results = None _save_results(options, module, core_results, fit_results)
Do analysis for a single run, as specified by options. Parameters ---------- options : dict Option names and values for analysis
def process_request(self, request): global _urlconf_pages page_list = list( Page.objects.exclude(glitter_app_name='').values_list('id', 'url').order_by('id') ) with _urlconf_lock: if page_list != _urlconf_pages: glitter_urls = 'glitter.urls' if glitter_urls in sys.modules: importlib.reload(sys.modules[glitter_urls]) _urlconf_pages = page_list
Reloads glitter URL patterns if page URLs change. Avoids having to restart the server to recreate the glitter URLs being used by Django.
def _bounds_to_array(bounds): elements = [] for value in bounds: if all_elements_equal(value): elements.append(Scalar(get_single_value(value), ctype='mot_float_type')) else: elements.append(Array(value, ctype='mot_float_type', as_scalar=True)) return CompositeArray(elements, 'mot_float_type', address_space='local')
Create a CompositeArray to hold the bounds.
def reset_state(self, reset_state): if isinstance(reset_state, int): self._pool.map(_reset_state, self._shard_num_args({'reset_state': reset_state})) elif isinstance(reset_state, np.ndarray): sim.validate_normalized_state(reset_state, self._num_qubits) args = [] for kwargs in self._shard_num_args(): shard_num = kwargs['shard_num'] shard_size = 1 << kwargs['num_shard_qubits'] start = shard_num * shard_size end = start + shard_size kwargs['reset_state'] = reset_state[start:end] args.append(kwargs) self._pool.map(_reset_state, args)
Reset the state to the given initial state. Args: reset_state: If this is an int, then this is the state to reset the stepper to, expressed as an integer of the computational basis. Integer to bitwise indices is little endian. Otherwise if this is a np.ndarray this must be the correct size, be normalized (L2 norm of 1), and have dtype of np.complex64. Raises: ValueError if the state is incorrectly sized or not of the correct dtype.
def get_parts(self): parts = [] start_b = 0 end_byte = start_b + PartSize.DOWNLOAD_MINIMUM_PART_SIZE - 1 for i in range(self.total): parts.append([start_b, end_byte]) start_b = end_byte + 1 end_byte = start_b + PartSize.DOWNLOAD_MINIMUM_PART_SIZE - 1 return parts
Partitions the file and saves the part information in memory.
def process_dut(dut): if dut.finished(): return Dut._signalled_duts.appendleft(dut) Dut._sem.release()
Signal worker thread that specified Dut needs processing
def _format_coord(self, x, limits): if x is None: return None formatter = self._mplformatter formatter.locs = np.linspace(limits[0], limits[1], 7) formatter._set_format(*limits) formatter._set_orderOfMagnitude(abs(np.diff(limits))) return formatter.pprint_val(x)
Handles display-range-specific formatting for the x and y coords. Parameters ---------- x : number The number to be formatted limits : 2-item sequence The min and max of the current display limits for the axis.
def __update_offsets(self, fileobj, atoms, delta, offset): if delta == 0: return moov = atoms[b"moov"] for atom in moov.findall(b'stco', True): self.__update_offset_table(fileobj, ">%dI", atom, delta, offset) for atom in moov.findall(b'co64', True): self.__update_offset_table(fileobj, ">%dQ", atom, delta, offset) try: for atom in atoms[b"moof"].findall(b'tfhd', True): self.__update_tfhd(fileobj, atom, delta, offset) except KeyError: pass
Update offset tables in all 'stco' and 'co64' atoms.
def from_json_to_list(cls, data: str, force_snake_case=True, force_cast: bool=False, restrict: bool=False) -> TList[T]: return cls.from_dicts(util.load_json(data), force_snake_case=force_snake_case, force_cast=force_cast, restrict=restrict)
From json string to list of instance :param data: Json string :param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True :param force_cast: Cast forcibly if True :param restrict: Prohibit extra parameters if True :return: List of instance Usage: >>> from owlmixin.samples import Human >>> humans: TList[Human] = Human.from_json_to_list('''[ ... {"id": 1, "name": "Tom", "favorites": [{"name": "Apple"}]}, ... {"id": 2, "name": "John", "favorites": [{"name": "Orange"}]} ... ]''') >>> humans[0].name 'Tom' >>> humans[1].name 'John'
def default_suse_tr(mod): pkg = 'python-%s' % mod py2pkg = 'python2-%s' % mod py3pkg = 'python3-%s' % mod return (pkg, py2pkg, py3pkg)
Default translation function for openSUSE, SLES, and other SUSE based systems Returns a tuple of 3 elements - the unversioned name, the python2 versioned name and the python3 versioned name.
def convert_size(size_bytes): if size_bytes == 0: return "0B" size_name = ("B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB") i = int(math.floor(math.log(size_bytes, 1024))) p = math.pow(1024, i) s = round(size_bytes / p, 2) return "%s %s" % (s, size_name[i])
Transform bytesize to a human readable filesize :param size_bytes: bytesize :return: human readable filesize
def previous_theme(self): theme = self.term.theme_list.previous(self.term.theme) while not self.term.check_theme(theme): theme = self.term.theme_list.previous(theme) self.term.set_theme(theme) self.draw() message = self.term.theme.display_string self.term.show_notification(message, timeout=1)
Cycle to preview the previous theme from the internal list of themes.
def system_add_keyspace(self, ks_def): self._seqid += 1 d = self._reqs[self._seqid] = defer.Deferred() self.send_system_add_keyspace(ks_def) return d
adds a keyspace and any column families that are part of it. returns the new schema id. Parameters: - ks_def
def label_from_bin(buf): mpls_label = type_desc.Int3.to_user(six.binary_type(buf)) return mpls_label >> 4, mpls_label & 1
Converts binary representation label to integer. :param buf: Binary representation of label. :return: MPLS Label and BoS bit.
def can_fetch_pool(self, request: Request): url_info = request.url_info user_agent = request.fields.get('User-agent', '') if self._robots_txt_pool.has_parser(url_info): return self._robots_txt_pool.can_fetch(url_info, user_agent) else: raise NotInPoolError()
Return whether the request can be fetched based on the pool.
def verify_login(request): verifier = request.registry['persona.verifier'] try: data = verifier.verify(request.POST['assertion']) except (ValueError, browserid.errors.TrustError) as e: logger.info('Failed persona login: %s (%s)', e, type(e).__name__) raise HTTPBadRequest('Invalid assertion') return data['email']
Verifies the assertion and the csrf token in the given request. Returns the email of the user if everything is valid, otherwise raises a HTTPBadRequest
def delta_nu(self, *args): return 134.88 * np.sqrt(self.mass(*args) / self.radius(*args)**3)
Returns asteroseismic delta_nu in uHz reference: https://arxiv.org/pdf/1312.3853v1.pdf, Eq (2)
def AddFareObject(self, fare, problem_reporter=None): warnings.warn("No longer supported. The Fare class was renamed to " "FareAttribute, and all related functions were renamed " "accordingly.", DeprecationWarning) self.AddFareAttributeObject(fare, problem_reporter)
Deprecated. Please use AddFareAttributeObject.
def get_attached_message(self, key, message_type, tags=None, required=False): attached_bytes = self._get_attached_bytes(key, tags) if attached_bytes is None: if required: raise KeyError("No attached message for key '%s' in graph version %s " "of Hub Module" % (key, sorted(tags or []))) else: return None message = message_type() message.ParseFromString(attached_bytes) return message
Returns the message attached to the module under the given key, or None. Module publishers can attach protocol messages to modules at creation time to provide module consumers with additional information, e.g., on module usage or provenance (see see hub.attach_message()). A typical use would be to store a small set of named values with modules of a certain type so that a support library for consumers of such modules can be parametric in those values. This method can also be called on a Module instantiated from a ModuleSpec, then `tags` are set to those used in module instatiation. Args: key: A string with the key of an attached message. message_type: A concrete protocol message class (*not* object) used to parse the attached message from its serialized representation. The message type for a particular key must be advertised with the key. tags: Optional set of strings, specifying the graph variant from which to read the attached message. required: An optional boolean. Setting it true changes the effect of an unknown `key` from returning None to raising a KeyError with text about attached messages. Returns: An instance of `message_type` with the message contents attached to the module, or `None` if `key` is unknown and `required` is False. Raises: KeyError: if `key` is unknown and `required` is True.
def train_epoch(model:nn.Module, dl:DataLoader, opt:optim.Optimizer, loss_func:LossFunction)->None: "Simple training of `model` for 1 epoch of `dl` using optim `opt` and loss function `loss_func`." model.train() for xb,yb in dl: loss = loss_func(model(xb), yb) loss.backward() opt.step() opt.zero_grad()
Simple training of `model` for 1 epoch of `dl` using optim `opt` and loss function `loss_func`.
def set_info_handler(codec, handler, data=None): OPENJP2.opj_set_info_handler.argtypes = [CODEC_TYPE, ctypes.c_void_p, ctypes.c_void_p] OPENJP2.opj_set_info_handler.restype = check_error OPENJP2.opj_set_info_handler(codec, handler, data)
Wraps openjp2 library function opj_set_info_handler. Set the info handler use by openjpeg. Parameters ---------- codec : CODEC_TYPE Codec initialized by create_compress function. handler : python function The callback function to be used. user_data : anything User/client data. Raises ------ RuntimeError If the OpenJPEG library routine opj_set_info_handler fails.
def as_rational(self): (denominator, numerator) = \ NatDivision.undivision( self.integer_part, self.non_repeating_part, self.repeating_part, self.base ) result = Fraction( Nats.convert_to_int(numerator, self.base), Nats.convert_to_int(denominator, self.base) ) return result * self.sign
Return this value as a Rational. :returns: this radix as a rational :rtype: Rational
def get_conn(opts, profile=None, host=None, port=None): if not (host and port): opts_pillar = opts.get('pillar', {}) opts_master = opts_pillar.get('master', {}) opts_merged = {} opts_merged.update(opts_master) opts_merged.update(opts_pillar) opts_merged.update(opts) if profile: conf = opts_merged.get(profile, {}) else: conf = opts_merged host = conf.get('memcached.host', DEFAULT_HOST) port = conf.get('memcached.port', DEFAULT_PORT) if not six.text_type(port).isdigit(): raise SaltInvocationError('port must be an integer') if HAS_LIBS: return memcache.Client(['{0}:{1}'.format(host, port)]) else: raise CommandExecutionError( '(unable to import memcache, ' 'module most likely not installed)' )
Return a conn object for accessing memcached
def setnx(self, name, value): with self.pipe as pipe: return pipe.setnx(self.redis_key(name), self.valueparse.encode(value))
Set the value as a string in the key only if the key doesn't exist. :param name: str the name of the redis key :param value: :return: Future()
def encode(self, s): return [int(w) + self._num_reserved_ids for w in s.split()]
Transform a human-readable string into a sequence of int ids. The ids should be in the range [num_reserved_ids, vocab_size). Ids [0, num_reserved_ids) are reserved. EOS is not appended. Args: s: human-readable string to be converted. Returns: ids: list of integers
def get_catalog(self, query): try: url = self._base[:-1] if self._base[-1] == '/' else self._base url += '?' + str(query) return TDSCatalog(url) except ET.ParseError: raise BadQueryError(self.get_catalog_raw(query))
Fetch a parsed THREDDS catalog from the radar server. Requests a catalog of radar data files data from the radar server given the parameters in `query` and returns a :class:`~siphon.catalog.TDSCatalog` instance. Parameters ---------- query : RadarQuery The parameters to send to the radar server Returns ------- catalog : TDSCatalog The catalog of matching data files Raises ------ :class:`~siphon.http_util.BadQueryError` When the query cannot be handled by the server See Also -------- get_catalog_raw
def generate_cloudformation_args(stack_name, parameters, tags, template, capabilities=DEFAULT_CAPABILITIES, change_set_type=None, service_role=None, stack_policy=None, change_set_name=None): args = { "StackName": stack_name, "Parameters": parameters, "Tags": tags, "Capabilities": capabilities, } if service_role: args["RoleARN"] = service_role if change_set_name: args["ChangeSetName"] = change_set_name if change_set_type: args["ChangeSetType"] = change_set_type if template.url: args["TemplateURL"] = template.url else: args["TemplateBody"] = template.body if not change_set_name: args.update(generate_stack_policy_args(stack_policy)) return args
Used to generate the args for common cloudformation API interactions. This is used for create_stack/update_stack/create_change_set calls in cloudformation. Args: stack_name (str): The fully qualified stack name in Cloudformation. parameters (list): A list of dictionaries that defines the parameter list to be applied to the Cloudformation stack. tags (list): A list of dictionaries that defines the tags that should be applied to the Cloudformation stack. template (:class:`stacker.provider.base.Template`): The template object. capabilities (list, optional): A list of capabilities to use when updating Cloudformation. change_set_type (str, optional): An optional change set type to use with create_change_set. service_role (str, optional): An optional service role to use when interacting with Cloudformation. stack_policy (:class:`stacker.providers.base.Template`): A template object representing a stack policy. change_set_name (str, optional): An optional change set name to use with create_change_set. Returns: dict: A dictionary of arguments to be used in the Cloudformation API call.
def _select_parent(action_set): total_fitness = sum(rule.fitness for rule in action_set) selector = random.uniform(0, total_fitness) for rule in action_set: selector -= rule.fitness if selector <= 0: return rule return random.choice(list(action_set))
Select a rule from this action set, with probability proportionate to its fitness, to act as a parent for a new rule in the classifier set. Return the selected rule.
def assertType(var, *allowedTypes): if not isinstance(var, *allowedTypes): raise NotImplementedError("This operation is only supported for {}. "\ "Instead found {}".format(str(*allowedTypes), type(var)))
Asserts that a variable @var is of an @expectedType. Raises a TypeError if the assertion fails.
def spawn_agent(self, agent_definition, location): self._should_write_to_command_buffer = True self._add_agents(agent_definition) command_to_send = SpawnAgentCommand(location, agent_definition.name, agent_definition.type) self._commands.add_command(command_to_send)
Queues a spawn agent command. It will be applied when `tick` or `step` is called next. The agent won't be able to be used until the next frame. Args: agent_definition (:obj:`AgentDefinition`): The definition of the agent to spawn. location (np.ndarray or list): The position to spawn the agent in the world, in XYZ coordinates (in meters).
def parse(self): if self.rorg in [RORG.RPS, RORG.BS1, RORG.BS4]: self.status = self.data[-1] if self.rorg == RORG.VLD: self.status = self.optional[-1] if self.rorg in [RORG.RPS, RORG.BS1, RORG.BS4]: self.repeater_count = enocean.utils.from_bitarray(self._bit_status[4:]) return self.parsed
Parse data from Packet
def delete(self): " Delete the record." response = self.dyn.delete(self.url) return response.content['job_id']
Delete the record.
def run(self, service_id, **kwargs): log = self.get_logger(**kwargs) log.info("Loading Service for metric sync") try: service = Service.objects.get(id=service_id) log.info("Getting metrics for <%s>" % (service.name)) metrics = self.get_metrics(service.url, service.token) result = metrics.json() if "metrics_available" in result: for key in result["metrics_available"]: check = WidgetData.objects.filter(service=service, key=key) if not check.exists(): WidgetData.objects.create( service=service, key=key, title="TEMP - Pending update" ) log.info("Add WidgetData for <%s>" % (key,)) return "Completed metric sync for <%s>" % (service.name) except ObjectDoesNotExist: logger.error('Missing Service', exc_info=True) except SoftTimeLimitExceeded: logger.error( 'Soft time limit exceed processing pull of service metrics \ via Celery.', exc_info=True)
Retrieve a list of metrics. Ensure they are set as metric data sources.
def _find_workflows(mcs, attrs): workflows = {} for attribute, value in attrs.items(): if isinstance(value, Workflow): workflows[attribute] = StateField(value) return workflows
Finds all occurrences of a workflow in the attributes definitions. Returns: dict(str => StateField): maps an attribute name to a StateField describing the related Workflow.
def css(self, *props, **kwprops): self._stable = False styles = {} if props: if len(props) == 1 and isinstance(props[0], Mapping): styles = props[0] else: raise WrongContentError(self, props, "Arguments not valid") elif kwprops: styles = kwprops else: raise WrongContentError(self, None, "args OR wkargs are needed") return self.attr(style=styles)
Adds css properties to this element.
def _meta_schema_factory(self, columns, model, class_mixin): _model = model if columns: class MetaSchema(ModelSchema, class_mixin): class Meta: model = _model fields = columns strict = True sqla_session = self.datamodel.session else: class MetaSchema(ModelSchema, class_mixin): class Meta: model = _model strict = True sqla_session = self.datamodel.session return MetaSchema
Creates ModelSchema marshmallow-sqlalchemy :param columns: a list of columns to mix :param model: Model :param class_mixin: a marshamallow Schema to mix :return: ModelSchema
async def create(self, config: dict = None, access: str = None, replace: bool = False) -> Wallet: LOGGER.debug('WalletManager.create >>> config %s, access %s, replace %s', config, access, replace) assert {'name', 'id'} & {k for k in config} wallet_name = config.get('name', config.get('id')) if replace: von_wallet = self.get(config, access) if not await von_wallet.remove(): LOGGER.debug('WalletManager.create <!< Failed to remove wallet %s for replacement', wallet_name) raise ExtantWallet('Failed to remove wallet {} for replacement'.format(wallet_name)) indy_config = self._config2indy(config) von_config = self._config2von(config, access) rv = Wallet(indy_config, von_config) await rv.create() LOGGER.debug('WalletManager.create <<< %s', rv) return rv
Create wallet on input name with given configuration and access credential value. Raise ExtantWallet if wallet on input name exists already and replace parameter is False. Raise BadAccess on replacement for bad access credentials value. FAIR WARNING: specifying replace=True attempts to remove any matching wallet before proceeding; to succeed, the existing wallet must use the same access credentials that the input configuration has. :param config: configuration data for both indy-sdk and VON anchor wallet: - 'name' or 'id': wallet name - 'storage_type': storage type - 'freshness_time': freshness time - 'did': (optional) DID to use - 'seed': (optional) seed to use - 'auto_create': whether to create the wallet on first open (persists past close, can work with auto_remove) - 'auto_remove': whether to remove the wallet on next close - 'link_secret_label': (optional) link secret label to use to create link secret :param access: indy wallet access credential ('key') value, if different than default :param replace: whether to replace old wallet if it exists :return: wallet created
def cmd_loadfile(args): if len(args) != 1: fileargs = " ".join(args) else: fileargs = args[0] if not os.path.exists(fileargs): print("Error loading file ", fileargs); return if os.name == 'nt': fileargs = fileargs.replace("\\", "/") loadfile(fileargs.strip('"'))
callback from menu to load a log file
def config_extensions(app): " Init application with extensions. " cache.init_app(app) db.init_app(app) main.init_app(app) collect.init_app(app) config_babel(app)
Init application with extensions.
async def register_callback(self, cb): self._callbacks.add(cb) def unregister(): self._callbacks.remove(cb) return unregister
Allows the caller to register a callback, and returns a closure that can be used to unregister the provided callback
def parse_value(self, value): parsed = super(BoolField, self).parse_value(value) return bool(parsed) if parsed is not None else None
Cast value to `bool`.
def authenticate(json_path=None): msg = ('budou.authentication() is deprecated. ' 'Please use budou.get_parser() to obtain a parser instead.') warnings.warn(msg, DeprecationWarning) parser = get_parser('nlapi', credentials_path=json_path) return parser
Gets a Natural Language API parser by authenticating the API. **This method is deprecated.** Please use :obj:`budou.get_parser` to obtain a parser instead. Args: json_path (:obj:`str`, optional): The file path to the service account's credentials. Returns: Parser. (:obj:`budou.parser.NLAPIParser`)
def set_todo_results(self, filename, todo_results): index = self.has_filename(filename) if index is None: return self.data[index].set_todo_results(todo_results)
Synchronize todo results between editorstacks
def _raise_last_error(bulk_write_result): write_errors = bulk_write_result.get("writeErrors") if write_errors: _raise_last_write_error(write_errors) _raise_write_concern_error(bulk_write_result["writeConcernErrors"][-1])
Backward compatibility helper for insert error handling.
def survey_loader(sur_dir=SUR_DIR, sur_file=SUR_FILE): survey_path = os.path.join(sur_dir, sur_file) survey = None with open(survey_path) as survey_file: survey = Survey(survey_file.read()) return survey
Loads up the given survey in the given dir.
def get_swagger_versions(settings): swagger_versions = set(aslist(settings.get( 'pyramid_swagger.swagger_versions', DEFAULT_SWAGGER_VERSIONS))) if len(swagger_versions) == 0: raise ValueError('pyramid_swagger.swagger_versions is empty') for swagger_version in swagger_versions: if swagger_version not in SUPPORTED_SWAGGER_VERSIONS: raise ValueError('Swagger version {0} is not supported.' .format(swagger_version)) return swagger_versions
Validates and returns the versions of the Swagger Spec that this pyramid application supports. :type settings: dict :return: list of strings. eg ['1.2', '2.0'] :raises: ValueError when an unsupported Swagger version is encountered.
def create(controller_id, name): def _decorator(cls): class _ControllerClass(cls, Controller): def __init__(self): Controller.__init__(self, controller_id, name) for key in cls.__dict__.keys(): prop = cls.__dict__[key] if isinstance(prop, KerviValue): if prop.is_input: self.inputs._add_internal(key, prop) else: self.outputs._add_internal(key, prop) cls.__init__(self) return _ControllerClass return _decorator
Turn class into a kervi controller
def export_data(self): result = {} data = self.__original_data__.copy() data.update(self.__modified_data__) for key, value in data.items(): if key in self.__deleted_fields__: continue try: result[key] = value.export_data() except AttributeError: result[key] = value return result
Get the results with the modified_data
def read_csr(csr): csr = _get_request_obj(csr) ret = { 'Version': csr.get_version() + 1, 'Subject': _parse_subject(csr.get_subject()), 'Subject Hash': _dec2hex(csr.get_subject().as_hash()), 'Public Key Hash': hashlib.sha1(csr.get_pubkey().get_modulus()).hexdigest(), 'X509v3 Extensions': _get_csr_extensions(csr), } return ret
Returns a dict containing details of a certificate request. :depends: - OpenSSL command line tool csr: A path or PEM encoded string containing the CSR to read. CLI Example: .. code-block:: bash salt '*' x509.read_csr /etc/pki/mycert.csr
def make_check(exc_type, template, pred, actual, funcname): if isinstance(funcname, str): def get_funcname(_): return funcname else: get_funcname = funcname def _check(func, argname, argvalue): if pred(argvalue): raise exc_type( template % { 'funcname': get_funcname(func), 'argname': argname, 'actual': actual(argvalue), }, ) return argvalue return _check
Factory for making preprocessing functions that check a predicate on the input value. Parameters ---------- exc_type : Exception The exception type to raise if the predicate fails. template : str A template string to use to create error messages. Should have %-style named template parameters for 'funcname', 'argname', and 'actual'. pred : function[object -> bool] A function to call on the argument being preprocessed. If the predicate returns `True`, we raise an instance of `exc_type`. actual : function[object -> object] A function to call on bad values to produce the value to display in the error message. funcname : str or callable Name to use in error messages, or function to call on decorated functions to produce a name. Passing an explicit name is useful when creating checks for __init__ or __new__ methods when you want the error to refer to the class name instead of the method name.
def ellipse(center,covariance_matrix,level=1, n=1000): U, s, rotation_matrix = N.linalg.svd(covariance_matrix) saxes = N.sqrt(s)*level u = N.linspace(0, 2*N.pi, n) data = N.column_stack((saxes[0]*N.cos(u), saxes[1]*N.sin(u))) return N.dot(data, rotation_matrix)+ center
Returns error ellipse in slope-azimuth space
def parse_authentication_request(self, request_body, http_headers=None): auth_req = AuthorizationRequest().deserialize(request_body) for validator in self.authentication_request_validators: validator(auth_req) logger.debug('parsed authentication_request: %s', auth_req) return auth_req
Parses and verifies an authentication request. :param request_body: urlencoded authentication request :param http_headers: http headers
def _base_placeholder(self): base_ph_type = { PP_PLACEHOLDER.BODY: PP_PLACEHOLDER.BODY, PP_PLACEHOLDER.CHART: PP_PLACEHOLDER.BODY, PP_PLACEHOLDER.BITMAP: PP_PLACEHOLDER.BODY, PP_PLACEHOLDER.CENTER_TITLE: PP_PLACEHOLDER.TITLE, PP_PLACEHOLDER.ORG_CHART: PP_PLACEHOLDER.BODY, PP_PLACEHOLDER.DATE: PP_PLACEHOLDER.DATE, PP_PLACEHOLDER.FOOTER: PP_PLACEHOLDER.FOOTER, PP_PLACEHOLDER.MEDIA_CLIP: PP_PLACEHOLDER.BODY, PP_PLACEHOLDER.OBJECT: PP_PLACEHOLDER.BODY, PP_PLACEHOLDER.PICTURE: PP_PLACEHOLDER.BODY, PP_PLACEHOLDER.SLIDE_NUMBER: PP_PLACEHOLDER.SLIDE_NUMBER, PP_PLACEHOLDER.SUBTITLE: PP_PLACEHOLDER.BODY, PP_PLACEHOLDER.TABLE: PP_PLACEHOLDER.BODY, PP_PLACEHOLDER.TITLE: PP_PLACEHOLDER.TITLE, }[self._element.ph_type] slide_master = self.part.slide_master return slide_master.placeholders.get(base_ph_type, None)
Return the master placeholder this layout placeholder inherits from.
def rename_item_list(self, item_list_url, new_name): data = json.dumps({'name': new_name}) resp = self.api_request(str(item_list_url), data, method="PUT") try: return ItemList(resp['items'], self, item_list_url, resp['name']) except KeyError: try: raise APIError('200', 'Rename operation failed', resp['error']) except KeyError: raise APIError('200', 'Rename operation failed', resp)
Rename an Item List on the server :type item_list_url: String or ItemList :param item_list_url: the URL of the list to which to add the items, or an ItemList object :type new_name: String :param new_name: the new name to give the Item List :rtype: ItemList :returns: the item list, if successful :raises: APIError if the request was not successful
def port_is_open(): with settings(hide('aborts'), warn_only=True ): try: if env.verbosity: print "Testing node for previous installation on port %s:"% env.port distribution = lsb_release() except KeyboardInterrupt: if env.verbosity: print >> sys.stderr, "\nStopped." sys.exit(1) except: return False if distribution.distributor_id <> 'Ubuntu': print env.host, 'WARNING: Woven has only been tested on Ubuntu >= 10.04. It may not work as expected on',distribution.description return True
Determine if the default port and user is open for business.
def block_user(self, username, domain): self.set_password(username, domain, self.get_random_password())
Block the specified user. The default implementation calls :py:func:`~xmpp_backends.base.XmppBackendBase.set_password` with a random password. :param username: The username of the user. :type username: str :param domain: The domain of the user. :type domain: str
def generate_seeds(num, root_seed, secret): if num < 0: raise HeartbeatError('%s is not greater than 0' % num) if secret is None: raise HeartbeatError('secret can not be of type NoneType') seeds = [] try: tmp_seed = hashlib.sha256(root_seed).digest() except TypeError: tmp_seed = hashlib.sha256(str(root_seed).encode()).digest() for x in range(num): seeds.append(tmp_seed) h = hashlib.sha256(tmp_seed) h.update(secret) tmp_seed = h.digest() return seeds
Deterministically generate list of seeds from a root seed. :param num: Numbers of seeds to generate as int :param root_seed: Seed to start off with. :return: seed values as a list of length num
def set_input_container(_container, cfg): if not _container: return False if _container.exists(): cfg["container"]["input"] = str(_container) return True return False
Save the input for the container in the configurations.
def is_zipstream(data): if isinstance(data, (str, buffer)): data = BytesIO(data) if hasattr(data, "read"): tell = 0 if hasattr(data, "tell"): tell = data.tell() try: result = bool(_EndRecData(data)) except IOError: result = False if hasattr(data, "seek"): data.seek(tell) else: raise TypeError("requies str, buffer, or stream-like object") return result
just like zipfile.is_zipfile, but works upon buffers and streams rather than filenames. If data supports the read method, it will be treated as a stream and read from to test whether it is a valid ZipFile. If data also supports the tell and seek methods, it will be rewound after being tested.
def assign(self, experiment): self.experiments.append(experiment) self.farms.append(empty_farm)
Assign an experiment.
def update_lbaas_l7policy(self, l7policy, body=None): return self.put(self.lbaas_l7policy_path % l7policy, body=body)
Updates L7 policy.
def all(self): return { key: value for key, value in chain(self.entry_points.items(), self.factories.items()) }
Return a synthetic dictionary of all factories.
def rename(self, newname): dic = self.to_json_dict() if self._get_resource_root().version < 6: dic['name'] = newname else: dic['displayName'] = newname return self._put_cluster(dic)
Rename a cluster. @param newname: New cluster name @return: An ApiCluster object @since: API v2
async def fetchmany(self, size: int = None) -> Iterable[sqlite3.Row]: args = () if size is not None: args = (size,) return await self._execute(self._cursor.fetchmany, *args)
Fetch up to `cursor.arraysize` number of rows.
def visualize_learning_result(self, state_key): x, y = state_key map_arr = copy.deepcopy(self.__map_arr) goal_point_tuple = np.where(map_arr == self.__end_point_label) goal_x, goal_y = goal_point_tuple map_arr[y][x] = "@" self.__map_arr_list.append(map_arr) if goal_x == x and goal_y == y: for i in range(10): key = len(self.__map_arr_list) - (10 - i) print("Number of searches: " + str(key)) print(self.__map_arr_list[key]) print("Total number of searches: " + str(self.t)) print(self.__map_arr_list[-1]) print("Goal !!")
Visualize learning result.
def to_bytes(instance, encoding='utf-8', error='strict'): if isinstance(instance, bytes): return instance elif hasattr(instance, 'encode'): return instance.encode(encoding, error) elif isinstance(instance, list): return list([to_bytes(item, encoding, error) for item in instance]) elif isinstance(instance, tuple): return tuple([to_bytes(item, encoding, error) for item in instance]) elif isinstance(instance, dict): return dict( [(to_bytes(key, encoding, error), to_bytes(value, encoding, error)) for key, value in instance.items()]) else: return instance
Convert an instance recursively to bytes.
def dropout_no_scaling(x, keep_prob): if keep_prob == 1.0: return x mask = tf.less(tf.random_uniform(tf.shape(x)), keep_prob) return x * cast_like(mask, x)
Like tf.nn.dropout, but does not scale up. Works on integers also. Args: x: a Tensor keep_prob: a floating point number Returns: Tensor of the same shape as x.
def blur(self): scene = self.get_scene() if scene and scene._focus_sprite == self: scene._focus_sprite = None
removes focus from the current element if it has it
def raw_sensor_strings(self): try: with open(self.sensorpath, "r") as f: data = f.readlines() except IOError: raise NoSensorFoundError(self.type_name, self.id) if data[0].strip()[-3:] != "YES": raise SensorNotReadyError(self) return data
Reads the raw strings from the kernel module sysfs interface :returns: raw strings containing all bytes from the sensor memory :rtype: str :raises NoSensorFoundError: if the sensor could not be found :raises SensorNotReadyError: if the sensor is not ready yet
def decode(self, json_string): default_obj = super(JSONPDecoder, self).decode(json_string) return list(self._iterdecode(default_obj))[0]
json_string is basicly string that you give to json.loads method
def set_limit_override(self, service_name, limit_name, value, override_ta=True): self.services[service_name].set_limit_override( limit_name, value, override_ta=override_ta )
Set a manual override on an AWS service limits, i.e. if you had limits increased by AWS support. This method calls :py:meth:`._AwsService.set_limit_override` on the corresponding _AwsService instance. Explicitly set limit overrides using this method will take precedence over default limits. They will also take precedence over limit information obtained via Trusted Advisor, unless ``override_ta`` is set to ``False``. :param service_name: the name of the service to override limit for :type service_name: str :param limit_name: the name of the limit to override: :type limit_name: str :param value: the new (overridden) limit value) :type value: int :param override_ta: whether or not to use this value even if Trusted Advisor supplies limit information :type override_ta: bool :raises: :py:exc:`ValueError` if limit_name is not known to the service instance
def get_pull_request_number(task, source_env_prefix): pull_request = _extract_from_env_in_payload(task, source_env_prefix + '_PULL_REQUEST_NUMBER') if pull_request is not None: pull_request = int(pull_request) return pull_request
Get what Github pull request created the graph. Args: obj (ChainOfTrust or LinkOfTrust): the trust object to inspect source_env_prefix (str): The environment variable prefix that is used to get repository information. Returns: int: the pull request number. None: if not defined for this task.
def _update(self, resource, update_dict=None, params=None, **kwargs): url = self._client._build_url(resource, **kwargs) response = self._client._request('PUT', url, json=update_dict, params=params) if response.status_code != requests.codes.ok: raise APIError("Could not update {} ({})".format(self.__class__.__name__, response.json().get('results'))) else: self.refresh()
Update the object.
def listen(self): while True: message = self.pull.recv() logger.debug("received message of length %d" % len(message)) uuid, message = message[:32], message[32:] response = uuid + self.handle(message) self.push.send(response)
Listen forever on the zmq.PULL socket.
def _write_script(self, script_name, ref, qry, outfile): f = pyfastaq.utils.open_file_write(script_name) print(self._nucmer_command(ref, qry, 'p'), file=f) print(self._delta_filter_command('p.delta', 'p.delta.filter'), file=f) print(self._show_coords_command('p.delta.filter', outfile), file=f) if self.show_snps: print(self._show_snps_command('p.delta.filter', outfile + '.snps'), file=f) pyfastaq.utils.close(f)
Write commands into a bash script
def ProgramScanner(**kw): kw['path_function'] = SCons.Scanner.FindPathDirs('LIBPATH') ps = SCons.Scanner.Base(scan, "ProgramScanner", **kw) return ps
Return a prototype Scanner instance for scanning executable files for static-lib dependencies
def to_pretty_json(self, ignore_none: bool=True, ignore_empty: bool=False) -> str: return self.to_json(4, ignore_none, ignore_empty)
From instance to pretty json string :param ignore_none: Properties which is None are excluded if True :param ignore_empty: Properties which is empty are excluded if True :return: Json string Usage: >>> from owlmixin.samples import Human >>> human = Human.from_dict({ ... "id": 1, ... "name": "Tom", ... "favorites": [ ... {"name": "Apple", "names_by_lang": {"en": "Apple", "de": "Apfel"}}, ... {"name": "Orange"} ... ] ... }) >>> print(human.to_pretty_json()) { "favorites": [ { "name": "Apple", "names_by_lang": { "de": "Apfel", "en": "Apple" } }, { "name": "Orange" } ], "id": 1, "name": "Tom" }