code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def get_user_pubkeys(users): if not isinstance(users, list): return {'Error': 'A list of users is expected'} ret = {} for user in users: key_ids = [] if isinstance(user, dict): tmp_user = next(six.iterkeys(user)) key_ids = user[tmp_user] user = tmp_user url = 'https://api.github.com/users/{0}/keys'.format(user) result = salt.utils.http.query( url, 'GET', decode=False, text=True, ) keys = salt.utils.json.loads(result['text']) ret[user] = {} for key in keys: if key_ids: if six.text_type(key['id']) in key_ids: ret[user][key['id']] = key['key'] else: ret[user][key['id']] = key['key'] return ret
Retrieve a set of public keys from GitHub for the specified list of users. Expects input in list format. Optionally, a value in the list may be a dict whose value is a list of key IDs to be returned. If this is not done, then all keys will be returned. Some example data structures that coupld be passed in would look like: .. code_block:: yaml ['user1', 'user2', 'user3'] [ 'user1': [ '12345', '67890', ], 'user2', 'user3', ]
def check_diag(self, jac, name): system = self.system pos = [] names = [] pairs = '' size = jac.size diag = jac[0:size[0] ** 2:size[0] + 1] for idx in range(size[0]): if abs(diag[idx]) <= 1e-8: pos.append(idx) for idx in pos: names.append(system.varname.__dict__[name][idx]) if len(names) > 0: for i, j in zip(pos, names): pairs += '{0}: {1}\n'.format(i, j) logger.debug('Jacobian diagonal check:') logger.debug(pairs)
Check matrix ``jac`` for diagonal elements that equals 0
def datatype2schemacls( _datatype, _registry=None, _factory=None, _force=True, _besteffort=True, **kwargs ): result = None gdbt = getbydatatype if _registry is None else _registry.getbydatatype result = gdbt(_datatype, besteffort=_besteffort) if result is None: gscls = getschemacls if _factory is None else _factory.getschemacls result = gscls(_datatype, besteffort=_besteffort) if result is None and _force: _build = build if _factory is None else _factory.build result = _build(_resource=_datatype, **kwargs) return result
Get a schema class which has been associated to input data type by the registry or the factory in this order. :param type datatype: data type from where get associated schema. :param SchemaRegisgry _registry: registry from where call the getbydatatype . Default is the global registry. :param SchemaFactory _factory: factory from where call the getschemacls if getbydatatype returns None. Default is the global factory. :param bool _force: if true (default), force the building of schema class if no schema is associated to input data type. :param bool _besteffort: if True (default), try to resolve schema by inheritance. :param dict kwargs: factory builder kwargs. :rtype: type :return: Schema associated to input registry or factory. None if no association found.
def get_batch_for_key(data): batches = _get_batches(data, require_bam=False) if len(batches) == 1: return batches[0] else: return tuple(batches)
Retrieve batch information useful as a unique key for the sample.
def get_operator(name): sep = name.index('/') provider_name = name[:sep] operator_name = name[sep + 1:] provider = OPERATOR_PROVIDERS[provider_name] return provider[operator_name]
Get an operator class from a provider plugin. Attrs: name: The name of the operator class. Returns: The operator *class object* (i.e. not an instance).
def etree(A): assert isinstance(A,spmatrix), "A must be a sparse matrix" assert A.size[0] == A.size[1], "A must be a square matrix" n = A.size[0] cp,ri,_ = A.CCS parent = matrix(0,(n,1)) w = matrix(0,(n,1)) for k in range(n): parent[k] = k w[k] = -1 for p in range(cp[k],cp[k+1]): i = ri[p] while ((not i == -1) and (i < k)): inext = w[i] w[i] = k if inext == -1: parent[i] = k i = inext; return parent
Compute elimination tree from upper triangle of A.
def get_private_rooms(self, **kwargs): return GetPrivateRooms(settings=self.settings, **kwargs).call(**kwargs)
Get a listing of all private rooms with their names and IDs
def plot_simseries(self, **kwargs: Any) -> None: self.__plot_series([self.sequences.sim], kwargs)
Plot the |IOSequence.series| of the |Sim| sequence object. See method |Node.plot_allseries| for further information.
def add_item(self, item): _idx = len(self.items) self.items.update({"item_" + str(_idx + 1): item})
Updates the list of items in the current transaction
def container_present(name, profile): containers = __salt__['libcloud_storage.list_containers'](profile) match = [z for z in containers if z['name'] == name] if match: return state_result(True, "Container already exists", name, {}) else: result = __salt__['libcloud_storage.create_container'](name, profile) return state_result(True, "Created new container", name, result)
Ensures a container is present. :param name: Container name :type name: ``str`` :param profile: The profile key :type profile: ``str``
def _render_round_init(self, horizon: int, non_fluents: NonFluents) -> None: print('*********************************************************') print('>>> ROUND INIT, horizon = {}'.format(horizon)) print('*********************************************************') fluent_variables = self._compiler.rddl.non_fluent_variables self._render_fluent_timestep('non-fluents', non_fluents, fluent_variables)
Prints round init information about `horizon` and `non_fluents`.
def _new_type(cls, args): fformat = ["%r" if f is None else "%s=%%r" % f for f in args] fformat = "(%s)" % ", ".join(fformat) class _ResultTuple(cls): __slots__ = () _fformat = fformat if args: for i, a in enumerate(args): if a is not None: vars()[a] = property(itemgetter(i)) del i, a return _ResultTuple
Creates a new class similar to namedtuple. Pass a list of field names or None for no field name. >>> x = ResultTuple._new_type([None, "bar"]) >>> x((1, 3)) ResultTuple(1, bar=3)
def reload_configuration(self, event): if event.target == self.uniquename: self.log('Reloading configuration') self._read_config()
Event triggered configuration reload
def sign(self, msg, key): if not isinstance(key, rsa.RSAPrivateKey): raise TypeError( "The key must be an instance of rsa.RSAPrivateKey") sig = key.sign(msg, self.padding, self.hash) return sig
Create a signature over a message as defined in RFC7515 using an RSA key :param msg: the message. :type msg: bytes :returns: bytes, the signature of data. :rtype: bytes
def model_tree(name, model_cls, visited=None): if not visited: visited = set() visited.add(model_cls) mapper = class_mapper(model_cls) columns = [column.key for column in mapper.column_attrs] related = [model_tree(rel.key, rel.mapper.entity, visited) for rel in mapper.relationships if rel.mapper.entity not in visited] return {name: columns + related}
Create a simple tree of model's properties and its related models. It traverse trough relations, but ignore any loops. :param name: name of the model :type name: str :param model_cls: model class :param visited: set of visited models :type visited: list or None :return: a dictionary where values are lists of string or other \ dictionaries
def shutdown_host(port, username=None, password=None, authdb=None): host = 'localhost:%i' % port try: mc = MongoConnection(host) try: if username and password and authdb: if authdb != "admin": raise RuntimeError("given username/password is not for " "admin database") else: try: mc.admin.authenticate(name=username, password=password) except OperationFailure: pass mc.admin.command('shutdown', force=True) except AutoReconnect: pass except OperationFailure: print("Error: cannot authenticate to shut down %s." % host) return except ConnectionFailure: pass else: mc.close()
Send the shutdown command to a mongod or mongos on given port. This function can be called as a separate thread.
def _ngrams(self, sequence, degree): count = max(0, len(sequence) - degree + 1) return [self._tokenizer.joiner.join( self._tokenizer.joiner.join(sequence[i:i+degree]).split()) for i in range(count)]
Returns the n-grams generated from `sequence`. Based on the ngrams function from the Natural Language Toolkit. Each n-gram in the returned list is a string with whitespace removed. :param sequence: the source data to be converted into n-grams :type sequence: sequence :param degree: the degree of the n-grams :type degree: `int` :rtype: `list` of `str`
def convert_bboxes_from_albumentations(bboxes, target_format, rows, cols, check_validity=False): return [convert_bbox_from_albumentations(bbox, target_format, rows, cols, check_validity) for bbox in bboxes]
Convert a list of bounding boxes from the format used by albumentations to a format, specified in `target_format`. Args: bboxes (list): List of bounding box with coordinates in the format used by albumentations target_format (str): required format of the output bounding box. Should be 'coco' or 'pascal_voc'. rows (int): image height cols (int): image width check_validity (bool): check if all boxes are valid boxes
def set_header_s(self, stream): if self.argstreams[1].state == StreamState.init: self.argstreams[1] = stream else: raise TChannelError( "Unable to change the header since the streaming has started")
Set customized header stream. Note: the header stream can only be changed before the stream is consumed. :param stream: InMemStream/PipeStream for header :except TChannelError: Raise TChannelError if the stream is being sent when you try to change the stream.
def get_complex_output(self, stderr=STDOUT): proc = Popen(self.cmd, shell=True, stdout=PIPE, stderr=stderr) return proc.stdout.readlines()
Executes a piped command and get the lines of the output in a list :param stderr: where to put stderr :return: output of command
def is_new_namespace_preorder( self, namespace_id_hash, lastblock=None ): if lastblock is None: lastblock = self.lastblock preorder = namedb_get_namespace_preorder( self.db, namespace_id_hash, lastblock ) if preorder is not None: return False else: return True
Given a namespace preorder hash, determine whether or not is is unseen before.
def get_asset_content_form_for_update(self, asset_content_id=None): if asset_content_id is None: raise NullArgument() asset = None for a in AssetLookupSession(self._repository_id, proxy=self._proxy, runtime=self._runtime).get_assets(): for ac in a.get_asset_contents(): if ac.get_id() == asset_content_id: asset = a asset_content = ac if asset is None: raise NotFound() asset_content_form = objects.AssetContentForm(asset_content._my_map, asset_id=asset.get_id()) self._forms[asset_content_form.get_id().get_identifier()] = not UPDATED return asset_content_form
Gets the asset form for updating content for an existing asset. A new asset content form should be requested for each update transaction. :param asset_content_id: the ``Id`` of the ``AssetContent`` :type asset_content_id: ``osid.id.Id`` :return: the asset content form :rtype: ``osid.repository.AssetContentForm`` :raise: ``NotFound`` -- ``asset_content_id`` is not found :raise: ``NullArgument`` -- ``asset_content_id`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request *compliance: mandatory -- This method must be implemented.*
def gen_oui_str(self, oui_list): oui_str = [] for oui in oui_list: oui_str.append('-c') oui_str.append(oui) return oui_str
Generate the OUI string for vdptool.
def simplify_polynomial(polynomial, monomial_substitutions): if isinstance(polynomial, (int, float, complex)): return polynomial polynomial = (1.0 * polynomial).expand(mul=True, multinomial=True) if is_number_type(polynomial): return polynomial if polynomial.is_Mul: elements = [polynomial] else: elements = polynomial.as_coeff_mul()[1][0].as_coeff_add()[1] new_polynomial = 0 for element in elements: monomial, coeff = separate_scalar_factor(element) monomial = apply_substitutions(monomial, monomial_substitutions) new_polynomial += coeff * monomial return new_polynomial
Simplify a polynomial for uniform handling later.
def bottom(self): if self.vMerge is not None: tc_below = self._tc_below if tc_below is not None and tc_below.vMerge == ST_Merge.CONTINUE: return tc_below.bottom return self._tr_idx + 1
The row index that marks the bottom extent of the vertical span of this cell. This is one greater than the index of the bottom-most row of the span, similar to how a slice of the cell's rows would be specified.
async def fire_sending(self,msg,num_repeats): if num_repeats is None: num_repeats = self.retry_count sent_msg_count = 0 sleep_interval = 0.05 while(sent_msg_count < num_repeats): if self.transport: self.transport.sendto(msg.packed_message) sent_msg_count += 1 await aio.sleep(sleep_interval)
Coroutine used to send message to the device when no response is needed. :param msg: Message to send :type msg: aiolifx. :param num_repeats: number of times the message is to be sent. :returns: The coroutine that can be scheduled to run :rtype: coroutine
def set_group_conditions(self, group_id, conditions, trigger_mode=None): data = self._serialize_object(conditions) if trigger_mode is not None: url = self._service_url(['triggers', 'groups', group_id, 'conditions', trigger_mode]) else: url = self._service_url(['triggers', 'groups', group_id, 'conditions']) response = self._put(url, data) return Condition.list_to_object_list(response)
Set the group conditions. This replaces any existing conditions on the group and member conditions for all trigger modes. :param group_id: Group to be updated :param conditions: New conditions to replace old ones :param trigger_mode: Optional TriggerMode used :type conditions: GroupConditionsInfo :type trigger_mode: TriggerMode :return: The new Group conditions
def _recurring_setExpressCheckout_adapter(self, params): params['l_billingtype0'] = "RecurringPayments" params['l_billingagreementdescription0'] = params['desc'] REMOVE = ["billingfrequency", "billingperiod", "profilestartdate", "desc"] for k in params.keys(): if k in REMOVE: del params[k] return params
The recurring payment interface to SEC is different than the recurring payment interface to ECP. This adapts a normal call to look like a SEC call.
def _distribution_info(self): print('Gathering information...') system = platform.system() system = 'cygwin' if 'CYGWIN' in system else system processor = platform.processor() machine = '64bit' if sys.maxsize > 2 ** 32 else '32bit' print('SYSTEM: ', system) print('PROCESSOR:', processor) print('MACHINE: ', machine) return self._dists[(system, machine)]
Creates the distribution name and the expected extension for the CSPICE package and returns it. :return (distribution, extension) tuple where distribution is the best guess from the strings available within the platform_urls list of strings, and extension is either "zip" or "tar.Z" depending on whether we are dealing with a Windows platform or else. :rtype: tuple (str, str) :raises: KeyError if the (system, machine) tuple does not correspond to any of the supported SpiceyPy environments.
def tropocollagen( cls, aa=28, major_radius=5.0, major_pitch=85.0, auto_build=True): instance = cls.from_parameters( n=3, aa=aa, major_radius=major_radius, major_pitch=major_pitch, phi_c_alpha=0.0, minor_helix_type='collagen', auto_build=False) instance.major_handedness = ['r'] * 3 rpr_collagen = _helix_parameters['collagen'][1] instance.z_shifts = [-rpr_collagen * 2, -rpr_collagen, 0.0] instance.minor_repeats = [None] * 3 if auto_build: instance.build() return instance
Creates a model of a collagen triple helix. Parameters ---------- aa : int, optional Number of amino acids per minor helix. major_radius : float, optional Radius of super helix. major_pitch : float, optional Pitch of super helix. auto_build : bool, optional If `True`, the model will be built as part of instantiation.
def print_spans(spans, max_idx: int) -> None: bel_spans = [" "] * (max_idx + 3) for val, span in spans: if val in ["Nested", "NSArg"]: continue for i in range(span[0], span[1] + 1): bel_spans[i] = val[0] bel_spans = [" "] * (max_idx + 3) for val, span in spans: if val not in ["Nested"]: continue for i in range(span[0], span[1] + 1): bel_spans[i] = val[0]
Quick test to show how character spans match original BEL String Mostly for debugging purposes
def get_graph(graph, conn, **kwargs): sparql = render_without_request("sparqlGraphDataTemplate.rq", prefix=NSM.prefix(), graph=graph) return conn.query(sparql, **kwargs)
Returns all the triples for a specific are graph args: graph: the URI of the graph to retreive conn: the rdfframework triplestore connection
def default_endpoint(ctx, param, value): if ctx.resilient_parsing: return config = ctx.obj['config'] endpoint = default_endpoint_from_config(config, option=value) if endpoint is None: raise click.UsageError('No default endpoint found.') return endpoint
Return default endpoint if specified.
def serialize(self, o): if isinstance(o, (list, tuple)): return [self.serialize(i) for i in o] if isinstance(o, dict): return {k: self.serialize(v) for k, v in o.items()} if isinstance(o, datetime): return o.isoformat() if isinstance(o, Result): return self.serialize(o.serialize()) return o
Returns a safe serializable object that can be serialized into JSON. @param o Python object to serialize
def get_host_health_data(self, data=None): if not data or data and "GET_EMBEDDED_HEALTH_DATA" not in data: data = self._execute_command( 'GET_EMBEDDED_HEALTH', 'SERVER_INFO', 'read') return data
Request host health data of the server. :param: the data to retrieve from the server, defaults to None. :returns: the dictionary containing the embedded health data. :raises: IloConnectionError if failed connecting to the iLO. :raises: IloError, on an error from iLO.
def cache_fake_input(cls, weld_input_id, fake_weld_input): assert isinstance(weld_input_id, str) assert isinstance(fake_weld_input, _FakeWeldInput) Cache._cache[weld_input_id] = fake_weld_input
Cache the fake Weld input to be seen by LazyResult.evaluate Parameters ---------- weld_input_id : str Generated when registering the fake_weld_input in WeldObject.update. fake_weld_input : _FakeWeldInput The fake Weld input previously generated by create_fake_array_input.
def sky2px(wcs,ra,dec,dra,ddec,cell, beam): dra = beam if dra<beam else dra ddec = beam if ddec<beam else ddec offsetDec = int((ddec/2.)/cell) offsetRA = int((dra/2.)/cell) if offsetDec%2==1: offsetDec += 1 if offsetRA%2==1: offsetRA += 1 raPix,decPix = map(int, wcs.wcs2pix(ra,dec)) return np.array([raPix-offsetRA,raPix+offsetRA,decPix-offsetDec,decPix+offsetDec])
convert a sky region to pixel positions
def EnqueueBreakpointUpdate(self, breakpoint): with self._transmission_thread_startup_lock: if self._transmission_thread is None: self._transmission_thread = threading.Thread( target=self._TransmissionThreadProc) self._transmission_thread.name = 'Cloud Debugger transmission thread' self._transmission_thread.daemon = True self._transmission_thread.start() self._transmission_queue.append((breakpoint, 0)) self._new_updates.set()
Asynchronously updates the specified breakpoint on the backend. This function returns immediately. The worker thread is actually doing all the work. The worker thread is responsible to retry the transmission in case of transient errors. Args: breakpoint: breakpoint in either final or non-final state.
def _initialize_logging(): if sys.stdout.isatty() or platform.system() in ("Darwin", "Linux"): RuntimeGlobals.logging_console_handler = foundations.verbose.get_logging_console_handler() RuntimeGlobals.logging_formatters = {"Default": foundations.verbose.LOGGING_DEFAULT_FORMATTER, "Extended": foundations.verbose.LOGGING_EXTENDED_FORMATTER, "Standard": foundations.verbose.LOGGING_STANDARD_FORMATTER}
Initializes the Application logging.
def render(self, data, accepted_media_type=None, renderer_context=None): if data is None: return bytes() renderer_context = renderer_context or {} indent = self.get_indent(accepted_media_type, renderer_context) if indent is None: separators = SHORT_SEPARATORS if self.compact else LONG_SEPARATORS else: separators = INDENT_SEPARATORS ret = json.dumps( data, cls=self.encoder_class, indent=indent, ensure_ascii=self.ensure_ascii, separators=separators ) if isinstance(ret, six.text_type): ret = ret.replace('\u2028', '\\u2028').replace('\u2029', '\\u2029') return bytes(ret.encode('utf-8')) return ret
Render `data` into JSON, returning a bytestring.
def make_shift_function(alphabet): def shift_case_sensitive(shift, symbol): case = [case for case in alphabet if symbol in case] if not case: return symbol case = case[0] index = case.index(symbol) return case[(index - shift) % len(case)] return shift_case_sensitive
Construct a shift function from an alphabet. Examples: Shift cases independently >>> make_shift_function([string.ascii_uppercase, string.ascii_lowercase]) <function make_shift_function.<locals>.shift_case_sensitive> Additionally shift punctuation characters >>> make_shift_function([string.ascii_uppercase, string.ascii_lowercase, string.punctuation]) <function make_shift_function.<locals>.shift_case_sensitive> Shift entire ASCII range, overflowing cases >>> make_shift_function([''.join(chr(x) for x in range(32, 127))]) <function make_shift_function.<locals>.shift_case_sensitive> Args: alphabet (iterable): Ordered iterable of strings representing separate cases of an alphabet Returns: Function (shift, symbol)
def get_meta_graph_def(saved_model_dir, tag_set): saved_model = reader.read_saved_model(saved_model_dir) set_of_tags = set(tag_set.split(',')) for meta_graph_def in saved_model.meta_graphs: if set(meta_graph_def.meta_info_def.tags) == set_of_tags: return meta_graph_def raise RuntimeError("MetaGraphDef associated with tag-set {0} could not be found in SavedModel".format(tag_set))
Utility function to read a meta_graph_def from disk. From `saved_model_cli.py <https://github.com/tensorflow/tensorflow/blob/8e0e8d41a3a8f2d4a6100c2ea1dc9d6c6c4ad382/tensorflow/python/tools/saved_model_cli.py#L186>`_ Args: :saved_model_dir: path to saved_model. :tag_set: list of string tags identifying the TensorFlow graph within the saved_model. Returns: A TensorFlow meta_graph_def, or raises an Exception otherwise.
def validate_enum_attribute(fully_qualified_name: str, spec: Dict[str, Any], attribute: str, candidates: Set[Union[str, int, float]]) -> Optional[InvalidValueError]: if attribute not in spec: return if spec[attribute] not in candidates: return InvalidValueError(fully_qualified_name, spec, attribute, candidates)
Validates to ensure that the value of an attribute lies within an allowed set of candidates
def reverse_url(self, datatype, url, verb='GET', urltype='single', api_version=None): api_version = api_version or 'v1' templates = getattr(self, 'URL_TEMPLATES__%s' % api_version) template_url = r"https://(?P<api_host>.+)/services/api/(?P<api_version>.+)" template_url += re.sub(r'{([^}]+)}', r'(?P<\1>.+)', templates[datatype][verb][urltype]) m = re.match(template_url, url or '') if not m: raise KeyError("No reverse match from '%s' to %s.%s.%s" % (url, datatype, verb, urltype)) r = m.groupdict() del r['api_host'] if r.pop('api_version') != api_version: raise ValueError("API version mismatch") return r
Extracts parameters from a populated URL :param datatype: a string identifying the data the url accesses. :param url: the fully-qualified URL to extract parameters from. :param verb: the HTTP verb needed for use with the url. :param urltype: an adjective used to the nature of the request. :return: dict
def finish(self, items=None, sort_methods=None, succeeded=True, update_listing=False, cache_to_disc=True, view_mode=None): if items: self.add_items(items) if sort_methods: for sort_method in sort_methods: if not isinstance(sort_method, basestring) and hasattr(sort_method, '__len__'): self.add_sort_method(*sort_method) else: self.add_sort_method(sort_method) if view_mode is not None: try: view_mode_id = int(view_mode) except ValueError: view_mode_id = self.get_view_mode_id(view_mode) if view_mode_id is not None: self.set_view_mode(view_mode_id) self.end_of_directory(succeeded, update_listing, cache_to_disc) return self.added_items
Adds the provided items to the XBMC interface. :param items: an iterable of items where each item is either a dictionary with keys/values suitable for passing to :meth:`xbmcswift2.ListItem.from_dict` or an instance of :class:`xbmcswift2.ListItem`. :param sort_methods: a list of valid XBMC sort_methods. Each item in the list can either be a sort method or a tuple of ``sort_method, label2_mask``. See :meth:`add_sort_method` for more detail concerning valid sort_methods. Example call with sort_methods:: sort_methods = ['label', 'title', ('date', '%D')] plugin.finish(items, sort_methods=sort_methods) :param view_mode: can either be an integer (or parseable integer string) corresponding to a view_mode or the name of a type of view. Currrently the only view type supported is 'thumbnail'. :returns: a list of all ListItems added to the XBMC interface.
def get_instance(self, payload): return WorkerInstance(self._version, payload, workspace_sid=self._solution['workspace_sid'], )
Build an instance of WorkerInstance :param dict payload: Payload response from the API :returns: twilio.rest.taskrouter.v1.workspace.worker.WorkerInstance :rtype: twilio.rest.taskrouter.v1.workspace.worker.WorkerInstance
def client_new(): form = ClientForm(request.form) if form.validate_on_submit(): c = Client(user_id=current_user.get_id()) c.gen_salt() form.populate_obj(c) db.session.add(c) db.session.commit() return redirect(url_for('.client_view', client_id=c.client_id)) return render_template( 'invenio_oauth2server/settings/client_new.html', form=form, )
Create new client.
def _invokeWrite(self, fileIO, session, directory, filename, replaceParamFile): instance = None try: instance = session.query(fileIO). \ filter(fileIO.projectFile == self). \ one() except: extension = filename.split('.')[1] try: instance = session.query(fileIO). \ filter(fileIO.projectFile == self). \ filter(fileIO.fileExtension == extension). \ one() except NoResultFound: log.warning('{0} listed as card in project file, but ' 'the file is not found in the database.'.format(filename)) except MultipleResultsFound: self._invokeWriteForMultipleOfType(directory, extension, fileIO, filename, session, replaceParamFile=replaceParamFile) return if instance is not None: instance.write(session=session, directory=directory, name=filename, replaceParamFile=replaceParamFile)
Invoke File Write Method on Other Files
def to_str(obj): if not isinstance(obj, str) and PY3 and isinstance(obj, bytes): obj = obj.decode('utf-8') return obj if isinstance(obj, string_types) else str(obj)
Attempts to convert given object to a string object
def touch(ctx, key, policy, admin_pin, force): controller = ctx.obj['controller'] old_policy = controller.get_touch(key) if old_policy == TOUCH_MODE.FIXED: ctx.fail('A FIXED policy cannot be changed!') force or click.confirm('Set touch policy of {.name} key to {.name}?'.format( key, policy), abort=True, err=True) if admin_pin is None: admin_pin = click.prompt('Enter admin PIN', hide_input=True, err=True) controller.set_touch(key, policy, admin_pin.encode('utf8'))
Manage touch policy for OpenPGP keys. \b KEY Key slot to set (sig, enc or aut). POLICY Touch policy to set (on, off or fixed).
def delete_tag_for_component(user, c_id, tag_id): query = _TABLE_TAGS.delete().where(_TABLE_TAGS.c.tag_id == tag_id and _TABLE_TAGS.c.component_id == c_id) try: flask.g.db_conn.execute(query) except sa_exc.IntegrityError: raise dci_exc.DCICreationConflict(_TABLE_TAGS.c.tag_id, 'tag_id') return flask.Response(None, 204, content_type='application/json')
Delete a tag on a specific component.
def focus_prev_sibling(self): mid = self.get_selected_mid() localroot = self._sanitize_position((mid,)) if localroot == self.get_focus()[1]: newpos = self._tree.prev_sibling_position(mid) if newpos is not None: newpos = self._sanitize_position((newpos,)) else: newpos = localroot if newpos is not None: self.body.set_focus(newpos)
focus previous sibling of currently focussed message in thread tree
def get_row_data(self, row, name=None): retdict = {} for rowname, data in zip(self.get_DataFrame(), self.get_DataFrame(data=True)): retdict[rowname] = pd.DataFrame(data.ix[row]) if name: retdict['name'] = name return retdict
Returns a dict with all available data for a row in the extension Parameters ---------- row : tuple, list, string A valid index for the extension DataFrames name : string, optional If given, adds a key 'name' with the given value to the dict. In that case the dict can be used directly to build a new extension. Returns ------- dict object with the data (pandas DataFrame)for the specific rows
def delegate_names(delegate, accessors, typ, overwrite=False): def add_delegate_accessors(cls): cls._add_delegate_accessors(delegate, accessors, typ, overwrite=overwrite) return cls return add_delegate_accessors
Add delegated names to a class using a class decorator. This provides an alternative usage to directly calling `_add_delegate_accessors` below a class definition. Parameters ---------- delegate : object the class to get methods/properties & doc-strings accessors : Sequence[str] List of accessor to add typ : {'property', 'method'} overwrite : boolean, default False overwrite the method/property in the target class if it exists Returns ------- callable A class decorator. Examples -------- @delegate_names(Categorical, ["categories", "ordered"], "property") class CategoricalAccessor(PandasDelegate): [...]
def bench_report(results): table = Table(names=['function', 'nest', 'nside', 'size', 'time_healpy', 'time_self', 'ratio'], dtype=['S20', bool, int, int, float, float, float], masked=True) for row in results: table.add_row(row) table['time_self'].format = '10.7f' if HEALPY_INSTALLED: table['ratio'] = table['time_self'] / table['time_healpy'] table['time_healpy'].format = '10.7f' table['ratio'].format = '7.2f' table.pprint(max_lines=-1)
Print a report for given benchmark results to the console.
def build_specfile_header(spec): str = "" mandatory_header_fields = { 'NAME' : '%%define name %s\nName: %%{name}\n', 'VERSION' : '%%define version %s\nVersion: %%{version}\n', 'PACKAGEVERSION' : '%%define release %s\nRelease: %%{release}\n', 'X_RPM_GROUP' : 'Group: %s\n', 'SUMMARY' : 'Summary: %s\n', 'LICENSE' : 'License: %s\n', } str = str + SimpleTagCompiler(mandatory_header_fields).compile( spec ) optional_header_fields = { 'VENDOR' : 'Vendor: %s\n', 'X_RPM_URL' : 'Url: %s\n', 'SOURCE_URL' : 'Source: %s\n', 'SUMMARY_' : 'Summary(%s): %s\n', 'X_RPM_DISTRIBUTION' : 'Distribution: %s\n', 'X_RPM_ICON' : 'Icon: %s\n', 'X_RPM_PACKAGER' : 'Packager: %s\n', 'X_RPM_GROUP_' : 'Group(%s): %s\n', 'X_RPM_REQUIRES' : 'Requires: %s\n', 'X_RPM_PROVIDES' : 'Provides: %s\n', 'X_RPM_CONFLICTS' : 'Conflicts: %s\n', 'X_RPM_BUILDREQUIRES' : 'BuildRequires: %s\n', 'X_RPM_SERIAL' : 'Serial: %s\n', 'X_RPM_EPOCH' : 'Epoch: %s\n', 'X_RPM_AUTOREQPROV' : 'AutoReqProv: %s\n', 'X_RPM_EXCLUDEARCH' : 'ExcludeArch: %s\n', 'X_RPM_EXCLUSIVEARCH' : 'ExclusiveArch: %s\n', 'X_RPM_PREFIX' : 'Prefix: %s\n', 'X_RPM_BUILDROOT' : 'BuildRoot: %s\n', } if 'X_RPM_BUILDROOT' not in spec: spec['X_RPM_BUILDROOT'] = '%{_tmppath}/%{name}-%{version}-%{release}' str = str + SimpleTagCompiler(optional_header_fields, mandatory=0).compile( spec ) return str
Builds all sections but the %file of a rpm specfile
def find_elb_dns_zone_id(name='', env='dev', region='us-east-1'): LOG.info('Find %s ELB DNS Zone ID in %s [%s].', name, env, region) client = boto3.Session(profile_name=env).client('elb', region_name=region) elbs = client.describe_load_balancers(LoadBalancerNames=[name]) return elbs['LoadBalancerDescriptions'][0]['CanonicalHostedZoneNameID']
Get an application's AWS elb dns zone id. Args: name (str): ELB name env (str): Environment/account of ELB region (str): AWS Region Returns: str: elb DNS zone ID
def _check_emphasis(numbers, emph): "Find index postions in list of numbers to be emphasized according to emph." pat = '(\w+)\:(eq|gt|ge|lt|le)\:(.+)' emphasized = {} for (i, n) in enumerate(numbers): if n is None: continue for em in emph: color, op, value = re.match(pat, em).groups() value = float(value) if op == 'eq' and n == value: emphasized[i] = color elif op == 'gt' and n > value: emphasized[i] = color elif op == 'ge' and n >= value: emphasized[i] = color elif op == 'lt' and n < value: emphasized[i] = color elif op == 'le' and n <= value: emphasized[i] = color return emphasized
Find index postions in list of numbers to be emphasized according to emph.
def save(self, *args, **kwargs): old_instance = None if self.pk: old_instance = self.__class__._default_manager.get(pk=self.pk) self.slug = slugify(force_text(self.name), allow_unicode=True) super().save(*args, **kwargs) if old_instance and old_instance.parent != self.parent: self.update_trackers() signals.forum_moved.send(sender=self, previous_parent=old_instance.parent)
Saves the forum instance.
def read_lamination_parameters(thickness, laminaprop, rho, xiA1, xiA2, xiA3, xiA4, xiB1, xiB2, xiB3, xiB4, xiD1, xiD2, xiD3, xiD4, xiE1, xiE2, xiE3, xiE4): r lam = Laminate() lam.h = thickness lam.matobj = read_laminaprop(laminaprop, rho) lam.xiA = np.array([1, xiA1, xiA2, xiA3, xiA4], dtype=np.float64) lam.xiB = np.array([0, xiB1, xiB2, xiB3, xiB4], dtype=np.float64) lam.xiD = np.array([1, xiD1, xiD2, xiD3, xiD4], dtype=np.float64) lam.xiE = np.array([1, xiE1, xiE2, xiE3, xiE4], dtype=np.float64) lam.calc_ABDE_from_lamination_parameters() return lam
r"""Calculates a laminate based on the lamination parameters. The lamination parameters: `\xi_{A1} \cdots \xi_{A4}`, `\xi_{B1} \cdots \xi_{B4}`, `\xi_{C1} \cdots \xi_{C4}`, `\xi_{D1} \cdots \xi_{D4}`, `\xi_{E1} \cdots \xi_{E4}` are used to calculate the laminate constitutive matrix. Parameters ---------- thickness : float The total thickness of the laminate laminaprop : tuple The laminaprop tuple used to define the laminate material. rho : float Material density. xiA1 to xiD4 : float The 16 lamination parameters used to define the laminate. Returns ------- lam : Laminate laminate with the ABD and ABDE matrices already calculated
def join(cls, splits): segments = [] for split in splits: segments.append('"{}",'.format(split)) if len(segments) > 0: segments[-1] = segments[-1][:-1] jsonString = '[{}]'.format(''.join(segments)) return jsonString
Join an array of ids into a compound id string
def init_params(self, initializer=Uniform(0.01), arg_params=None, aux_params=None, allow_missing=False, force_init=False, allow_extra=False): pass
Initializes the parameters and auxiliary states. By default this function does nothing. Subclass should override this method if contains parameters. Parameters ---------- initializer : Initializer Called to initialize parameters if needed. arg_params : dict If not ``None``, should be a dictionary of existing `arg_params`. Initialization will be copied from that. aux_params : dict If not ``None``, should be a dictionary of existing `aux_params`. Initialization will be copied from that. allow_missing : bool If ``True``, params could contain missing values, and the initializer will be called to fill those missing params. force_init : bool If ``True``, will force re-initialize even if already initialized. allow_extra : boolean, optional Whether allow extra parameters that are not needed by symbol. If this is True, no error will be thrown when arg_params or aux_params contain extra parameters that is not needed by the executor.
def write_eof(self): self._check_writable() self._transport._can_write.wait() self._transport.write_eof()
Close the write direction of the transport. This method will block if the transport's write buffer is at capacity.
def cf_encoder(variables, attributes): new_vars = OrderedDict((k, encode_cf_variable(v, name=k)) for k, v in variables.items()) return new_vars, attributes
A function which takes a dicts of variables and attributes and encodes them to conform to CF conventions as much as possible. This includes masking, scaling, character array handling, and CF-time encoding. Decode a set of CF encoded variables and attributes. See Also, decode_cf_variable Parameters ---------- variables : dict A dictionary mapping from variable name to xarray.Variable attributes : dict A dictionary mapping from attribute name to value Returns ------- encoded_variables : dict A dictionary mapping from variable name to xarray.Variable, encoded_attributes : dict A dictionary mapping from attribute name to value See also: encode_cf_variable
def set_extra_info(self, username, extra_info): url = self._get_extra_info_url(username) make_request(url, method='PUT', body=extra_info, timeout=self.timeout)
Set extra info for the given user. Raise a ServerError if an error occurs in the request process. @param username The username for the user to update. @param info The extra info as a JSON encoded string, or as a Python dictionary like object.
def flat_data(self): def flat_field(value): try: value.flat_data() return value except AttributeError: return value modified_dict = self.__original_data__ modified_dict.update(self.__modified_data__) self.__original_data__ = {k: flat_field(v) for k, v in modified_dict.items() if k not in self.__deleted_fields__} self.clear_modified_data()
Pass all the data from modified_data to original_data
def sqlvm_aglistener_create(client, cmd, availability_group_listener_name, sql_virtual_machine_group_name, resource_group_name, availability_group_name, ip_address, subnet_resource_id, load_balancer_resource_id, probe_port, sql_virtual_machine_instances, port=1433, public_ip_address_resource_id=None): if not is_valid_resource_id(subnet_resource_id): raise CLIError("Invalid subnet resource id.") if not is_valid_resource_id(load_balancer_resource_id): raise CLIError("Invalid load balancer resource id.") if public_ip_address_resource_id and not is_valid_resource_id(public_ip_address_resource_id): raise CLIError("Invalid public IP address resource id.") for sqlvm in sql_virtual_machine_instances: if not is_valid_resource_id(sqlvm): raise CLIError("Invalid SQL virtual machine resource id.") private_ip_object = PrivateIPAddress(ip_address=ip_address, subnet_resource_id=subnet_resource_id if is_valid_resource_id(subnet_resource_id) else None) load_balancer_object = LoadBalancerConfiguration(private_ip_address=private_ip_object, public_ip_address_resource_id=public_ip_address_resource_id, load_balancer_resource_id=load_balancer_resource_id, probe_port=probe_port, sql_virtual_machine_instances=sql_virtual_machine_instances) ag_listener_object = AvailabilityGroupListener(availability_group_name=availability_group_name, load_balancer_configurations=load_balancer_object, port=port) LongRunningOperation(cmd.cli_ctx)(sdk_no_wait(False, client.create_or_update, resource_group_name, sql_virtual_machine_group_name, availability_group_listener_name, ag_listener_object)) return client.get(resource_group_name, sql_virtual_machine_group_name, availability_group_listener_name)
Creates an availability group listener
def on_message(self, ws, message): try: data = json.loads(message) except Exception: self._set_error(message, "decode message failed") else: self._inbox.put(RTMMessage(data))
Websocket on_message event handler Saves message as RTMMessage in self._inbox
def _geoid_radius(latitude: float) -> float: lat = deg2rad(latitude) return sqrt(1/(cos(lat) ** 2 / Rmax_WGS84 ** 2 + sin(lat) ** 2 / Rmin_WGS84 ** 2))
Calculates the GEOID radius at a given latitude Parameters ---------- latitude : float Latitude (degrees) Returns ------- R : float GEOID Radius (meters)
def _connect(self): try: if sys.version_info[:2] >= (2,6): self._conn = telnetlib.Telnet(self._amihost, self._amiport, connTimeout) else: self._conn = telnetlib.Telnet(self._amihost, self._amiport) except: raise Exception( "Connection to Asterisk Manager Interface on " "host %s and port %s failed." % (self._amihost, self._amiport) )
Connect to Asterisk Manager Interface.
def cdfNormal(z): if (abs(z) < ERF_CODY_LIMIT1): return 0.5 * (1.0 + (z / M_SQRT2) * _erfRationalHelperR3(0.5 * z * z)) elif (z < 0.0): return np.exp(logPdfNormal(z)) * _erfRationalHelper(-z) / (-z) else: return 1.0 - np.exp(logPdfNormal(z)) * _erfRationalHelper(z) / z
Robust implementations of cdf of a standard normal. @see [[https://github.com/mseeger/apbsint/blob/master/src/eptools/potentials/SpecfunServices.h original implementation]] in C from Matthias Seeger. */
def has_source_contents(self, src_id): return bool(rustcall(_lib.lsm_view_has_source_contents, self._get_ptr(), src_id))
Checks if some sources exist.
def parse_int_list(s): result = [] for item in s.split(','): item = item.strip().split('-') if len(item) == 1: result.append(int(item[0])) elif len(item) == 2: start, end = item result.extend(range(int(start), int(end)+1)) else: raise ValueError("invalid range: '{0}'".format(s)) return result
Parse a comma-separated list of strings. The list may additionally contain ranges such as "1-5", which will be expanded into "1,2,3,4,5".
def read(handle, bytes): return Zchunk(lib.zchunk_read(coerce_py_file(handle), bytes), True)
Read chunk from an open file descriptor
def index(): global productpage table = json2html.convert(json = json.dumps(productpage), table_attributes="class=\"table table-condensed table-bordered table-hover\"") return render_template('index.html', serviceTable=table)
Display productpage with normal user and test user buttons
def _create_oracle(oracle_type, **kwargs): if oracle_type == 'f': return FO(**kwargs) elif oracle_type == 'a': return MO(**kwargs) else: return MO(**kwargs)
A routine for creating a factor oracle.
def is_char_in_pairs(pos_char, pairs): for pos_left, pos_right in pairs.items(): if pos_left < pos_char < pos_right: return True return False
Return True if the charactor is in pairs of brackets or quotes.
def set_title(self, name): self._set_attr(TIT2(encoding=3, text=name.decode('utf-8')))
Sets song's title :param name: title
def pop(self, index=None): if not self.items: raise KeyError('Set is empty') def remove_index(i): elem = self.items[i] del self.items[i] del self.map[elem] return elem if index is None: elem = remove_index(-1) else: size = len(self.items) if index < 0: index = size + index if index < 0: raise IndexError('assignement index out of range') elif index >= size: raise IndexError('assignement index out of range') elem = remove_index(index) for k, v in self.map.items(): if v >= index and v > 0: self.map[k] = v - 1 return elem
Removes an element at the tail of the OrderedSet or at a dedicated position. This implementation is meant for the OrderedSet from the ordered_set package only.
def swipe(self, element, x, y, duration=None): if not self.driver_wrapper.is_mobile_test(): raise Exception('Swipe method is not implemented in Selenium') center = self.get_center(element) initial_context = self.driver_wrapper.driver.current_context if self.driver_wrapper.is_web_test() or initial_context != 'NATIVE_APP': center = self.get_native_coords(center) end_x = x if self.driver_wrapper.is_ios_test() else center['x'] + x end_y = y if self.driver_wrapper.is_ios_test() else center['y'] + y self.driver_wrapper.driver.swipe(center['x'], center['y'], end_x, end_y, duration) if self.driver_wrapper.is_web_test() or initial_context != 'NATIVE_APP': self.driver_wrapper.driver.switch_to.context(initial_context)
Swipe over an element :param element: either a WebElement, PageElement or element locator as a tuple (locator_type, locator_value) :param x: horizontal movement :param y: vertical movement :param duration: time to take the swipe, in ms
def get_object_closure(subject, object_category=None, **kwargs): results = search_associations(subject=subject, object_category=object_category, select_fields=[], facet_fields=[M.OBJECT_CLOSURE], facet_limit=-1, rows=0, **kwargs) return set(results['facet_counts'][M.OBJECT_CLOSURE].keys())
Find all terms used to annotate subject plus ancestors
def _django_to_es_field(self, field): from django.db import models prefix = "" if field.startswith("-"): prefix = "-" field = field.lstrip("-") if field in ["id", "pk"]: return "_id", models.AutoField try: dj_field, _, _, _ = self.model._meta.get_field_by_name(field) if isinstance(dj_field, models.ForeignKey): return prefix + field + "_id", models.ForeignKey else: return prefix + field, dj_field except models.FieldDoesNotExist: pass return prefix + field.replace(FIELD_SEPARATOR, "."), None
We use this function in value_list and ordering to get the correct fields name
def load_x509_certificates(buf): if not isinstance(buf, basestring): raise ValueError('`buf` should be an instance of `basestring` not `%s`' % type(buf)) for pem in re.findall('(-----BEGIN CERTIFICATE-----\s(\S+\n*)+\s-----END CERTIFICATE-----\s)', buf): yield load_certificate(crypto.FILETYPE_PEM, pem[0])
Load one or multiple X.509 certificates from a buffer. :param str buf: A buffer is an instance of `basestring` and can contain multiple certificates. :return: An iterator that iterates over certificates in a buffer. :rtype: list[:class:`OpenSSL.crypto.X509`]
def lemma(self): if self._lemma is None: lemmata = self._element.xpath('lemma/text()') if len(lemmata) > 0: self._lemma = lemmata[0] return self._lemma
Lazy-loads the lemma for this word :getter: Returns the plain string value of the word lemma :type: str
def sas_logical_jbods(self): if not self.__sas_logical_jbods: self.__sas_logical_jbods = SasLogicalJbods(self.__connection) return self.__sas_logical_jbods
Gets the SAS Logical JBODs API client. Returns: SasLogicalJbod:
def process_text(text, pmid=None, python2_path=None): if python2_path is None: for path in os.environ["PATH"].split(os.pathsep): proposed_python2_path = os.path.join(path, 'python2.7') if os.path.isfile(proposed_python2_path): python2_path = proposed_python2_path print('Found python 2 interpreter at', python2_path) break if python2_path is None: raise Exception('Could not find python2 in the directories ' + 'listed in the PATH environment variable. ' + 'Need python2 to run TEES.') a1_text, a2_text, sentence_segmentations = run_on_text(text, python2_path) tp = TEESProcessor(a1_text, a2_text, sentence_segmentations, pmid) return tp
Processes the specified plain text with TEES and converts output to supported INDRA statements. Check for the TEES installation is the TEES_PATH environment variable, and configuration file; if not found, checks candidate paths in tees_candidate_paths. Raises an exception if TEES cannot be found in any of these places. Parameters ---------- text : str Plain text to process with TEES pmid : str The PMID from which the paper comes from, to be stored in the Evidence object of statements. Set to None if this is unspecified. python2_path : str TEES is only compatible with python 2. This processor invokes this external python 2 interpreter so that the processor can be run in either python 2 or python 3. If None, searches for an executible named python2 in the PATH environment variable. Returns ------- tp : TEESProcessor A TEESProcessor object which contains a list of INDRA statements extracted from TEES extractions
def ns_whois(self, nameservers, limit=DEFAULT_LIMIT, offset=DEFAULT_OFFSET, sort_field=DEFAULT_SORT): if not isinstance(nameservers, list): uri = self._uris["whois_ns"].format(nameservers) params = {'limit': limit, 'offset': offset, 'sortField': sort_field} else: uri = self._uris["whois_ns"].format('') params = {'emailList' : ','.join(nameservers), 'limit': limit, 'offset': offset, 'sortField': sort_field} resp_json = self.get_parse(uri, params=params) return resp_json
Gets the domains that have been registered with a nameserver or nameservers
def create_negotiate_message(self, domain_name=None, workstation=None): self.negotiate_message = NegotiateMessage(self.negotiate_flags, domain_name, workstation) return base64.b64encode(self.negotiate_message.get_data())
Create an NTLM NEGOTIATE_MESSAGE :param domain_name: The domain name of the user account we are authenticating with, default is None :param worksation: The workstation we are using to authenticate with, default is None :return: A base64 encoded string of the NEGOTIATE_MESSAGE
def assertDateTimesFrequencyEqual(self, sequence, frequency, msg=None): if not isinstance(sequence, collections.Iterable): raise TypeError('First argument is not iterable') if not isinstance(frequency, timedelta): raise TypeError('Second argument is not a timedelta object') standardMsg = 'unexpected frequencies found in %s' % sequence s1 = pd.Series(sequence) s2 = s1.shift(-1) freq = s2 - s1 if not all(f == frequency for f in freq[:-1]): self.fail(self._formatMessage(msg, standardMsg))
Fail if any elements in ``sequence`` aren't separated by the expected ``fequency``. Parameters ---------- sequence : iterable frequency : timedelta msg : str If not provided, the :mod:`marbles.mixins` or :mod:`unittest` standard message will be used. Raises ------ TypeError If ``sequence`` is not iterable. TypeError If ``frequency`` is not a timedelta object.
def snap_install_requested(): origin = config('openstack-origin') or "" if not origin.startswith('snap:'): return False _src = origin[5:] if '/' in _src: channel = _src.split('/')[1] else: channel = 'stable' return valid_snap_channel(channel)
Determine if installing from snaps If openstack-origin is of the form snap:track/channel[/branch] and channel is in SNAPS_CHANNELS return True.
def powerstring_by_border(u): f = maximum_border_length(u) n = len(u) if n % (n - f[-1]) == 0: return n // (n - f[-1]) return 1
Power string by Knuth-Morris-Pratt :param x: string :returns: largest k such that there is a string y with x = y^k :complexity: O(len(x))
def filter(self, field_name, field_value): self.filters.append((field_name, field_value)) return self
Add permanent filter on the collection :param field_name: name of the field to filter on :type field_name: str :param field_value: value to filter on :rtype: Collection
def provision_system_config(items, database_name, overwrite=False, clear=False, skip_user_check=False): from hfos.provisions.base import provisionList from hfos.database import objectmodels default_system_config_count = objectmodels['systemconfig'].count({ 'name': 'Default System Configuration'}) if default_system_config_count == 0 or (clear or overwrite): provisionList([SystemConfiguration], 'systemconfig', overwrite, clear, skip_user_check) hfoslog('Provisioning: System: Done.', emitter='PROVISIONS') else: hfoslog('Default system configuration already present.', lvl=warn, emitter='PROVISIONS')
Provision a basic system configuration
def fold_enrichment(self): expected = self.K * (self.n/float(self.N)) return self.k / expected
Returns the fold enrichment of the gene set. Fold enrichment is defined as ratio between the observed and the expected number of gene set genes present.
def _initial_guess(self): a, b, c = np.polyfit(self.volumes, self.energies, 2) self.eos_params = [a, b, c] v0 = -b/(2*a) e0 = a*(v0**2) + b*v0 + c b0 = 2 * a * v0 b1 = 4 vmin, vmax = min(self.volumes), max(self.volumes) if not vmin < v0 and v0 < vmax: raise EOSError('The minimum volume of a fitted parabola is ' 'not in the input volumes\n.') return e0, b0, b1, v0
Quadratic fit to get an initial guess for the parameters. Returns: tuple: (e0, b0, b1, v0)
def synchronize (lock, func, log_duration_secs=0): def newfunc (*args, **kwargs): t = time.time() with lock: duration = time.time() - t if duration > log_duration_secs > 0: print("WARN:", func.__name__, "locking took %0.2f seconds" % duration, file=sys.stderr) return func(*args, **kwargs) return update_func_meta(newfunc, func)
Return synchronized function acquiring the given lock.
def add_global_handler(self, event, handler, priority=0): handler = PrioritizedHandler(priority, handler) with self.mutex: event_handlers = self.handlers.setdefault(event, []) bisect.insort(event_handlers, handler)
Adds a global handler function for a specific event type. Arguments: event -- Event type (a string). Check the values of numeric_events for possible event types. handler -- Callback function taking 'connection' and 'event' parameters. priority -- A number (the lower number, the higher priority). The handler function is called whenever the specified event is triggered in any of the connections. See documentation for the Event class. The handler functions are called in priority order (lowest number is highest priority). If a handler function returns "NO MORE", no more handlers will be called.
def multipath_flush(device): if not os.path.exists(device): return '{0} does not exist'.format(device) cmd = 'multipath -f {0}'.format(device) return __salt__['cmd.run'](cmd).splitlines()
Device-Mapper Multipath flush CLI Example: .. code-block:: bash salt '*' devmap.multipath_flush mpath1
def update_from_json(self, json_attributes, models=None, setter=None): for k, v in json_attributes.items(): self.set_from_json(k, v, models, setter)
Updates the object's properties from a JSON attributes dictionary. Args: json_attributes: (JSON-dict) : attributes and values to update models (dict or None, optional) : Mapping of model ids to models (default: None) This is needed in cases where the attributes to update also have values that have references. setter(ClientSession or ServerSession or None, optional) : This is used to prevent "boomerang" updates to Bokeh apps. In the context of a Bokeh server application, incoming updates to properties will be annotated with the session that is doing the updating. This value is propagated through any subsequent change notifications that the update triggers. The session can compare the event setter to itself, and suppress any updates that originate from itself. Returns: None
def _prepdata(self): if not self._data.get("bbox"): self.update_bbox() if not self._data.get("crs"): self._data["crs"] = {"type":"name", "properties":{"name":"urn:ogc:def:crs:OGC:2:84"}}
Adds potentially missing items to the geojson dictionary