code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def ellipse_to_cov(sigma_maj, sigma_min, theta): cth = np.cos(theta) sth = np.sin(theta) covxx = cth**2 * sigma_maj**2 + sth**2 * sigma_min**2 covyy = sth**2 * sigma_maj**2 + cth**2 * sigma_min**2 covxy = cth * sth * sigma_maj**2 - cth * sth * sigma_min**2 return np.array([[covxx, covxy], [covxy...
Compute the covariance matrix in two variables x and y given the std. deviation along the semi-major and semi-minor axes and the rotation angle of the error ellipse. Parameters ---------- sigma_maj : float Std. deviation along major axis of error ellipse. sigma_min : float Std....
def DeleteUser(username): grr_api = maintenance_utils.InitGRRRootAPI() try: grr_api.GrrUser(username).Get().Delete() except api_errors.ResourceNotFoundError: raise UserNotFoundError(username)
Deletes a GRR user from the datastore.
def insert_metric_changes(db, metrics, metric_mapping, commit): values = [ [commit.sha, metric_mapping[metric.name], metric.value] for metric in metrics if metric.value != 0 ] db.executemany( 'INSERT INTO metric_changes (sha, metric_id, value) VALUES (?, ?, ?)', value...
Insert into the metric_changes tables. :param metrics: `list` of `Metric` objects :param dict metric_mapping: Maps metric names to ids :param Commit commit:
def get_datetime(self, tz=None): dt = self.datetime assert dt.tzinfo == pytz.utc, "Algorithm should have a utc datetime" if tz is not None: dt = dt.astimezone(tz) return dt
Returns the current simulation datetime. Parameters ---------- tz : tzinfo or str, optional The timezone to return the datetime in. This defaults to utc. Returns ------- dt : datetime The current simulation datetime converted to ``tz``.
def getAnalogID(self,num): listidx = self.An.index(num) return self.Ach_id[listidx]
Returns the COMTRADE ID of a given channel number. The number to be given is the same of the COMTRADE header.
def from_dict(self, d: Dict[str, Any]) -> None: for key, value in d.items(): if key.isupper(): self._setattr(key, value) logger.info("Config is loaded from dict: %r", d)
Load values from a dict.
def Is64bit(self): if "64" not in platform.machine(): return False iswow64 = ctypes.c_bool(False) if IsWow64Process is None: return False if not IsWow64Process(self.h_process, ctypes.byref(iswow64)): raise process_error.ProcessError("Error while calling IsWow64Process.") return not...
Returns true if this is a 64 bit process.
def GuinierPorod(q, G, Rg, alpha): return GuinierPorodMulti(q, G, Rg, alpha)
Empirical Guinier-Porod scattering Inputs: ------- ``q``: independent variable ``G``: factor of the Guinier-branch ``Rg``: radius of gyration ``alpha``: power-law exponent Formula: -------- ``G * exp(-q^2*Rg^2/3)`` if ``q<q_sep`` and ``a*q^alpha`` otherwise. ...
def to_creator(self, for_modify=False): signature = {} if for_modify: try: signature['id'] = self.id except AttributeError: raise AttributeError('a modify request should specify an ID') if hasattr(self, 'name'): signatur...
Returns a dict object suitable for a 'CreateSignature'. A signature object for creation is like : <signature name="unittest"> <content type="text/plain">My signature content</content> </signature> which is : { 'name' : 'unittest', ...
def init1(self, dae): self.v0 = matrix(dae.y[self.v])
Set initial voltage for time domain simulation
def delete_speaker(self, speaker_uri): response = self.api_request(speaker_uri, method='DELETE') return self.__check_success(response)
Delete an speaker from a collection :param speaker_uri: the URI that references the speaker :type speaker_uri: String :rtype: Boolean :returns: True if the speaker was deleted :raises: APIError if the request was not successful
def wells_by_index(self) -> Dict[str, Well]: return {well: wellObj for well, wellObj in zip(self._ordering, self._wells)}
Accessor function used to create a look-up table of Wells by name. With indexing one can treat it as a typical python dictionary whose keys are well names. To access well A1, for example, simply write: labware.wells_by_index()['A1'] :return: Dictionary of well objects keyed by well nam...
def delete_channel(current): ch_key = current.input['channel_key'] ch = Channel(current).objects.get(owner_id=current.user_id, key=ch_key) ch.delete() Subscriber.objects.filter(channel_id=ch_key).delete() Message.objects.filter(channel_id=ch_key).delete() current.output = {'status': 'Deleted', '...
Delete a channel .. code-block:: python # request: { 'view':'_zops_delete_channel, 'channel_key': key, } # response: { 'status': 'OK', 'code': 200 }
def attach_file(self, filename, resource_id=None): if resource_id is None: resource_id = os.path.basename(filename) upload_url = self.links[REF_MODEL_RUN_ATTACHMENTS] + '/' + resource_id response = requests.post( upload_url, files={'file': open(filename, 'rb')...
Upload an attachment for the model run. Paramerers ---------- filename : string Path to uploaded file resource_id : string Identifier of the attachment. If None, the filename will be used as resource identifier. Returns ------- ...
def color(self, color): self._data['color'] = color request = self._base_request request['color'] = color return self._tc_requests.update(request, owner=self.owner)
Updates the security labels color. Args: color:
def get_disabled(jail=None): en_ = get_enabled(jail) all_ = get_all(jail) return sorted(set(all_) - set(en_))
Return what services are available but not enabled to start at boot .. versionchanged:: 2016.3.4 Support for jail (representing jid or jail name) keyword argument in kwargs CLI Example: .. code-block:: bash salt '*' service.get_disabled
def getBirthdate(self, string=True): if string: return self._convert_string(self.birthdate.rstrip()) else: return datetime.strptime(self._convert_string(self.birthdate.rstrip()), "%d %b %Y")
Returns the birthdate as string object Parameters ---------- None Examples -------- >>> import pyedflib >>> f = pyedflib.data.test_generator() >>> f.getBirthdate()=='30 jun 1969' True >>> f._close() >>> del f
def remove_output(clean=False, **kwargs): if not clean: return False found = False cwd = os.getcwd() for file in os.listdir(cwd): if file.endswith('_eig.txt') or \ file.endswith('_out.txt') or \ file.endswith('_out.lst') or \ file.endswith(...
Remove the outputs generated by Andes, including power flow reports ``_out.txt``, time-domain list ``_out.lst`` and data ``_out.dat``, eigenvalue analysis report ``_eig.txt``. Parameters ---------- clean : bool If ``True``, execute the function body. Returns otherwise. kwargs : dict ...
def is_locked(self, request: AxesHttpRequest, credentials: dict = None) -> bool: if settings.AXES_LOCK_OUT_AT_FAILURE: return self.get_failures(request, credentials) >= settings.AXES_FAILURE_LIMIT return False
Checks if the request or given credentials are locked.
def get_action(self, action): func_name = action.replace('-', '_') if not hasattr(self, func_name): raise DaemonError( 'Invalid action "{action}"'.format(action=action)) func = getattr(self, func_name) if (not hasattr(func, '__call__') or getat...
Get a callable action.
def send_file_from_directory(filename, directory, app=None): if app is None: app = current_app cache_timeout = app.get_send_file_max_age(filename) return send_from_directory(directory, filename, cache_timeout=cache_timeout)
Helper to add static rules, like in `abilian.app`.app. Example use:: app.add_url_rule( app.static_url_path + '/abilian/<path:filename>', endpoint='abilian_static', view_func=partial(send_file_from_directory, directory='/path/to/static/files/dir'))
def add_attribute_label(self, attribute_id, label): if not self.can_update(): self._tcex.handle_error(910, [self.type]) return self.tc_requests.add_attribute_label( self.api_type, self.api_sub_type, self.unique_id, attribute_id, label, owner=self.owner )
Adds a security labels to a attribute Args: attribute_id: label: Returns: A response json
def load_graph_xml(xml, filename, load_all=False): ret = [] try: root = objectify.fromstring(xml) except Exception: return [] if root.tag != 'graphs': return [] if not hasattr(root, 'graph'): return [] for g in root.graph: name = g.attrib['name'] e...
load a graph from one xml string
def mapstr_to_list(mapstr): maplist = [] with StringIO(mapstr) as infile: for row in infile: maplist.append(row.strip()) return maplist
Convert an ASCII map string with rows to a list of strings, 1 string per row.
def patch_ligotimegps(module="ligo.lw.lsctables"): module = import_module(module) orig = module.LIGOTimeGPS module.LIGOTimeGPS = _ligotimegps try: yield finally: module.LIGOTimeGPS = orig
Context manager to on-the-fly patch LIGOTimeGPS to accept all int types
def circleconvert(amount, currentformat, newformat): if currentformat.lower() == newformat.lower(): return amount if currentformat.lower() == 'radius': if newformat.lower() == 'diameter': return amount * 2 elif newformat.lower() == 'circumference': return amount *...
Convert a circle measurement. :type amount: number :param amount: The number to convert. :type currentformat: string :param currentformat: The format of the provided value. :type newformat: string :param newformat: The intended format of the value. >>> circleconvert(45, "radius", "diamet...
def license_id(filename): import editdistance with io.open(filename, encoding='UTF-8') as f: contents = f.read() norm = _norm_license(contents) min_edit_dist = sys.maxsize min_edit_dist_spdx = '' for spdx, text in licenses.LICENSES: norm_license = _norm_license(text) if n...
Return the spdx id for the license contained in `filename`. If no license is detected, returns `None`. spdx: https://spdx.org/licenses/ licenses from choosealicense.com: https://github.com/choosealicense.com Approximate algorithm: 1. strip copyright line 2. normalize whitespace (replace all ...
def get_recent_tracks(self, limit=10, cacheable=True, time_from=None, time_to=None): params = self._get_params() if limit: params["limit"] = limit if time_from: params["from"] = time_from if time_to: params["to"] = time_to seq = [] for ...
Returns this user's played track as a sequence of PlayedTrack objects in reverse order of playtime, all the way back to the first track. Parameters: limit : If None, it will try to pull all the available data. from (Optional) : Beginning timestamp of a range - only display scrob...
def runtime(self): warnings.warn("admm.ADMM.runtime attribute has been replaced by " "an upgraded timer class: please see the documentation " "for admm.ADMM.solve method and util.Timer class", PendingDeprecationWarning) return self.timer....
Transitional property providing access to the new timer mechanism. This will be removed in the future.
def close_transport(self): if (self.path): self._release_media_transport(self.path, self.access_type) self.path = None
Forcibly close previously acquired media transport. .. note:: The user should first make sure any transport event handlers are unregistered first.
async def runItemCmdr(item, outp=None, **opts): cmdr = await getItemCmdr(item, outp=outp, **opts) await cmdr.runCmdLoop()
Create a cmdr for the given item and run the cmd loop. Example: runItemCmdr(foo)
def _start_scan(self, active): success, retval = self._set_scan_parameters(active=active) if not success: return success, retval try: response = self._send_command(6, 2, [2]) if response.payload[0] != 0: self._logger.error('Error starting scan ...
Begin scanning forever
def create_signature(self, base_url, payload=None): url = urlparse(base_url) url_to_sign = "{path}?{query}".format(path=url.path, query=url.query) converted_payload = self._convert(payload) decoded_key = base64.urlsafe_b64decode(self.private_key.encode('utf-8')) signature = hmac....
Creates unique signature for request. Make sure ALL 'GET' and 'POST' data is already included before creating the signature or receiver won't be able to re-create it. :param base_url: The url you'll using for your request. :param payload: The POST data that you'l...
def format_string(format, *cols): sc = SparkContext._active_spark_context return Column(sc._jvm.functions.format_string(format, _to_seq(sc, cols, _to_java_column)))
Formats the arguments in printf-style and returns the result as a string column. :param col: the column name of the numeric value to be formatted :param d: the N decimal places >>> df = spark.createDataFrame([(5, "hello")], ['a', 'b']) >>> df.select(format_string('%d %s', df.a, df.b).alias('v')).colle...
def enable_asynchronous(self): def is_monkey_patched(): try: from gevent import monkey, socket except ImportError: return False if hasattr(monkey, "saved"): return "socket" in monkey.saved return gevent.socket.socket...
Check if socket have been monkey patched by gevent
def set_debug_listener(stream): def debugger(sig, frame): launch_debugger(frame, stream) if hasattr(signal, 'SIGUSR1'): signal.signal(signal.SIGUSR1, debugger) else: logger.warn("Cannot set SIGUSR1 signal for debug mode.")
Break into a debugger if receives the SIGUSR1 signal
def eat_config(self, conf_file): cfg = ConfigParser.RawConfigParser() cfg.readfp(conf_file) sec = 'channels' mess = 'missmatch of channel keys' assert(set(self.pack.D.keys()) == set([int(i) for i in cfg.options(sec)])), mess if not self.pack.chnames: self.pack...
conf_file a file opened for reading. Update the packs channel names and the conditions, accordingly.
def has_dtypes(df, items): dtypes = df.dtypes for k, v in items.items(): if not dtypes[k] == v: raise AssertionError("{} has the wrong dtype. Should be ({}), is ({})".format(k, v,dtypes[k])) return df
Assert that a DataFrame has ``dtypes`` Parameters ========== df: DataFrame items: dict mapping of columns to dtype. Returns ======= df : DataFrame
def request_search(self, txt=None): if self.checkBoxRegex.isChecked(): try: re.compile(self.lineEditSearch.text(), re.DOTALL) except sre_constants.error as e: self._show_error(e) return else: self._show_error(Non...
Requests a search operation. :param txt: The text to replace. If None, the content of lineEditSearch is used instead.
def _pre_tune(self): if self._running is None: self._running = True elif self._running is False: log.error( 'This %s was scheduled to stop. Not running %s.tune_in()', self.__class__.__name__, self.__class__.__name__ ) return...
Set the minion running flag and issue the appropriate warnings if the minion cannot be started or is already running
def MICECache(subsystem, parent_cache=None): if config.REDIS_CACHE: cls = RedisMICECache else: cls = DictMICECache return cls(subsystem, parent_cache=parent_cache)
Construct a |MICE| cache. Uses either a Redis-backed cache or a local dict cache on the object. Args: subsystem (Subsystem): The subsystem that this is a cache for. Kwargs: parent_cache (MICECache): The cache generated by the uncut version of ``subsystem``. Any cached |MICE| w...
def is_member(self, ldap_user, group_dn): try: user_uid = ldap_user.attrs["uid"][0] try: is_member = ldap_user.connection.compare_s( group_dn, "memberUid", user_uid.encode() ) except (ldap.UNDEFINED_TYPE, ldap.NO_SUCH_ATTRIB...
Returns True if the group is the user's primary group or if the user is listed in the group's memberUid attribute.
def get_compute_credentials(key): scopes = ['https://www.googleapis.com/auth/compute'] credentials = ServiceAccountCredentials.from_json_keyfile_dict( key, scopes=scopes) return credentials
Authenticates a service account for the compute engine. This uses the `oauth2client.service_account` module. Since the `google` Python package does not support the compute engine (yet?), we need to make direct HTTP requests. For that we need authentication tokens. Obtaining these based on the credentia...
async def _scp(self, source, destination, scp_opts): cmd = [ 'scp', '-i', os.path.expanduser('~/.local/share/juju/ssh/juju_id_rsa'), '-o', 'StrictHostKeyChecking=no', '-q', '-B' ] cmd.extend(scp_opts.split() if isinstance(scp_opts, str)...
Execute an scp command. Requires a fully qualified source and destination.
def setAutoRangeOff(self): if self.getRefreshBlocked(): logger.debug("setAutoRangeOff blocked for {}".format(self.nodeName)) return if self.autoRangeCti: self.autoRangeCti.data = False self._forceRefreshAutoRange()
Turns off the auto range checkbox. Calls _refreshNodeFromTarget, not _updateTargetFromNode, because setting auto range off does not require a redraw of the target.
def data_complete(self): return task.data_complete(self.datadir, self.sitedir, self._get_container_name)
Return True if all the expected datadir files are present
def main(self, x): for i in range(len(self.taps_fix_reversed)): self.next.mul[i] = x * self.taps_fix_reversed[i] if i == 0: self.next.acc[0] = self.mul[i] else: self.next.acc[i] = self.acc[i - 1] + self.mul[i] self.next.out = self.acc[-...
Transposed form FIR implementation, uses full precision
def validate_process_steps(prop, value): if value is not None: validate_type(prop, value, (dict, list)) procstep_keys = set(_complex_definitions[prop]) for idx, procstep in enumerate(wrap_value(value)): ps_idx = prop + '[' + str(idx) + ']' validate_type(ps_idx, procst...
Default validation for Process Steps data structure
def load_data(path, dense=False): catalog = {'.csv': load_csv, '.sps': load_svmlight_file, '.h5': load_hdf5} ext = os.path.splitext(path)[1] func = catalog[ext] X, y = func(path) if dense and sparse.issparse(X): X = X.todense() return X, y
Load data from a CSV, LibSVM or HDF5 file based on the file extension. Args: path (str): A path to the CSV, LibSVM or HDF5 format file containing data. dense (boolean): An optional variable indicating if the return matrix should be dense. By default, it is false. Retu...
async def get_advanced_settings(request: web.Request) -> web.Response: res = _get_adv_settings() return web.json_response(res)
Handles a GET request and returns a json body with the key "settings" and a value that is a list of objects where each object has keys "id", "title", "description", and "value"
def check_config(conf): if 'fmode' in conf and not isinstance(conf['fmode'], string_types): raise TypeError(TAG + ": `fmode` must be a string") if 'dmode' in conf and not isinstance(conf['dmode'], string_types): raise TypeError(TAG + ": `dmode` must be a string") if 'depth' in conf: ...
Type and boundary check
def OneResult(parser): "Parse like parser, but return exactly one result, not a tuple." def parse(text): results = parser(text) assert len(results) == 1, "Expected one result but got %r" % (results,) return results[0] return parse
Parse like parser, but return exactly one result, not a tuple.
def updateData(self, signal, fs): t = threading.Thread(target=_doSpectrogram, args=(self.spec_done, (fs, signal),), kwargs=self.specgramArgs) t.start()
Displays a spectrogram of the provided signal :param signal: 1-D signal of audio :type signal: numpy.ndarray :param fs: samplerate of signal :type fs: int
def update(self, items): post_data = {} items_json = json.dumps(items, default=dthandler) post_data['data'] = items_json response = self.post_attribute("update", data=post_data) return response['ticket']
Update a catalog object Args: items (list): A list of dicts describing update data and action codes (see api docs) Kwargs: Returns: A ticket id Example: >>> c = catalog.Catalog('my_songs', type='song') >>> items [{'action': 'update', ...
def find_boost(): short_version = "{}{}".format(sys.version_info[0], sys.version_info[1]) boostlibnames = ['boost_python-py' + short_version, 'boost_python' + short_version, 'boost_python', ] if sys.version_info[0] == 2: boostlibnames +=...
Find the name of the boost-python library. Returns None if none is found.
def _pfp__show(self, level=0, include_offset=False): res = [] res.append("{}{} {{".format( "{:04x} ".format(self._pfp__offset) if include_offset else "", self._pfp__show_name )) for child in self._pfp__children: res.append("{}{}{:10s} = {}".format( ...
Show the contents of the struct
def _exposure_to_weights(self, y, exposure=None, weights=None): y = y.ravel() if exposure is not None: exposure = np.array(exposure).astype('f').ravel() exposure = check_array(exposure, name='sample exposure', ndim=1, verbose=self.verbose) ...
simple tool to create a common API Parameters ---------- y : array-like, shape (n_samples,) Target values (integers in classification, real numbers in regression) For classification, labels must correspond to classes. exposure : array-like shape (n_sa...
def area(self): r edges = tuple(edge._nodes for edge in self._edges) return _surface_helpers.compute_area(edges)
r"""The area of the current curved polygon. This assumes, but does not check, that the current curved polygon is valid (i.e. it is bounded by the edges). This computes the area via Green's theorem. Using the vector field :math:`\mathbf{F} = \left[-y, x\right]^T`, since :math:`\...
def copy_table( self, sources, destination, job_id=None, job_id_prefix=None, location=None, project=None, job_config=None, retry=DEFAULT_RETRY, ): job_id = _make_job_id(job_id, job_id_prefix) if project is None: proj...
Copy one or more tables to another table. See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.copy Arguments: sources (Union[ \ :class:`~google.cloud.bigquery.table.Table`, \ :class:`~google.cloud.bigquery.table.TableRefer...
def reboot(self): reboot_msg = self.message_factory.command_long_encode( 0, 0, mavutil.mavlink.MAV_CMD_PREFLIGHT_REBOOT_SHUTDOWN, 0, 1, 0, 0, 0, 0, 0, 0) self.send_mavlink(reboot_msg)
Requests an autopilot reboot by sending a ``MAV_CMD_PREFLIGHT_REBOOT_SHUTDOWN`` command.
def validate_range_value(value): if value == (None, None): return if not hasattr(value, '__iter__'): raise TypeError('Range value must be an iterable, got "%s".' % value) if not 2 == len(value): raise ValueError('Range value must consist of two elements, got %d.' % ...
Validates given value against `Schema.TYPE_RANGE` data type. Raises TypeError or ValueError if something is wrong. Returns None if everything is OK.
def is_valid(number): n = str(number) if not n.isdigit(): return False return int(n[-1]) == get_check_digit(n[:-1])
determines whether the card number is valid.
def unaccent(string, encoding="utf-8"): string = to_unicode(string) if has_unidecode: return unidecode.unidecode(string) if PYTHON_VERSION < 3: if type(string) == str: string = unicode(string, encoding) nfkd_form = unicodedata.normalize('NFKD', string) return u""....
not just unaccent, but full to-ascii transliteration
def get_global_vars(func): closure = getclosurevars(func) if closure['nonlocal']: raise TypeError("Can't launch a job with closure variables: %s" % closure['nonlocals'].keys()) globalvars = dict(modules={}, functions={}, vars={}) ...
Store any methods or variables bound from the function's closure Args: func (function): function to inspect Returns: dict: mapping of variable names to globally bound VARIABLES
def by_type(self, type_name): if IRestorator.providedBy(type_name): type_name = type_name.type_name return (x[1] for x in self._links if x[0] == type_name)
Return an iterator of doc_ids of the documents of the specified type.
def remove_all_static_host_mappings(): LOG.debug("remove_host_mapping() called") session = bc.get_writer_session() try: mapping = _lookup_all_host_mappings( session=session, is_static=True) for host in mapping: session.delete(host) ...
Remove all entries defined in config file from mapping data base.
def _exit_handling(self): def close_asyncio_loop(): loop = None try: loop = asyncio.get_event_loop() except AttributeError: pass if loop is not None: loop.close() atexit.register(close_asyncio_loop)
Makes sure the asyncio loop is closed.
def __add_paths(self, config): bin_path = os.path.join(self.directory.install_directory(self.feature_name), 'bin') whitelist_executables = self._get_whitelisted_executables(config) for f in os.listdir(bin_path): for pattern in BLACKLISTED_EXECUTABLES: if re.match(patt...
add the proper resources into the environment
def stop_processing(self, warning=True): if not self.__is_processing: warning and LOGGER.warning( "!> {0} | Engine is not processing, 'stop_processing' request has been ignored!".format( self.__class__.__name__)) return False LOGGER.debug("> St...
Registers the end of a processing operation. :param warning: Emit warning message. :type warning: int :return: Method success. :rtype: bool
def _plot_thermo(self, func, temperatures, factor=1, ax=None, ylabel=None, label=None, ylim=None, **kwargs): ax, fig, plt = get_ax_fig_plt(ax) values = [] for t in temperatures: values.append(func(t, structure=self.structure) * factor) ax.plot(temperatures, values, label=labe...
Plots a thermodynamic property for a generic function from a PhononDos instance. Args: func: the thermodynamic function to be used to calculate the property temperatures: a list of temperatures factor: a multiplicative factor applied to the thermodynamic property calculated....
def parse_info(response): info = {} response = response.decode('utf-8') def get_value(value): if ',' and '=' not in value: return value sub_dict = {} for item in value.split(','): k, v = item.split('=') try: sub_dict[k] = ...
Parse the response of Redis's INFO command into a Python dict. In doing so, convert byte data into unicode.
def _updateWordSet(self): self._wordSet = set(self._keywords) | set(self._customCompletions) start = time.time() for line in self._qpart.lines: for match in _wordRegExp.findall(line): self._wordSet.add(match) if time.time() - start > self._WORD_SET_UPDATE_...
Make a set of words, which shall be completed, from text
def get_inputs(node, kwargs): name = node["name"] proc_nodes = kwargs["proc_nodes"] index_lookup = kwargs["index_lookup"] inputs = node["inputs"] attrs = node.get("attrs", {}) input_nodes = [] for ip in inputs: input_node_id = index_lookup[ip[0]] input_nodes.append(proc_nodes...
Helper function to get inputs
def _update_records(self, records, data): data = {k: v for k, v in data.items() if v} records = [dict(record, **data) for record in records] return self._apicall( 'updateDnsRecords', domainname=self.domain, dnsrecordset={'dnsrecords': records}, ).get('...
Insert or update a list of DNS records, specified in the netcup API convention. The fields ``hostname``, ``type``, and ``destination`` are mandatory and must be provided either in the record dict or through ``data``!
def modpath_pkg_resources(module, entry_point): result = [] try: path = resource_filename_mod_entry_point(module.__name__, entry_point) except ImportError: logger.warning("module '%s' could not be imported", module.__name__) except Exception: logger.warning("%r does not appear to...
Goes through pkg_resources for compliance with various PEPs. This one accepts a module as argument.
def prep_for_intensity_plot(data, meth_code, dropna=(), reqd_cols=()): dropna = list(dropna) reqd_cols = list(reqd_cols) try: magn_col = get_intensity_col(data) except AttributeError: return False, "Could not get intensity method from data" if magn_col not in dropna: dropna.a...
Strip down measurement data to what is needed for an intensity plot. Find the column with intensity data. Drop empty columns, and make sure required columns are present. Keep only records with the specified method code. Parameters ---------- data : pandas DataFrame measurement dataframe...
def overtime(self): if self._overtime.lower() == 'ot': return 1 if self._overtime.lower() == 'so': return SHOOTOUT if self._overtime == '': return 0 num = re.findall(r'\d+', self._overtime) if len(num) > 0: return num[0] ret...
Returns an ``int`` of the number of overtimes that were played during the game, or an int constant if the game went to a shootout.
def _maybe_purge_cache(self): if self._last_reload_check + MIN_CHECK_INTERVAL > time.time(): return for name, tmpl in list(self.cache.items()): if not os.stat(tmpl.path): self.cache.pop(name) continue if os.stat(tmpl.path).st_mtime > tm...
If enough time since last check has elapsed, check if any of the cached templates has changed. If any of the template files were deleted, remove that file only. If any were changed, then purge the entire cache.
def get_subwords(self, word, on_unicode_error='strict'): pair = self.f.getSubwords(word, on_unicode_error) return pair[0], np.array(pair[1])
Given a word, get the subwords and their indicies.
def toggle_rich_text(self, checked): if checked: self.docstring = not checked self.switch_to_rich_text() self.set_option('rich_mode', checked)
Toggle between sphinxified docstrings or plain ones
def get_entities_tsv(namespace, workspace, etype): uri = "workspaces/{0}/{1}/entities/{2}/tsv".format(namespace, workspace, etype) return __get(uri)
List entities of given type in a workspace as a TSV. Identical to get_entities(), but the response is a TSV. Args: namespace (str): project to which workspace belongs workspace (str): Workspace name etype (str): Entity type Swagger: https://api.firecloud.org/#!/Entities/br...
def _find_cont_gaussian_smooth(wl, fluxes, ivars, w): print("Finding the continuum") bot = np.dot(ivars, w.T) top = np.dot(fluxes*ivars, w.T) bad = bot == 0 cont = np.zeros(top.shape) cont[~bad] = top[~bad] / bot[~bad] return cont
Returns the weighted mean block of spectra Parameters ---------- wl: numpy ndarray wavelength vector flux: numpy ndarray block of flux values ivar: numpy ndarray block of ivar values L: float width of Gaussian used to assign weights Returns ------- ...
def togglePopup(self): if not self._popupWidget.isVisible(): self.showPopup() elif self._popupWidget.currentMode() != self._popupWidget.Mode.Dialog: self._popupWidget.close()
Toggles whether or not the popup is visible.
def createLrrBafPlot(raw_dir, problematic_samples, format, dpi, out_prefix): dir_name = out_prefix + ".LRR_BAF" if not os.path.isdir(dir_name): os.mkdir(dir_name) baf_lrr_plot_options = ["--problematic-samples", problematic_samples, "--raw-dir", raw_dir, "--format", forma...
Creates the LRR and BAF plot. :param raw_dir: the directory containing the intensities. :param problematic_samples: the file containing the problematic samples. :param format: the format of the plot. :param dpi: the DPI of the resulting images. :param out_prefix: the prefix of the output file. ...
def format_jid_instance_ext(jid, job): ret = format_job_instance(job) ret.update({ 'JID': jid, 'StartTime': jid_to_time(jid)}) return ret
Format the jid correctly with jid included
def setStartSegment(self, segment): segments = self.segments if not isinstance(segment, int): segmentIndex = segments.index(segment) else: segmentIndex = segment if len(self.segments) < 2: return if segmentIndex == 0: return ...
Set the first segment on the contour. segment can be a segment object or an index.
def iterate(self, max_iter=None): with self as active_streamer: for n, obj in enumerate(active_streamer.stream_): if max_iter is not None and n >= max_iter: break yield obj
Instantiate an iterator. Parameters ---------- max_iter : None or int > 0 Maximum number of iterations to yield. If ``None``, exhaust the stream. Yields ------ obj : Objects yielded by the streamer provided on init. See Also ----...
def resume(env, identifier): vsi = SoftLayer.VSManager(env.client) vs_id = helpers.resolve_id(vsi.resolve_ids, identifier, 'VS') env.client['Virtual_Guest'].resume(id=vs_id)
Resumes a paused virtual server.
def check_length_of_shape_or_intercept_names(name_list, num_alts, constrained_param, list_title): if len(name_list) != (num_alts - constrained_param): msg_1 = "{} is of the ...
Ensures that the length of the parameter names matches the number of parameters that will be estimated. Will raise a ValueError otherwise. Parameters ---------- name_list : list of strings. Each element should be the name of a parameter that is to be estimated. num_alts : int. Shoul...
def choice(self, board: Union[chess.Board, int], *, minimum_weight: int = 1, exclude_moves: Container[chess.Move] = (), random=random) -> Entry: chosen_entry = None for i, entry in enumerate(self.find_all(board, minimum_weight=minimum_weight, exclude_moves=exclude_moves)): if chosen_entry is...
Uniformly selects a random entry for the given position. :raises: :exc:`IndexError` if no entries are found.
async def start_child(): logger.info('Started to watch for code changes') loop = asyncio.get_event_loop() watcher = aionotify.Watcher() flags = ( aionotify.Flags.MODIFY | aionotify.Flags.DELETE | aionotify.Flags.ATTRIB | aionotify.Flags.MOVED_TO | aionotify.Flags....
Start the child process that will look for changes in modules.
def to_array(self): array = super(SuccessfulPayment, self).to_array() array['currency'] = u(self.currency) array['total_amount'] = int(self.total_amount) array['invoice_payload'] = u(self.invoice_payload) array['telegram_payment_charge_id'] = u(self.telegram_payment_charge_id) ...
Serializes this SuccessfulPayment to a dictionary. :return: dictionary representation of this object. :rtype: dict
def find_step_impl(self, step): result = None for si in self.steps[step.step_type]: matches = si.match(step.match) if matches: if result: raise AmbiguousStepImpl(step, result[0], si) args = [self._apply_transforms(arg, si) for a...
Find the implementation of the step for the given match string. Returns the StepImpl object corresponding to the implementation, and the arguments to the step implementation. If no implementation is found, raises UndefinedStepImpl. If more than one implementation is found, raises AmbiguousStepIm...
def do_up(self,args): parser = CommandArgumentParser("up") args = vars(parser.parse_args(args)) if None == self.parent: print "You're at the root. Try 'quit' to quit" else: return True
Navigate up by one level. For example, if you are in `(aws)/stack:.../asg:.../`, executing `up` will place you in `(aws)/stack:.../`. up -h for more details
def is_iterable_of_int(l): r if not is_iterable(l): return False return all(is_int(value) for value in l)
r""" Checks if l is iterable and contains only integral types
def run_iqtree(phy, model, threads, cluster, node): if threads > 24: ppn = 24 else: ppn = threads tree = '%s.treefile' % (phy) if check(tree) is False: if model is False: model = 'TEST' dir = os.getcwd() command = 'iqtree-omp -s %s -m %s -nt %s -quiet'...
run IQ-Tree
def clone(cls, repo_location, repo_dir=None, branch_or_tag=None, temp=False): if temp: reponame = repo_location.rsplit('/', 1)[-1] suffix = '%s.temp_simpl_GitRepo' % '_'.join( [str(x) for x in (reponame, branch_or_tag) if x]) repo_dir = create_te...
Clone repo at repo_location into repo_dir and checkout branch_or_tag. Defaults into current working directory if repo_dir is not supplied. If 'temp' is True, a temporary directory will be created for you and the repository will be cloned into it. The tempdir is scheduled for deletion (...
def init_celery(project_name): os.environ.setdefault('DJANGO_SETTINGS_MODULE', '%s.settings' % project_name) app = Celery(project_name) app.config_from_object('django.conf:settings') app.autodiscover_tasks(settings.INSTALLED_APPS, related_name='tasks') return app
init celery app without the need of redundant code
def encode_packet(packet: dict) -> str: if packet['protocol'] == 'rfdebug': return '10;RFDEBUG=' + packet['command'] + ';' elif packet['protocol'] == 'rfudebug': return '10;RFDEBUG=' + packet['command'] + ';' else: return SWITCH_COMMAND_TEMPLATE.format( node=PacketHeader....
Construct packet string from packet dictionary. >>> encode_packet({ ... 'protocol': 'newkaku', ... 'id': '000001', ... 'switch': '01', ... 'command': 'on', ... }) '10;newkaku;000001;01;on;'
def _get_adjustment(mag, year, mmin, completeness_year, t_f, mag_inc=0.1): if len(completeness_year) == 1: if (mag >= mmin) and (year >= completeness_year[0]): return 1.0 else: return False kval = int(((mag - mmin) / mag_inc)) + 1 if (kval >= 1) and (year >= completen...
If the magnitude is greater than the minimum in the completeness table and the year is greater than the corresponding completeness year then return the Weichert factor :param float mag: Magnitude of an earthquake :param float year: Year of earthquake :param np.ndarray completeness...