code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def _get_more(self): if not self.alive: raise pymongo.errors.InvalidOperation( "Can't call get_more() on a MotorCursor that has been" " exhausted or killed.") self.started = True return self._refresh()
Initial query or getMore. Returns a Future.
def _parse_ntthal(ntthal_output): parsed_vals = re.search(_ntthal_re, ntthal_output) return THERMORESULT( True, float(parsed_vals.group(1)), float(parsed_vals.group(2)), float(parsed_vals.group(3)), float(parsed_vals.group(4)) ) if parsed_vals else NULLTHERMORESULT
Helper method that uses regex to parse ntthal output.
def index(self, value, start=None, stop=None): return self.__alias__.index(value, start, stop)
Return first index of value.
def _process_maybe_work(self, yes_work, maybe_work, work_dir, yn_results_path, stats): if maybe_work == yes_work: return stats self._logger.info( 'Processing "maybe" work {} against "yes" work {}.'.format( maybe_work, yes_work)) ...
Returns statistics of how `yes_work` compares with `maybe_work`. :param yes_work: name of work for which stats are collected :type yes_work: `str` :param maybe_work: name of work being compared with `yes_work` :type maybe_work: `str` :param work_dir: directory where generated fi...
def iterfields(klass): is_field = lambda x: isinstance(x, TypedField) for name, field in inspect.getmembers(klass, predicate=is_field): yield name, field
Iterate over the input class members and yield its TypedFields. Args: klass: A class (usually an Entity subclass). Yields: (class attribute name, TypedField instance) tuples.
async def status(cls): rqst = Request(cls.session, 'GET', '/manager/status') rqst.set_json({ 'status': 'running', }) async with rqst.fetch() as resp: return await resp.json()
Returns the current status of the configured API server.
def insertAdjacentHTML(self, position: str, html: str) -> None: df = self._parse_html(html) pos = position.lower() if pos == 'beforebegin': self.before(df) elif pos == 'afterbegin': self.prepend(df) elif pos == 'beforeend': self.append(df) ...
Parse ``html`` to DOM and insert to ``position``. ``position`` is a case-insensive string, and must be one of "beforeBegin", "afterBegin", "beforeEnd", or "afterEnd".
def powerset(iterable, *, reverse=False): lst = list(iterable) if reverse: rng = range(len(lst), -1, -1) else: rng = range(len(lst) + 1) return chain.from_iterable(combinations(lst, r) for r in rng)
Return the powerset. Arguments --------- iterable : iterable reverse : boolean Indicates whether the powerset should be returned descending by size Returns ------- A generator producing each element of the powerset.
def post(self, text=None, attachments=None, source_guid=None): return self.messages.create(text=text, attachments=attachments, source_guid=source_guid)
Post a direct message to the user. :param str text: the message content :param attachments: message attachments :param str source_guid: a client-side unique ID for the message :return: the message sent :rtype: :class:`~groupy.api.messages.DirectMessage`
def filename_for(self, subpath): try: filename = self.readme_for(subpath) return os.path.relpath(filename, self.root_directory) except ReadmeNotFoundError: return None
Returns the relative filename for the specified subpath, or the root filename if subpath is None. Raises werkzeug.exceptions.NotFound if the resulting path would fall out of the root directory.
def photos(self, query, page=1, per_page=10): url = "/search/photos" data = self._search(url, query, page=page, per_page=per_page) data["results"] = PhotoModel.parse_list(data.get("results")) return data
Get a single page of photo results for a query. :param query [string]: Search terms. :param page [integer]: Page number to retrieve. (Optional; default: 1) :param per_page [integer]: Number of items per page. (Optional; default: 10) :return: [dict]: {u'total': 0, u'total_pages': 0, u're...
def needs_gcloud(self): gcloud_default_path = ['google-cloud-sdk', 'bin'] if platform.system() != "Windows": gcloud_default_path = os.path.join(os.path.expanduser('~'), *gcloud_default_path) else: gcloud_default_path = os.pat...
Returns true if gcloud is unavailable and needed for authentication.
def _one_iteration(self, F, Ybus, V, Vm, Va, pv, pq, pvpq): J = self._build_jacobian(Ybus, V, pv, pq, pvpq) dx = -1 * spsolve(J, F) npv = len(pv) npq = len(pq) if npv > 0: Va[pv] = Va[pv] + dx[range(npv)] if npq > 0: Va[pq] = Va[pq] + dx[range(npv,...
Performs one Newton iteration.
def load_rc(self, path=None, system=False): if os.path.isfile(self.user_rc_path) and not system: path = self.user_rc_path elif os.path.isfile(self.sys_rc_path): path = self.sys_rc_path if not path or not os.path.isfile(path): return {} with open(path) ...
Load the conda configuration file. If both user and system configuration exists, user will be used.
def get_current_value(self, use_cached=False): current_value = self._get_stream_metadata(use_cached).get("currentValue") if current_value: return DataPoint.from_json(self, current_value) else: return None
Return the most recent DataPoint value written to a stream The current value is the last recorded data point for this stream. :param bool use_cached: If False, the function will always request the latest from Device Cloud. If True, the device will not make a request if it already has cache...
def eigenvalues_samples(self): r res = np.empty((self.nsamples, self.nstates), dtype=config.dtype) for i in range(self.nsamples): res[i, :] = self._sampled_hmms[i].eigenvalues return res
r""" Samples of the eigenvalues
def handle(self, dict): ex_res = self.extract(dict['url']) key = "{sid}:{dom}.{suf}:queue".format( sid=dict['spiderid'], dom=ex_res.domain, suf=ex_res.suffix) val = ujson.dumps(dict) self.redis_conn.zadd(key, val, -dict['priority']) if 'expires...
Processes a vaild crawl request @param dict: a valid dictionary object
def parse(self, body): self._parse_top_level(body) self._parse_resource(body['data']) resource = body['data'] if 'attributes' in resource: self._parse_attributes(resource['attributes']) if 'relationships' in resource: self._parse_relationships(resource['re...
Invoke the JSON API spec compliant parser Order is important. Start from the request body root key & work your way down so exception handling is easier to follow. :return: the parsed & vetted request body
def remove_all_labels(stdout=None): if not stdout: stdout = sys.stdout stdout.write("Droping constraints...\n") drop_constraints(quiet=False, stdout=stdout) stdout.write('Droping indexes...\n') drop_indexes(quiet=False, stdout=stdout)
Calls functions for dropping constraints and indexes. :param stdout: output stream :return: None
def _tab(content): response = _data_frame(content).to_csv(index=False,sep='\t') return response
Helper funcation that converts text-based get response to tab separated values for additional manipulation.
def detect_format(filename): filename = Path(filename) if filename.suffix == '.csv': recformat = 'csv' elif filename.suffix == '.sfp': recformat = 'sfp' else: recformat = 'unknown' return recformat
Detect file format of the channels based on extension. Parameters ---------- filename : Path name of the filename Returns ------- str file format
def predict(self, y_prob): y_pred = np.floor(y_prob[:, 1] + (1 - self.threshold_)) return y_pred
Calculate the prediction using the ThresholdingOptimization. Parameters ---------- y_prob : array-like of shape = [n_samples, 2] Predicted probabilities. Returns ------- y_pred : array-like of shape = [n_samples] Predicted class
def _getitem_with_mask(self, key, fill_value=dtypes.NA): if fill_value is dtypes.NA: fill_value = dtypes.get_fill_value(self.dtype) dims, indexer, new_order = self._broadcast_indexes(key) if self.size: if isinstance(self._data, dask_array_type): actual_ind...
Index this Variable with -1 remapped to fill_value.
def exposed_method(name=None, private=False, is_coroutine=True, requires_handler_reference=False): def wrapper(func): if name: method_name = name else: method_name = func.__name__ if not METHOD_NAME_REGEX.match(method_name): raise ValueError("Invalid metho...
Marks a method as exposed via JSON RPC. :param name: the name of the exposed method. Must contains only letters, digits, dots and underscores. If not present or is set explicitly to ``None``, this parameter will default to the name of the exposed method. If two me...
def _run_cmplx(fn, image): original_format = image.format if image.format != 'complex' and image.format != 'dpcomplex': if image.bands % 2 != 0: raise Error('not an even number of bands') if image.format != 'float' and image.format != 'double': image = image.cast('float')...
Run a complex function on a non-complex image. The image needs to be complex, or have an even number of bands. The input can be int, the output is always float or double.
def find_by_id(self, organization_export, params={}, **options): path = "/organization_exports/%s" % (organization_export) return self.client.get(path, params, **options)
Returns details of a previously-requested Organization export. Parameters ---------- organization_export : {Id} Globally unique identifier for the Organization export. [params] : {Object} Parameters for the request
def read(cls, source, format=None, coalesce=False, **kwargs): def combiner(listofseglists): out = cls(seg for seglist in listofseglists for seg in seglist) if coalesce: return out.coalesce() return out return io_read_multi(combiner, cls, source, format...
Read segments from file into a `SegmentList` Parameters ---------- filename : `str` path of file to read format : `str`, optional source format identifier. If not given, the format will be detected if possible. See below for list of acceptable ...
def complete(self, stream): assert not self.is_complete() self._marker.addInputPort(outputPort=stream.oport) self.stream.oport.schema = stream.oport.schema self._pending_schema._set(self.stream.oport.schema) stream.oport.operator._start_op = True
Complete the pending stream. Any connections made to :py:attr:`stream` are connected to `stream` once this method returns. Args: stream(Stream): Stream that completes the connection.
def describe_volumes(self, *volume_ids): volumeset = {} for pos, volume_id in enumerate(volume_ids): volumeset["VolumeId.%d" % (pos + 1)] = volume_id query = self.query_factory( action="DescribeVolumes", creds=self.creds, endpoint=self.endpoint, other_params=v...
Describe available volumes.
def _ip_string_from_prefix(self, prefixlen=None): if not prefixlen: prefixlen = self._prefixlen return self._string_from_ip_int(self._ip_int_from_prefix(prefixlen))
Turn a prefix length into a dotted decimal string. Args: prefixlen: An integer, the netmask prefix length. Returns: A string, the dotted decimal netmask string.
def del_object_from_parent(self): if self.parent: self.parent.objects.pop(self.ref)
Delete object from parent object.
def clean_fail(func): def func_wrapper(*args, **kwargs): try: return func(*args, **kwargs) except botocore.exceptions.ClientError as e: print(str(e), file=sys.stderr) sys.exit(1) return func_wrapper
A decorator to cleanly exit on a failed call to AWS. catch a `botocore.exceptions.ClientError` raised from an action. This sort of error is raised if you are targeting a region that isn't set up (see, `credstash setup`.
def _format_summary_node(self, task_class): modulename = task_class.__module__ classname = task_class.__name__ nodes = [] nodes.append( self._format_class_nodes(task_class)) nodes.append( self._format_config_nodes(modulename, classname) ) m...
Format a section node containg a summary of a Task class's key APIs.
def data_structure_builder(func): @wraps(func) def ds_builder_wrapper(function, *args, **kwargs): try: function = DSBuilder(function) except NoBuilder: pass return func(function, *args, **kwargs) return ds_builder_wrapper
Decorator to handle automatic data structure creation for pipe-utils.
def _create_sot_file(self): try: self._delete_file(filename="sot_file") except Exception: pass commands = [ "terminal dont-ask", "checkpoint file sot_file", "no terminal dont-ask", ] self._send_command_list(commands)
Create Source of Truth file to compare.
def get_doctype(self, index, name): if index not in self.indices: self.get_all_indices() return self.indices.get(index, {}).get(name, None)
Returns a doctype given an index and a name
def WriteUInt160(self, value): if type(value) is UInt160: value.Serialize(self) else: raise Exception("value must be UInt160 instance ")
Write a UInt160 type to the stream. Args: value (UInt160): Raises: Exception: when `value` is not of neocore.UInt160 type.
def _get_model_metadata(model_class, metadata, version=None): from turicreate import __version__ info = { 'turicreate_version': __version__, 'type': model_class, } if version is not None: info['version'] = str(version) info.update(metadata) return info
Returns user-defined metadata, making sure information all models should have is also available, as a dictionary
def get_bbox_list(self, crs=None, buffer=None, reduce_bbox_sizes=None): bbox_list = self.bbox_list if buffer: bbox_list = [bbox.buffer(buffer) for bbox in bbox_list] if reduce_bbox_sizes is None: reduce_bbox_sizes = self.reduce_bbox_sizes if reduce_bbox_sizes: ...
Returns a list of bounding boxes that are the result of the split :param crs: Coordinate reference system in which the bounding boxes should be returned. If None the CRS will be the default CRS of the splitter. :type crs: CRS or None :param buffer: A percentage of each BBox ...
async def popHiveKey(self, path): perm = ('hive:pop',) + path self.user.allowed(perm) return await self.cell.hive.pop(path)
Remove and return the value of a key in the cell default hive
async def start_server_in_loop(runner, hostname, port, agent): await runner.setup() agent.web.server = aioweb.TCPSite(runner, hostname, port) await agent.web.server.start() logger.info(f"Serving on http://{hostname}:{port}/")
Listens to http requests and sends them to the webapp. Args: runner: AppRunner to process the http requests hostname: host name to listen from. port: port to listen from. agent: agent that owns the web app.
def scatter(self, x, y, xerr=[], yerr=[], mark='o', markstyle=None): self.plot(x, y, xerr=xerr, yerr=yerr, mark=mark, linestyle=None, markstyle=markstyle)
Plot a series of points. Plot a series of points (marks) that are not connected by a line. Shortcut for plot with linestyle=None. :param x: array containing x-values. :param y: array containing y-values. :param xerr: array containing errors on the x-values. :param yerr:...
def get_language(self): if 'lang' in self.request.GET: lang = self.request.GET['lang'].lower() if lang in settings.LANGUAGE_URL_MAP: return settings.LANGUAGE_URL_MAP[lang] if self.request.META.get('HTTP_ACCEPT_LANGUAGE'): best = self.get_best_language(...
Return a locale code we support on the site using the user's Accept-Language header to determine which is best. This mostly follows the RFCs but read bug 439568 for details.
def by_zipcode(self, zipcode, zipcode_type=None, zero_padding=True): if zero_padding: zipcode = str(zipcode).zfill(5) else: zipcode = str(zipcode) res = self.query( zipcode=zipcode, sort_by=N...
Search zipcode by exact 5 digits zipcode. No zero padding is needed. :param zipcode: int or str, the zipcode will be automatically zero padding to 5 digits. :param zipcode_type: str or :class`~uszipcode.model.ZipcodeType` attribute. by default, it returns any zipcode type. ...
def keys_values(self): keys_values = [] for value in self.keys.values(): if isinstance(value, list): keys_values += value elif isinstance(value, basestring) and not value.startswith('-'): keys_values.append(value) elif isinstance(value,...
Key values might be a list or not, always return a list.
def update_pricing(kwargs=None, call=None): url = 'https://cloudpricingcalculator.appspot.com/static/data/pricelist.json' price_json = salt.utils.http.query(url, decode=True, decode_type='json') outfile = os.path.join( __opts__['cachedir'], 'gce-pricing.p' ) with salt.utils.files.fopen(outfi...
Download most recent pricing information from GCE and save locally CLI Examples: .. code-block:: bash salt-cloud -f update_pricing my-gce-config .. versionadded:: 2015.8.0
def _check_structure(self): unused_variables = set() unused_operators = set() for variable in self.unordered_variable_iterator(): unused_variables.add(variable.full_name) for operator in self.unordered_operator_iterator(): unused_operators.add(operator.full_name) ...
This function applies some rules to check if the parsed model is proper. Currently, it only checks if isolated variable and isolated operator exists.
def form_invalid(self, form): messages.error(self.request, form.errors[NON_FIELD_ERRORS]) return redirect( reverse( 'forum_conversation:topic', kwargs={ 'forum_slug': self.object.topic.forum.slug, 'forum_pk': self.object...
Handles an invalid form.
def process_md5(md5_output, pattern=r"=\s+(\S+)"): match = re.search(pattern, md5_output) if match: return match.group(1) else: raise ValueError("Invalid output from MD5 command: {}".format(md5_output))
Process the string to retrieve the MD5 hash Output from Cisco IOS (ASA is similar) .MD5 of flash:file_name Done! verify /md5 (flash:file_name) = 410db2a7015eaa42b1fe71f1bf3d59a2
def filter_pyfqn(cls, value, relative_to=0): def collect_packages(element, packages): parent = element.eContainer() if parent: collect_packages(parent, packages) packages.append(element.name) packages = [] collect_packages(value, packages) ...
Returns Python form of fully qualified name. Args: relative_to: If greater 0, the returned path is relative to the first n directories.
def get_value(self, spec, row): column = spec.get('column') default = spec.get('default') if column is None: if default is not None: return self.convert_type(default, spec) return value = row.get(column) if is_empty(value): if d...
Returns the value or a dict with a 'value' entry plus extra fields.
def install_new_pipeline(): def new_create_pipeline(context, *args, **kwargs): result = old_create_pipeline(context, *args, **kwargs) result.insert(1, DAAPObjectTransformer(context)) return result old_create_pipeline = Pipeline.create_pipeline Pipeline.create_pipeline = new_create_pi...
Install above transformer into the existing pipeline creator.
def update_momentum_by_name(self, name, **kwargs): momentum = self.pop_momentum_by_name(name) velocity, since, until = momentum[:3] velocity = kwargs.get('velocity', velocity) since = kwargs.get('since', since) until = kwargs.get('until', until) return self.add_momentum(v...
Updates a momentum by the given name. :param name: the momentum name. :param velocity: (keyword-only) a new value for `velocity`. :param since: (keyword-only) a new value for `since`. :param until: (keyword-only) a new value for `until`. :returns: a momentum updated. :...
def var_expand(self, cmd, depth=0, formatter=DollarFormatter()): ns = self.user_ns.copy() try: frame = sys._getframe(depth + 1) except ValueError: pass else: ns.update(frame.f_locals) try: cmd = formatter.vformat(cmd, args=[], kwarg...
Expand python variables in a string. The depth argument indicates how many frames above the caller should be walked to look for the local namespace where to expand variables. The global namespace for expansion is always the user's interactive namespace.
def revoke_membership(self, username): url = self._build_url('memberships', username, base_url=self._api) return self._boolean(self._delete(url), 204, 404)
Revoke this user's team membership. :param str username: (required), name of the team member :returns: bool
def change_group(self, name, group): m1 = OmapiMessage.open(b"host") m1.update_object(dict(name=name)) r1 = self.query_server(m1) if r1.opcode != OMAPI_OP_UPDATE: raise OmapiError("opening host %s failed" % name) m2 = OmapiMessage.update(r1.handle) m2.update_object(dict(group=group)) r2 = self.query_se...
Change the group of a host given the name of the host. @type name: str @type group: str
def get_xml(self, fp, format=FORMAT_NATIVE): r = self._client.request('GET', getattr(self, format), stream=True) filename = stream.stream_response_to_file(r, path=fp) return filename
Returns the XML metadata for this source, converted to the requested format. Converted metadata may not contain all the same information as the native format. :param file fp: A path, or an open file-like object which the content should be written to. :param str format: desired format for the ou...
def append_use_flags(atom, uses=None, overwrite=False): if not uses: uses = portage.dep.dep_getusedeps(atom) if not uses: return atom = atom[:atom.rfind('[')] append_to_package_conf('use', atom=atom, flags=uses, overwrite=overwrite)
Append a list of use flags for a given package or DEPEND atom CLI Example: .. code-block:: bash salt '*' portage_config.append_use_flags "app-admin/salt[ldap, -libvirt]" salt '*' portage_config.append_use_flags ">=app-admin/salt-0.14.1" "['ldap', '-libvirt']"
def _run_hooks(config, hooks, args, environ): skips = _get_skips(environ) cols = _compute_cols(hooks, args.verbose) filenames = _all_filenames(args) filenames = filter_by_include_exclude(filenames, '', config['exclude']) classifier = Classifier(filenames) retval = 0 for hook in hooks: ...
Actually run the hooks.
def start(self): self.__stop = False self._queue.start() self._zk.start()
Starts the connection
def _itemsLoadedDone(self, data): if data is None: return self.continuation = data.get('continuation', None) self.lastUpdated = data.get('updated', None) self.lastLoadLength = len(data.get('items', [])) self.googleReader.itemsToObjects(self, data.get('items', [])...
Called when all items are loaded
def distance(cls, q0, q1): q = Quaternion.log_map(q0, q1) return q.norm
Quaternion intrinsic distance. Find the intrinsic geodesic distance between q0 and q1. Params: q0: the first quaternion q1: the second quaternion Returns: A positive amount corresponding to the length of the geodesic arc connecting q0 to q1. ...
def get_rect(self): if self.handle: left, top, right, bottom = win32gui.GetWindowRect(self.handle) return RECT(left, top, right, bottom) else: desktop = win32gui.GetDesktopWindow() left, top, right, bottom = win32gui.GetWindowRect(desktop) retu...
Get rectangle of app or desktop resolution Returns: RECT(left, top, right, bottom)
def compose_post(apikey, resize, rotation, noexif): check_rotation(rotation) check_resize(resize) post_data = { 'formatliste': ('', 'og'), 'userdrehung': ('', rotation), 'apikey': ('', apikey) } if resize and 'x' in resize: width, height = [ x.stri...
composes basic post requests
def purgeRDR(rh): rh.printSysLog("Enter changeVM.purgeRDR") results = purgeReader(rh) rh.updateResults(results) rh.printSysLog("Exit changeVM.purgeRDR, rc: " + str(rh.results['overallRC'])) return rh.results['overallRC']
Purge the reader belonging to the virtual machine. Input: Request Handle with the following properties: function - 'CHANGEVM' subfunction - 'PURGERDR' userid - userid of the virtual machine Output: Request Handle updated with the results. Return code ...
def gen_template_files(path): " Generate relative template pathes. " path = path.rstrip(op.sep) for root, _, files in walk(path): for f in filter(lambda x: not x in (TPLNAME, CFGNAME), files): yield op.relpath(op.join(root, f), path)
Generate relative template pathes.
def reset_term_stats(set_id, term_id, client_id, user_id, access_token): found_sets = [user_set for user_set in get_user_sets(client_id, user_id) if user_set.set_id == set_id] if len(found_sets) != 1: raise ValueError('{} set(s) found with id {}'.format(len(found_sets), set_id)) fo...
Reset the stats of a term by deleting and re-creating it.
def _root(self): _n = self while _n.parent: _n = _n.parent return _n
Attribute referencing the root node of the tree. :returns: the root node of the tree containing this instance. :rtype: Node
def replace(old, new): parent = old.getparent() parent.replace(old, new)
A simple way to replace one element node with another.
def economic_qs(K, epsilon=sqrt(finfo(float).eps)): r (S, Q) = eigh(K) nok = abs(max(Q[0].min(), Q[0].max(), key=abs)) < epsilon nok = nok and abs(max(K.min(), K.max(), key=abs)) >= epsilon if nok: from scipy.linalg import eigh as sp_eigh (S, Q) = sp_eigh(K) ok = S >= epsilon ...
r"""Economic eigen decomposition for symmetric matrices. A symmetric matrix ``K`` can be decomposed in :math:`\mathrm Q_0 \mathrm S_0 \mathrm Q_0^\intercal + \mathrm Q_1\ \mathrm S_1 \mathrm Q_1^ \intercal`, where :math:`\mathrm S_1` is a zero matrix with size determined by ``K``'s rank deficiency. ...
def put_attributes(self, item_name, attributes, replace=True, expected_value=None): return self.connection.put_attributes(self, item_name, attributes, replace, expected_value)
Store attributes for a given item. :type item_name: string :param item_name: The name of the item whose attributes are being stored. :type attribute_names: dict or dict-like object :param attribute_names: The name/value pairs to store as attributes :type expected_value: list ...
def map_indices(fn, iterable, indices): r index_set = set(indices) for i, arg in enumerate(iterable): if i in index_set: yield fn(arg) else: yield arg
r""" Map a function across indices of an iterable. Notes ----- Roughly equivalent to, though more efficient than:: lambda fn, iterable, *indices: (fn(arg) if i in indices else arg for i, arg in enumerate(iterable)) Examples -------- >>> a =...
def request_update_of_all_params(self): for group in self.toc.toc: for name in self.toc.toc[group]: complete_name = '%s.%s' % (group, name) self.request_param_update(complete_name)
Request an update of all the parameters in the TOC
def get_rollup_ttl(self, use_cached=True): rollup_ttl_text = self._get_stream_metadata(use_cached).get("rollupTtl") return int(rollup_ttl_text)
Retrieve the rollupTtl for this stream The rollupTtl is the time to live (TTL) in seconds for the aggregate roll-ups of data points stored in the stream. A roll-up expires after the configured amount of time and is automatically deleted. :param bool use_cached: If False, the function w...
def surface_based_cape_cin(pressure, temperature, dewpoint): r p, t, td, profile = parcel_profile_with_lcl(pressure, temperature, dewpoint) return cape_cin(p, t, td, profile)
r"""Calculate surface-based CAPE and CIN. Calculate the convective available potential energy (CAPE) and convective inhibition (CIN) of a given upper air profile for a surface-based parcel. CIN is integrated between the surface and LFC, CAPE is integrated between the LFC and EL (or top of sounding). In...
def write_file(path, data): with open(path, 'w') as f: log.debug('setting %s contents:\n%s', path, data) f.write(data) return f
Writes data to specified path.
def _persist(source, path, encoder=None): import posixpath from dask.bytes import open_files import dask import pickle import json from intake.source.textfiles import TextFilesSource encoder = {None: str, 'str': str, 'json': json.dumps, 'pickle'...
Save list to files using encoding encoder : None or one of str|json|pickle None is equivalent to str
def plugins(self): from fluent_contents import extensions if self._plugins is None: return extensions.plugin_pool.get_plugins() else: return extensions.plugin_pool.get_plugins_by_name(*self._plugins)
Get the set of plugins that this widget should display.
def leaveEvent(self, event): super(ToolTipWidget, self).leaveEvent(event) self.hide()
Override Qt method to hide the tooltip on leave.
def flatten_pages(self, pages, level=1): flattened = [] for page in pages: if type(page) is list: flattened.append( { 'file': page[0], 'title': page[1], ...
Recursively flattens pages data structure into a one-dimensional data structure
def from_dict(document): type_name = document['name'] if type_name == ATTR_TYPE_INT: return IntType() elif type_name == ATTR_TYPE_FLOAT: return FloatType() elif type_name == ATTR_TYPE_ENUM: return EnumType(document['values']) elif type_name == ...
Create data type definition form Json-like object represenation. Parameters ---------- document : dict Json-like object represenation Returns ------- AttributeType
def handle_error(self, type_, value, tb): if not issubclass(type_, pywsgi.GreenletExit): self.server.loop.handle_error(self.environ, type_, value, tb) if self.response_length: self.close_connection = True else: tb_stream = traceback.format_exception(type_, val...
This method copies the code from pywsgi.WSGIHandler.handle_error, change the write part to be a reflection of traceback and environ
def get_soap_client(db_alias, client_class=None): if not beatbox: raise InterfaceError("To use SOAP API, you'll need to install the Beatbox package.") if client_class is None: client_class = beatbox.PythonClient soap_client = client_class() connection = connections[db_alias] cursor =...
Create the SOAP client for the current user logged in the db_alias The default created client is "beatbox.PythonClient", but an alternative client is possible. (i.e. other subtype of beatbox.XMLClient)
def handleOACK(self, pkt): if len(pkt.options.keys()) > 0: if pkt.match_options(self.context.options): log.info("Successful negotiation of options") self.context.options = pkt.options for key in self.context.options: log.info(" %...
This method handles an OACK from the server, syncing any accepted options.
def decode_step(self, step_input, states): step_output, states, step_additional_outputs =\ self.decoder(self.tgt_embed(step_input), states) step_output = self.tgt_proj(step_output) return step_output, states, step_additional_outputs
One step decoding of the translation model. Parameters ---------- step_input : NDArray Shape (batch_size,) states : list of NDArrays Returns ------- step_output : NDArray Shape (batch_size, C_out) states : list step_additi...
def validate(self, uri): if WURIComponentVerifier.validate(self, uri) is False: return False try: WStrictURIQuery( WURIQuery.parse(uri.component(self.component())), *self.__specs, extra_parameters=self.__extra_parameters ) except ValueError: return False return True
Check that an query part of an URI is compatible with this descriptor. Return True if the URI is compatible. :param uri: an URI to check :return: bool
def set_qos(self, port_name, type='linux-htb', max_rate=None, queues=None): queues = queues if queues else [] command_qos = ovs_vsctl.VSCtlCommand( 'set-qos', [port_name, type, max_rate]) command_queue = ovs_vsctl.VSCtlCommand( 'set-queue', [port_n...
Sets a Qos rule and creates Queues on the given port.
def file_size(filename): fd = os.open(filename, os.O_RDONLY) try: return os.lseek(fd, 0, os.SEEK_END) except KeyboardInterrupt as e: raise e except Exception as e: raise Exception( "file_size failed to obtain the size of '%s': %s" % (filename, str(e))) finally: ...
Obtains the size of a given file. @filename - Path to the file. Returns the size of the file.
def popleft(self, block=True, timeout=None): return self._pop(block, timeout, left=True)
Remove and return an item from the right side of the GeventDeque. If no elements are present, raises an IndexError. If optional args *block* is True and *timeout* is ``None`` (the default), block if necessary until an item is available. If *timeout* is a positive number, it blocks at ...
def filter(self, record): request = get_request() if request: user = getattr(request, 'user', None) if user and not user.is_anonymous(): record.username = user.username else: record.username = '-' meta = getattr(request, 'ME...
Adds user and remote_addr to the record.
def random_alphanum(length=10, lower_only=False): character_set = ALPHANUM_LOWER if lower_only else ALPHANUM sample_size = 5 chars = random.sample(character_set, sample_size) while len(chars) < length: chars += random.sample(character_set, sample_size) random.shuffle(chars) return ''.joi...
Gets a random alphanumeric value using both letters and numbers. :param length: size of the random alphanumeric string. :param lower_only: boolean indicating if only lower case letters should be used. :return: alphanumeric string size of length This function uses all number except for: * ...
async def set_reply_markup(msg: Dict, request: 'Request', stack: 'Stack') \ -> None: from bernard.platforms.telegram.layers import InlineKeyboard, \ ReplyKeyboard, \ ReplyKeyboardRemove try: keyboard = stack.get_layer(InlineKeyboard) except KeyError: pass else: ...
Add the "reply markup" to a message from the layers :param msg: Message dictionary :param request: Current request being replied :param stack: Stack to analyze
def _update_pods_metrics(self, instance, pods): tags_map = defaultdict(int) for pod in pods['items']: pod_meta = pod.get('metadata', {}) pod_tags = self.kubeutil.get_pod_creator_tags(pod_meta, legacy_rep_controller_tag=True) services = self.kubeutil.match_services_for...
Reports the number of running pods on this node, tagged by service and creator We go though all the pods, extract tags then count them by tag list, sorted and serialized in a pipe-separated string (it is an illegar character for tags)
def delete(self, password, message=""): data = {'user': self.user.name, 'passwd': password, 'delete_message': message, 'confirm': True} return self.request_json(self.config['delete_redditor'], data=data)
Delete the currently authenticated redditor. WARNING! This action is IRREVERSIBLE. Use only if you're okay with NEVER accessing this reddit account again. :param password: password for currently authenticated account :param message: optional 'reason for deletion' message. ...
def save_evaluations(self, evaluations_file = None): iterations = np.array(range(1, self.Y.shape[0] + 1))[:, None] results = np.hstack((iterations, self.Y, self.X)) header = ['Iteration', 'Y'] + ['var_' + str(k) for k in range(1, self.X.shape[1] + 1)] data = [header] + results.tolist() ...
Saves evaluations at each iteration of the optimization :param evaluations_file: name of the file in which the results are saved.
async def iter_all( self, direction: msg.StreamDirection = msg.StreamDirection.Forward, from_position: Optional[Union[msg.Position, msg._PositionSentinel]] = None, batch_size: int = 100, resolve_links: bool = True, require_master: bool = False, correlation_id: Opt...
Read through all the events in the database. Args: direction (optional): Controls whether to read forward or backward through the events. Defaults to StreamDirection.Forward from_position (optional): The position to start reading from. Defaults to photonpump...
def main(global_config, **settings): engine = engine_from_config(settings, "sqlalchemy.") DBSession.configure(bind=engine) Base.metadata.bind = engine config = Configurator(settings=settings) config.include("pyramid_jinja2") config.include("pyramid_debugtoolbar") config.add_route("home", "/"...
Return a Pyramid WSGI application.
async def join_voice_channel(self, guild_id, channel_id): voice_ws = self.get_voice_ws(guild_id) await voice_ws.voice_state(guild_id, channel_id)
Alternative way to join a voice channel if node is known.
def bin_spikes(spike_times, binsz): bins = np.empty((len(spike_times),), dtype=int) for i, stime in enumerate(spike_times): bins[i] = np.floor(np.around(stime/binsz, 5)) return bins
Sort spike times into bins :param spike_times: times of spike instances :type spike_times: list :param binsz: length of time bin to use :type binsz: float :returns: list of bin indicies, one for each element in spike_times
def eager_load_relations(self, models): for name, constraints in self._eager_load.items(): if name.find('.') == -1: models = self._load_relation(models, name, constraints) return models
Eager load the relationship of the models. :param models: :type models: list :return: The models :rtype: list