code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def open_subreddit_page(self, name): from .subreddit_page import SubredditPage with self.term.loader('Loading subreddit'): page = SubredditPage(self.reddit, self.term, self.config, self.oauth, name) if not self.term.loader.exception: retur...
Open an instance of the subreddit page for the given subreddit name.
def _data_update(subjects, queue, run_flag): while run_flag.running: while not queue.empty(): data = queue.get() for subject in [s for s in subjects if not s.is_disposed]: subject.on_next(data) time.sleep(0.1)
Get data from backgound process and notify all subscribed observers with the new data
def list_launch_configurations(region=None, key=None, keyid=None, profile=None): ret = get_all_launch_configurations(region, key, keyid, profile) return [r.name for r in ret]
List all Launch Configurations. CLI example:: salt myminion boto_asg.list_launch_configurations
def volume_delete(pool, volume, **kwargs): conn = __get_conn(**kwargs) try: vol = _get_storage_vol(conn, pool, volume) return not bool(vol.delete()) finally: conn.close()
Delete a libvirt managed volume. :param pool: libvirt storage pool name :param volume: name of the volume to delete :param connection: libvirt connection URI, overriding defaults :param username: username to connect with, overriding defaults :param password: password to connect with, overriding def...
def save_csv(p, sheet): 'Save as single CSV file, handling column names as first line.' with p.open_text(mode='w') as fp: cw = csv.writer(fp, **csvoptions()) colnames = [col.name for col in sheet.visibleCols] if ''.join(colnames): cw.writerow(colnames) for r in Progre...
Save as single CSV file, handling column names as first line.
def random(self, *args, **kwargs): indexer = Random() self.add(indexer) return self
Add a random index. Shortcut of :class:`recordlinkage.index.Random`:: from recordlinkage.index import Random indexer = recordlinkage.Index() indexer.add(Random())
def create(self, name, network): if not network in SUPPORTED_NETWORKS: raise ValueError('Network not valid!') account = self.wrap(self.resource.create(dict(name=name, network=network))) self.add(account) return account
Create a new Account object and add it to this Accounts collection. Args: name (str): Account name network (str): Type of cryptocurrency. Can be one of, 'bitcoin', ' bitcoin_testnet', 'litecoin', 'dogecoin'. Returns: The new round.Account
def get_peak_mem(): import resource rusage_denom = 1024. if sys.platform == 'darwin': rusage_denom = rusage_denom * rusage_denom mem = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / rusage_denom return mem
this returns peak memory use since process starts till the moment its called
def img2code(self, key, img): code_template = \ "wx.ImageFromData({width}, {height}, " + \ "bz2.decompress(base64.b64decode('{data}'))).ConvertToBitmap()" code_alpha_template = \ "wx.ImageFromDataWithAlpha({width}, {height}, " + \ "bz2.decompress(base64.b6...
Pastes wx.Image into single cell
def register(self, *args): super(ConfigurableMeta, self).register(*args) from hfos.database import configschemastore configschemastore[self.name] = self.configschema
Register a configurable component in the configuration schema store
def update_snapshot(self, snapshot, display_name=None, display_description=None): return snapshot.update(display_name=display_name, display_description=display_description)
Update the specified values on the specified snapshot. You may specify one or more values to update.
def fit(self, col): dates = self.safe_datetime_cast(col) self.default_val = dates.groupby(dates).count().index[0].timestamp() * 1e9
Prepare the transformer to convert data. Args: col(pandas.DataFrame): Data to transform. Returns: None
def certify_parameter(certifier, name, value, kwargs=None): try: certifier(value, **kwargs or {}) except CertifierError as err: six.raise_from( CertifierParamError( name, value, ), err)
Internal certifier for kwargs passed to Certifiable public methods. :param callable certifier: The certifier to use :param str name: The name of the kwargs :param object value: The value of the kwarg. :param bool required: Is the param required. Default=False. :raise...
def viterbi_alignment(es, fs, t, a): max_a = collections.defaultdict(float) l_e = len(es) l_f = len(fs) for (j, e) in enumerate(es, 1): current_max = (0, -1) for (i, f) in enumerate(fs, 1): val = t[(e, f)] * a[(i, j, l_e, l_f)] if current_max[1] < val: ...
return dictionary e in es -> f in fs
def _to_list(obj): ret = {} for attr in __attrs: if hasattr(obj, attr): ret[attr] = getattr(obj, attr) return ret
Convert snetinfo object to list
def uri(self): return url_for( '.{0}_files'.format(self.pid.pid_type), pid_value=self.pid.pid_value, filename=self.file.key)
Get file download link. .. note:: The URI generation assumes that you can download the file using the view ``invenio_records_ui.<pid_type>_files``.
def patch_os_module(): if not hasattr(os, 'symlink'): os.symlink = symlink os.path.islink = islink if not hasattr(os, 'readlink'): os.readlink = readlink
jaraco.windows provides the os.symlink and os.readlink functions. Monkey-patch the os module to include them if not present.
def _check_states_enum(cls): states_enum_name = cls.context.get_config('states_enum_name') try: cls.context['states_enum'] = getattr( cls.context.new_class, states_enum_name) except AttributeError: raise ValueError('No states enum given!') proper =...
Check if states enum exists and is proper one.
def copy(self): return self.__class__(self, key=self._keyfn, precedes=self._precedes)
Return a shallow copy of a pqdict.
def serialize_attrs(self, *args): cls = type(self) result = {} for a in args: if hasattr(cls, a) and a not in cls.attrs_forbidden_for_serialization(): val = getattr(self, a) if is_list_like(val): result[a] = list(val) ...
Converts and instance to a dictionary with only the specified attributes as keys Args: *args (list): The attributes to serialize Examples: >>> customer = Customer.create(name="James Bond", email="007@mi.com", phone="007", city...
def are_equal(value1, value2): if value1 == None or value2 == None: return True if value1 == None or value2 == None: return False return value1 == value2
Checks if two values are equal. The operation can be performed over values of any type. :param value1: the first value to compare :param value2: the second value to compare :return: true if values are equal and false otherwise
def replace(zpool, old_device, new_device=None, force=False): flags = [] target = [] if force: flags.append('-f') target.append(zpool) target.append(old_device) if new_device: target.append(new_device) res = __salt__['cmd.run_all']( __utils__['zfs.zpool_command']( ...
Replaces ``old_device`` with ``new_device`` .. note:: This is equivalent to attaching ``new_device``, waiting for it to resilver, and then detaching ``old_device``. The size of ``new_device`` must be greater than or equal to the minimum size of all the devices in a mirror or raidz...
def is_ancestor_of_book(self, id_, book_id): if self._catalog_session is not None: return self._catalog_session.is_ancestor_of_catalog(id_=id_, catalog_id=book_id) return self._hierarchy_session.is_ancestor(id_=id_, ancestor_id=book_id)
Tests if an ``Id`` is an ancestor of a book. arg: id (osid.id.Id): an ``Id`` arg: book_id (osid.id.Id): the ``Id`` of a book return: (boolean) - ``tru`` e if this ``id`` is an ancestor of ``book_id,`` ``false`` otherwise raise: NotFound - ``book_id`` is not found...
def train(self, *args, **kwargs): objs = self._do_transform(*args, **kwargs) obj_list = [objs, ] if not isinstance(objs, Iterable) else objs for obj in obj_list: if not isinstance(obj, ODPSModelExpr): continue for meta in ['predictor', 'recommender']: ...
Perform training on a DataFrame. The label field is specified by the ``label_field`` method. :param train_data: DataFrame to be trained. Label field must be specified. :type train_data: DataFrame :return: Trained model :rtype: MLModel
def _generate_to_tempfile(self, generator): with temporary_file(cleanup=False, binary_mode=False) as output: generator.write(output) return output.name
Applies the specified generator to a temp file and returns the path to that file. We generate into a temp file so that we don't lose any manual customizations on error.
def _dat_read_params(fmt, sig_len, byte_offset, skew, tsamps_per_frame, sampfrom, sampto): start_flat_sample = sampfrom * tsamps_per_frame if (sampto + max(skew)) > sig_len: end_flat_sample = sig_len * tsamps_per_frame extra_flat_samples = (sampto + max(skew) - sig_len) * ts...
Calculate the parameters used to read and process a dat file, given its layout, and the desired sample range. Parameters ---------- fmt : str The format of the dat file sig_len : int The signal length (per channel) of the dat file byte_offset : int The byte offset of the...
def unwrap_raw(content): starting_symbol = get_start_symbol(content) ending_symbol = ']' if starting_symbol == '[' else '}' start = content.find(starting_symbol, 0) end = content.rfind(ending_symbol) return content[start:end+1]
unwraps the callback and returns the raw content
def is_bootstrapped(metadata): fields = UNIHAN_FIELDS + DEFAULT_COLUMNS if TABLE_NAME in metadata.tables.keys(): table = metadata.tables[TABLE_NAME] if set(fields) == set(c.name for c in table.columns): return True else: return False else: return False
Return True if cihai is correctly bootstrapped.
def add_signature(name=None, inputs=None, outputs=None): if not name: name = "default" if inputs is None: inputs = {} if outputs is None: outputs = {} if not isinstance(inputs, dict): inputs = {"default": inputs} if not isinstance(outputs, dict): outputs = {"default": outputs} message = ...
Adds a signature to the module definition. NOTE: This must be called within a `module_fn` that is defining a Module. Args: name: Signature name as a string. If omitted, it is interpreted as 'default' and is the signature used when `Module.__call__` `signature` is not specified. inputs: A dict ...
def find_occurrences(self, resource=None, pymodule=None): tools = _OccurrenceToolsCreator(self.project, resource=resource, pymodule=pymodule, docs=self.docs) for offset in self._textual_finder.find_offsets(tools.source_code): occurrence = Occurrence(to...
Generate `Occurrence` instances
def printer(self): if not self._has_loaded: self.load() if not self._printer_name: raise exceptions.ConfigSectionMissingError('printer') if not self._printer: self._printer = getattr(printer, self._printer_name)(**self._printer_config) return self._pri...
Returns a printer that was defined in the config, or throws an exception. This method loads the default config if one hasn't beeen already loaded.
def update_model(self, words): extended_words = DefaultCompleter._DefaultCompleter__tokens[self.__language][:] extended_words.extend((word for word in set(words) if word not in DefaultCompleter._DefaultCompleter__tokens[self.__language])) self.setModel(QStringListM...
Updates the completer model. :param words: Words to update the completer with. :type words: tuple or list :return: Method success. :rtype: bool
def load(self, specfiles=None): if specfiles is None: specfiles = [_ for _ in viewkeys(self.info)] else: specfiles = aux.toList(specfiles) for specfile in specfiles: if specfile not in self.info: warntext = 'Error while calling "FiContainer.loa...
Imports the specified ``fic`` files from the hard disk. :param specfiles: the name of an ms-run file or a list of names. If None all specfiles are selected. :type specfiles: None, str, [str, str]
def magic_session(db_session=None, url=None): if db_session is not None: yield db_session else: session = get_session(url, expire_on_commit=False) try: try: yield session finally: session.commit() finally: sessio...
Either does nothing with the session you already have or makes one that commits and closes no matter what happens
def setup_list_pars(self): tdf = self.setup_temporal_list_pars() sdf = self.setup_spatial_list_pars() if tdf is None and sdf is None: return os.chdir(self.m.model_ws) try: apply_list_pars() except Exception as e: os.chdir("..") ...
main entry point for setting up list multiplier parameters
def additional_assets(context: Context): rsync_flags = '-avz' if context.verbosity == 2 else '-az' for path in context.app.additional_asset_paths: context.shell('rsync %s %s %s/' % (rsync_flags, path, context.app.asset_build_path))
Collects assets from GOV.UK frontend toolkit
def remove_root_book(self, book_id): if self._catalog_session is not None: return self._catalog_session.remove_root_catalog(catalog_id=book_id) return self._hierarchy_session.remove_root(id_=book_id)
Removes a root book. arg: book_id (osid.id.Id): the ``Id`` of a book raise: NotFound - ``book_id`` is not a root raise: NullArgument - ``book_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *comp...
def populate(self, blueprint, documents): documents = self.finish(blueprint, documents) frames = [] for document in documents: meta_document = {} for field_name in blueprint._meta_fields: meta_document[field_name] = document[field_name] doc...
Populate the database with documents
def _polygon_from_coords(coords, fix_geom=False, swap=True, dims=2): assert len(coords) % dims == 0 number_of_points = len(coords)/dims coords_as_array = np.array(coords) reshaped = coords_as_array.reshape(number_of_points, dims) points = [ (float(i[1]), float(i[0])) if swap else ((float(i[0...
Return Shapely Polygon from coordinates. - coords: list of alterating latitude / longitude coordinates - fix_geom: automatically fix geometry
def _check_euk_contamination(self, hmm_hit_tables): euk_hit_table = HMMreader(hmm_hit_tables.pop(-1)) other_hit_tables = [HMMreader(x) for x in hmm_hit_tables] reads_unique_to_eukaryotes = [] reads_with_better_euk_hit = [] for hit in euk_hit_table.names(): bits = [] ...
check_euk_contamination - Check output HMM tables hits reads that hit the 18S HMM with a higher bit score. Parameters ---------- hmm_hit_tables : array Array of paths to the output files produced by hmmsearch or nhmmer. run_stats...
def get_querystring(uri): parts = urlparse.urlsplit(uri) return urlparse.parse_qs(parts.query)
Get Querystring information from uri. :param uri: uri :return: querystring info or {}
def end_time(self): try: return self.start_time + SCAN_DURATION[self.sector] except KeyError: return self.start_time
End timestamp of the dataset
def open_as_pillow(filename): with __sys_open(filename, 'rb') as f: data = BytesIO(f.read()) return Image.open(data)
This way can delete file immediately
def on_finish(self): r = self.response r.request_time = time.time() - self.start_time if self.callback: self.callback(r)
Called regardless of success or failure
def valid_ovsdb_addr(addr): m = re.match(r'unix:(\S+)', addr) if m: file = m.group(1) return os.path.isfile(file) m = re.match(r'(tcp|ssl):(\S+):(\d+)', addr) if m: address = m.group(2) port = m.group(3) if '[' in address: address = address.strip('[')....
Returns True if the given addr is valid OVSDB server address, otherwise False. The valid formats are: - ``unix:file`` - ``tcp:ip:port`` - ``ssl:ip:port`` If ip is IPv6 address, wrap ip with brackets (e.g., ssl:[::1]:6640). :param addr: str value of OVSDB server address. :return: True...
def _api_call(function): @wraps(function) def wrapper(*args, **kwargs): try: if not _webview_ready.wait(15): raise Exception('Main window failed to start') return function(*args, **kwargs) except NameError: raise Exception('Create a web view wi...
Decorator to call a pywebview API, checking for _webview_ready and raisings appropriate Exceptions on failure.
def write_record(self, event_str): header = struct.pack('Q', len(event_str)) header += struct.pack('I', masked_crc32c(header)) footer = struct.pack('I', masked_crc32c(event_str)) self._writer.write(header + event_str + footer)
Writes a serialized event to file.
def delete(self): logger.debug('Deleting Dagobah instance with ID {0}'.format(self.dagobah_id)) self.jobs = [] self.created_jobs = 0 self.backend.delete_dagobah(self.dagobah_id)
Delete this Dagobah instance from the Backend.
def popitem(self): heap = self._heap position = self._position try: end = heap.pop(-1) except IndexError: raise KeyError('pqdict is empty') if heap: node = heap[0] heap[0] = end position[end.key] = 0 self._si...
Remove and return the item with highest priority. Raises ``KeyError`` if pqdict is empty.
def getAccountNames(store, protocol=None): return ((meth.localpart, meth.domain) for meth in getLoginMethods(store, protocol))
Retrieve account name information about the given database. @param store: An Axiom Store representing a user account. It must have been opened through the store which contains its account information. @return: A generator of two-tuples of (username, domain) which refer to the given store.
def read_input(self, input_cls, filename, **kwargs): input_inst = input_cls() input_inst.read_input(filename) return input_inst.get_data()
Read in input and do some minimal preformatting input_cls - the class to use to read the input filename - input filename
def uuidify(val): if uuidutils.is_uuid_like(val): return val else: try: int_val = int(val, 16) except ValueError: with excutils.save_and_reraise_exception(): LOG.error("Invalid UUID format %s. Please provide an " "integer ...
Takes an integer and transforms it to a UUID format. returns: UUID formatted version of input.
def update_assessment_offered(self, assessment_offered_form): collection = JSONClientValidated('assessment', collection='AssessmentOffered', runtime=self._runtime) if not isinstance(assessment_offered_form, ABCAssessmentOf...
Updates an existing assessment offered. arg: assessment_offered_form (osid.assessment.AssessmentOfferedForm): the form containing the elements to be updated raise: IllegalState - ``assessment_offrered_form`` already used in an update transaction ...
async def seek(self, pos, whence=sync_io.SEEK_SET): return self._stream.seek(pos, whence)
Move to new file position. Argument offset is a byte count. Optional argument whence defaults to SEEK_SET or 0 (offset from start of file, offset should be >= 0); other values are SEEK_CUR or 1 (move relative to current position, positive or negative), and SEEK_END or 2 (move relative ...
def update_rotation(self, dt, buttons): assert isinstance(buttons, dict) ma = buttons['right'] - buttons['left'] if ma != 0: self.stats['battery'] -= self.battery_use['angular'] self.rotation += ma * dt * self.angular_velocity a = math.radians(self.rotation) ...
Updates rotation and impulse direction
def add_api_key(key, value): if key is None or key == "": logger.error("Key cannot be empty") if value is None or value == "": logger.error("Value cannot be empty") from .. import datatools data = datatools.get_data() if "keys" not in data["discord"]: data["discord"]["keys"] ...
Adds a key to the bot's data Args: key: The name of the key to add value: The value for the key
def tmpconfig(request): SUBFOLDER = tempfile.mkdtemp() CONF = UserConfig('spyder-test', defaults=DEFAULTS, version=CONF_VERSION, subfolder=SUBFOLDER, raw_mode=True, ) def fin(): shutil.rmtre...
Fixtures that returns a temporary CONF element.
def cause_repertoire(self, mechanism, purview): if not purview: return np.array([1.0]) if not mechanism: return max_entropy_distribution(purview, self.tpm_size) purview = frozenset(purview) joint = np.ones(repertoire_shape(purview, self.tpm_size)) joint *=...
Return the cause repertoire of a mechanism over a purview. Args: mechanism (tuple[int]): The mechanism for which to calculate the cause repertoire. purview (tuple[int]): The purview over which to calculate the cause repertoire. Returns: ...
def parse_tenant_config_path(config_path): try: return config_path % connection.schema_name except (TypeError, ValueError): return os.path.join(config_path, connection.schema_name)
Convenience function for parsing django-tenants' path configuration strings. If the string contains '%s', then the current tenant's schema name will be inserted at that location. Otherwise the schema name will be appended to the end of the string. :param config_path: A configuration path string that optio...
def is_alive(self, container: Container) -> bool: uid = container.uid return uid in self.__dockerc and \ self.__dockerc[uid].status == 'running'
Determines whether a given container is still alive. Returns: `True` if the underlying Docker container for the given BugZoo container is still alive, otherwise `False`.
def kron(a, b): if hasattr(a, '__kron__'): return a.__kron__(b) if a is None: return b else: raise ValueError( 'Kron is waiting for two TT-vectors or two TT-matrices')
Kronecker product of two TT-matrices or two TT-vectors
def send_rpc_response(self, rpc_tag, result, response): if rpc_tag not in self.in_flight_rpcs: raise ArgumentError("In flight RPC could not be found, it may have timed out", rpc_tag=rpc_tag) del self.in_flight_rpcs[rpc_tag] response_message = { 'response': response, ...
Send a response to an RPC. Args: rpc_tag (str): The exact string given in a previous call to send_rpc_command result (str): The result of the operation. The possible values of response are: service_not_found, rpc_not_found, timeout, success, invalid_response, ...
def translocation(from_loc, to_loc): rv = _activity_helper(TRANSLOCATION) rv[EFFECT] = { FROM_LOC: Entity(namespace=BEL_DEFAULT_NAMESPACE, name=from_loc) if isinstance(from_loc, str) else from_loc, TO_LOC: Entity(namespace=BEL_DEFAULT_NAMESPACE, name=to_loc) if isinstance(to_loc, str) else to_lo...
Make a translocation dictionary. :param dict from_loc: An entity dictionary from :func:`pybel.dsl.entity` :param dict to_loc: An entity dictionary from :func:`pybel.dsl.entity` :rtype: dict
def generate_hash(self, length=30): import random, string chars = string.ascii_letters + string.digits ran = random.SystemRandom().choice hash = ''.join(ran(chars) for i in range(length)) return hash
Generate random string of given length
def parse(*models, **kwargs): if isinstance(models, tuple) and isinstance(models[0], list): models = models[0] config = kwargs.pop('config', False) state = kwargs.pop('state', False) profiles = kwargs.pop('profiles', []) if not profiles and hasattr(napalm_device, 'profile'): profiles...
Parse configuration from the device. models A list of models to be used when parsing. config: ``False`` Parse config. state: ``False`` Parse state. profiles: ``None`` Use certain profiles to parse. If not specified, will use the device default profile(s). ...
def __get_node(self, word): node = self.root for c in word: try: node = node.children[c] except KeyError: return None return node
Private function retrieving a final node of trie for given word Returns node or None, if the trie doesn't contain the word.
def data_to_string(self, data_element): stream = NativeIO() self.data_to_stream(data_element, stream) return stream.getvalue()
Converts the given data element into a string representation. :param data_element: object implementing :class:`everest.representers.interfaces.IExplicitDataElement` :returns: string representation (using the MIME content type configured for this representer)
def get_es_ids(self): search = self.search.source(['uri']).sort(['uri']) es_ids = [item.meta.id for item in search.scan()] return es_ids
reads all the elasticssearch ids for an index
def add_to_capabilities(self, capabilities): proxy_caps = {} proxy_caps['proxyType'] = self.proxyType['string'] if self.autodetect: proxy_caps['autodetect'] = self.autodetect if self.ftpProxy: proxy_caps['ftpProxy'] = self.ftpProxy if self.httpProxy: ...
Adds proxy information as capability in specified capabilities. :Args: - capabilities: The capabilities to which proxy will be added.
def to_json(self): if self._embedding: warnings.warn('Serialization of attached embedding ' 'to json is not supported. ' 'You may serialize the embedding to a binary format ' 'separately using vocab.embedding.serialize') ...
Serialize Vocab object to json string. This method does not serialize the underlying embedding.
def set_signal_type(self, sig_type): if isinstance(sig_type, str): sig_type = [sig_type] self.snr_input.signal_type = sig_type return
Set the signal type of interest. Sets the signal type for which the SNR is calculated. This means inspiral, merger, and/or ringdown. Args: sig_type (str or list of str): Signal type desired by user. Choices are `ins`, `mrg`, `rd`, `all` for circular waveforms create...
def _parse(self, globals_dict): globals = {} if not isinstance(globals_dict, dict): raise InvalidGlobalsSectionException(self._KEYWORD, "It must be a non-empty dictionary".format(self._KEYWORD)) for section_name, properties in globals_...
Takes a SAM template as input and parses the Globals section :param globals_dict: Dictionary representation of the Globals section :return: Processed globals dictionary which can be used to quickly identify properties to merge :raises: InvalidResourceException if the input contains properties t...
def open_submission(self, url=None): if url is None: data = self.get_selected_item() url = data['permalink'] if data.get('url_type') == 'selfpost': self.config.history.add(data['url_full']) self.selected_page = self.open_submission_page(url)
Select the current submission to view posts.
def del_calculation(job_id, confirmed=False): if logs.dbcmd('get_job', job_id) is None: print('There is no job %d' % job_id) return if confirmed or confirm( 'Are you sure you want to (abort and) delete this calculation and ' 'all associated outputs?\nThis action cannot be...
Delete a calculation and all associated outputs.
def export_kappa_im(model, fname=None): from .kappa_util import im_json_to_graph kappa = _prepare_kappa(model) imap = kappa.analyses_influence_map() im = im_json_to_graph(imap) for param in model.parameters: try: im.remove_node(param.name) except: pass if ...
Return a networkx graph representing the model's Kappa influence map. Parameters ---------- model : pysb.core.Model A PySB model to be exported into a Kappa IM. fname : Optional[str] A file name, typically with .png or .pdf extension in which the IM is rendered using pygraphviz....
def authenticate(self, api_key): self._api_key = api_key self._session.auth = ('', self._api_key) return self._verify_api_key()
Logs user into Heroku with given api_key.
def _find_devices_mac(self): self.keyboards.append(Keyboard(self)) self.mice.append(MightyMouse(self)) self.mice.append(Mouse(self))
Find devices on Mac.
def _next_record(self, next_line): record = self.loader.parse_record_stream(self.reader, next_line, self.known_format, self.no_record_parse, ...
Use loader to parse the record from the reader stream Supporting warc and arc records
def set_monitor(module): def monitor(name, tensor, track_data=True, track_grad=True): module.monitored_vars[name] = { 'tensor':tensor, 'track_data':track_data, 'track_grad':track_grad, } module.monitor = monitor
Defines the monitor method on the module.
def formula_sections(self): if self.dtree is not None: return self.dtree.order else: return [s for s in self.manifest.sections() if s != "config"]
Return all sections related to a formula, re-ordered according to the "depends" section.
def combine_first(self, other): import pandas.core.computation.expressions as expressions def extract_values(arr): if isinstance(arr, (ABCIndexClass, ABCSeries)): arr = arr._values if needs_i8_conversion(arr): if is_extension_array_dtype(arr.dtype)...
Update null elements with value in the same location in `other`. Combine two DataFrame objects by filling null values in one DataFrame with non-null values from other DataFrame. The row and column indexes of the resulting DataFrame will be the union of the two. Parameters -----...
def clear_bucket_props(self, bucket): bucket_type = self._get_bucket_type(bucket.bucket_type) url = self.bucket_properties_path(bucket.name, bucket_type=bucket_type) url = self.bucket_properties_path(bucket.name) headers = {'Content-Type': 'appli...
reset the properties on the bucket object given
def getDigitalMinimum(self, chn=None): if chn is not None: if 0 <= chn < self.signals_in_file: return self.digital_min(chn) else: return 0 else: digMin = np.zeros(self.signals_in_file) for i in np.arange(self.signals_in_file...
Returns the minimum digital value of signal edfsignal. Parameters ---------- chn : int channel number Examples -------- >>> import pyedflib >>> f = pyedflib.data.test_generator() >>> f.getDigitalMinimum(0) -32768 >>> f._close(...
def reduce(self, dimensions=[], function=None, spreadfn=None, **kwargs): kwargs['_method_args'] = (dimensions, function, spreadfn) return self.__call__('reduce', **kwargs)
Applies a reduce function to all ViewableElement objects. See :py:meth:`Dimensioned.opts` and :py:meth:`Apply.__call__` for more information.
def get_tree_depth( self ): if (self.children): depth = 1 childDepths = [] for child in self.children: childDepths.append( child.get_tree_depth() ) return depth + max(childDepths) else: return 0
Finds depth of this tree.
def _enter_single_subdir(root_dir): current_cwd = os.getcwd() try: dest_dir = root_dir dir_list = os.listdir(root_dir) if len(dir_list) == 1: first = os.path.join(root_dir, dir_list[0]) if os.path.isdir(first): dest_dir = first else: ...
if the given directory has just a single subdir, enter that
def get_instance(self, payload): return FieldInstance( self._version, payload, assistant_sid=self._solution['assistant_sid'], task_sid=self._solution['task_sid'], )
Build an instance of FieldInstance :param dict payload: Payload response from the API :returns: twilio.rest.autopilot.v1.assistant.task.field.FieldInstance :rtype: twilio.rest.autopilot.v1.assistant.task.field.FieldInstance
def fetch(self): params = values.of({}) payload = self._version.fetch( 'GET', self._uri, params=params, ) return PublishedTrackInstance( self._version, payload, room_sid=self._solution['room_sid'], partic...
Fetch a PublishedTrackInstance :returns: Fetched PublishedTrackInstance :rtype: twilio.rest.video.v1.room.room_participant.room_participant_published_track.PublishedTrackInstance
def wait_for_vacancy(self, processor_type): with self._condition: self._condition.wait_for(lambda: ( self._processor_available(processor_type) or self._cancelled_event.is_set())) if self._cancelled_event.is_set(): raise WaitCancelledExcepti...
Waits for a particular processor type to have the capacity to handle additional transactions or until is_cancelled is True. Args: processor_type (ProcessorType): The family, and version of the transaction processor. Returns: Processor
def read_metadata(self, symbol): sym = self._get_symbol_info(symbol) if not sym: raise NoDataFoundException("Symbol does not exist.") x = self._symbols.find_one({SYMBOL: symbol}) return x[USERMETA] if USERMETA in x else None
Reads user defined metadata out for the given symbol Parameters ---------- symbol: str symbol for the given item in the DB Returns ------- ?
def get_kde_home_dir (): if os.environ.get("KDEHOME"): kde_home = os.path.abspath(os.environ["KDEHOME"]) else: home = os.environ.get("HOME") if not home: return kde3_home = os.path.join(home, ".kde") kde4_home = os.path.join(home, ".kde4") if fileutil....
Return KDE home directory or None if not found.
def table_to_source_list(table, src_type=OutputSource): source_list = [] if table is None: return source_list for row in table: src = src_type() for param in src_type.names: if param in table.colnames: val = row[param] if isinstance(val, np...
Convert a table of data into a list of sources. A single table must have consistent source types given by src_type. src_type should be one of :class:`AegeanTools.models.OutputSource`, :class:`AegeanTools.models.SimpleSource`, or :class:`AegeanTools.models.IslandSource`. Parameters ---------- ...
def guest_capture(self, userid, image_name, capture_type='rootonly', compress_level=6): action = ("capture guest '%(vm)s' to generate image '%(img)s'" % {'vm': userid, 'img': image_name}) with zvmutils.log_and_reraise_sdkbase_error(action): self._vmops...
Capture the guest to generate a image :param userid: (str) the user id of the vm :param image_name: (str) the unique image name after capture :param capture_type: (str) the type of capture, the value can be: rootonly: indicate just root device will be captured alld...
def add_update_callback(self, callback, device): self._update_callbacks.append([callback, device]) _LOGGER.debug('Added update callback to %s on %s', callback, device)
Register as callback for when a matching device changes.
def save(callLog, logFilename): with open(logFilename, "wb") as outp: cPickle.dump(callLog, outp)
Save the call log history into this file. @param logFilename (path) Filename in which to save a pickled version of the call logs.
def mapzen_elevation_rgb(arr): arr = np.clip(arr + 32768.0, 0.0, 65535.0) r = arr / 256 g = arr % 256 b = (arr * 256) % 256 return np.stack([r, g, b]).astype(np.uint8)
Encode elevation value to RGB values compatible with Mapzen tangram. Attributes ---------- arr : numpy ndarray Image array to encode. Returns ------- out : numpy ndarray RGB array (3, h, w)
def track_end(self): self.__tracking = False changes = self.__changes self.__changes = {} return changes
Ends tracking of attributes changes. Returns the changes that occurred to the attributes. Only the final state of each attribute is obtained
def list_properties(self, list_all=False): if list_all: props = [] for k,v in self.env.property_rules.rdl_properties.items(): if type(self.inst) in v.bindable_to: props.append(k) for k,v in self.env.property_rules.user_properties.items(): ...
Lists properties associated with this node. By default, only lists properties that were explicitly set. If ``list_all`` is set to ``True`` then lists all valid properties of this component type Parameters ---------- list_all: bool If true, lists all valid properties ...
def create_socket(): sock = socket.socket(PF_CAN, socket.SOCK_RAW, CAN_RAW) log.info('Created a socket') return sock
Creates a raw CAN socket. The socket will be returned unbound to any interface.
def make(parser): mds_parser = parser.add_subparsers(dest='subcommand') mds_parser.required = True mds_create = mds_parser.add_parser( 'create', help='Deploy Ceph MDS on remote host(s)' ) mds_create.add_argument( 'mds', metavar='HOST[:NAME]', nargs='+', ...
Ceph MDS daemon management