code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def get_modules(folder): if is_frozen(): zipname = os.path.dirname(os.path.dirname(__file__)) parentmodule = os.path.basename(os.path.dirname(__file__)) with zipfile.ZipFile(zipname, 'r') as f: prefix = "%s/%s/" % (parentmodule, folder) modnames = [os.path.splitext(n[...
Find all valid modules in the given folder which must be in in the same directory as this loader.py module. A valid module has a .py extension, and is importable. @return: all loaded valid modules @rtype: iterator of module
def handle(data_type, data, data_id=None, caller=None): if not data_id: data_id = data_type if data_id not in _handlers: _handlers[data_id] = dict( [(h.handle, h) for h in handlers.instantiate_for_data_type(data_type, data_id=data_id)]) for handler in list(_handlers[data_id].valu...
execute all data handlers on the specified data according to data type Args: data_type (str): data type handle data (dict or list): data Kwargs: data_id (str): can be used to differentiate between different data sets of the same data type. If not specified will default to ...
def pages(self): pages = [] for har_dict in self.har_data: har_parser = HarParser(har_data=har_dict) if self.page_id: for page in har_parser.pages: if page.page_id == self.page_id: pages.append(page) else: ...
The aggregate pages of all the parser objects.
def option_group_exists(name, tags=None, region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) try: rds = conn.describe_option_groups(OptionGroupName=name) return {'exists': bool(rds)} except ClientErr...
Check to see if an RDS option group exists. CLI example:: salt myminion boto_rds.option_group_exists myoptiongr region=us-east-1
def to_b58check(self, testnet=False): b = self.testnet_bytes if testnet else bytes(self) return base58.b58encode_check(b)
Generates a Base58Check encoding of this key. Args: testnet (bool): True if the key is to be used with testnet, False otherwise. Returns: str: A Base58Check encoded string representing the key.
def is_modified(self): if self.__modified_data__ is not None: return True for value in self.__original_data__: try: if value.is_modified(): return True except AttributeError: pass return False
Returns whether list is modified or not
def to_api(in_dict, int_keys=None, date_keys=None, bool_keys=None): if int_keys: for in_key in int_keys: if (in_key in in_dict) and (in_dict.get(in_key, None) is not None): in_dict[in_key] = int(in_dict[in_key]) if date_keys: for in_key in date_keys: if (i...
Extends a given object for API Production.
def feature_list(): lib_features_c_array = ctypes.POINTER(Feature)() lib_features_size = ctypes.c_size_t() check_call(_LIB.MXLibInfoFeatures(ctypes.byref(lib_features_c_array), ctypes.byref(lib_features_size))) features = [lib_features_c_array[i] for i in range(lib_features_size.value)] return featu...
Check the library for compile-time features. The list of features are maintained in libinfo.h and libinfo.cc Returns ------- list List of :class:`.Feature` objects
def wait_for_port(self, port, timeout=10, **probe_kwargs): Probe(timeout=timeout, fnc=functools.partial(self.is_port_open, port), **probe_kwargs).run()
block until specified port starts accepting connections, raises an exc ProbeTimeout if timeout is reached :param port: int, port number :param timeout: int or float (seconds), time to wait for establishing the connection :param probe_kwargs: arguments passed to Probe constructor ...
def ls(): heading, body = cli_syncthing_adapter.ls() if heading: click.echo(heading) if body: click.echo(body.strip())
List all synchronized directories.
def load_fasta_file(filename): with open(filename, "r") as handle: records = list(SeqIO.parse(handle, "fasta")) return records
Load a FASTA file and return the sequences as a list of SeqRecords Args: filename (str): Path to the FASTA file to load Returns: list: list of all sequences in the FASTA file as Biopython SeqRecord objects
def xor(s, pad): from itertools import cycle s = bytearray(force_bytes(s, encoding='latin-1')) pad = bytearray(force_bytes(pad, encoding='latin-1')) return binary_type(bytearray(x ^ y for x, y in zip(s, cycle(pad))))
XOR a given string ``s`` with the one-time-pad ``pad``
def config_profile_list(self): these_profiles = self._config_profile_list() or [] profile_list = [q for p in these_profiles for q in [p.get('profileName')]] return profile_list
Return config profile list from DCNM.
def init_selection(self): si = self.shotverbrws.selected_indexes(0) if si: self.shot_ver_sel_changed(si[0]) else: self.shot_ver_sel_changed(QtCore.QModelIndex()) ai = self.assetverbrws.selected_indexes(0) if ai: self.asset_ver_sel_changed(ai[0]...
Call selection changed in the beginning, so signals get emitted once Emit shot_taskfile_sel_changed signal and asset_taskfile_sel_changed. :returns: None :raises: None
def get_agent(self, agent_id): collection = JSONClientValidated('authentication', collection='Agent', runtime=self._runtime) result = collection.find_one( dict({'_id': ObjectId(self._get_id(agent_id, 'authentic...
Gets the ``Agent`` specified by its ``Id``. In plenary mode, the exact ``Id`` is found or a ``NotFound`` results. Otherwise, the returned ``Agent`` may have a different ``Id`` than requested, such as the case where a duplicate ``Id`` was assigned to an ``Agent`` and retained for compati...
def glob1(self, dir_relpath, glob): if self.isignored(dir_relpath, directory=True): return [] matched_files = self._glob1_raw(dir_relpath, glob) prefix = self._relpath_no_dot(dir_relpath) return self._filter_ignored(matched_files, selector=lambda p: os.path.join(prefix, p))
Returns a list of paths in path that match glob and are not ignored.
def calculate_shannon_entropy(self, data): if not data: return 0 entropy = 0 for x in self.charset: p_x = float(data.count(x)) / len(data) if p_x > 0: entropy += - p_x * math.log(p_x, 2) return entropy
Returns the entropy of a given string. Borrowed from: http://blog.dkbza.org/2007/05/scanning-data-for-entropy-anomalies.html. :param data: string. The word to analyze. :returns: float, between 0.0 and 8.0
def local_path(self, url, filename=None, decompress=False, download=False): if download: return self.fetch(url=url, filename=filename, decompress=decompress) else: filename = self.local_filename(url, filename, decompress) return join(self.cache_directory_path, filenam...
What will the full local path be if we download the given file?
def register_run_plugins(self, plugin_name, plugin_class): if plugin_name in self.registered_plugins: raise PluginException("Plugin {} already registered! " "Duplicate plugins?".format(plugin_name)) self.logger.debug("Registering plugin %s", plugin_name) ...
Loads a plugin as a dictionary and attaches needed parts to correct Icetea run global parts. :param plugin_name: Name of the plugins :param plugin_class: PluginBase :return: Nothing
def _setup_http_session(self): headers = {"Content-type": "application/json"} if (self._id_token): headers.update({"authorization": "Bearer {}".format( self._id_token)}) self._session.headers.update(headers) self._session.verify = False
Sets up the common HTTP session parameters used by requests.
def _find_usage_networking_sgs(self): logger.debug("Getting usage for EC2 VPC resources") sgs_per_vpc = defaultdict(int) rules_per_sg = defaultdict(int) for sg in self.resource_conn.security_groups.all(): if sg.vpc_id is not None: sgs_per_vpc[sg.vpc_id] += 1 ...
calculate usage for VPC-related things
def ulocalized_gmt0_time(self, time, context, request): value = get_date(context, time) if not value: return "" value = value.toZone("GMT+0") return self.ulocalized_time(value, context, request)
Returns the localized time in string format, but in GMT+0
def match_tagname(self, el, tag): name = (util.lower(tag.name) if not self.is_xml and tag.name is not None else tag.name) return not ( name is not None and name not in (self.get_tag(el), '*') )
Match tag name.
async def filter_new_posts(self, source_id, post_ids): new_ids = [] try: db_client = self._db posts_in_db = await db_client.get_known_posts(source_id, post_ids) new_ids = [p for p in post_ids if p not in posts_in_db] except Exception as exc: logger...
Filters ist of post_id for new ones. :param source_id: id of the source :type string: :param post_ids: list of post ids :type list: :returns: list of unknown post ids.
def begin(self, request, data): request = self.get_request( http_url = self.REQUEST_TOKEN_URL, parameters = dict(oauth_callback = self.get_callback(request))) content = self.load_request(request) if not content: return redirect('netauth-login') ...
Try to get Request Token from OAuth Provider and redirect user to provider's site for approval.
def count_open_fds(): pid = os.getpid() procs = subprocess.check_output( ['lsof', '-w', '-Ff', '-p', str(pid)]) nprocs = len( [s for s in procs.split('\n') if s and s[0] == 'f' and s[1:].isdigit()] ) return nprocs
return the number of open file descriptors for current process. .. warning: will only work on UNIX-like os-es. http://stackoverflow.com/a/7142094
def chown(dirs, user=None, group=None): if isinstance(dirs, basestring): dirs = [dirs] args = ' '.join(dirs) if user and group: return sudo('chown {}:{} {}'.format(user, group, args)) elif user: return sudo('chown {} {}'.format(user, args)) elif group: return sudo('ch...
User sudo to set user and group ownership
def add_unique_runid(testcase, run_id=None): testcase["description"] = '{}<br id="{}"/>'.format( testcase.get("description") or "", run_id or id(add_unique_runid) )
Adds run id to the test description. The `run_id` runs makes the descriptions unique between imports and force Polarion to update every testcase every time.
def __get_values(self): values = [] if self.__remote: description = self.__client.describe(self.__point) if description is not None: if description['type'] != 'Point': raise IOTUnknown('%s is not a Point' % self.__point) values ...
Retrieve value information either via describe or point value listing. MUST be called within lock.
def emit(self, record): level = record.levelno if not FLAGS.is_parsed(): global _warn_preinit_stderr if _warn_preinit_stderr: sys.stderr.write( 'WARNING: Logging before flag parsing goes to stderr.\n') _warn_preinit_stderr = False self._log_to_stderr(record) eli...
Prints a record out to some streams. If FLAGS.logtostderr is set, it will print to sys.stderr ONLY. If FLAGS.alsologtostderr is set, it will print to sys.stderr. If FLAGS.logtostderr is not set, it will log to the stream associated with the current thread. Args: record: logging.LogRecord, ...
def apply_to(self, x, columns=False): if isinstance(x, np.ndarray) and len(x.shape) == 2 and x.shape[0] == 3 and columns: return x + self.t.reshape((3,1)) if isinstance(x, np.ndarray) and (x.shape == (3, ) or (len(x.shape) == 2 and x.shape[1] == 3)) and not columns: return x + se...
Apply this translation to the given object The argument can be several sorts of objects: * ``np.array`` with shape (3, ) * ``np.array`` with shape (N, 3) * ``np.array`` with shape (3, N), use ``columns=True`` * ``Translation`` * ``Rotation`` ...
def configure_lease(self, lease, lease_max, mount_point=DEFAULT_MOUNT_POINT): params = { 'lease': lease, 'lease_max': lease_max, } api_path = '/v1/{mount_point}/config/lease'.format(mount_point=mount_point) return self._adapter.post( url=api_path, ...
Configure lease settings for the AWS secrets engine. It is optional, as there are default values for lease and lease_max. Supported methods: POST: /{mount_point}/config/lease. Produces: 204 (empty body) :param lease: Specifies the lease value provided as a string duration with tim...
def format(logger, show_successful=True, show_errors=True, show_traceback=True): output = [] errors = logger.get_aborted_actions() if show_errors and errors: output += _underline('Failed actions:') for log in logger.get_aborted_logs(): if show_tra...
Prints a report of the actions that were logged by the given Logger. The report contains a list of successful actions, as well as the full error message on failed actions. :type logger: Logger :param logger: The logger that recorded what happened in the queue. :rtype: string :return: A string...
def _iterate_prefix(self, callsign, timestamp=timestamp_now): prefix = callsign if re.search('(VK|AX|VI)9[A-Z]{3}', callsign): if timestamp > datetime(2006,1,1, tzinfo=UTC): prefix = callsign[0:3]+callsign[4:5] while len(prefix) > 0: try: r...
truncate call until it corresponds to a Prefix in the database
def update_from_json(self, path=join('config', 'hdx_dataset_static.json')): super(Dataset, self).update_from_json(path) self.separate_resources()
Update dataset metadata with static metadata from JSON file Args: path (str): Path to JSON dataset metadata. Defaults to config/hdx_dataset_static.json. Returns: None
def do_history(self, line): self._split_args(line, 0, 0) for idx, item in enumerate(self._history): d1_cli.impl.util.print_info("{0: 3d} {1}".format(idx, item))
history Display a list of commands that have been entered.
def compose_all(stream, Loader=Loader): loader = Loader(stream) try: while loader.check_node(): yield loader.get_node() finally: loader.dispose()
Parse all YAML documents in a stream and produce corresponding representation trees.
def toPIL(self, **attribs): import PIL.Image bytes = self.convert("png") sfile = io.BytesIO(bytes) pil = PIL.Image.open(sfile) return pil
Convert canvas to a PIL image
def panzoom(marks): return PanZoom(scales={ 'x': sum([mark._get_dimension_scales('x', preserve_domain=True) for mark in marks], []), 'y': sum([mark._get_dimension_scales('y', preserve_domain=True) for mark in marks], []) })
Helper function for panning and zooming over a set of marks. Creates and returns a panzoom interaction with the 'x' and 'y' dimension scales of the specified marks.
def _execute_command(self, command, workunit_name=None, workunit_labels=None): workunit_name = workunit_name or command.executable workunit_labels = {WorkUnitLabel.TOOL} | set(workunit_labels or ()) with self.context.new_workunit(name=workunit_name, labels=workunit_labels,...
Executes a node or npm command via self._run_node_distribution_command. :param NodeDistribution.Command command: The command to run. :param string workunit_name: A name for the execution's work unit; default command.executable. :param list workunit_labels: Any extra :class:`pants.base.workunit.WorkUnitLabe...
def build_docs(location="doc-source", target=None, library="icetea_lib"): cmd_ar = ["sphinx-apidoc", "-o", location, library] try: print("Generating api docs.") retcode = check_call(cmd_ar) except CalledProcessError as error: print("Documentation build failed. Return code: {}".format...
Build documentation for Icetea. Start by autogenerating module documentation and finish by building html. :param location: Documentation source :param target: Documentation target path :param library: Library location for autodoc. :return: -1 if something fails. 0 if successfull.
def pickle_dumps(self, protocol=None): strio = StringIO() pmg_pickle_dump(self, strio, protocol=self.pickle_protocol if protocol is None else protocol) return strio.getvalue()
Return a string with the pickle representation. `protocol` selects the pickle protocol. self.pickle_protocol is used if `protocol` is None
def get(self, key, get_cas=False): for server in self.servers: value, cas = server.get(key) if value is not None: if get_cas: return value, cas else: return value if get_cas: return None, None
Get a key from server. :param key: Key's name :type key: six.string_types :param get_cas: If true, return (value, cas), where cas is the new CAS value. :type get_cas: boolean :return: Returns a key data from server. :rtype: object
def assert_page_source_contains(self, expected_value, failure_message='Expected page source to contain: "{}"'): assertion = lambda: expected_value in self.driver_wrapper.page_source() self.webdriver_assert(assertion, unicode(failure_message).format(expected_value))
Asserts that the page source contains the string passed in expected_value
def update(self, job_id, name=NotUpdated, description=NotUpdated, is_public=NotUpdated, is_protected=NotUpdated): data = {} self._copy_if_updated(data, name=name, description=description, is_public=is_public, is_protected=is_protected) return self._pa...
Update a Job.
def square_root(n, epsilon=0.001): guess = n / 2 while abs(guess * guess - n) > epsilon: guess = (guess + (n / guess)) / 2 return guess
Return square root of n, with maximum absolute error epsilon
def safe_call(request: Request, methods: Methods, *, debug: bool) -> Response: with handle_exceptions(request, debug) as handler: result = call(methods.items[request.method], *request.args, **request.kwargs) handler.response = SuccessResponse(result=result, id=request.id) return handler.response
Call a Request, catching exceptions to ensure we always return a Response. Args: request: The Request object. methods: The list of methods that can be called. debug: Include more information in error responses. Returns: A Response object.
def valarray(shape, value=np.NaN, typecode=None): if typecode is None: typecode = bool out = np.ones(shape, dtype=typecode) * value if not isinstance(out, np.ndarray): out = np.asarray(out) return out
Return an array of all value.
def capitalize_unicode_name(s): index = s.find('capital') if index == -1: return s tail = s[index:].replace('capital', '').strip() tail = tail[0].upper() + tail[1:] return s[:index] + tail
Turns a string such as 'capital delta' into the shortened, capitalized version, in this case simply 'Delta'. Used as a transform in sanitize_identifier.
def get_first_lang(): request_lang = request.headers.get('Accept-Language').split(',') if request_lang: lang = locale.normalize(request_lang[0]).split('.')[0] else: lang = False return lang
Get the first lang of Accept-Language Header.
def getTypeStr(_type): r if isinstance(_type, CustomType): return str(_type) if hasattr(_type, '__name__'): return _type.__name__ return ''
r"""Gets the string representation of the given type.
def npm(usr_pwd=None, clean=False): try: cmd('which npm') except: return print('-[npm]----------') p = cmd("npm outdated -g | awk 'NR>1 {print $1}'") if not p: return pkgs = getPackages(p) for p in pkgs: cmd('{} {}'.format('npm update -g ', p), usr_pwd=usr_pwd, run=global_run)
Handle npm for Node.js
def _remote_file_size(url=None, file_name=None, pb_dir=None): if file_name and pb_dir: url = posixpath.join(config.db_index_url, pb_dir, file_name) response = requests.head(url, headers={'Accept-Encoding': 'identity'}) response.raise_for_status() remote_file_size = int(response.headers['content-...
Get the remote file size in bytes Parameters ---------- url : str, optional The full url of the file. Use this option to explicitly state the full url. file_name : str, optional The base file name. Use this argument along with pb_dir if you want the full url to be constr...
def _add_membership_multicast_socket(self): self._membership_request = socket.inet_aton(self._multicast_group) \ + socket.inet_aton(self._multicast_ip) self._multicast_socket.setsockopt( socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, self._membership_request...
Make membership request to multicast :rtype: None
def to_commit(obj): if obj.type == 'tag': obj = deref_tag(obj) if obj.type != "commit": raise ValueError("Cannot convert object %r to type commit" % obj) return obj
Convert the given object to a commit if possible and return it
def tokenize(self): tokens = [] token_spec = [ ('mlc', r'/\*.*?\*/'), ('slc', r'//[^\r\n]*?\r?\n'), ('perl', r'<%.*?%>'), ('incl', r'`include'), ] tok_regex = '|'.join('(?P<%s>%s)' % pair for pair in token_spec) for m in re.finditer...
Tokenize the input text Scans for instances of perl tags and include directives. Tokenization skips line and block comments. Returns ------- list List of tuples: (typ, start, end) Where: - typ is "perl" or "incl" - start/end mar...
def export_wif(self) -> str: data = b''.join([b'\x80', self.__private_key, b'\01']) checksum = Digest.hash256(data[0:34]) wif = base58.b58encode(b''.join([data, checksum[0:4]])) return wif.decode('ascii')
This interface is used to get export ECDSA private key in the form of WIF which is a way to encoding an ECDSA private key and make it easier to copy. :return: a WIF encode private key.
def effective_nsamples(self): try: act = numpy.array(list(self.acts.values())).max() except (AttributeError, TypeError): act = numpy.inf if self.burn_in is None: nperwalker = max(int(self.niterations // act), 1) elif self.burn_in.is_burned_in: ...
The effective number of samples post burn-in that the sampler has acquired so far.
def get(self, url, data=None): response = self.http.get(url, headers=self.headers, params=data, **self.requests_params) return self.process(response)
Executes an HTTP GET request for the given URL. ``data`` should be a dictionary of url parameters
def plot_sed(sed, showlnl=False, **kwargs): ax = kwargs.pop('ax', plt.gca()) cmap = kwargs.get('cmap', 'BuGn') annotate_name(sed, ax=ax) SEDPlotter.plot_flux_points(sed, **kwargs) if np.any(sed['ts'] > 9.): if 'model_flux' in sed: SEDPlotter.plot_model...
Render a plot of a spectral energy distribution. Parameters ---------- showlnl : bool Overlay a map of the delta-loglikelihood values vs. flux in each energy bin. cmap : str Colormap that will be used for the delta-loglikelihood ...
def filter(self, chromosome, **kwargs) : def appendAllele(alleles, sources, snp) : pos = snp.start if snp.alt[0] == '-' : pass elif snp.ref[0] == '-' : pass else : sources[snpSet] = snp alleles.append(snp.alt) refAllele = chromosome.refSequence[pos] alleles.append(refAllele) sourc...
The default filter mixes applied all SNPs and ignores Insertions and Deletions.
def threshold(image, block_size=DEFAULT_BLOCKSIZE, mask=None): if mask is None: mask = np.zeros(image.shape[:2], dtype=np.uint8) mask[:] = 255 if len(image.shape) > 2 and image.shape[2] == 4: image = cv2.cvtColor(image, cv2.COLOR_BGRA2GRAY) res = _calc_block_mean_variance(image, mask...
Applies adaptive thresholding to the given image. Args: image: BGRA image. block_size: optional int block_size to use for adaptive thresholding. mask: optional mask. Returns: Thresholded image.
def revoker(self, revoker, **prefs): hash_algo = prefs.pop('hash', None) sig = PGPSignature.new(SignatureType.DirectlyOnKey, self.key_algorithm, hash_algo, self.fingerprint.keyid) sensitive = prefs.pop('sensitive', False) keyclass = RevocationKeyClass.Normal | (RevocationKeyClass.Sensiti...
Generate a signature that specifies another key as being valid for revoking this key. :param revoker: The :py:obj:`PGPKey` to specify as a valid revocation key. :type revoker: :py:obj:`PGPKey` :raises: :py:exc:`~pgpy.errors.PGPError` if the key is passphrase-protected and has not been unlocked ...
def _set_relationship_type(self, type_identifier, display_name=None, display_label=None, description=None, domain='Relationship'): if display_name is None: display_name = type_identifier if display_label is None: display_label = display_name if description is None: ...
Sets the relationship type
def state(self, *args, **kwargs): return self._makeApiCall(self.funcinfo["state"], *args, **kwargs)
Get AWS State for a worker type Return the state of a given workertype as stored by the provisioner. This state is stored as three lists: 1 for running instances, 1 for pending requests. The `summary` property contains an updated summary similar to that returned from `listWorkerTypeSum...
def ClaimRecords(self, limit=10000, timeout="30m", start_time=None, record_filter=lambda x: False, max_filtered=1000): if not self.locked: raise aff4.LockError("Queue must be locked to claim records.") with data...
Returns and claims up to limit unclaimed records for timeout seconds. Returns a list of records which are now "claimed", a claimed record will generally be unavailable to be claimed until the claim times out. Note however that in case of an unexpected timeout or other error a record might be claimed tw...
def custom_resolve(self): if not callable(self.custom_resolver): return new_addresses = [] for address in self.addresses: for new_address in self.custom_resolver(address): new_addresses.append(new_address) self.addresses = new_addresses
If a custom resolver is defined, perform custom resolution on the contained addresses. :return:
def get_resource(self, path, params=None): url = '%s%s' % (path, self._param_list(params)) headers = { 'Accept': 'application/json;odata=minimalmetadata' } response = O365_DAO().getURL(self._url(url), headers) if response.status != 200: raise DataFailureEx...
O365 GET method. Return representation of the requested resource.
def tailor(pattern_or_root, dimensions=None, distributed_dim='time', read_only=False): return TileManager(pattern_or_root, dimensions=dimensions, distributed_dim=distributed_dim, read_only=read_only)
Return a TileManager to wrap the root descriptor and tailor all the dimensions to a specified window. Keyword arguments: root -- a NCObject descriptor. pattern -- a filename string to open a NCObject descriptor. dimensions -- a dictionary to configurate the dimensions limits.
def unpatch(self): if not self._patched: return for func in self._read_compilers + self._write_compilers: func.execute_sql = self._original[func] self.cache_backend.unpatch() self._patched = False
un-applies this patch.
def close(self): with self._close_lock: sfd = self._sfd if sfd >= 0: self._sfd = -1 self._signals = frozenset() close(sfd)
Close the internal signalfd file descriptor if it isn't closed :raises OSError: If the underlying ``close(2)`` fails. The error message matches those found in the manual page.
def value_text(self): search = self._selected.get() for item in self._rbuttons: if item.value == search: return item.text return ""
Sets or returns the option selected in a ButtonGroup by its text value.
def save_current_figure_as(self): if self.current_thumbnail is not None: self.save_figure_as(self.current_thumbnail.canvas.fig, self.current_thumbnail.canvas.fmt)
Save the currently selected figure.
def add_summary(self, summary, global_step=None): if isinstance(summary, bytes): summ = summary_pb2.Summary() summ.ParseFromString(summary) summary = summ for value in summary.value: if not value.metadata: continue if value.tag ...
Adds a `Summary` protocol buffer to the event file. This method wraps the provided summary in an `Event` protocol buffer and adds it to the event file. Parameters ---------- summary : A `Summary` protocol buffer Optionally serialized as a string. global...
def _use_memcache(self, key, options=None): flag = ContextOptions.use_memcache(options) if flag is None: flag = self._memcache_policy(key) if flag is None: flag = ContextOptions.use_memcache(self._conn.config) if flag is None: flag = True return flag
Return whether to use memcache for this key. Args: key: Key instance. options: ContextOptions instance, or None. Returns: True if the key should be cached in memcache, False otherwise.
def translate(self, vector, inc_alt_states=True): vector = numpy.array(vector) for atom in self.get_atoms(inc_alt_states=inc_alt_states): atom._vector += vector return
Translates every atom in the AMPAL object. Parameters ---------- vector : 3D Vector (tuple, list, numpy.array) Vector used for translation. inc_alt_states : bool, optional If true, will rotate atoms in all states i.e. includes alternate conformations ...
def _create_interval_filter(interval): def filter_fn(value): if (not isinstance(value, six.integer_types) and not isinstance(value, float)): raise error.HParamsError( 'Cannot use an interval filter for a value of type: %s, Value: %s' % (type(value), value)) return interval.mi...
Returns a function that checkes whether a number belongs to an interval. Args: interval: A tensorboard.hparams.Interval protobuf describing the interval. Returns: A function taking a number (a float or an object of a type in six.integer_types) that returns True if the number belongs to (the closed) ...
def _polling_iteration(self): if self.__task is None: self.ready_event().set() elif self.__task.check_events() is True: self.ready_event().set() self.registry().task_finished(self)
Poll for scheduled task stop events :return: None
def merge_entities(doc): with doc.retokenize() as retokenizer: for ent in doc.ents: attrs = {"tag": ent.root.tag, "dep": ent.root.dep, "ent_type": ent.label} retokenizer.merge(ent, attrs=attrs) return doc
Merge entities into a single token. doc (Doc): The Doc object. RETURNS (Doc): The Doc object with merged entities. DOCS: https://spacy.io/api/pipeline-functions#merge_entities
def job_runner(self): outputs = luigi.task.flatten(self.output()) for output in outputs: if not isinstance(output, luigi.contrib.hdfs.HdfsTarget): warnings.warn("Job is using one or more non-HdfsTarget outputs" + " so it will be run in local mode...
Get the MapReduce runner for this job. If all outputs are HdfsTargets, the DefaultHadoopJobRunner will be used. Otherwise, the LocalJobRunner which streams all data through the local machine will be used (great for testing).
def _print_drift_report(self): try: response = self._cloud_formation.describe_stack_resources(StackName=self._stack_name) rows = [] for resource in response.get('StackResources', []): row = [] row.append(resource.get('LogicalResourceId', 'unkno...
Report the drift of the stack. Args: None Returns: Good or Bad; True or False Note: not yet implemented
def _cli_check_format(fmt): if fmt is None: return None fmt = fmt.lower() if not fmt in api.get_formats(): errstr = "Format '" + fmt + "' does not exist.\n" errstr += "For a complete list of formats, use the 'bse list-formats' command" raise RuntimeError(errstr) return fm...
Checks that a basis set format exists and if not, raises a helpful exception
async def generate_waifu_insult(self, avatar): if not isinstance(avatar, str): raise TypeError("type of 'avatar' must be str.") async with aiohttp.ClientSession() as session: async with session.post("https://api.weeb.sh/auto-image/waifu-insult", headers=self.__headers, data={"ava...
Generate a waifu insult image. This function is a coroutine. Parameters: avatar: str - http/s url pointing to an image, has to have proper headers and be a direct link to an image Return Type: image data
def users_get(self, domain): path = self._get_management_path(domain) return self.http_request(path=path, method='GET')
Retrieve a list of users from the server. :param AuthDomain domain: The authentication domain to retrieve users from. :return: :class:`~.HttpResult`. The list of users can be obtained from the returned object's `value` property.
def to_struct(cls, name=None): if name is None: name = cls.__name__ basic_attrs = dict([(attr_name, value) for attr_name, value in cls.get_attrs() if isinstance(value, Column)]) if not basic_attrs: return None ...
Convert the TreeModel into a compiled C struct
def GetBatchJobDownloadUrlWhenReady(client, batch_job_id, max_poll_attempts=MAX_POLL_ATTEMPTS): batch_job = GetBatchJob(client, batch_job_id) if batch_job['status'] == 'CANCELED': raise Exception('Batch Job with ID "%s" was canceled before completing.' % b...
Retrieves the downloadUrl when the BatchJob is complete. Args: client: an instantiated AdWordsClient used to poll the BatchJob. batch_job_id: a long identifying the BatchJob to be polled. max_poll_attempts: an int defining the number of times the BatchJob will be checked to determine whether it has...
def _endmsg(self, rd): msg = "" s = "" if rd.hours > 0: if rd.hours > 1: s = "s" msg += colors.bold(str(rd.hours)) + " hour" + s + " " s = "" if rd.minutes > 0: if rd.minutes > 1: s = "s" msg += color...
Returns an end message with elapsed time
def iterator(plugins, context): test = pyblish.logic.registered_test() state = { "nextOrder": None, "ordersWithError": set() } for plugin in plugins: state["nextOrder"] = plugin.order message = test(**state) if message: raise StopIteration("Stopped due...
An iterator for plug-in and instance pairs
def expand_variable_dicts( list_of_variable_dicts: 'List[Union[Dataset, OrderedDict]]', ) -> 'List[Mapping[Any, Variable]]': from .dataarray import DataArray from .dataset import Dataset var_dicts = [] for variables in list_of_variable_dicts: if isinstance(variables, Dataset): va...
Given a list of dicts with xarray object values, expand the values. Parameters ---------- list_of_variable_dicts : list of dict or Dataset objects Each value for the mappings must be of the following types: - an xarray.Variable - a tuple `(dims, data[, attrs[, encoding]])` that can ...
def getFeedContent(self, feed, excludeRead=False, continuation=None, loadLimit=20, since=None, until=None): return self._getFeedContent(feed.fetchUrl, excludeRead, continuation, loadLimit, since, until)
Return items for a particular feed
def _write_iodir(self, iodir=None): if iodir is not None: self.iodir = iodir self.i2c.write_list(self.IODIR, self.iodir)
Write the specified byte value to the IODIR registor. If no value specified the current buffered value will be written.
def logger_init(level): levellist = [logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG] handler = logging.StreamHandler() fmt = ('%(levelname) -10s %(asctime)s %(name) -30s %(funcName) ' '-35s %(lineno) -5d: %(message)s') handler.setFormatter(logging.Formatter(fmt)) logger = log...
Initialize the logger for this thread. Sets the log level to ERROR (0), WARNING (1), INFO (2), or DEBUG (3), depending on the argument `level`.
def agent_service_deregister(consul_url=None, token=None, serviceid=None): ret = {} data = {} if not consul_url: consul_url = _get_config() if not consul_url: log.error('No Consul URL found.') ret['message'] = 'No Consul URL found.' ret['res'] = False ...
Used to remove a service. :param consul_url: The Consul server URL. :param serviceid: A serviceid describing the service. :return: Boolean and message indicating success or failure. CLI Example: .. code-block:: bash salt '*' consul.agent_service_deregister serviceid='redis'
def read_url(url): logging.debug('reading {url} ...'.format(url=url)) token = os.environ.get("BOKEH_GITHUB_API_TOKEN") headers = {} if token: headers['Authorization'] = 'token %s' % token request = Request(url, headers=headers) response = urlopen(request).read() return json.loads(res...
Reads given URL as JSON and returns data as loaded python object.
def apply_t0(self, hits): if HAVE_NUMBA: apply_t0_nb( hits.time, hits.dom_id, hits.channel_id, self._lookup_tables ) else: n = len(hits) cal = np.empty(n) lookup = self._calib_by_dom_and_channel for i in range(n): ...
Apply only t0s
def ping_directories_handler(sender, **kwargs): entry = kwargs['instance'] if entry.is_visible and settings.SAVE_PING_DIRECTORIES: for directory in settings.PING_DIRECTORIES: DirectoryPinger(directory, [entry])
Ping directories when an entry is saved.
def viable_source_types_for_generator (generator): assert isinstance(generator, Generator) if generator not in __viable_source_types_cache: __vstg_cached_generators.append(generator) __viable_source_types_cache[generator] = viable_source_types_for_generator_real (generator) return __viable_s...
Caches the result of 'viable_source_types_for_generator'.
def __PrintMessageCommentLines(self, message_type): description = message_type.description or '%s message type.' % ( message_type.name) width = self.__printer.CalculateWidth() - 3 for line in textwrap.wrap(description, width): self.__printer('// %s', line) PrintIn...
Print the description of this message.
def script_post_save(model, os_path, contents_manager, **kwargs): from nbconvert.exporters.script import ScriptExporter if model['type'] != 'notebook': return global _script_exporter if _script_exporter is None: _script_exporter = ScriptExporter(parent=contents_manager) log = content...
convert notebooks to Python script after save with nbconvert replaces `ipython notebook --script`
def get_option(self, option): value = getattr(self, option, None) if value is not None: return value return getattr(settings, "COUNTRIES_{0}".format(option.upper()))
Get a configuration option, trying the options attribute first and falling back to a Django project setting.