code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def getDebt(self): debt = float(self['principalBalance']) + float(self['interestBalance']) debt += float(self['feesBalance']) + float(self['penaltyBalance']) return debt
Sums up all the balances of the account and returns them.
def overlap(ival0, ival1): min0, max0 = ival0 min1, max1 = ival1 return max(0, min(max0, max1) - max(min0, min1)) > 0
Determine if two interval tuples have overlap. Args: iv0 ((int,int)): An interval tuple iv1 ((int,int)); An interval tuple Returns: (bool): True if the intervals overlap, otherwise False
async def publish(self, message): try: self.write('data: {}\n\n'.format(message)) await self.flush() except StreamClosedError: self.finished = True
Pushes data to a listener.
def removeClassBreak(self, label): for v in self._classBreakInfos: if v['label'] == label: self._classBreakInfos.remove(v) return True del v return False
removes a classification break value to the renderer
def load_z_meso(self,z_meso_path): self.z_meso = [] z_meso_file_path = os.path.join(z_meso_path, self.Z_MESO_FILE_NAME) if not os.path.exists(z_meso_file_path): raise Exception("z_meso.txt file: '{}' does not exist.".format(uwg_param_file)) f = open(z_meso_file_path,'r') ...
Open the z_meso.txt file and return heights as list
def log_url (self, url_data): self.check_active_loggers() do_print = self.do_print(url_data) for log in self.loggers: log.log_filter_url(url_data, do_print)
Send new url to all configured loggers.
def phase_by(val: Any, phase_turns: float, qubit_index: int, default: TDefault = RaiseTypeErrorIfNotProvided): getter = getattr(val, '_phase_by_', None) result = NotImplemented if getter is None else getter( phase_turns, qubit_index) if result is not NotImplemented: return resul...
Returns a phased version of the effect. For example, an X gate phased by 90 degrees would be a Y gate. This works by calling `val`'s _phase_by_ method and returning the result. Args: val: The value to describe with a unitary matrix. phase_turns: The amount to phase the gate, in fractio...
def contains_is_html(cls, data): for key, val in data.items(): if isinstance(key, str) and key.endswith("IsHTML"): return True if isinstance(val, (OrderedDict, dict)) and cls.contains_is_html(val): return True return False
Detect if the problem has at least one "xyzIsHTML" key
def pause(self, queue_name, kw_in=None, kw_out=None, kw_all=None, kw_none=None, kw_state=None, kw_bcast=None): command = ["PAUSE", queue_name] if kw_in: command += ["in"] if kw_out: command += ["out"] if kw_all: command += ["all"] ...
Pause a queue. Unfortunately, the PAUSE keywords are mostly reserved words in Python, so I've been a little creative in the function variable names. Open to suggestions to change it (canardleteer) :param queue_name: The job queue we are modifying. :param kw_in: pause the queue ...
def bogoliubov_trans(p, q, theta): r expo = -4 * theta / np.pi yield cirq.X(p) yield cirq.S(p) yield cirq.ISWAP(p, q)**expo yield cirq.S(p) ** 1.5 yield cirq.X(p)
r"""The 2-mode Bogoliubov transformation is mapped to two-qubit operations. We use the identity X S^\dag X S X = Y X S^\dag Y S X = X to transform the Hamiltonian XY+YX to XX+YY type. The time evolution of the XX + YY Hamiltonian can be expressed as a power of the iSWAP gate. Args: p: the fi...
def decodeRPCErrorMsg(e): found = re.search( ( "(10 assert_exception: Assert Exception\n|" "3030000 tx_missing_posting_auth)" ".*: (.*)\n" ), str(e), flags=re.M, ) if found: return found.group(2).strip() else: return str...
Helper function to decode the raised Exception and give it a python Exception class
def start(self): if self.run is True: self.job = multiprocessing.Process(target=self.indicator) self.job.start() return self.job
Indicate that we are performing work in a thread. :returns: multiprocessing job object
def simulate(radius=5e-6, sphere_index=1.339, medium_index=1.333, wavelength=550e-9, grid_size=(80, 80), model="projection", pixel_size=None, center=None): if isinstance(grid_size, numbers.Integral): grid_size = (grid_size, grid_size) if pixel_size is None: rl = radius ...
Simulate scattering at a sphere Parameters ---------- radius: float Radius of the sphere [m] sphere_index: float Refractive index of the object medium_index: float Refractive index of the surrounding medium wavelength: float Vacuum wavelength of the imaging light...
def assert_estimator_equal(left, right, exclude=None, **kwargs): left_attrs = [x for x in dir(left) if x.endswith("_") and not x.startswith("_")] right_attrs = [x for x in dir(right) if x.endswith("_") and not x.startswith("_")] if exclude is None: exclude = set() elif isinstance(exclude, str): ...
Check that two Estimators are equal Parameters ---------- left, right : Estimators exclude : str or sequence of str attributes to skip in the check kwargs : dict Passed through to the dask `assert_eq` method.
def state_cpfs(self) -> List[CPF]: _, cpfs = self.cpfs state_cpfs = [] for cpf in cpfs: name = utils.rename_next_state_fluent(cpf.name) if name in self.state_fluents: state_cpfs.append(cpf) state_cpfs = sorted(state_cpfs, key=lambda cpf: cpf.name) ...
Returns list of state-fluent CPFs.
def get_rng(obj=None): seed = (id(obj) + os.getpid() + int(datetime.now().strftime("%Y%m%d%H%M%S%f"))) % 4294967295 if _RNG_SEED is not None: seed = _RNG_SEED return np.random.RandomState(seed)
Get a good RNG seeded with time, pid and the object. Args: obj: some object to use to generate random seed. Returns: np.random.RandomState: the RNG.
def get_cas_client(self, request, provider, renew=False): service_url = utils.get_current_url(request, {"ticket", "provider"}) self.service_url = service_url return CASFederateValidateUser(provider, service_url, renew=renew)
return a CAS client object matching provider :param django.http.HttpRequest request: The current request object :param cas_server.models.FederatedIendityProvider provider: the user identity provider :return: The user CAS client object :rtype: :class:`federate.CASFederate...
def offset(self, offset): self.log(u"Applying offset to all fragments...") self.log([u" Offset %.3f", offset]) for fragment in self.fragments: fragment.interval.offset( offset=offset, allow_negative=False, min_begin_value=self.begin, ...
Move all the intervals in the list by the given ``offset``. :param offset: the shift to be applied :type offset: :class:`~aeneas.exacttiming.TimeValue` :raises TypeError: if ``offset`` is not an instance of ``TimeValue``
def query_source(self, source): return self._get_repo_filter(Layer.objects).filter(url=source)
Query by source
def on_before_transform_template(self, template_dict): template = SamTemplate(template_dict) self.existing_implicit_api_resource = copy.deepcopy(template.get(self.implicit_api_logical_id)) template.set(self.implicit_api_logical_id, ImplicitApiResource().to_dict()) errors = [] for...
Hook method that gets called before the SAM template is processed. The template has pass the validation and is guaranteed to contain a non-empty "Resources" section. :param dict template_dict: Dictionary of the SAM template :return: Nothing
def app_start(name, profile, **kwargs): ctx = Context(**kwargs) ctx.execute_action('app:start', **{ 'node': ctx.repo.create_secure_service('node'), 'name': name, 'profile': profile })
Start an application with specified profile. Does nothing if application is already running.
def list_supported_categories(): categories = get_supported_categories(api) category_names = [category.name for category in categories] print ("Supported account categories by name: {0}".format( COMMA_WITH_SPACE.join(map(str, category_names))))
Prints a list of supported external account category names. For example, "AWS" is a supported external account category name.
def remove_suffix(text, suffix): rest, suffix, null = text.partition(suffix) return rest
Remove the suffix from the text if it exists. >>> remove_suffix('name.git', '.git') 'name' >>> remove_suffix('something special', 'sample') 'something special'
def update(self, resource, timeout=-1): return self._client.update(resource, timeout=timeout, default_values=self.DEFAULT_VALUES, uri=self.URI)
Updates a User. Args: resource (dict): Object to update. timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation in OneView, just stop waiting for its completion. Returns: dict: Updat...
def search(self, **kwargs): return super(ApiV4As, self).get(self.prepare_url( 'api/v4/as/', kwargs))
Method to search asns based on extends search. :param search: Dict containing QuerySets to find asns. :param include: Array containing fields to include on response. :param exclude: Array containing fields to exclude on response. :param fields: Array containing fields to override defau...
def download_log(currentfile=None): if currentfile == None: return if not currentfile.endswith(".err.log"): currentfile=currentfile + ".err.log" list = get_base_ev3dev_cmd() + ['download','--force'] list.append(currentfile) env = os.environ.copy() env["PYTHONUSERBASE"] = THONNY_U...
downloads log of given .py file from EV3.
def save_graph(cn_topo, filename, showintfs=False, showaddrs=False): __do_draw(cn_topo, showintfs=showintfs, showaddrs=showaddrs) pyp.savefig(filename)
Save the topology to an image file
def format_content_type_object(repo, content_type, uuid): try: storage_manager = StorageManager(repo) model_class = load_model_class(repo, content_type) return dict(storage_manager.get(model_class, uuid)) except GitCommandError: raise NotFound('Object does not exist.')
Return a content object from a repository for a given content_type and uuid :param Repo repo: The git repository. :param str content_type: The content type to list :returns: dict
def enable(self, key_id, **kwargs): path = '%s/%s/enable' % (self.path, key_id) self.gitlab.http_post(path, **kwargs)
Enable a deploy key for a project. Args: key_id (int): The ID of the key to enable **kwargs: Extra options to send to the server (e.g. sudo) Raises: GitlabAuthenticationError: If authentication is not correct GitlabProjectDeployKeyError: If the key could...
def change_name(self, username): self.release_name() try: self.server.register_name(username) except UsernameInUseException: logging.log(', '.join(self.server.registered_names)) self.server.register_name(self.name) raise self.name =...
changes the username to given username, throws exception if username used
def task2ics(): from argparse import ArgumentParser, FileType from sys import stdout parser = ArgumentParser(description='Converter from Taskwarrior to iCalendar syntax.') parser.add_argument('indir', nargs='?', help='Input Taskwarrior directory (default to ~/.task)', default=expanduser('~/.task')) ...
Command line tool to convert from Taskwarrior to iCalendar
def precompile_python_code(context: Context): from compileall import compile_dir kwargs = {} if context.verbosity < 2: kwargs['quiet'] = True compile_dir(context.app.django_app_name, **kwargs)
Pre-compiles python modules
def layerize(begin_update=None, predict=None, *args, **kwargs): if begin_update is not None: return FunctionLayer(begin_update, predict=predict, *args, **kwargs) def wrapper(begin_update): return FunctionLayer(begin_update, *args, **kwargs) return wrapper
Wrap a function into a layer
def set_as_error(self, color=Qt.red): self.format.setUnderlineStyle( QTextCharFormat.WaveUnderline) self.format.setUnderlineColor(color)
Highlights text as a syntax error. :param color: Underline color :type color: QtGui.QColor
def _parse(cls, data, key=None): parse = cls.parse if cls.parse is not None else cls.get_endpoint() if callable(parse): data = parse(data) elif isinstance(parse, str): data = data[key] else: raise Exception('"parse" should be a callable or string got, ...
Parse a set of data to extract entity-only data. Use classmethod `parse` if available, otherwise use the `endpoint` class variable to extract data from a data blob.
def set_category(self, category): if isinstance(category, Category): name = category.name else: name = category self.find("category").text = name
Set package category Args: category: String of an existing category's name, or a Category object.
def capture(returns, factor_returns, period=DAILY): return (annual_return(returns, period=period) / annual_return(factor_returns, period=period))
Compute capture ratio. Parameters ---------- returns : pd.Series or np.ndarray Returns of the strategy, noncumulative. - See full explanation in :func:`~empyrical.stats.cum_returns`. factor_returns : pd.Series or np.ndarray Noncumulative returns of the factor to which beta is ...
def load_extra_data(cls, data): try: cls._extra_config.update(json.loads(data)) except ValueError as exception: sys.stderr.write('Could convert to JSON. {0:s}'.format(exception)) exit(-1)
Loads extra JSON configuration parameters from a data buffer. The data buffer must represent a JSON object. Args: data: str, the buffer to load the JSON data from.
def _ensure_slack(self, connector: Any, retries: int, backoff: Callable[[int], float]) -> None: connector = self._env_var if connector is None else connector slack: SlackClient = _create_slack(connector) self._slack = _SlackClientWrapper( slack=slack, ...
Ensure we have a SlackClient.
def twofilter_smoothing(self, t, info, phi, loggamma, linear_cost=False, return_ess=False, modif_forward=None, modif_info=None): ti = self.T - 2 - t if t < 0 or t >= self.T - 1: raise ValueError( 'two-filter smoothing: t...
Two-filter smoothing. Parameters ---------- t: time, in range 0 <= t < T-1 info: SMC object the information filter phi: function test function, a function of (X_t,X_{t+1}) loggamma: function a function of (X_{t+1}) linear_cost...
def _send(self, email_message): pre_send.send(self.__class__, message=email_message) if not email_message.recipients(): return False from_email = sanitize_address(email_message.from_email, email_message.encoding) recipients = [sanitize_ad...
Sends an individual message via the Amazon SES HTTP API. Args: email_message: A single Django EmailMessage object. Returns: True if the EmailMessage was sent successfully, otherwise False. Raises: ClientError: An interaction with the Amazon SES HTTP API ...
def post_build_time_coverage(self): from ambry.util.datestimes import expand_to_years years = set() if self.metadata.about.time: for year in expand_to_years(self.metadata.about.time): years.add(year) if self.identity.btime: for year in expand_to_ye...
Collect all of the time coverage for the bundle.
def _get_gradient_log_pdf(self): sub_vec = self.variable_assignments - self.model.mean.flatten() grad = - np.dot(self.model.precision_matrix, sub_vec) log_pdf = 0.5 * np.dot(sub_vec, grad) return grad, log_pdf
Method that finds gradient and its log at position
def value(dtype, arg): if not isinstance(arg, ir.Expr): arg = ir.literal(arg) if not isinstance(arg, ir.AnyValue): raise com.IbisTypeError( 'Given argument with type {} is not a value ' 'expression'.format(type(arg)) ) value = getattr(arg.op(), 'value', None) ...
Validates that the given argument is a Value with a particular datatype Parameters ---------- dtype : DataType subclass or DataType instance arg : python literal or an ibis expression If a python literal is given the validator tries to coerce it to an ibis literal. Returns ------- ...
def login(session, user, password, database=None, server=None): if not user: user = click.prompt("Username", type=str) if not password: password = click.prompt("Password", hide_input=True, type=str) try: with click.progressbar(length=1, label="Logging in...") as progressbar: ...
Logs into a MyGeotab server and stores the returned credentials. :param session: The current Session object. :param user: The username used for MyGeotab servers. Usually an email address. :param password: The password associated with the username. Optional if `session_id` is provided. :param database: ...
def internal_writer(self, outputs, stdout): for output in outputs: print("\t".join(map(self.internal_serialize, output)), file=stdout)
Writer which outputs the python repr for each item.
def start_event(self): if self.with_outframe_pool: self.update_config() for name in self.outputs: self.outframe_pool[name] = ObjectPool( Frame, self.new_frame, self.config['outframe_pool_len']) try: self.on_start() except Ex...
Called by the event loop when it is started. Creates the output frame pools (if used) then calls :py:meth:`on_start`. Creating the output frame pools now allows their size to be configured before starting the component.
def filter(configs, settings): if isinstance(configs, pd.DataFrame): configs = configs[['a', 'b', 'm', 'n']].values filter_funcs = { 'dd': _filter_dipole_dipole, 'schlumberger': _filter_schlumberger, } keys = ['dd', 'schlumberger', ] allowed_keys = settings.get('only_types', ...
Main entry function to filtering configuration types Parameters ---------- configs: Nx4 array array containing A-B-M-N configurations settings: dict 'only_types': ['dd', 'other'], # filter only for those types Returns ------- dict results dict containing filter res...
def _get_json(self, path, params=None, base=JIRA_BASE_URL, ): url = self._get_url(path, base) r = self._session.get(url, params=params) try: r_json = json_loads(r) except ValueError as e: logg...
Get the json for a given path and params. :param path: The subpath required :type path: str :param params: Parameters to filter the json query. :type params: Optional[Dict[str, Any]] :param base: The Base JIRA URL, defaults to the instance base. :type base: Optional[str]...
def add_pagination_meta(self, params, meta): meta['page_size'] = params['page_size'] meta['page'] = params['page'] meta['prev'] = "page={0}&page_size={1}".format( params['page'] - 1, params['page_size'] ) if meta['page'] > 0 else None meta['next'] = "page={0}&page_siz...
Extend default meta dictionary value with pagination hints. Note: This method handler attaches values to ``meta`` dictionary without changing it's reference. This means that you should never replace ``meta`` dictionary with any other dict instance but simply modify ...
def __undo_filter_average(self, scanline): ai = -self.fu previous = self.prev for i in range(len(scanline)): x = scanline[i] if ai < 0: a = 0 else: a = scanline[ai] b = previous[i] scanline[i] = (x + ((a ...
Undo average filter.
def _compute_mean(self, C, g, mag, hypo_depth, rrup, vs30, pga_rock, imt): if hypo_depth > 100: hypo_depth = 100 delta = 0.00724 * 10 ** (0.507 * mag) R = np.sqrt(rrup ** 2 + delta ** 2) s_amp = self._compute_soil_amplification(C, vs30, pga_rock, imt) mean = ( ...
Compute mean according to equation 1, page 1706.
def connect(self): future = concurrent.Future() if self.connected: raise exceptions.ConnectError('already connected') LOGGER.debug('%s connecting', self.name) self.io_loop.add_future( self._client.connect(self.host, self.port), lambda f: self._on_conne...
Connect to the Redis server if necessary. :rtype: :class:`~tornado.concurrent.Future` :raises: :class:`~tredis.exceptions.ConnectError` :class:`~tredis.exceptinos.RedisError`
def post(self, request, format=None): serializer_class = self.get_serializer_class() serializer = serializer_class(data=request.data, instance=request.user) if serializer.is_valid(): serializer.save() return Response({'detail': _(u'Password successfully changed')}) ...
validate password change operation and return result
def hildatree2dgparentedtree(self): def transform(hilda_tree): if isinstance(hilda_tree, basestring) or not hasattr(hilda_tree, 'label'): return hilda_tree assert len(hilda_tree) == 2, "We can only handle binary trees." match = HILDA_REL_RE.match(hilda_tree.la...
Convert the tree from HILDA's format into a conventional binary tree, which can be easily converted into output formats like RS3.
def config_as_dict(cfg): ret = cfg.__dict__.copy() del ret['rand_crop_samplers'] assert isinstance(cfg.rand_crop_samplers, list) ret = merge_dict(ret, zip_namedtuple(cfg.rand_crop_samplers)) num_crop_sampler = len(cfg.rand_crop_samplers) ret['num_crop_sampler'] = num_crop_sampler ret['rand_c...
convert raw configuration to unified dictionary
def speed_difference(points): data = [0] for before, after in pairwise(points): data.append(before.vel - after.vel) return data
Computes the speed difference between each adjacent point Args: points (:obj:`Point`) Returns: :obj:`list` of int: Indexes of changepoints
def factorize_groupby_cols(self, groupby_cols): factor_list = [] values_list = [] for col in groupby_cols: if self.auto_cache or self.cache_valid(col): if not self.cache_valid(col): self.cache_factor([col]) col_rootdir = self[col].r...
factorizes all columns that are used in the groupby it will use cache carrays if available if not yet auto_cache is valid, it will create cache carrays
def ls(path, pattern='*'): path_iter = glob(path, pattern, recursive=False) return sorted(list(path_iter))
like unix ls - lists all files and dirs in path
def _assert_has_data_for_time(da, start_date, end_date): if isinstance(start_date, str) and isinstance(end_date, str): logging.warning( 'When using strings to specify start and end dates, the check ' 'to determine if data exists for the full extent of the desired ' 'inter...
Check to make sure data is in Dataset for the given time range. Parameters ---------- da : DataArray DataArray with a time variable start_date : datetime-like object or str start date end_date : datetime-like object or str end date Raises ------ AssertionErro...
def write(self, output): w = c_int32() self.WriteAnalogF64(self.bufsize, 0, 10.0, DAQmx_Val_GroupByChannel, output, w, None);
Writes the data to be output to the device buffer, output will be looped when the data runs out :param output: data to output :type output: numpy.ndarray
def long2ip(l): if MAX_IP < l or l < MIN_IP: raise TypeError( "expected int between %d and %d inclusive" % (MIN_IP, MAX_IP)) return '%d.%d.%d.%d' % ( l >> 24 & 255, l >> 16 & 255, l >> 8 & 255, l & 255)
Convert a network byte order 32-bit integer to a dotted quad ip address. >>> long2ip(2130706433) '127.0.0.1' >>> long2ip(MIN_IP) '0.0.0.0' >>> long2ip(MAX_IP) '255.255.255.255' >>> long2ip(None) #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): ... T...
def query(self, *args, **kwargs): if not issubclass(kwargs.get('itercls', None), AsyncViewBase): raise ArgumentError.pyexc("itercls must be defined " "and must be derived from AsyncViewBase") return super(AsyncBucket, self).query(*args, **kwargs)
Reimplemented from base class. This method does not add additional functionality of the base class' :meth:`~couchbase.bucket.Bucket.query` method (all the functionality is encapsulated in the view class anyway). However it does require one additional keyword argument :param cla...
def get_code(node, coder=Coder()): return cgi.escape(str(coder.code(node)), quote=True)
Return a node's code
def spin_z(particles, index): mat = np.zeros((2**particles, 2**particles)) for i in range(2**particles): ispin = btest(i, index) if ispin == 1: mat[i, i] = 1 else: mat[i, i] = -1 return 1/2.*mat
Generates the spin_z projection operator for a system of N=particles and for the selected spin index name. where index=0..N-1
def auth_list(**kwargs): ctx = Context(**kwargs) ctx.execute_action('auth:group:list', **{ 'storage': ctx.repo.create_secure_service('storage'), })
Shows available authorization groups.
def get(self): self.set_status(401) self.set_header('WWW-Authenticate', 'Session') ret = {'status': '401 Unauthorized', 'return': 'Please log in'} self.write(self.serialize(ret))
All logins are done over post, this is a parked endpoint .. http:get:: /login :status 401: |401| :status 406: |406| **Example request:** .. code-block:: bash curl -i localhost:8000/login .. code-block:: text GET /login HTTP/1.1 ...
def launch_shell(username, hostname, password, port=22): if not username or not hostname or not password: return False with tempfile.NamedTemporaryFile() as tmpFile: os.system(sshCmdLine.format(password, tmpFile.name, username, hostname, port)) return True
Launches an ssh shell
def get_data(self, datatype, data): result = {} params = StopforumspamClient._set_payload(datatype, data) response = self.client.get( 'https://api.stopforumspam.org/api', params=params, proxies=self.proxies) response.raise_for_status() report = response.js...
Look for an IP address or an email address in the spammer database. :param datatype: Which type of data is to be looked up. Allowed values are 'ip' or 'mail'. :param data: The value to be looked up through the API. :type datatype: str :type data: str :re...
def delete(self): r = self._client._request('DELETE', self._client._build_url('property', property_id=self.id)) if r.status_code != requests.codes.no_content: raise APIError("Could not delete property: {} with id {}".format(self.name, self.id))
Delete this property. :return: None :raises APIError: if delete was not successful
def get_version(self, dependency): logger.debug("getting installed version for %s", dependency) stdout = helpers.logged_exec([self.pip_exe, "show", str(dependency)]) version = [line for line in stdout if line.startswith('Version:')] if len(version) == 1: version = version[0]....
Return the installed version parsing the output of 'pip show'.
def format_to_json(data): if sys.stdout.isatty(): return json.dumps(data, indent=4, separators=(',', ': ')) else: return json.dumps(data)
Converts `data` into json If stdout is a tty it performs a pretty print.
def _format_options_usage(options): options_usage = "" for op in options: short, long = op.get_flags() if op.arg: flag = "{short} {arg} {long}={arg}".format( short=short, long=long, arg=op.arg) else: flag = "{short} {long}".format(short=short, long...
Format the Options-part of the usage text. Parameters ---------- options : list[sacred.commandline_options.CommandLineOption] A list of all supported commandline options. Returns ------- str Text formatted as a description for the commandline options
def unique_prefixes(context): res = {} for m in context.modules.values(): if m.keyword == "submodule": continue prf = new = m.i_prefix suff = 0 while new in res.values(): suff += 1 new = "%s%x" % (prf, suff) res[m] = new return res
Return a dictionary with unique prefixes for modules in `context`. Keys are 'module' statements and values are prefixes, disambiguated where necessary.
def prepare_image_data(extracted_image_data, output_directory, image_mapping): img_list = {} for image, caption, label in extracted_image_data: if not image or image == 'ERROR': continue image_location = get_image_location( image, output...
Prepare and clean image-data from duplicates and other garbage. :param: extracted_image_data ([(string, string, list, list) ...], ...])): the images and their captions + contexts, ordered :param: tex_file (string): the location of the TeX (used for finding the associated images; the TeX is assu...
def _total_counts(seqs, seqL, aligned=False): total = Counter() if isinstance(seqs, list): if not aligned: l = len([total.update(seqL[s].freq) for s in seqs]) else: l = len([total.update(seqL[s].freq) for s in seqs if seqL[s].align > 0]) elif isinstance(seqs, dict): ...
Counts total seqs after each step
def _next_sample_index(self): idx = self.active_index_ self.active_index_ += 1 if self.active_index_ >= len(self.streams_): self.active_index_ = 0 while self.streams_[idx] is None: idx = self.active_index_ self.active_index_ += 1 if self.ac...
Rotates through each active sampler by incrementing the index
def array(self): if self._ind < self.shape: return self._values[:self._ind] if not self._cached: ind = int(self._ind % self.shape) self._cache[:self.shape - ind] = self._values[ind:] self._cache[self.shape - ind:] = self._values[:ind] self._cac...
Returns a numpy array containing the last stored values.
def socket(self, blocking=True): if self._socket_lock.acquire(blocking): try: yield self._socket finally: self._socket_lock.release()
Blockingly yield the socket
def run(self, format=None, reduce=False, *args, **kwargs): plan = self._generate_plan() if reduce: plan.graph.transitive_reduction() fn = FORMATTERS[format] fn(sys.stdout, plan.graph) sys.stdout.flush()
Generates the underlying graph and prints it.
def __telnet_event_listener(self, ip, callback): tn = telnetlib.Telnet(ip, 2708) self._last_event = "" self._telnet_running = True while self._telnet_running: try: raw_string = tn.read_until(b'.\n', 5) if len(raw_string) >= 2 and raw_string[-2:...
creates a telnet connection to the lightpad
def mkdir(dir, enter): if not os.path.exists(dir): os.makedirs(dir)
Create directory with template for topic of the current environment
def alg2keytype(alg): if not alg or alg.lower() == "none": return "none" elif alg.startswith("RS") or alg.startswith("PS"): return "RSA" elif alg.startswith("HS") or alg.startswith("A"): return "oct" elif alg.startswith("ES") or alg.startswith("ECDH-ES"): return "EC" ...
Go from algorithm name to key type. :param alg: The algorithm name :return: The key type
def get(self): if self.arch.startswith("i") and self.arch.endswith("86"): self.arch = self.x86 elif self.meta.arch.startswith("arm"): self.arch = self.arm return self.arch
Return sbo arch
def source_sum(self): if self._is_completely_masked: return np.nan * self._data_unit else: return np.sum(self.values)
The sum of the unmasked ``data`` values within the source segment. .. math:: F = \\sum_{i \\in S} (I_i - B_i) where :math:`F` is ``source_sum``, :math:`(I_i - B_i)` is the ``data``, and :math:`S` are the unmasked pixels in the source segment. Non-finite pixel values (e.g. NaN,...
def compose(*funcs): def compose_two(f1, f2): return lambda *args, **kwargs: f1(f2(*args, **kwargs)) return functools.reduce(compose_two, funcs)
Compose any number of unary functions into a single unary function. >>> import textwrap >>> from six import text_type >>> stripped = text_type.strip(textwrap.dedent(compose.__doc__)) >>> compose(text_type.strip, textwrap.dedent)(compose.__doc__) == stripped True Compose also allows the innermost function to tak...
def get_crimes_area(self, points, date=None, category=None): if isinstance(category, CrimeCategory): category = category.id method = 'crimes-street/%s' % (category or 'all-crime') kwargs = { 'poly': encode_polygon(points), } crimes = [] if date is ...
Get crimes within a custom area. Uses the crime-street_ API call. .. _crime-street: https//data.police.uk/docs/method/crime-street/ :rtype: list :param list points: A ``list`` of ``(lat, lng)`` tuples. :param date: The month in which the crimes were reported in the format ...
def _table_set_column(table, name, expr): expr = table._ensure_expr(expr) if expr._name != name: expr = expr.name(name) if name not in table: raise KeyError('{0} is not in the table'.format(name)) proj_exprs = [] for key in table.columns: if key == name: proj_expr...
Replace an existing column with a new expression Parameters ---------- name : string Column name to replace expr : value expression New data for column Returns ------- set_table : TableExpr New table expression
def set_location(self, uri, size, checksum, storage_class=None): self.file = FileInstance() self.file.set_uri( uri, size, checksum, storage_class=storage_class ) db.session.add(self.file) return self
Set only URI location of for object. Useful to link files on externally controlled storage. If a file instance has already been set, this methods raises an ``FileInstanceAlreadySetError`` exception. :param uri: Full URI to object (which can be interpreted by the storage int...
def get_smart_storage_config(self, smart_storage_config_url): return (smart_storage_config. HPESmartStorageConfig(self._conn, smart_storage_config_url, redfish_version=self.redfish_version))
Returns a SmartStorageConfig Instance for each controller.
def get_file(fn): fn = os.path.join(os.path.dirname(__file__), 'data', fn) f = open(fn, 'rb') lines = [line.decode('utf-8').strip() for line in f.readlines()] return lines
Returns file contents in unicode as list.
def keytype_path_to(args, keytype): if keytype == "admin": return '{cluster}.client.admin.keyring'.format( cluster=args.cluster) if keytype == "mon": return '{cluster}.mon.keyring'.format( cluster=args.cluster) return '{cluster}.bootstrap-{what}.keyring'.format( ...
Get the local filename for a keyring type
def findlast(*args, **kwargs): list_, idx = _index(*args, start=sys.maxsize, step=-1, **kwargs) if idx < 0: raise IndexError("element not found") return list_[idx]
Find the last matching element in a list and return it. Usage:: findlast(element, list_) findlast(of=element, in_=list_) findlast(where=predicate, in_=list_) :param element, of: Element to search for (by equality comparison) :param where: Predicate defining an element to search fo...
def _validate_namespaces(self, input_namespaces): output_namespaces = [] if input_namespaces == []: return output_namespaces elif '*' in input_namespaces: if len(input_namespaces) > 1: warning = 'Warning: Multiple namespaces are ' warning +...
Converts a list of db namespaces to a list of namespace tuples, supporting basic commandline wildcards
def string(self, units: typing.Optional[str] = None) -> str: if not units: _units: str = self._units else: if not units.upper() in CustomPressure.legal_units: raise UnitsError("unrecognized pressure unit: '" + units + "'") _units = units.upper() ...
Return a string representation of the pressure, using the given units.
def set(self, field, value): if field == 'uuid': raise ValueError('uuid cannot be set') elif field == 'key': raise ValueError( 'key cannot be set. Use \'reset_key\' method') else: self.data[field] = value
Sets the value of an app field. :param str field: The name of the app field. Trying to set immutable fields ``uuid`` or ``key`` will raise a ValueError. :param value: The new value of the app field. :raises: ValueError
def set(self, section, option, value=''): self._string_check(value) super(GitConfigParser, self).set(section, option, value)
This is overridden from the RawConfigParser merely to change the default value for the 'value' argument.
def render_source(self, source, variables=None): if variables is None: variables = {} template = self._engine.from_string(source) return template.render(**variables)
Render a source with the passed variables.
def translate(self, exc): from boto.exception import StorageResponseError if isinstance(exc, StorageResponseError): if exc.status == 404: return self.error_cls(str(exc)) return None
Return whether or not to do translation.
def ekf1_pos(EKF1): global ekf_home from . import mavutil self = mavutil.mavfile_global if ekf_home is None: if not 'GPS' in self.messages or self.messages['GPS'].Status != 3: return None ekf_home = self.messages['GPS'] (ekf_home.Lat, ekf_home.Lng) = gps_offset(ekf_home.Lat, ekf_home...
calculate EKF position when EKF disabled