code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def remove_message(self, message, afterwards=None): if self.ro: raise DatabaseROError() path = message.get_filename() self.writequeue.append(('remove', afterwards, path))
Remove a message from the notmuch index :param message: message to remove :type message: :class:`Message` :param afterwards: callback to trigger after removing :type afterwards: callable or None
def data_from_url(self, url, apple_fix=False): if apple_fix: url = apple_url_fix(url) _, content = self.http.request(url) if not content: raise ConnectionError('Could not get data from %s!' % url) return self.decode(content, apple_fix=apple_fix)
Download iCal data from URL. :param url: URL to download :param apple_fix: fix Apple bugs (protocol type and tzdata in iCal) :return: decoded (and fixed) iCal data
def tag_fig_ordinal(tag): tag_count = 0 if 'specific-use' not in tag.attrs: return len(list(filter(lambda tag: 'specific-use' not in tag.attrs, tag.find_all_previous(tag.name)))) + 1
Meant for finding the position of fig tags with respect to whether they are for a main figure or a child figure
def del_option_by_name(self, name): for o in list(self._options): assert isinstance(o, Option) if o.name == name: self._options.remove(o)
Delete an option from the message by name :type name: String :param name: option name
def as_view(cls, endpoint, protocol, *init_args, **init_kwargs): def _wrapper(request, *args, **kwargs): instance = cls(*init_args, endpoint=endpoint, request=request, **init_kwargs) if protocol == Resource.Protocol.http: return instance._wrap_http(cls.dispatch, endpoint=endpoint, *args, **kwargs) elif protocol == Resource.Protocol.websocket: return instance._wrap_ws(cls.dispatch, endpoint=endpoint, *args, **kwargs) elif protocol == Resource.Protocol.amqp: return instance._wrap_amqp(endpoint, *args, **kwargs) else: raise Exception('Communication protocol not specified') return _wrapper
Used for hooking up the endpoints. Returns a wrapper function that creates a new instance of the resource class and calls the correct view method for it.
def open_external(self, fnames=None): if fnames is None: fnames = self.get_selected_filenames() for fname in fnames: self.open_outside_spyder([fname])
Open files with default application
def json_error(code, message): message = repr(message) return jsonify(dict(request=request.path, message=message)), code
Returns a JSON-ified error object
def get_absolute_url(self): if self.override_url: return self.override_url if self.destination.is_blog: return reverse('blog_entry_detail', args=[self.destination.slug, self.slug]) return reverse('article_detail', args=[self.slug])
If override_url was given, use that. Otherwise, if the content belongs to a blog, use a blog url. If not, use a regular article url.
def get_items(self, maxlevel): itemlist = [] def add_to_itemlist(item, maxlevel, level=1): level += 1 for index in range(item.childCount()): citem = item.child(index) itemlist.append(citem) if level <= maxlevel: add_to_itemlist(citem, maxlevel, level) for tlitem in self.get_top_level_items(): itemlist.append(tlitem) if maxlevel > 0: add_to_itemlist(tlitem, maxlevel=maxlevel) return itemlist
Return all items with a level <= `maxlevel`
def removeSpacePadding(str, blocksize=AES_blocksize): 'Remove padding with spaces' pad_len = 0 for char in str[::-1]: if char == ' ': pad_len += 1 else: break str = str[:-pad_len] return str
Remove padding with spaces
def set_quiet(mres, parent, global_options): quiet = global_options.get('quiet') if quiet is not None: mres._quiet = quiet else: mres._quiet = parent.quiet
Sets the 'quiet' property on the MultiResult
def get_service(self, name): self._convert_connected_app() if not self.project_config.services or name not in self.project_config.services: self._raise_service_not_valid(name) if name not in self.services: self._raise_service_not_configured(name) return self._get_service(name)
Retrieve a stored ServiceConfig from the keychain or exception :param name: the service name to retrieve :type name: str :rtype ServiceConfig :return the configured Service
def parse_option(self, option, block_name, *values): _extra_subs = ('www', 'm', 'mobile') if len(values) == 0: raise ValueError for value in values: value = value.lower() if not _RE_PROTOCOL.match(value): value = 'http://' + value parsed = urlparse.urlparse(value) if parsed: domain = parsed.hostname if domain and _RE_TLD.search(domain): domain = _RE_WWW_SUB.sub('', domain) if len(domain.split('.')) == 2: for sub in _extra_subs: self.domains.add('{0}.{1}'.format(sub, domain)) self.domains.add(domain) if not self.domains: raise ValueError
Parse domain values for option.
def array_to_npy(array_like): buffer = BytesIO() np.save(buffer, array_like) return buffer.getvalue()
Convert an array like object to the NPY format. To understand better what an array like object is see: https://docs.scipy.org/doc/numpy/user/basics.creation.html#converting-python-array-like-objects-to-numpy-arrays Args: array_like (np.array or Iterable or int or float): array like object to be converted to NPY. Returns: (obj): NPY array.
def build(self, text, matrix, skim_depth=10, d_weights=False): for anchor in bar(matrix.keys): n1 = text.unstem(anchor) pairs = matrix.anchored_pairs(anchor).items() for term, weight in list(pairs)[:skim_depth]: if d_weights: weight = 1-weight n2 = text.unstem(term) self.graph.add_edge(n1, n2, weight=float(weight))
1. For each term in the passed matrix, score its KDE similarity with all other indexed terms. 2. With the ordered stack of similarities in hand, skim off the top X pairs and add them as edges. Args: text (Text): The source text instance. matrix (Matrix): An indexed term matrix. skim_depth (int): The number of siblings for each term. d_weights (bool): If true, give "close" words low edge weights.
def print_projects(self, projects): for project in projects: print('{}: {}'.format(project.name, project.id))
Print method for projects.
def get(self, uri, params={}): logging.debug("Requesting URL: "+str(urlparse.urljoin(self.BASE_URL, uri))) return requests.get(urlparse.urljoin(self.BASE_URL, uri), params=params, verify=False, auth=self.auth)
A generic method to make GET requests
def param(self,key,default=None): if key in self.parameters: return self.parameters[key] return default
for accessing global parameters
def show_instance(name, conn=None, call=None): if call != 'action': raise SaltCloudSystemExit( 'The show_instance action must be called with -a or --action.' ) if conn is None: conn = get_conn() node = conn.get_server(name, bare=True) ret = dict(node) ret['id'] = node.id ret['name'] = node.name ret['size'] = conn.get_flavor(node.flavor.id).name ret['state'] = node.status ret['private_ips'] = _get_ips(node, 'private') ret['public_ips'] = _get_ips(node, 'public') ret['floating_ips'] = _get_ips(node, 'floating') ret['fixed_ips'] = _get_ips(node, 'fixed') if isinstance(node.image, six.string_types): ret['image'] = node.image else: ret['image'] = conn.get_image(node.image.id).name return ret
Get VM on this OpenStack account name name of the instance CLI Example .. code-block:: bash salt-cloud -a show_instance myserver
def save(self, basename): irom_segment = self.get_irom_segment() if irom_segment is not None: with open("%s0x%05x.bin" % (basename, irom_segment.addr - ESP8266ROM.IROM_MAP_START), "wb") as f: f.write(irom_segment.data) normal_segments = self.get_non_irom_segments() with open("%s0x00000.bin" % basename, 'wb') as f: self.write_common_header(f, normal_segments) checksum = ESPLoader.ESP_CHECKSUM_MAGIC for segment in normal_segments: checksum = self.save_segment(f, segment, checksum) self.append_checksum(f, checksum)
Save a set of V1 images for flashing. Parameter is a base filename.
def dispatch(self, message): for validator, callback in self.validators: if not validator.matches(message): continue callback(message) return raise ArgumentError("No handler was registered for message", message=message)
Dispatch a message to a callback based on its schema. Args: message (dict): The message to dispatch
def _trim_value(self, value): if value[0] == '"': assert value[-1] == '"' value = value[1:-1].replace('\\"', '"').replace("\\\\", "\\") return Parser._unescape_re.sub(Parser._unescape_fn, value) return value
Trim double quotes off the ends of a value, un-escaping inner double quotes and literal backslashes. Also convert escapes to unicode. If the string is not quoted, return it unmodified.
def is_expanded(request, key): expand = request.query_params.get("expand", "") expand_fields = [] for e in expand.split(","): expand_fields.extend([e for e in e.split(".")]) return "~all" in expand_fields or key in expand_fields
Examines request object to return boolean of whether passed field is expanded.
def allsame(list_, strict=True): if len(list_) == 0: return True first_item = list_[0] return list_all_eq_to(list_, first_item, strict)
checks to see if list is equal everywhere Args: list_ (list): Returns: True if all items in the list are equal
def analyze(self, id): schema = AnalysisSchema() resp = self.service.post(self.base+str(id)+'/', params={'process': 'analyze'}) return self.service.decode(schema, resp)
Get a list of tests that will be skipped for a package. :param id: Package ID as an int. :return: :class:`packages.Analysis <packages.Analysis>` object :rtype: packages.Analysis
def parse_list(self): try: return List([self.parse() for _ in self.collect_tokens_until('CLOSE_BRACKET')]) except IncompatibleItemType as exc: raise self.error(f'Item {str(exc.item)!r} is not a ' f'{exc.subtype.__name__} tag') from None
Parse a list from the token stream.
def verify(ctx, file, account): if not file: print_message("Prompting for message. Terminate with CTRL-D", "info") file = click.get_text_stream("stdin") m = Message(file.read(), bitshares_instance=ctx.bitshares) try: if m.verify(): print_message("Verified", "success") else: print_message("not verified", "error") except InvalidMessageSignature: print_message("Signature INVALID!", "error")
Verify a signed message
def write_stilde(self, stilde_dict, group=None): subgroup = self.data_group + "/{ifo}/stilde" if group is None: group = subgroup else: group = '/'.join([group, subgroup]) for ifo, stilde in stilde_dict.items(): self[group.format(ifo=ifo)] = stilde self[group.format(ifo=ifo)].attrs['delta_f'] = stilde.delta_f self[group.format(ifo=ifo)].attrs['epoch'] = float(stilde.epoch)
Writes stilde for each IFO to file. Parameters ----------- stilde : {dict, FrequencySeries} A dict of FrequencySeries where the key is the IFO. group : {None, str} The group to write the strain to. If None, will write to the top level.
def unhook_wnd_proc(self): if not self.__local_wnd_proc_wrapped: return SetWindowLong(self.__local_win_handle, GWL_WNDPROC, self.__old_wnd_proc) self.__local_wnd_proc_wrapped = None
Restore previous Window message handler
def verify_account(self, email_address): request = self._get_request() resp = request.post(self.ACCOUNT_VERIFY_URL, { 'email_address': email_address }) return ('account' in resp)
Verify whether a HelloSign Account exists Args: email_address (str): Email address for the account to verify Returns: True or False
def list_vcls(self, service_id, version_number): content = self._fetch("/service/%s/version/%d/vcl" % (service_id, version_number)) return map(lambda x: FastlyVCL(self, x), content)
List the uploaded VCLs for a particular service and version.
def full_path(self): if Path(self.path).is_absolute(): return self.path else: return str(self.app_root / self.path)
Return the full path to the file.
def project_version(full_version): project_full_version=full_version v = _parse_project_version(full_version) name = project_name() project_fullname = '-'.join([name,v]) return _setenv(project_full_version=project_full_version, project_version=v,project_name=name,project_fullname=project_fullname)
project_version context manager
def question_default_add_related_pks(self, obj): if not hasattr(obj, '_choice_pks'): obj._choice_pks = list(obj.choices.values_list('pk', flat=True))
Add related primary keys to a Question instance.
def to_dict(self): return {field_name: getattr(self, field_name, None) for field_name in self.meta_.declared_fields}
Return entity data as a dictionary
def log2(x, context=None): return _apply_function_in_current_context( BigFloat, mpfr.mpfr_log2, (BigFloat._implicit_convert(x),), context, )
Return the base-two logarithm of x.
def add_file_arg(self, filename): self.__arguments.append(filename) if filename not in self.__input_files: self.__input_files.append(filename)
Add a file argument to the executable. Arguments are appended after any options and their order is guaranteed. Also adds the file name to the list of required input data for this job. @param filename: file to add as argument.
def on_heartbeat(self, message): logger.info("Got a heartbeat") logger.info("Heartbeat message: {}".format(message)) self.heartbeat_thread.update_sequence(message['d']) return
Runs on a heartbeat event from websocket connection Args: message (dict): Full message from Discord websocket connection"
def tzinfo_eq(tzinfo1, tzinfo2, startYear = 2000, endYear=2020): if tzinfo1 == tzinfo2: return True elif tzinfo1 is None or tzinfo2 is None: return False def dt_test(dt): if dt is None: return True return tzinfo1.utcoffset(dt) == tzinfo2.utcoffset(dt) if not dt_test(datetime.datetime(startYear, 1, 1)): return False for year in range(startYear, endYear): for transitionTo in 'daylight', 'standard': t1=getTransition(transitionTo, year, tzinfo1) t2=getTransition(transitionTo, year, tzinfo2) if t1 != t2 or not dt_test(t1): return False return True
Compare offsets and DST transitions from startYear to endYear.
def download_file(fname, target_dir=None, force=False): target_dir = target_dir or temporary_dir() target_fname = os.path.join(target_dir, fname) if force or not os.path.isfile(target_fname): url = urljoin(datasets_url, fname) urlretrieve(url, target_fname) return target_fname
Download fname from the datasets_url, and save it to target_dir, unless the file already exists, and force is False. Parameters ---------- fname : str Name of the file to download target_dir : str Directory where to store the file force : bool Force downloading the file, if it already exists Returns ------- fname : str Full path of the downloaded file
def output_vm(gandi, vm, datacenters, output_keys, justify=10): output_generic(gandi, vm, output_keys, justify) if 'datacenter' in output_keys: for dc in datacenters: if dc['id'] == vm['datacenter_id']: dc_name = dc.get('dc_code', dc.get('iso', '')) break output_line(gandi, 'datacenter', dc_name, justify) if 'ip' in output_keys: for iface in vm['ifaces']: gandi.separator_line() output_line(gandi, 'bandwidth', iface['bandwidth'], justify) for ip in iface['ips']: ip_addr = ip['ip'] output_line(gandi, 'ip%s' % ip['version'], ip_addr, justify)
Helper to output a vm information.
def repair(self, verbose=False, joincomp=False, remove_smallest_components=True): assert self.f.shape[1] == 3, 'Face array must contain three columns' assert self.f.ndim == 2, 'Face array must be 2D' self.v, self.f = _meshfix.clean_from_arrays(self.v, self.f, verbose, joincomp, remove_smallest_components)
Performs mesh repair using MeshFix's default repair process. Parameters ---------- verbose : bool, optional Enables or disables debug printing. Disabled by default. joincomp : bool, optional Attempts to join nearby open components. remove_smallest_components : bool, optional Remove all but the largest isolated component from the mesh before beginning the repair process. Default True Notes ----- Vertex and face arrays are updated inplace. Access them with: meshfix.v meshfix.f
def list_instance_configs(self, page_size=None, page_token=None): metadata = _metadata_with_prefix(self.project_name) path = "projects/%s" % (self.project,) page_iter = self.instance_admin_api.list_instance_configs( path, page_size=page_size, metadata=metadata ) page_iter.next_page_token = page_token page_iter.item_to_value = _item_to_instance_config return page_iter
List available instance configurations for the client's project. .. _RPC docs: https://cloud.google.com/spanner/docs/reference/rpc/\ google.spanner.admin.instance.v1#google.spanner.admin.\ instance.v1.InstanceAdmin.ListInstanceConfigs See `RPC docs`_. :type page_size: int :param page_size: Optional. The maximum number of configs in each page of results from this request. Non-positive values are ignored. Defaults to a sensible value set by the API. :type page_token: str :param page_token: Optional. If present, return the next batch of configs, using the value, which must correspond to the ``nextPageToken`` value returned in the previous response. Deprecated: use the ``pages`` property of the returned iterator instead of manually passing the token. :rtype: :class:`~google.api_core.page_iterator.Iterator` :returns: Iterator of :class:`~google.cloud.spanner_v1.instance.InstanceConfig` resources within the client's project.
def create_repository(self): schema = self.get_connection().get_schema_builder() with schema.create(self._table) as table: table.string("migration") table.integer("batch")
Create the migration repository data store.
def move_backend(self, from_path, to_path): params = { 'from': from_path, 'to': to_path, } api_path = '/v1/sys/remount' return self._adapter.post( url=api_path, json=params, )
Move an already-mounted backend to a new mount point. Supported methods: POST: /sys/remount. Produces: 204 (empty body) :param from_path: Specifies the previous mount point. :type from_path: str | unicode :param to_path: Specifies the new destination mount point. :type to_path: str | unicode :return: The response of the request. :rtype: requests.Response
def _broadcast_shapes(s1, s2): n1 = len(s1) n2 = len(s2) n = max(n1, n2) res = [1] * n for i in range(n): if i >= n1: c1 = 1 else: c1 = s1[n1-1-i] if i >= n2: c2 = 1 else: c2 = s2[n2-1-i] if c1 == 1: rc = c2 elif c2 == 1 or c1 == c2: rc = c1 else: raise ValueError('array shapes %r and %r are not compatible' % (s1, s2)) res[n-1-i] = rc return tuple(res)
Given array shapes `s1` and `s2`, compute the shape of the array that would result from broadcasting them together.
def render_text(text, language=None): text_filter = SUPPORTED_LANGUAGES.get(language, None) if not text_filter: raise ImproperlyConfigured("markup filter does not exist: {0}. Valid options are: {1}".format( language, ', '.join(list(SUPPORTED_LANGUAGES.keys())) )) return text_filter(text)
Render the text, reuses the template filters provided by Django.
def UWRatio(s1, s2, full_process=True): return WRatio(s1, s2, force_ascii=False, full_process=full_process)
Return a measure of the sequences' similarity between 0 and 100, using different algorithms. Same as WRatio but preserving unicode.
def loop_stopped(self): transport = self.transport() if self.server_mode() is True: transport.close_server_socket(self.config()) else: transport.close_client_socket(self.config())
Terminate socket connection because of stopping loop :return: None
def get_person_by_netid(self, netid): if not self.valid_uwnetid(netid): raise InvalidNetID(netid) url = "{}/{}/full.json".format(PERSON_PREFIX, netid.lower()) response = DAO.getURL(url, {"Accept": "application/json"}) if response.status != 200: raise DataFailureException(url, response.status, response.data) return self._person_from_json(response.data)
Returns a restclients.Person object for the given netid. If the netid isn't found, or if there is an error communicating with the PWS, a DataFailureException will be thrown.
def _get_line(self) -> str: line = self.in_lines[self.index] self.index += 1 return line
Returns the current line from the file while incrementing the index.
def release(self): try: self.pidfile.close() os.remove(self._pidfile) except OSError as err: if err.errno != 2: raise
Release the pidfile. Close and delete the Pidfile. :return: None
def trans(self, id, parameters=None, domain=None, locale=None): if parameters is None: parameters = {} assert isinstance(parameters, dict) if locale is None: locale = self.locale else: self._assert_valid_locale(locale) if domain is None: domain = 'messages' msg = self.get_catalogue(locale).get(id, domain) return self.format(msg, parameters)
Translates the given message. @type id: str @param id: The message id @type parameters: dict @param parameters: A dict of parameters for the message @type domain: str @param domain: The domain for the message or null to use the default @type locale: str @param locale: The locale or null to use the default @rtype: str @return: Translated message
def getIndexStripUrl(self, index): chapter, num = index.split('-') return self.stripUrl % (chapter, chapter, num)
Get comic strip URL from index.
def set_all_name_components(self, name, weight, width, custom_name): self.weight = weight or "Regular" self.width = width or "Regular" self.customName = custom_name or "" if self._joinName() == name: self._name = None del self.customParameters["Master Name"] else: self._name = name self.customParameters["Master Name"] = name
This function ensures that after being called, the master.name, master.weight, master.width, and master.customName match the given values.
def client_ident(self): return irc.client.NickMask.from_params( self.nick, self.user, self.server.servername)
Return the client identifier as included in many command replies.
def remove(self, **kwargs): self.helper.remove(self.inner(), **kwargs) self._inner = None
Remove an instance of this resource definition.
def transition(value, maximum, start, end): return round(start + (end - start) * value / maximum, 2)
Transition between two values. :param value: Current iteration. :param maximum: Maximum number of iterations. :param start: Start value. :param end: End value. :returns: Transitional value.
def seed_instance(self, seed=None): if self.__random == random: self.__random = random_module.Random() self.__random.seed(seed) return self
Calls random.seed
def get_status_key(self, instance): key_id = "inst_%s" % id(instance) if instance.pk is None else instance.pk return "%s.%s-%s-%s" % (instance._meta.app_label, get_model_name(instance), key_id, self.field.name)
Generates a key used to set a status on a field
def set_transition_down(self, p_self): if p_self is None: self.down_transition = None else: self.down_transition = transition_loop(2, p_self)
Set the downbeat-tracking transition matrix according to self-loop probabilities. Parameters ---------- p_self : None, float in (0, 1), or np.ndarray [shape=(2,)] Optional self-loop probability(ies), used for Viterbi decoding
def get_reversed_unification_program(angles, control_indices, target, controls, mode): if mode == 'phase': gate = RZ elif mode == 'magnitude': gate = RY else: raise ValueError("mode must be \'phase\' or \'magnitude\'") reversed_gates = [] for j in range(len(angles)): if angles[j] != 0: reversed_gates.append(gate(-angles[j], target)) if len(controls) > 0: reversed_gates.append(CNOT(controls[control_indices[j] - 1], target)) return Program().inst(reversed_gates[::-1])
Gets the Program representing the reversed circuit for the decomposition of the uniformly controlled rotations in a unification step. If :math:`n` is the number of controls, the indices within control indices must range from 1 to :math:`n`, inclusive. The length of control_indices and the length of angles must both be :math:`2^n`. :param list angles: The angles of rotation in the the decomposition, in order from left to right :param list control_indices: a list of positions for the controls of the CNOTs used when decomposing uniformly controlled rotations; see get_cnot_control_positions for labelling conventions. :param int target: Index of the target of all rotations :param list controls: Index of the controls, in order from bottom to top. :param str mode: The unification mode. Is either 'phase', corresponding to controlled RZ rotations, or 'magnitude', corresponding to controlled RY rotations. :return: The reversed circuit of this unification step. :rtype: Program
def info_to_datatype_v4(signed, little_endian): if signed: if little_endian: datatype = v4c.DATA_TYPE_SIGNED_INTEL else: datatype = v4c.DATA_TYPE_SIGNED_MOTOROLA else: if little_endian: datatype = v4c.DATA_TYPE_UNSIGNED_INTEL else: datatype = v4c.DATA_TYPE_UNSIGNED_MOTOROLA return datatype
map CAN signal to MDF integer types Parameters ---------- signed : bool signal is flagged as signed in the CAN database little_endian : bool signal is flagged as little endian (Intel) in the CAN database Returns ------- datatype : int integer code for MDF channel data type
def post_upgrade_checks(self, upgrades): errors = [] for u in upgrades: self._setup_log_prefix(plugin_id=u.name) try: u.post_upgrade() except RuntimeError as e: errors.append((u.name, e.args)) for check in self.global_post_upgrade: self._setup_log_prefix(plugin_id=check.__name__) try: check() except RuntimeError as e: errors.append((check.__name__, e.args)) self._teardown_log_prefix() self._check_errors(errors, "Post-upgrade check for %s failed with the " "following errors:")
Run post-upgrade checks after applying all pending upgrades. Post checks may be used to emit warnings encountered when applying an upgrade, but post-checks can also be used to advice the user to run re-indexing or similar long running processes. Post-checks may query for user-input, but should respect the --yes-i-know option to run in an unattended mode. All applied upgrades post-checks are executed. :param upgrades: List of upgrades sorted in topological order.
async def start(self, file_path, locale=None, kwargs=None): self._file_path = os.path.realpath(file_path) self._locale = locale if kwargs: self._kwargs = kwargs if settings.I18N_LIVE_RELOAD: loop = asyncio.get_event_loop() self._running = True self._watcher = aionotify.Watcher() self._watcher.watch( path=os.path.dirname(self._file_path), flags=aionotify.Flags.MOVED_TO | aionotify.Flags.MODIFY, ) await self._watcher.setup(loop) await self._load() loop.create_task(self._watch()) else: await self._load()
Setup the watching utilities, start the loop and load data a first time.
def activate(lancet, method, project): with taskstatus("Looking up project") as ts: if method == "key": func = get_project_keys elif method == "dir": func = get_project_keys for key, project_path in func(lancet): if key.lower() == project.lower(): break else: ts.abort( 'Project "{}" not found (using {}-based lookup)', project, method, ) config = load_config(os.path.join(project_path, LOCAL_CONFIG)) lancet.defer_to_shell("cd", project_path) venv = config.get("lancet", "virtualenv", fallback=None) if venv: venv_path = os.path.join(project_path, os.path.expanduser(venv)) activate_script = os.path.join(venv_path, "bin", "activate") lancet.defer_to_shell("source", activate_script) else: if "VIRTUAL_ENV" in os.environ: lancet.defer_to_shell("deactivate")
Switch to this project.
def calc_columns_rows(n): num_columns = int(ceil(sqrt(n))) num_rows = int(ceil(n / float(num_columns))) return (num_columns, num_rows)
Calculate the number of columns and rows required to divide an image into ``n`` parts. Return a tuple of integers in the format (num_columns, num_rows)
def _wait(self, args, now, cap, consumed_history, consumed_capacity): for key in ['read', 'write']: if key in cap and cap[key] > 0: consumed_history[key].add(now, consumed_capacity[key]) consumed = consumed_history[key].value if consumed > 0 and consumed >= cap[key]: seconds = math.ceil(float(consumed) / cap[key]) LOG.debug("Rate limited throughput exceeded. Sleeping " "for %d seconds.", seconds) if callable(self.callback): callback_args = args + (seconds,) if self.callback(*callback_args): continue time.sleep(seconds)
Check the consumed capacity against the limit and sleep
def get_local_file_list(self): file_list = [] for (dirpath, dirnames, filenames) in os.walk(self.build_dir): for fname in filenames: local_key = os.path.join( os.path.relpath(dirpath, self.build_dir), fname ) if local_key.startswith('./'): local_key = local_key[2:] file_list.append(local_key) return file_list
Walk the local build directory and create a list of relative and absolute paths to files.
def _listeq_to_dict(jobconfs): if not isinstance(jobconfs, dict): jobconfs = dict(x.split('=', 1) for x in jobconfs) return dict((str(k), str(v)) for k, v in jobconfs.items())
Convert iterators of 'key=val' into a dictionary with later values taking priority.
def is_sparse_file(filename): dirname, basename = os.path.split(filename) name, ext = os.path.splitext(basename) matrix_name, matrix_ext = os.path.splitext(name) if matrix_ext == '.coo': return True else: return False
Determine if the given filename indicates a dense or a sparse matrix If pathname is xxx.coo.yyy return True otherwise False.
def kill_window(pymux, variables): " Kill all panes in the current window. " for pane in pymux.arrangement.get_active_window().panes: pymux.kill_pane(pane)
Kill all panes in the current window.
def expect_column_values_to_match_json_schema(self, column, json_schema, mostly=None, result_format=None, include_config=False, catch_exceptions=None, meta=None ): raise NotImplementedError
Expect column entries to be JSON objects matching a given JSON schema. expect_column_values_to_match_json_schema is a :func:`column_map_expectation <great_expectations.data_asset.dataset.Dataset.column_map_expectation>`. Args: column (str): \ The column name. Keyword Args: mostly (None or a float between 0 and 1): \ Return `"success": True` if at least mostly percent of values match the expectation. \ For more detail, see :ref:`mostly`. Other Parameters: result_format (str or None): \ Which output mode to use: `BOOLEAN_ONLY`, `BASIC`, `COMPLETE`, or `SUMMARY`. For more detail, see :ref:`result_format <result_format>`. include_config (boolean): \ If True, then include the expectation config as part of the result object. \ For more detail, see :ref:`include_config`. catch_exceptions (boolean or None): \ If True, then catch exceptions and include them as part of the result object. \ For more detail, see :ref:`catch_exceptions`. meta (dict or None): \ A JSON-serializable dictionary (nesting allowed) that will be included in the output without modification. \ For more detail, see :ref:`meta`. Returns: A JSON-serializable expectation result object. Exact fields vary depending on the values passed to :ref:`result_format <result_format>` and :ref:`include_config`, :ref:`catch_exceptions`, and :ref:`meta`. See Also: expect_column_values_to_be_json_parseable The JSON-schema docs at: http://json-schema.org/
def cressman_point(sq_dist, values, radius): r weights = tools.cressman_weights(sq_dist, radius) total_weights = np.sum(weights) return sum(v * (w / total_weights) for (w, v) in zip(weights, values))
r"""Generate a Cressman interpolation value for a point. The calculated value is based on the given distances and search radius. Parameters ---------- sq_dist: (N, ) ndarray Squared distance between observations and grid point values: (N, ) ndarray Observation values in same order as sq_dist radius: float Maximum distance to search for observations to use for interpolation. Returns ------- value: float Interpolation value for grid point.
def matrix_transpose(m): num_cols = len(m) num_rows = len(m[0]) m_t = [] for i in range(num_rows): temp = [] for j in range(num_cols): temp.append(m[j][i]) m_t.append(temp) return m_t
Transposes the input matrix. The input matrix :math:`m` is a 2-dimensional array. :param m: input matrix with dimensions :math:`(n \\times m)` :type m: list, tuple :return: transpose matrix with dimensions :math:`(m \\times n)` :rtype: list
def parse(self, rrstr): if self._initialized: raise pycdlibexception.PyCdlibInternalError('NM record already initialized!') (su_len, su_entry_version_unused, self.posix_name_flags) = struct.unpack_from('=BBB', rrstr[:5], 2) name_len = su_len - 5 if (self.posix_name_flags & 0x7) not in (0, 1, 2, 4): raise pycdlibexception.PyCdlibInvalidISO('Invalid Rock Ridge NM flags') if name_len != 0: if (self.posix_name_flags & (1 << 1)) or (self.posix_name_flags & (1 << 2)) or (self.posix_name_flags & (1 << 5)): raise pycdlibexception.PyCdlibInvalidISO('Invalid name in Rock Ridge NM entry (0x%x %d)' % (self.posix_name_flags, name_len)) self.posix_name += rrstr[5:5 + name_len] self._initialized = True
Parse a Rock Ridge Alternate Name record out of a string. Parameters: rrstr - The string to parse the record out of. Returns: Nothing.
def poly_to_power_basis(bezier_coeffs): num_coeffs, = bezier_coeffs.shape if num_coeffs == 1: return bezier_coeffs elif num_coeffs == 2: coeff0, coeff1 = bezier_coeffs return np.asfortranarray([coeff0, coeff1 - coeff0]) elif num_coeffs == 3: coeff0, coeff1, coeff2 = bezier_coeffs return np.asfortranarray( [coeff0, 2.0 * (coeff1 - coeff0), coeff2 - 2.0 * coeff1 + coeff0] ) elif num_coeffs == 4: coeff0, coeff1, coeff2, coeff3 = bezier_coeffs return np.asfortranarray( [ coeff0, 3.0 * (coeff1 - coeff0), 3.0 * (coeff2 - 2.0 * coeff1 + coeff0), coeff3 - 3.0 * coeff2 + 3.0 * coeff1 - coeff0, ] ) else: raise _helpers.UnsupportedDegree( num_coeffs - 1, supported=(0, 1, 2, 3) )
Convert a B |eacute| zier curve to polynomial in power basis. .. note:: This assumes, but does not verify, that the "B |eacute| zier degree" matches the true degree of the curve. Callers can guarantee this by calling :func:`.full_reduce`. Args: bezier_coeffs (numpy.ndarray): A 1D array of coefficients in the Bernstein basis. Returns: numpy.ndarray: 1D array of coefficients in monomial basis. Raises: .UnsupportedDegree: If the degree of the curve is not among 0, 1, 2 or 3.
def geo_point_n(arg, n): op = ops.GeoPointN(arg, n) return op.to_expr()
Return the Nth point in a single linestring in the geometry. Negative values are counted backwards from the end of the LineString, so that -1 is the last point. Returns NULL if there is no linestring in the geometry Parameters ---------- arg : geometry n : integer Returns ------- PointN : geometry scalar
def deprecated(func, msg='', *args, **kw): msg = '%s.%s has been deprecated. %s' % ( func.__module__, func.__name__, msg) if not hasattr(func, 'called'): warnings.warn(msg, DeprecationWarning, stacklevel=2) func.called = 0 func.called += 1 return func(*args, **kw)
A family of decorators to mark deprecated functions. :param msg: the message to print the first time the deprecated function is used. Here is an example of usage: >>> @deprecated(msg='Use new_function instead') ... def old_function(): ... 'Do something' Notice that if the function is called several time, the deprecation warning will be displayed only the first time.
def remove_by_tag(self, tag): obj = self.find_obj_by_tag(tag) if obj != None: self.remove_obj(obj) return True return False
Remove the first encountered object with the specified tag from the world. Returns true if an object was found and removed. Returns false if no object could be removed.
def _encoder(self, obj): return {'__class__': obj.__class__.__name__, 'ident': obj.ident, 'group': obj.group, 'name': obj.name, 'ctype': obj.ctype, 'pytype': obj.pytype, 'access': obj.access} raise TypeError(repr(obj) + ' is not JSON serializable')
Encode a toc element leaf-node
def get_offset(self): return self.p1.y-self.get_slope()*self.p1.x
Get the offset t of this line segment.
def make_roi(cls, sources=None): if sources is None: sources = {} src_fact = cls() src_fact.add_sources(sources) ret_model = roi_model.ROIModel( {}, skydir=SkyCoord(0.0, 0.0, unit='deg')) for source in src_fact.sources.values(): ret_model.load_source(source, build_index=False, merge_sources=False) return ret_model
Build and return a `fermipy.roi_model.ROIModel` object from a dict with information about the sources
def show_instance(name=None, instance_id=None, call=None, kwargs=None): if not name and call == 'action': raise SaltCloudSystemExit( 'The show_instance action requires a name.' ) if call == 'function': name = kwargs.get('name', None) instance_id = kwargs.get('instance_id', None) if not name and not instance_id: raise SaltCloudSystemExit( 'The show_instance function requires ' 'either a name or an instance_id' ) node = _get_node(name=name, instance_id=instance_id) __utils__['cloud.cache_node'](node, __active_provider_name__, __opts__) return node
Show the details from EC2 concerning an AMI. Can be called as an action (which requires a name): .. code-block:: bash salt-cloud -a show_instance myinstance ...or as a function (which requires either a name or instance_id): .. code-block:: bash salt-cloud -f show_instance my-ec2 name=myinstance salt-cloud -f show_instance my-ec2 instance_id=i-d34db33f
def _extract(self, raw: str, station: str) -> str: report = raw[raw.find(station.upper() + ' '):] report = report[:report.find(' =')] return report
Extracts the reports message using string finding
def find_matlab_version(process_path): bin_path = os.path.dirname(process_path) matlab_path = os.path.dirname(bin_path) matlab_dir_name = os.path.basename(matlab_path) version = matlab_dir_name if not is_linux(): version = matlab_dir_name.replace('MATLAB_', '').replace('.app', '') if not is_valid_release_version(version): return None return version
Tries to guess matlab's version according to its process path. If we couldn't gues the version, None is returned.
def _is_intrinsic_dict(self, input): return isinstance(input, dict) \ and len(input) == 1 \ and list(input.keys())[0] in self.supported_intrinsics
Can the input represent an intrinsic function in it? :param input: Object to be checked :return: True, if the input contains a supported intrinsic function. False otherwise
def filter(self, media_type, **params): mtype, msubtype = self._split_media_type(media_type) for x in self.__iter__(): matched = True for k, v in params.items(): if x[2].get(k, None) != v: matched = False break if matched: if x[0][0] == '*': if x[0][1] == '*': yield x elif x[0][1] == msubtype: yield x elif mtype == '*': if msubtype == '*': yield x elif x[0][1] == msubtype: yield x elif x[0][0] == mtype: if msubtype == '*': yield x elif x[0][1] == '*': yield x elif x[0][1] == msubtype: yield x
iterate all the accept media types that match media_type media_type -- string -- the media type to filter by **params -- dict -- further filter by key: val return -- generator -- yields all matching media type info things
def unescape(cls, text: str) -> str: chop = text.split("\\", 1) try: return (chop[0] if len(chop) == 1 else chop[0] + cls.unescape_map[chop[1][0]] + cls.unescape(chop[1][1:])) except KeyError: raise InvalidArgument(text) from None
Replace escape sequence with corresponding characters. Args: text: Text to unescape.
def get_message(self): try: m = self.get_from_backend() if m and m["type"] not in SKIP_TYPES: return self.decrypt(m["data"]) except AttributeError: raise Exception("Tried to call get message without having subscribed first!") except (KeyboardInterrupt, SystemExit): pass except: logging.critical("Error in watching pubsub get message: \n%s" % traceback.format_exc()) return None
Gets the latest object from the backend, and handles unpickling and validation.
def update_parser(self, parser): self._parser = parser ini_str = argparse_to_ini(parser) configp = configparser.ConfigParser(allow_no_value=True) configp.read_dict(self._config) configp.read_string(ini_str) self._config.update( {s: dict(configp.items(s)) for s in configp.sections()} )
Update config dictionary with declared arguments in an argparse.parser New variables will be created, and existing ones overridden. Args: parser (argparse.ArgumentParser): parser to read variables from
def cd_previous(self): if self._prev_dir is None or isinstance(self._prev_dir, ROOT.TROOT): return False if isinstance(self._prev_dir, ROOT.TFile): if self._prev_dir.IsOpen() and self._prev_dir.IsWritable(): self._prev_dir.cd() return True return False if not self._prev_dir.IsWritable(): return False prev_file = self._prev_dir.GetFile() if prev_file and prev_file.IsOpen(): self._prev_dir.cd() return True return False
cd to the gDirectory before this file was open.
def _detach_received(self, error): if error: condition = error.condition description = error.description info = error.info else: condition = b"amqp:unknown-error" description = None info = None self._error = errors._process_link_error(self.error_policy, condition, description, info) _logger.info("Received Link detach event: %r\nLink: %r\nDescription: %r" "\nDetails: %r\nRetryable: %r\nConnection: %r", condition, self.name, description, info, self._error.action.retry, self._session._connection.container_id)
Callback called when a link DETACH frame is received. This callback will process the received DETACH error to determine if the link is recoverable or whether it should be shutdown. :param error: The error information from the detach frame. :type error: ~uamqp.errors.ErrorResponse
def any2mb(s): if is_string(s): return int(Memory.from_string(s).to("Mb")) else: return int(s)
Convert string or number to memory in megabytes.
def impersonate_sid(sid, session_id=None, privs=None): for tok in enumerate_tokens(sid, session_id, privs): tok = dup_token(tok) elevate_token(tok) if win32security.ImpersonateLoggedOnUser(tok) == 0: raise WindowsError("Impersonation failure") return tok raise WindowsError("Impersonation failure")
Find an existing process token for the given sid and impersonate the token.
def send_packet(self, packet, protocol='json', time_precision=None): if protocol == 'json': data = make_lines(packet, time_precision).encode('utf-8') elif protocol == 'line': data = ('\n'.join(packet) + '\n').encode('utf-8') self.udp_socket.sendto(data, (self._host, self._udp_port))
Send an UDP packet. :param packet: the packet to be sent :type packet: (if protocol is 'json') dict (if protocol is 'line') list of line protocol strings :param protocol: protocol of input data, either 'json' or 'line' :type protocol: str :param time_precision: Either 's', 'm', 'ms' or 'u', defaults to None :type time_precision: str
def update_active(self): if self.active is not None: self.update_state(self.active, "normal") if self.current_iid == self.active: self._active = None return self._active = self.current_iid if self.active is not None: self.update_state(self.active, "active")
Update the active marker on the marker Canvas
def map(self): with Pool(self.cpu_count) as pool: pool.map(self._func, self._iterable) pool.close() return True
Perform a function on every item in an iterable.
def sanitize_metadata(self, metadata, replace_hyphen_with="-"): return {str(k).replace("-", replace_hyphen_with): str(v) for k, v in (metadata or {}).items() if v is not None}
Convert non-string metadata values to strings and drop null values
def set_credential_password(self, access_id, password): return self.client.call('Network_Storage_Allowed_Host', 'setCredentialPassword', password, id=access_id)
Sets the password for an access host :param integer access_id: id of the access host :param string password: password to set