code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def nl_pad(self, value): self.bytearray[self._get_slicers(1)] = bytearray(c_ushort(value or 0))
Pad setter.
def log(self, level, msg=None, *args, **kwargs): return self._log(level, msg, args, kwargs)
Writes log out at any arbitray level.
def complete(self): if not self._techniques: return False if not any(tech._is_overriden('complete') for tech in self._techniques): return False return self.completion_mode(tech.complete(self) for tech in self._techniques if tech._is_overriden('complete'))
Returns whether or not this manager has reached a "completed" state.
def enable_memcache(source=None, release=None, package=None): _release = None if release: _release = release else: _release = os_release(package, base='icehouse') if not _release: _release = get_os_codename_install_source(source) return CompareOpenStackReleases(_release) >= 'mitaka'
Determine if memcache should be enabled on the local unit @param release: release of OpenStack currently deployed @param package: package to derive OpenStack version deployed @returns boolean Whether memcache should be enabled
def _on_capacity_data(self, conn, command, kwargs, response, capacity): if self._analyzing: self.consumed_capacities.append((command, capacity)) if self._query_rate_limit is not None: self._query_rate_limit.on_capacity( conn, command, kwargs, response, capacity ) elif self.rate_limit is not None: self.rate_limit.callback = self._on_throttle self.rate_limit.on_capacity(conn, command, kwargs, response, capacity)
Log the received consumed capacity data
def _one_q_sic_prep(index, qubit): if index == 0: return Program() theta = 2 * np.arccos(1 / np.sqrt(3)) zx_plane_rotation = Program([ RX(-pi / 2, qubit), RZ(theta - pi, qubit), RX(-pi / 2, qubit), ]) if index == 1: return zx_plane_rotation elif index == 2: return zx_plane_rotation + RZ(-2 * pi / 3, qubit) elif index == 3: return zx_plane_rotation + RZ(2 * pi / 3, qubit) raise ValueError(f'Bad SIC index: {index}')
Prepare the index-th SIC basis state.
def auto_unit(self, number, low_precision=False, min_symbol='K' ): symbols = ('K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y') if min_symbol in symbols: symbols = symbols[symbols.index(min_symbol):] prefix = { 'Y': 1208925819614629174706176, 'Z': 1180591620717411303424, 'E': 1152921504606846976, 'P': 1125899906842624, 'T': 1099511627776, 'G': 1073741824, 'M': 1048576, 'K': 1024 } for symbol in reversed(symbols): value = float(number) / prefix[symbol] if value > 1: decimal_precision = 0 if value < 10: decimal_precision = 2 elif value < 100: decimal_precision = 1 if low_precision: if symbol in 'MK': decimal_precision = 0 else: decimal_precision = min(1, decimal_precision) elif symbol in 'K': decimal_precision = 0 return '{:.{decimal}f}{symbol}'.format( value, decimal=decimal_precision, symbol=symbol) return '{!s}'.format(number)
Make a nice human-readable string out of number. Number of decimal places increases as quantity approaches 1. CASE: 613421788 RESULT: 585M low_precision: 585M CASE: 5307033647 RESULT: 4.94G low_precision: 4.9G CASE: 44968414685 RESULT: 41.9G low_precision: 41.9G CASE: 838471403472 RESULT: 781G low_precision: 781G CASE: 9683209690677 RESULT: 8.81T low_precision: 8.8T CASE: 1073741824 RESULT: 1024M low_precision: 1024M CASE: 1181116006 RESULT: 1.10G low_precision: 1.1G :low_precision: returns less decimal places potentially (default is False) sacrificing precision for more readability. :min_symbol: Do not approache if number < min_symbol (default is K)
def write_directory (zfile, directory): for dirpath, dirnames, filenames in os.walk(directory): zfile.write(dirpath) for filename in filenames: zfile.write(os.path.join(dirpath, filename))
Write recursively all directories and filenames to zipfile instance.
def attributes(self): attr = { 'name': self.name, 'id': self.sync_id, 'network_id': self.network_id, 'serial': self.serial, 'status': self.status, 'region': self.region, 'region_id': self.region_id, } return attr
Return sync attributes.
def center(self): try: return self._center except AttributeError: pass self._center = Point() return self._center
Center point of the ellipse, equidistant from foci, Point class.\n Defaults to the origin.
def from_file(filename, mime=False): m = _get_magic_type(mime) return m.from_file(filename)
Accepts a filename and returns the detected filetype. Return value is the mimetype if mime=True, otherwise a human readable name. >>> magic.from_file("testdata/test.pdf", mime=True) 'application/pdf'
def get_all_coeffs(): coeffs = {} for platform in URLS.keys(): if platform not in coeffs: coeffs[platform] = {} for chan in URLS[platform].keys(): url = URLS[platform][chan] print url page = get_page(url) coeffs[platform][chan] = get_coeffs(page) return coeffs
Get all available calibration coefficients for the satellites.
def update_hit_count_ajax(request, *args, **kwargs): warnings.warn( "hitcount.views.update_hit_count_ajax is deprecated. " "Use hitcount.views.HitCountJSONView instead.", RemovedInHitCount13Warning ) view = HitCountJSONView.as_view() return view(request, *args, **kwargs)
Deprecated in 1.2. Use hitcount.views.HitCountJSONView instead.
def calc_paired_insert_stats(in_bam, nsample=1000000): dists = [] n = 0 with pysam.Samfile(in_bam, "rb") as in_pysam: for read in in_pysam: if read.is_proper_pair and read.is_read1: n += 1 dists.append(abs(read.isize)) if n >= nsample: break return insert_size_stats(dists)
Retrieve statistics for paired end read insert distances.
def get_admin_ids(self): admins = self.json_response.get("admin_list", None) admin_ids = [admin_id for admin_id in admins["userid"]] return admin_ids
Method to get the administrator id list.
def install(self, param, author=None, constraints=None, origin=''): if isinstance(param, SkillEntry): skill = param else: skill = self.find_skill(param, author) entry = build_skill_entry(skill.name, origin, skill.is_beta) try: skill.install(constraints) entry['installed'] = time.time() entry['installation'] = 'installed' entry['status'] = 'active' entry['beta'] = skill.is_beta except AlreadyInstalled: entry = None raise except MsmException as e: entry['installation'] = 'failed' entry['status'] = 'error' entry['failure_message'] = repr(e) raise finally: if entry: self.skills_data['skills'].append(entry)
Install by url or name
def silent_popen(args, **kwargs): return subprocess.Popen(args, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, **kwargs).communicate()[0]
Wrapper for subprocess.Popen with suppressed output. STERR is redirected to STDOUT which is piped back to the calling process and returned as the result.
def reftrack_alien_data(rt, role): alien = rt.alien() if role == QtCore.Qt.DisplayRole or role == QtCore.Qt.EditRole: if alien: return "Yes" else: return "No"
Return the data for the alien status :param rt: the :class:`jukeboxcore.reftrack.Reftrack` holds the data :type rt: :class:`jukeboxcore.reftrack.Reftrack` :param role: item data role :type role: QtCore.Qt.ItemDataRole :returns: data for the alien status :rtype: depending on role :raises: None
def _GetValueAsObject(self, property_value): if property_value.type == pyolecf.value_types.BOOLEAN: return property_value.data_as_boolean if property_value.type in self._INTEGER_TYPES: return property_value.data_as_integer if property_value.type in self._STRING_TYPES: return property_value.data_as_string try: data = property_value.data except IOError: data = None return data
Retrieves the property value as a Python object. Args: property_value (pyolecf.property_value): OLECF property value. Returns: object: property value as a Python object.
def set_window_size_callback(window, cbfun): window_addr = ctypes.cast(ctypes.pointer(window), ctypes.POINTER(ctypes.c_long)).contents.value if window_addr in _window_size_callback_repository: previous_callback = _window_size_callback_repository[window_addr] else: previous_callback = None if cbfun is None: cbfun = 0 c_cbfun = _GLFWwindowsizefun(cbfun) _window_size_callback_repository[window_addr] = (cbfun, c_cbfun) cbfun = c_cbfun _glfw.glfwSetWindowSizeCallback(window, cbfun) if previous_callback is not None and previous_callback[0] != 0: return previous_callback[0]
Sets the size callback for the specified window. Wrapper for: GLFWwindowsizefun glfwSetWindowSizeCallback(GLFWwindow* window, GLFWwindowsizefun cbfun);
def events_for_secretreveal( transfers_pair: List[MediationPairState], secret: Secret, pseudo_random_generator: random.Random, ) -> List[Event]: events: List[Event] = list() for pair in reversed(transfers_pair): payee_knows_secret = pair.payee_state in STATE_SECRET_KNOWN payer_knows_secret = pair.payer_state in STATE_SECRET_KNOWN is_transfer_pending = pair.payer_state == 'payer_pending' should_send_secret = ( payee_knows_secret and not payer_knows_secret and is_transfer_pending ) if should_send_secret: message_identifier = message_identifier_from_prng(pseudo_random_generator) pair.payer_state = 'payer_secret_revealed' payer_transfer = pair.payer_transfer revealsecret = SendSecretReveal( recipient=payer_transfer.balance_proof.sender, channel_identifier=CHANNEL_IDENTIFIER_GLOBAL_QUEUE, message_identifier=message_identifier, secret=secret, ) events.append(revealsecret) return events
Reveal the secret off-chain. The secret is revealed off-chain even if there is a pending transaction to reveal it on-chain, this allows the unlock to happen off-chain, which is faster. This node is named N, suppose there is a mediated transfer with two refund transfers, one from B and one from C: A-N-B...B-N-C..C-N-D Under normal operation N will first learn the secret from D, then reveal to C, wait for C to inform the secret is known before revealing it to B, and again wait for B before revealing the secret to A. If B somehow sent a reveal secret before C and D, then the secret will be revealed to A, but not C and D, meaning the secret won't be propagated forward. Even if D sent a reveal secret at about the same time, the secret will only be revealed to B upon confirmation from C. If the proof doesn't arrive in time and the lock's expiration is at risk, N won't lose tokens since it knows the secret can go on-chain at any time.
def quote_identifier(identifier: str, mixed: Union[SQLCompiler, Engine, Dialect]) -> str: return get_preparer(mixed).quote(identifier)
Converts an SQL identifier to a quoted version, via the SQL dialect in use. Args: identifier: the identifier to be quoted mixed: an SQLAlchemy :class:`SQLCompiler`, :class:`Engine`, or :class:`Dialect` object Returns: the quoted identifier
def date_range_builder(self, start='2013-02-11', end=None): if not end: end = time.strftime('%Y-%m-%d') return 'acquisitionDate:[%s+TO+%s]' % (start, end)
Builds date range query. :param start: Date string. format: YYYY-MM-DD :type start: String :param end: date string. format: YYYY-MM-DD :type end: String :returns: String
def _handle_browse(self, relpath, params): abspath = os.path.normpath(os.path.join(self._root, relpath)) if not abspath.startswith(self._root): raise ValueError if os.path.isdir(abspath): self._serve_dir(abspath, params) elif os.path.isfile(abspath): self._serve_file(abspath, params)
Handle requests to browse the filesystem under the build root.
def set_cookie_if_ok(self, cookie, request): self._cookies_lock.acquire() try: self._policy._now = self._now = int(time.time()) if self._policy.set_ok(cookie, request): self.set_cookie(cookie) finally: self._cookies_lock.release()
Set a cookie if policy says it's OK to do so.
def get_advanced_foreign_key_options_sql(self, foreign_key): query = "" if self.supports_foreign_key_on_update() and foreign_key.has_option( "on_update" ): query += " ON UPDATE %s" % self.get_foreign_key_referential_action_sql( foreign_key.get_option("on_update") ) if foreign_key.has_option("on_delete"): query += " ON DELETE %s" % self.get_foreign_key_referential_action_sql( foreign_key.get_option("on_delete") ) return query
Returns the FOREIGN KEY query section dealing with non-standard options as MATCH, INITIALLY DEFERRED, ON UPDATE, ... :param foreign_key: The foreign key :type foreign_key: ForeignKeyConstraint :rtype: str
def lock(self, resource_id, region, account_id=None): account_id = self.get_account_id(account_id) return self.http.post( "%s/%s/locks/%s/lock" % (self.endpoint, account_id, resource_id), json={'region': region}, auth=self.get_api_auth())
Lock a given resource
def _workdir(self, line): workdir = self._setup('WORKDIR', line) line = "cd %s" %(''.join(workdir)) self.install.append(line)
A Docker WORKDIR command simply implies to cd to that location Parameters ========== line: the line from the recipe file to parse for WORKDIR
def report_status_to_github(self, state: str, description: str, context: str, target_url: Optional[str] = None): if state not in ['error', 'failure', 'pending', 'success']: raise ValueError('Unrecognized state: {!r}'.format(state)) if self.repository is None or self.repository.access_token is None: return print(repr(('report_status', context, state, description, target_url)), file=sys.stderr) payload = { 'state': state, 'description': description, 'context': context, } if target_url is not None: payload['target_url'] = target_url url = ( "https://api.github.com/repos/{}/{}/statuses/{}?access_token={}" .format(self.repository.organization, self.repository.name, self.actual_commit_id, self.repository.access_token)) response = requests.post(url, json=payload) if response.status_code != 201: raise IOError('Request failed. Code: {}. Content: {}.'.format( response.status_code, response.content))
Sets a commit status indicator on github. If not running from a pull request (i.e. repository is None), then this just prints to stderr. Args: state: The state of the status indicator. Must be 'error', 'failure', 'pending', or 'success'. description: A summary of why the state is what it is, e.g. '5 lint errors' or 'tests passed!'. context: The name of the status indicator, e.g. 'pytest' or 'lint'. target_url: Optional location where additional details about the status can be found, e.g. an online test results page. Raises: ValueError: Not one of the allowed states. IOError: The HTTP post request failed, or the response didn't have a 201 code indicating success in the expected way.
def descendants(self, start, generations=None): visited = self.vertex_set() visited.add(start) to_visit = deque([(start, 0)]) while to_visit: vertex, depth = to_visit.popleft() if depth == generations: continue for child in self.children(vertex): if child not in visited: visited.add(child) to_visit.append((child, depth+1)) return self.full_subgraph(visited)
Return the subgraph of all nodes reachable from the given start vertex, including that vertex. If specified, the optional `generations` argument specifies how many generations to limit to.
def remove(self, uids: Iterable[int]) -> None: for uid in uids: self._recent.discard(uid) self._flags.pop(uid, None)
Remove any session flags for the given message. Args: uids: The message UID values.
def transform_external(self, external_url): return filestack.models.Transform(apikey=self.apikey, security=self.security, external_url=external_url)
Turns an external URL into a Filestack Transform object *returns* [Filestack.Transform] ```python from filestack import Client, Filelink client = Client("API_KEY") transform = client.transform_external('http://www.example.com') ```
def object_to_dict(cls, obj): dict_obj = dict() if obj is not None: if type(obj) == list: dict_list = [] for inst in obj: dict_list.append(cls.object_to_dict(inst)) dict_obj["list"] = dict_list elif not cls.is_primitive(obj): for key in obj.__dict__: if type(obj.__dict__[key]) == list: dict_list = [] for inst in obj.__dict__[key]: dict_list.append(cls.object_to_dict(inst)) dict_obj[key] = dict_list elif not cls.is_primitive(obj.__dict__[key]): temp_dict = cls.object_to_dict(obj.__dict__[key]) dict_obj[key] = temp_dict else: dict_obj[key] = obj.__dict__[key] elif cls.is_primitive(obj): return obj return dict_obj
This function converts Objects into Dictionary
def stop_consuming(self): if self._channel: logger.debug('Sending a Basic.Cancel RPC command to RabbitMQ') self._channel.basic_cancel(self.on_cancelok, self._consumer_tag)
Tell RabbitMQ that we would like to stop consuming.
def _from_dict(cls, _dict): args = {} if 'aggregations' in _dict: args['aggregations'] = [ MetricTokenAggregation._from_dict(x) for x in (_dict.get('aggregations')) ] return cls(**args)
Initialize a MetricTokenResponse object from a json dictionary.
def get_decimal_time(self): return decimal_time(self.data['year'], self.data['month'], self.data['day'], self.data['hour'], self.data['minute'], self.data['second'])
Returns the time of the catalogue as a decimal
def dotted_parts(s): idx = -1 while s: idx = s.find('.', idx+1) if idx == -1: yield s break yield s[:idx]
For a string "a.b.c", yields "a", "a.b", "a.b.c".
def object_path(collection, id): _logger.debug(type(id)) _logger.debug(id) if isinstance(id, dict) and 'id' in id: id = id['id'] normalized_id = normalize_text(str(id), lcase=False) return os.path.join(_basepath, collection, '%s.%s' % (normalized_id, _ext))
Returns path to the backing file of the object with the given ``id`` in the given ``collection``. Note that the ``id`` is made filesystem-safe by "normalizing" its string representation.
def initialize_params(self, preload_data=True) -> None: params = '' if preload_data: params = self.request('get', param_url) self.params = Params(params, self.request)
Load device parameters and initialize parameter management. Preload data can be disabled to selectively load params afterwards.
def _get_subcats(self, recurse=False): if recurse: return sorted([Category(e) for e in self._subcats_recursive], key=lambda c: c.sort_breadcrumb) parts = len(self.path.split('/')) + 1 if self.path else 1 subcats = [c.split('/')[:parts] for c in self._subcats_recursive] subcats = {'/'.join(c) for c in subcats} return sorted([Category(c) for c in subcats], key=lambda c: c.sort_name or c.name)
Get the subcategories of this category recurse -- whether to include their subcategories as well
def find_root(self): self.find_parents() index = 0 while len(self.vertices[index].parents)>0: index = self.vertices[index].parents[0] return index
Finds the index of the root node of the tree.
def _probe_lock_file(self, reported_mtime): delta = reported_mtime - self.lock_data["lock_time"] self.server_time_ofs = delta if self.get_option("verbose", 3) >= 4: write("Server time offset: {:.2f} seconds.".format(delta))
Called by get_dir
def reset_defaults(self): self.save_login.setChecked(False) self.save_password.setChecked(False) self.save_url.setChecked(False) set_setting(GEONODE_USER, '') set_setting(GEONODE_PASSWORD, '') set_setting(GEONODE_URL, '') self.login.setText('') self.password.setText('') self.url.setText('')
Reset login and password in QgsSettings.
def get_item_by_key(passed_list, key, value): if value in [None, '']: return if type(passed_list) in [QuerySet, PolymorphicQuerySet]: sub_list = passed_list.filter(**{key: value}) else: sub_list = [x for x in passed_list if x.get(key) == value] if len(sub_list) == 1: return sub_list[0] return sub_list
This one allows us to get one or more items from a list of dictionaries based on the value of a specified key, where both the key and the value can be variable names. Does not work with None or null string passed values.
def _to_reddit_list(arg): if (isinstance(arg, six.string_types) or not ( hasattr(arg, "__getitem__") or hasattr(arg, "__iter__"))): return six.text_type(arg) else: return ','.join(six.text_type(a) for a in arg)
Return an argument converted to a reddit-formatted list. The returned format is a comma deliminated list. Each element is a string representation of an object. Either given as a string or as an object that is then converted to its string representation.
def flux(self, photon_energy, distance=1 * u.kpc): spec = self._spectrum(photon_energy) if distance != 0: distance = validate_scalar( "distance", distance, physical_type="length" ) spec /= 4 * np.pi * distance.to("cm") ** 2 out_unit = "1/(s cm2 eV)" else: out_unit = "1/(s eV)" return spec.to(out_unit)
Differential flux at a given distance from the source. Parameters ---------- photon_energy : :class:`~astropy.units.Quantity` float or array Photon energy array. distance : :class:`~astropy.units.Quantity` float, optional Distance to the source. If set to 0, the intrinsic differential luminosity will be returned. Default is 1 kpc.
def _get_xref(self, line): mtch = self.attr2cmp['xref'].match(line) return mtch.group(1).replace(' ', '')
Given line, return optional attribute xref value in a dict of sets.
def compile(manager, path, allow_naked_names, allow_nested, disallow_unqualified_translocations, no_identifier_validation, no_citation_clearing, required_annotations, skip_tqdm, verbose): if verbose: logging.basicConfig(level=logging.DEBUG) log.setLevel(logging.DEBUG) log.debug('using connection: %s', manager.engine.url) click.secho('Compilation', fg='red', bold=True) if skip_tqdm: click.echo('```') graph = from_path( path, manager=manager, use_tqdm=(not (skip_tqdm or verbose)), allow_nested=allow_nested, allow_naked_names=allow_naked_names, disallow_unqualified_translocations=disallow_unqualified_translocations, citation_clearing=(not no_citation_clearing), required_annotations=required_annotations, no_identifier_validation=no_identifier_validation, allow_definition_failures=True, ) if skip_tqdm: click.echo('```') to_pickle(graph, get_corresponding_pickle_path(path)) click.echo('') _print_summary(graph, ticks=skip_tqdm) sys.exit(0 if 0 == graph.number_of_warnings() else 1)
Compile a BEL script to a graph.
def gam_in_biomass(model, reaction): id_of_main_compartment = helpers.find_compartment_id_in_model(model, 'c') try: left = { helpers.find_met_in_model( model, "MNXM3", id_of_main_compartment)[0], helpers.find_met_in_model( model, "MNXM2", id_of_main_compartment)[0] } right = { helpers.find_met_in_model( model, "MNXM7", id_of_main_compartment)[0], helpers.find_met_in_model( model, "MNXM1", id_of_main_compartment)[0], helpers.find_met_in_model( model, "MNXM9", id_of_main_compartment)[0] } except RuntimeError: return False return ( left.issubset(set(reaction.reactants)) and right.issubset(set(reaction.products)))
Return boolean if biomass reaction includes growth-associated maintenance. Parameters ---------- model : cobra.Model The metabolic model under investigation. reaction : cobra.core.reaction.Reaction The biomass reaction of the model under investigation. Returns ------- boolean True if the biomass reaction includes ATP and H2O as reactants and ADP, Pi and H as products, False otherwise.
def serialize(self) -> str: return { '@context': DIDDoc.CONTEXT, 'id': canon_ref(self.did, self.did), 'publicKey': [pubkey.to_dict() for pubkey in self.pubkey.values()], 'authentication': [{ 'type': pubkey.type.authn_type, 'publicKey': canon_ref(self.did, pubkey.id) } for pubkey in self.pubkey.values() if pubkey.authn], 'service': [service.to_dict() for service in self.service.values()] }
Dump current object to a JSON-compatible dictionary. :return: dict representation of current DIDDoc
def post(self, uri, body=None, **kwargs): return self.fetch('post', uri, kwargs.pop("query", {}), body, **kwargs)
make a POST request
def run_all(logdir, steps, thresholds, verbose=False): run_name = 'colors' if verbose: print('--- Running: %s' % run_name) start_runs( logdir=logdir, steps=steps, run_name=run_name, thresholds=thresholds) run_name = 'mask_every_other_prediction' if verbose: print('--- Running: %s' % run_name) start_runs( logdir=logdir, steps=steps, run_name=run_name, thresholds=thresholds, mask_every_other_prediction=True)
Generate PR curve summaries. Arguments: logdir: The directory into which to store all the runs' data. steps: The number of steps to run for. verbose: Whether to print the names of runs into stdout during execution. thresholds: The number of thresholds to use for PR curves.
def _condense(self, data): if data: return reduce(operator.ior, data.values()) return set()
Condense by or-ing all of the sets.
def ListingBox(listing, *args, **kwargs): " Delegate the boxing to the target's Box class. " obj = listing.publishable return obj.box_class(obj, *args, **kwargs)
Delegate the boxing to the target's Box class.
def add(self, entry): c = self.conn.cursor() c.execute("INSERT INTO oath (key, aead, nonce, key_handle, oath_C, oath_T) VALUES (?, ?, ?, ?, ?, ?)", (entry.data["key"], \ entry.data["aead"], \ entry.data["nonce"], \ entry.data["key_handle"], \ entry.data["oath_C"], \ entry.data["oath_T"],)) self.conn.commit() return c.rowcount == 1
Add entry to database.
async def dump_variant(obj, elem, elem_type=None, params=None, field_archiver=None): field_archiver = field_archiver if field_archiver else dump_field if isinstance(elem, x.VariantType) or elem_type.WRAPS_VALUE: return { elem.variant_elem: await field_archiver(None, getattr(elem, elem.variant_elem), elem.variant_elem_type) } else: fdef = elem_type.find_fdef(elem_type.f_specs(), elem) return { fdef[0]: await field_archiver(None, elem, fdef[1]) }
Transform variant to the popo object representation. :param obj: :param elem: :param elem_type: :param params: :param field_archiver: :return:
def LookupCirrusAmi(ec2, instance_type, ubuntu_release_name, mapr_version, role, ami_release_name, ami_owner_id): if not role in valid_instance_roles: raise RuntimeError('Specified role (%s) not a valid role: %s' % (role, valid_instance_roles)) virtualization_type = 'paravirtual' if IsHPCInstanceType(instance_type): virtualization_type = 'hvm' assert(ami_owner_id) images = ec2.get_all_images(owners=[ami_owner_id]) ami = None ami_name = AmiName(ami_release_name, ubuntu_release_name, virtualization_type, mapr_version, role) for image in images: if image.name == ami_name: ami = image break return ami
Returns AMI satisfying provided constraints.
def cache_file(self, template): saltpath = salt.utils.url.create(template) self.file_client().get_file(saltpath, '', True, self.saltenv)
Cache a file from the salt master
def make_middleware(app=None, *args, **kw): app = iWSGIMiddleware(app, *args, **kw) return app
Given an app, return that app wrapped in iWSGIMiddleware
def get_earth_radii(self): earth_model = self.prologue['GeometricProcessing']['EarthModel'] a = earth_model['EquatorialRadius'] * 1000 b = (earth_model['NorthPolarRadius'] + earth_model['SouthPolarRadius']) / 2.0 * 1000 return a, b
Get earth radii from prologue Returns: Equatorial radius, polar radius [m]
def _check_sample(in_bam, rgnames): with pysam.Samfile(in_bam, "rb") as bamfile: rg = bamfile.header.get("RG", [{}]) msgs = [] warnings = [] if len(rg) > 1: warnings.append("Multiple read groups found in input BAM. Expect single RG per BAM.") if len(rg) == 0: msgs.append("No read groups found in input BAM. Expect single RG per BAM.") if len(rg) > 0 and any(x.get("SM") != rgnames["sample"] for x in rg): msgs.append("Read group sample name (SM) does not match configuration `description`: %s vs %s" % (rg[0].get("SM"), rgnames["sample"])) if len(msgs) > 0: raise ValueError("Problems with pre-aligned input BAM file: %s\n" % (in_bam) + "\n".join(msgs) + "\nSetting `bam_clean: fixrg`\n" "in the configuration can often fix this issue.") if warnings: print("*** Potential problems in input BAM compared to reference:\n%s\n" % "\n".join(warnings))
Ensure input sample name matches expected run group names.
def replace_policy(self, scaling_group, policy, name, policy_type, cooldown, change=None, is_percent=False, desired_capacity=None, args=None): policy_id = utils.get_id(policy) group_id = utils.get_id(scaling_group) uri = "/%s/%s/policies/%s" % (self.uri_base, group_id, policy_id) body = self._create_policy_body(name=name, policy_type=policy_type, cooldown=cooldown, change=change, is_percent=is_percent, desired_capacity=desired_capacity, args=args) resp, resp_body = self.api.method_put(uri, body=body)
Replace an existing policy. All of the attributes must be specified. If you wish to delete any of the optional attributes, pass them in as None.
def reset_spent_time(self, **kwargs): path = '%s/%s/reset_spent_time' % (self.manager.path, self.get_id()) return self.manager.gitlab.http_post(path, **kwargs)
Resets the time spent working on the object. Args: **kwargs: Extra options to send to the server (e.g. sudo) Raises: GitlabAuthenticationError: If authentication is not correct GitlabTimeTrackingError: If the time tracking update cannot be done
def resample(samples, oldsr, newsr): backends = [ _resample_samplerate, _resample_scikits, _resample_nnresample, _resample_obspy, _resample_scipy ] for backend in backends: newsamples = backend(samples, oldsr, newsr) if newsamples is not None: return newsamples
Resample `samples` with given samplerate `sr` to new samplerate `newsr` samples: mono or multichannel frames oldsr : original samplerate newsr : new sample rate Returns: the new samples
def categorize(self, categories, default=None): return dim(self, categorize, categories=categories, default=default)
Replaces discrete values with supplied categories Replaces discrete values in input array into a fixed set of categories defined either as a list or dictionary. Args: categories: List or dict of categories to map inputs to default: Default value to assign if value not in categories
def fetchref(self, ref): log.debug('[%s] Fetching ref: %s', self.name, ref) fetch_info = self.repo.remotes.origin.fetch(ref).pop() return fetch_info.ref
Fetch a particular git ref.
def set_global(cls, user_agent=None, user_agent_config_yaml=None, user_agent_lookup=None, **kwargs): cls.user_agent = cls._create(user_agent, user_agent_config_yaml, user_agent_lookup, **kwargs)
Set global user agent string Args: user_agent (Optional[str]): User agent string. HDXPythonLibrary/X.X.X- is prefixed. user_agent_config_yaml (Optional[str]): Path to YAML user agent configuration. Ignored if user_agent supplied. Defaults to ~/.useragent.yml. user_agent_lookup (Optional[str]): Lookup key for YAML. Ignored if user_agent supplied. Returns: None
def appndd(item, cell): assert isinstance(cell, stypes.SpiceCell) if hasattr(item, "__iter__"): for d in item: libspice.appndd_c(ctypes.c_double(d), cell) else: item = ctypes.c_double(item) libspice.appndd_c(item, cell)
Append an item to a double precision cell. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/appndd_c.html :param item: The item to append. :type item: Union[float,Iterable[float]] :param cell: The cell to append to. :type cell: spiceypy.utils.support_types.SpiceCell
def Save(session, filename=None): if not filename: filename = "androguard_session_{:%Y-%m-%d_%H%M%S}.ag".format(datetime.datetime.now()) if os.path.isfile(filename): log.warning("{} already exists, overwriting!") reclimit = sys.getrecursionlimit() sys.setrecursionlimit(50000) saved = False try: with open(filename, "wb") as fd: pickle.dump(session, fd) saved = True except RecursionError: log.exception("Recursion Limit hit while saving. " "Current Recursion limit: {}. " "Please report this error!".format(sys.getrecursionlimit())) os.unlink(filename) sys.setrecursionlimit(reclimit) return filename if saved else None
save your session to use it later. Returns the filename of the written file. If not filename is given, a file named `androguard_session_<DATE>.ag` will be created in the current working directory. `<DATE>` is a timestamp with the following format: `%Y-%m-%d_%H%M%S`. This function will overwrite existing files without asking. If the file could not written, None is returned. example:: s = session.Session() session.Save(s, "msession.ag") :param session: A Session object to save :param filename: output filename to save the session :type filename: string
def _monomers(self): for stmt in self.statements: if _is_whitelisted(stmt): self._dispatch(stmt, 'monomers', self.agent_set)
Calls the appropriate monomers method based on policies.
def pipe_truncate(context=None, _INPUT=None, conf=None, **kwargs): funcs = get_splits(None, conf, **cdicts(opts, kwargs)) pieces, _pass = funcs[0](), funcs[2]() if _pass: _OUTPUT = _INPUT else: try: start = int(pieces.start) except AttributeError: start = 0 stop = start + int(pieces.count) _OUTPUT = islice(_INPUT, start, stop) return _OUTPUT
An operator that returns a specified number of items from the top of a feed. Not loopable. Parameters ---------- context : pipe2py.Context object _INPUT : pipe2py.modules pipe like object (iterable of items) kwargs -- terminal, if the truncation value is wired in conf : { 'start': {'type': 'number', value': <starting location>} 'count': {'type': 'number', value': <desired feed length>} } Returns ------- _OUTPUT : generator of items
def _family_notes_path(family, data_dir): data_dir = fix_data_dir(data_dir) family = family.lower() if not family in get_families(data_dir): raise RuntimeError("Family '{}' does not exist".format(family)) file_name = 'NOTES.' + family.lower() file_path = os.path.join(data_dir, file_name) return file_path
Form a path to the notes for a family
def cgv2el(center, vec1, vec2): center = stypes.toDoubleVector(center) vec1 = stypes.toDoubleVector(vec1) vec2 = stypes.toDoubleVector(vec2) ellipse = stypes.Ellipse() libspice.cgv2el_c(center, vec1, vec2, ctypes.byref(ellipse)) return ellipse
Form a SPICE ellipse from a center vector and two generating vectors. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/cgv2el_c.html :param center: Center Vector :type center: 3-Element Array of floats :param vec1: Vector 1 :type vec1: 3-Element Array of floats :param vec2: Vector 2 :type vec2: 3-Element Array of floats :return: Ellipse :rtype: spiceypy.utils.support_types.Ellipse
def metaclass(*metaclasses): def _inner(cls): metabases = tuple( collections.OrderedDict( (c, None) for c in (metaclasses + (type(cls),)) ).keys() ) _Meta = metabases[0] for base in metabases[1:]: class _Meta(base, _Meta): pass return six.add_metaclass(_Meta)(cls) return _inner
Create the class using all metaclasses. Args: metaclasses: A tuple of metaclasses that will be used to generate and replace a specified class. Returns: A decorator that will recreate the class using the specified metaclasses.
def add_resource(mt_file, ref, cache): if isinstance(mt_file, MetapackDoc): doc = mt_file else: doc = MetapackDoc(mt_file) if not 'Resources' in doc: doc.new_section('Resources') doc['Resources'].args = [e for e in set(doc['Resources'].args + ['Name', 'StartLine', 'HeaderLines', 'Encoding']) if e] seen_names = set() u = parse_app_url(ref) if u.proto == 'file': entries = u.list() else: entries = [ssu for su in u.list() for ssu in su.list()] errors = [] for e in entries: if not add_single_resource(doc, e, cache=cache, seen_names=seen_names): errors.append(e) if errors: prt() warn("Found, but failed to add these urls:") for e in errors: print(' ', e) write_doc(doc, mt_file)
Add a resources entry, downloading the intuiting the file, replacing entries with the same reference
def symlink_to_bin(self, name, path): self.__symlink_dir("bin", name, path) os.chmod(os.path.join(self.root_dir, "bin", name), os.stat(path).st_mode | stat.S_IXUSR | stat.S_IRUSR)
Symlink an object at path to name in the bin folder.
def do_grep(self, params): self.grep(params.path, params.content, 0, params.show_matches)
\x1b[1mNAME\x1b[0m grep - Prints znodes with a value matching the given text \x1b[1mSYNOPSIS\x1b[0m grep [path] <content> [show_matches] \x1b[1mOPTIONS\x1b[0m * path: the path (default: cwd) * show_matches: show the content that matched (default: false) \x1b[1mEXAMPLES\x1b[0m > grep / unbound true /passwd: unbound:x:992:991:Unbound DNS resolver:/etc/unbound:/sbin/nologin /copy/passwd: unbound:x:992:991:Unbound DNS resolver:/etc/unbound:/sbin/nologin
def sqlalchemy_escape(val, escape_char, special_chars): if sys.version_info[:2] >= (3, 0): assert isinstance(val, str) else: assert isinstance(val, basestring) result = [] for c in val: if c in special_chars + escape_char: result.extend(escape_char + c) else: result.extend(c) return ''.join(result)
Escape a string according for use in LIKE operator >>> sqlalchemy_escape("text_table", "\\", "%_") 'text\_table'
def init_logging( log_dir=tempfile.gettempdir(), format="[%(asctime)s][%(levelname)s] %(name)s:%(lineno)s - %(message)s", level=logging.INFO, ): if not Meta.log_path: dt = datetime.now().strftime("%Y-%m-%d_%H-%M-%S") log_path = os.path.join(log_dir, dt) if not os.path.exists(log_path): os.makedirs(log_path) logging.basicConfig( format=format, level=level, handlers=[ logging.FileHandler(os.path.join(log_path, "fonduer.log")), logging.StreamHandler(), ], ) logger.info(f"Setting logging directory to: {log_path}") Meta.log_path = log_path else: logger.info( f"Logging was already initialized to use {Meta.log_path}. " "To configure logging manually, call fonduer.init_logging before " "initialiting Meta." )
Configures logging to output to the provided log_dir. Will use a nested directory whose name is the current timestamp. :param log_dir: The directory to store logs in. :type log_dir: str :param format: The logging format string to use. :type format: str :param level: The logging level to use, e.g., logging.INFO.
def getitem(self, index): if index >= getattr(self.tree, self.size): raise IndexError(index) if self.__cache_objects and index in self.__cache: return self.__cache[index] obj = self.tree_object_cls(self.tree, self.name, self.prefix, index) if self.__cache_objects: self.__cache[index] = obj return obj
direct access without going through self.selection
def set_kill_on_exit_mode(bKillOnExit = False): try: win32.DebugSetProcessKillOnExit(bKillOnExit) except (AttributeError, WindowsError): return False return True
Defines the behavior of the debugged processes when the debugging thread dies. This method only affects the calling thread. Works on the following platforms: - Microsoft Windows XP and above. - Wine (Windows Emulator). Fails on the following platforms: - Microsoft Windows 2000 and below. - ReactOS. @type bKillOnExit: bool @param bKillOnExit: C{True} to automatically kill processes when the debugger thread dies. C{False} to automatically detach from processes when the debugger thread dies. @rtype: bool @return: C{True} on success, C{False} on error. @note: This call will fail if a debug port was not created. That is, if the debugger isn't attached to at least one process. For more info see: U{http://msdn.microsoft.com/en-us/library/ms679307.aspx}
def reply_to(self) -> Optional[Sequence[AddressHeader]]: try: return cast(Sequence[AddressHeader], self[b'reply-to']) except KeyError: return None
The ``Reply-To`` header.
def cut_across_axis(self, dim, minval=None, maxval=None): vertex_mask = np.ones((len(self.v),), dtype=bool) if minval is not None: predicate = self.v[:, dim] >= minval vertex_mask = np.logical_and(vertex_mask, predicate) if maxval is not None: predicate = self.v[:, dim] <= maxval vertex_mask = np.logical_and(vertex_mask, predicate) vertex_indices = np.flatnonzero(vertex_mask) self.keep_vertices(vertex_indices) return vertex_indices
Cut the mesh by a plane, discarding vertices that lie behind that plane. Or cut the mesh by two parallel planes, discarding vertices that lie outside them. The region to keep is defined by an axis of perpendicularity, specified by `dim`: 0 means x, 1 means y, 2 means z. `minval` and `maxval` indicate the portion of that axis to keep. Return the original indices of the kept vertices.
def _maybe_decompress_body(self): if self.content_encoding: if self.content_encoding in self._CODEC_MAP.keys(): module_name = self._CODEC_MAP[self.content_encoding] self.logger.debug('Decompressing with %s', module_name) module = self._maybe_import(module_name) return module.decompress(self._message.body) self.logger.debug('Unsupported content-encoding: %s', self.content_encoding) return self._message.body
Attempt to decompress the message body passed in using the named compression module, if specified. :rtype: bytes
def methods(method_list): assert isinstance(method_list, list) and len(method_list) > 0 def deco(handler): d = _HandleRequestDict() for m in method_list: d[m.upper()] = handler return d return deco
A decorator to mark HTTP methods a resource can handle. For example:: class SomeRes(UrlResource): ... @methods(['GET', 'HEAD']) def handle_request(self, req): ... @handle_request.methods(['POST']) def handle_post(self, req): ... In this case, GET and HEAD requests will be dispatched to ``handle_request``, and POST requests will be dispatched to ``handle_post``. All other request methods will cause a *501 Not Implemented* error.
def get_field_schema_validations(field: Field) -> Dict[str, Any]: f_schema: Dict[str, Any] = {} if lenient_issubclass(field.type_, (str, bytes)): for attr_name, t, keyword in _str_types_attrs: attr = getattr(field.schema, attr_name, None) if isinstance(attr, t): f_schema[keyword] = attr if lenient_issubclass(field.type_, numeric_types) and not issubclass(field.type_, bool): for attr_name, t, keyword in _numeric_types_attrs: attr = getattr(field.schema, attr_name, None) if isinstance(attr, t): f_schema[keyword] = attr schema = cast('Schema', field.schema) if schema.extra: f_schema.update(schema.extra) return f_schema
Get the JSON Schema validation keywords for a ``field`` with an annotation of a Pydantic ``Schema`` with validation arguments.
def map_keys_to_values(l: List[Any], d: Dict[Any, Any], default: Any = None, raise_if_missing: bool = False, omit_if_missing: bool = False) -> List[Any]: result = [] for k in l: if raise_if_missing and k not in d: raise ValueError("Missing key: " + repr(k)) if omit_if_missing and k not in d: continue result.append(d.get(k, default)) return result
The ``d`` dictionary contains a ``key -> value`` mapping. We start with a list of potential keys in ``l``, and return a list of corresponding values -- substituting ``default`` if any are missing, or raising :exc:`KeyError` if ``raise_if_missing`` is true, or omitting the entry if ``omit_if_missing`` is true.
def do_watch(self, *args): tables = [] if not self.engine.cached_descriptions: self.engine.describe_all() all_tables = list(self.engine.cached_descriptions) for arg in args: candidates = set((t for t in all_tables if fnmatch(t, arg))) for t in sorted(candidates): if t not in tables: tables.append(t) mon = Monitor(self.engine, tables) mon.start()
Watch Dynamo tables consumed capacity
def from_iothub_connection_string(cls, conn_str, **kwargs): address, policy, key, _ = _parse_conn_str(conn_str) hub_name = address.split('.')[0] username = "{}@sas.root.{}".format(policy, hub_name) password = _generate_sas_token(address, policy, key) client = cls("amqps://" + address, username=username, password=password, **kwargs) client._auth_config = { 'iot_username': policy, 'iot_password': key, 'username': username, 'password': password} return client
Create an EventHubClient from an IoTHub connection string. :param conn_str: The connection string. :type conn_str: str :param debug: Whether to output network trace logs to the logger. Default is `False`. :type debug: bool :param http_proxy: HTTP proxy settings. This must be a dictionary with the following keys: 'proxy_hostname' (str value) and 'proxy_port' (int value). Additionally the following keys may also be present: 'username', 'password'. :type http_proxy: dict[str, Any] :param auth_timeout: The time in seconds to wait for a token to be authorized by the service. The default value is 60 seconds. If set to 0, no timeout will be enforced from the client. :type auth_timeout: int
def send_key(self, key): if isinstance(key, Keys): key = key.value params = '<X_KeyEvent>{}</X_KeyEvent>'.format(key) self.soap_request(URL_CONTROL_NRC, URN_REMOTE_CONTROL, 'X_SendKey', params)
Send a key command to the TV.
def update_repository_method_acl(namespace, method, snapshot_id, acl_updates): uri = "methods/{0}/{1}/{2}/permissions".format(namespace,method,snapshot_id) return __post(uri, json=acl_updates)
Set method permissions. The method should exist in the methods repository. Args: namespace (str): Methods namespace method (str): method name snapshot_id (int): snapshot_id of the method acl_updates (list(dict)): List of access control updates Swagger: https://api.firecloud.org/#!/Method_Repository/setMethodACL
def load_feedback(): result = {} if os.path.exists(_feedback_file): f = open(_feedback_file, 'r') cont = f.read() f.close() else: cont = '{}' try: result = json.loads(cont) if cont else {} except ValueError as e: result = {"result":"crash", "text":"Feedback file has been modified by user !"} return result
Open existing feedback file
def without(self, other): if not (self.maxdepth == other.maxdepth): raise AssertionError("Regions must have the same maxdepth") self._demote_all() opd = set(other.get_demoted()) self.pixeldict[self.maxdepth].difference_update(opd) self._renorm() return
Subtract another Region by performing a difference operation on their pixlists. Requires both regions to have the same maxdepth. Parameters ---------- other : :class:`AegeanTools.regions.Region` The region to be combined.
def fingerprint_target(self, target): fingerprint = self.compute_fingerprint(target) if fingerprint: return '{fingerprint}-{name}'.format(fingerprint=fingerprint, name=type(self).__name__) else: return None
Consumers of subclass instances call this to get a fingerprint labeled with the name
def apply_ir_heuristics(irs, node): irs = integrate_value_gas(irs) irs = propagate_type_and_convert_call(irs, node) irs = remove_unused(irs) find_references_origin(irs) return irs
Apply a set of heuristic to improve slithIR
def insert_entry(self, entry, taxids): entry_dict = entry.attrib entry_dict['created'] = datetime.strptime(entry_dict['created'], '%Y-%m-%d') entry_dict['modified'] = datetime.strptime(entry_dict['modified'], '%Y-%m-%d') taxid = self.get_taxid(entry) if taxids is None or taxid in taxids: entry_dict = self.update_entry_dict(entry, entry_dict, taxid) entry_obj = models.Entry(**entry_dict) del entry_dict self.session.add(entry_obj)
Insert UniProt entry" :param entry: XML node entry :param taxids: Optional[iter[int]] taxids: NCBI taxonomy IDs
def process_ticket(self): try: auth_req = self.root.getchildren()[1].getchildren()[0] ticket = auth_req.getchildren()[0].text ticket = models.Ticket.get(ticket) if ticket.service != self.target: raise SamlValidateError( u'AuthnFailed', u'TARGET %s does not match ticket service' % self.target ) return ticket except (IndexError, KeyError): raise SamlValidateError(u'VersionMismatch') except Ticket.DoesNotExist: raise SamlValidateError( u'AuthnFailed', u'ticket %s should begin with PT- or ST-' % ticket ) except (ServiceTicket.DoesNotExist, ProxyTicket.DoesNotExist): raise SamlValidateError(u'AuthnFailed', u'ticket %s not found' % ticket)
validate ticket from SAML XML body :raises: SamlValidateError: if the ticket is not found or not valid, or if we fail to parse the posted XML. :return: a ticket object :rtype: :class:`models.Ticket<cas_server.models.Ticket>`
def group(requestContext, *seriesLists): seriesGroup = [] for s in seriesLists: seriesGroup.extend(s) return seriesGroup
Takes an arbitrary number of seriesLists and adds them to a single seriesList. This is used to pass multiple seriesLists to a function which only takes one.
def save(self, notes=None): if notes: self._changes['notes'] = notes super(Issue, self).save()
Save all changes back to Redmine with optional notes.
def get_tile_properties_by_layer(self, layer): try: assert (int(layer) >= 0) layer = int(layer) except (TypeError, AssertionError): msg = "Layer must be a positive integer. Got {0} instead." logger.debug(msg.format(type(layer))) raise ValueError p = product(range(self.width), range(self.height)) layergids = set(self.layers[layer].data[y][x] for x, y in p) for gid in layergids: try: yield gid, self.tile_properties[gid] except KeyError: continue
Get the tile properties of each GID in layer :param layer: layer number :rtype: iterator of (gid, properties) tuples