code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def set_secondary_state(self, value): if value not in (_STATE_RUNNING, _STATE_SHUTDOWN, _STATE_FORCED_SHUTDOWN): raise ValueError( "State {!r} is invalid - needs to be one of _STATE_RUNNING, _STATE_SHUTDOWN, or _STATE_FORCED_SHUTDOWN".format( value) ) if self.manager is None: raise RuntimeError("Manager not started") self.manager.set_state(value)
Sets the value for 'secondary_state'.
def wait(value, must_be_child=False): current = getcurrent() parent = current.parent if must_be_child and not parent: raise MustBeInChildGreenlet('Cannot wait on main greenlet') return parent.switch(value) if parent else value
Wait for a possible asynchronous value to complete.
def new_file(self, path, track_idx, copy_file=False): new_file_idx = track_idx new_file_path = os.path.abspath(path) if new_file_idx in self._tracks.keys(): new_file_idx = naming.index_name_if_in_list(new_file_idx, self._tracks.keys()) if copy_file: if not os.path.isdir(self.path): raise ValueError('To copy file the dataset needs to have a path.') __, ext = os.path.splitext(path) new_file_folder = os.path.join(self.path, DEFAULT_FILE_SUBDIR) new_file_path = os.path.join(new_file_folder, '{}{}'.format(new_file_idx, ext)) os.makedirs(new_file_folder, exist_ok=True) shutil.copy(path, new_file_path) new_file = tracks.FileTrack(new_file_idx, new_file_path) self._tracks[new_file_idx] = new_file return new_file
Adds a new audio file to the corpus with the given data. Parameters: path (str): Path of the file to add. track_idx (str): The id to associate the file-track with. copy_file (bool): If True the file is copied to the data set folder, otherwise the given path is used directly. Returns: FileTrack: The newly added file.
def merge(self, others, merge_conditions, common_ancestor=None): merging_occurred = False for o in others: for region_id, region in o._regions.items(): if region_id in self._regions: merging_occurred |= self._regions[region_id].merge( [region], merge_conditions, common_ancestor=common_ancestor ) else: merging_occurred = True self._regions[region_id] = region return merging_occurred
Merge this guy with another SimAbstractMemory instance
def detect_complexity(bam_in, genome, out): if not genome: logger.info("No genome given. skipping.") return None out_file = op.join(out, op.basename(bam_in) + "_cov.tsv") if file_exists(out_file): return None fai = genome + ".fai" cov = pybedtools.BedTool(bam_in).genome_coverage(g=fai, max=1) cov.saveas(out_file) total = 0 for region in cov: if region[0] == "genome" and int(region[1]) != 0: total += float(region[4]) logger.info("Total genome with sequences: %s " % total)
genome coverage of small RNA
def write_length_and_key(fp, value): written = write_fmt(fp, 'I', 0 if value in _TERMS else len(value)) written += write_bytes(fp, value) return written
Helper to write descriptor key.
def sorted_product_set(array_a, array_b): return np.sort( np.concatenate( [array_a[i] * array_b for i in xrange(len(array_a))], axis=0) )[::-1]
Compute the product set of array_a and array_b and sort it.
def upload(target): log.info("Uploading to pypi server <33>{}".format(target)) with conf.within_proj_dir(): shell.run('python setup.py sdist register -r "{}"'.format(target)) shell.run('python setup.py sdist upload -r "{}"'.format(target))
Upload the release to a pypi server. TODO: Make sure the git directory is clean before allowing a release. Args: target (str): pypi target as defined in ~/.pypirc
def generate_checks(fact): yield TypeCheck(type(fact)) fact_captured = False for key, value in fact.items(): if (isinstance(key, str) and key.startswith('__') and key.endswith('__')): if key == '__bind__': yield FactCapture(value) fact_captured = True else: yield FeatureCheck(key, value) else: yield FeatureCheck(key, value) if not fact_captured: yield FactCapture("__pattern_%s__" % id(fact))
Given a fact, generate a list of Check objects for checking it.
def persistent_load(self, pid): if len(pid) == 2: type_tag, filename = pid abs_path = _os.path.join(self.gl_temp_storage_path, filename) return _get_gl_object_from_persistent_id(type_tag, abs_path) else: type_tag, filename, object_id = pid if object_id in self.gl_object_memo: return self.gl_object_memo[object_id] else: abs_path = _os.path.join(self.gl_temp_storage_path, filename) obj = _get_gl_object_from_persistent_id(type_tag, abs_path) self.gl_object_memo[object_id] = obj return obj
Reconstruct a GLC object using the persistent ID. This method should not be used externally. It is required by the unpickler super class. Parameters ---------- pid : The persistent ID used in pickle file to save the GLC object. Returns ---------- The GLC object.
def _make_context(context=None): namespace = {'db': db, 'session': db.session} namespace.update(_iter_context()) if context is not None: namespace.update(context) return namespace
Create the namespace of items already pre-imported when using shell. Accepts a dict with the desired namespace as the key, and the object as the value.
def get(self): return EngagementContextContext( self._version, flow_sid=self._solution['flow_sid'], engagement_sid=self._solution['engagement_sid'], )
Constructs a EngagementContextContext :returns: twilio.rest.studio.v1.flow.engagement.engagement_context.EngagementContextContext :rtype: twilio.rest.studio.v1.flow.engagement.engagement_context.EngagementContextContext
def format_BLB(): rc("figure", facecolor="white") rc('font', family = 'serif', size=10) rc('xtick', labelsize=10) rc('ytick', labelsize=10) rc('axes', linewidth=1) rc('xtick.major', size=4, width=1) rc('xtick.minor', size=2, width=1) rc('ytick.major', size=4, width=1) rc('ytick.minor', size=2, width=1)
Sets some formatting options in Matplotlib.
def described_as(self, description, *args): if len(args): description = description.format(*args) self.description = description return self
Specify a custom message for the matcher
def filter_by_device_owner(query, device_owners=None): port_model = models_v2.Port if not device_owners: device_owners = utils.SUPPORTED_DEVICE_OWNERS supported_device_owner_filter = [ port_model.device_owner.ilike('%s%%' % owner) for owner in device_owners] unsupported_device_owner_filter = [ port_model.device_owner.notilike('%s%%' % owner) for owner in utils.UNSUPPORTED_DEVICE_OWNERS] query = (query .filter( and_(*unsupported_device_owner_filter), or_(*supported_device_owner_filter))) return query
Filter ports by device_owner Either filter using specified device_owner or using the list of all device_owners supported and unsupported by the arista ML2 plugin
def _order_params(self, data): has_signature = False params = [] for key, value in data.items(): if key == 'signature': has_signature = True else: params.append((key, value)) params.sort(key=itemgetter(0)) if has_signature: params.append(('signature', data['signature'])) return params
Convert params to list with signature as last element :param data: :return:
def dupstack(newtask): stack = s_task.varget('provstack') s_task.varset('provstack', stack.copy(), newtask)
Duplicate the current provenance stack onto another task
def get_model(app_dot_model): try: app, model = app_dot_model.split('.') except ValueError: msg = (f'Passed in value \'{app_dot_model}\' was not in the format ' '`<app_name>.<model_name>`.') raise ValueError(msg) return apps.get_app_config(app).get_model(model)
Returns Django model class corresponding to passed-in `app_dot_model` string. This is helpful for preventing circular-import errors in a Django project. Positional Arguments: ===================== - `app_dot_model`: Django's `<app_name>.<model_name>` syntax. For example, the default Django User model would be `auth.User`, where `auth` is the app and `User` is the model.
def aliasstr(self): return ', '.join(repr(self.ns + x) for x in self.aliases)
Concatenate the aliases tuple into a string.
def make_request_fn(): if FLAGS.cloud_mlengine_model_name: request_fn = serving_utils.make_cloud_mlengine_request_fn( credentials=GoogleCredentials.get_application_default(), model_name=FLAGS.cloud_mlengine_model_name, version=FLAGS.cloud_mlengine_model_version) else: request_fn = serving_utils.make_grpc_request_fn( servable_name=FLAGS.servable_name, server=FLAGS.server, timeout_secs=FLAGS.timeout_secs) return request_fn
Returns a request function.
def _randomString(): return ''.join( random.choice(string.ascii_uppercase + string.digits) for x in range(10))
Random string for message signing
def save(self): try: self.node.move_to(self.cleaned_data['target'], self.cleaned_data['position']) return self.node except InvalidMove, e: self.errors[NON_FIELD_ERRORS] = ErrorList(e) raise
Attempts to move the node using the selected target and position. If an invalid move is attempted, the related error message will be added to the form's non-field errors and the error will be re-raised. Callers should attempt to catch ``InvalidNode`` to redisplay the form with the error, should it occur.
def _onOffset(self, dt, businesshours): if self.n >= 0: op = self._prev_opening_time(dt) else: op = self._next_opening_time(dt) span = (dt - op).total_seconds() if span <= businesshours: return True else: return False
Slight speedups using calculated values.
def OnTogglePlay(self, event): if self.player.get_state() == vlc.State.Playing: self.player.pause() else: self.player.play() event.Skip()
Toggles the video status between play and hold
def add_item(self, item): self.beginInsertRows(QtCore.QModelIndex(), self.rowCount(), self.rowCount()) item["parent"] = self item = Item(**item) self.items.append(item) self.endInsertRows() item.__datachanged__.connect(self._dataChanged) return item
Add new item to model Each keyword argument is passed to the :func:Item factory function.
def read_packets(self): while self.running: packet_length = self.client.recv(2) if len(packet_length) < 2: self.stop() continue packet_length = struct.unpack("<h", packet_length)[0] - 2 data = self.client.recv(packet_length) packno = data[0] try: parser = "Packet" + format(packno, 'x').upper() + "Parser" packet_class = getattr(packets, parser) packet_class().parse(self.world, self.player, data, self._evman) except AttributeError as e: pass if packno == 2: self.stop() continue
Read packets from the socket and parse them
def _lazy_listen(self): if all([ self._loop, not self.running, self._subscriptions or (self._pending and not self._pending.empty()), ]): self._task = self._loop.create_task(self._listen())
Ensures that the listener task only runs when actually needed. This function is a no-op if any of the preconditions is not met. Preconditions are: * The application is running (self._loop is set) * The task is not already running * There are subscriptions: either pending, or active
def _write_gen_model_stats(self, iteration:int)->None: "Writes gradient statistics for generator to Tensorboard." generator = self.learn.gan_trainer.generator self.stats_writer.write(model=generator, iteration=iteration, tbwriter=self.tbwriter, name='gen_model_stats') self.gen_stats_updated = True
Writes gradient statistics for generator to Tensorboard.
def formatall(self, *args, **kargs): " Add-on method for fits returned by chained_nonlinear_fit. " ans = '' for x in self.chained_fits: ans += 10 * '=' + ' ' + str(x) + '\n' ans += self.chained_fits[x].format(*args, **kargs) ans += '\n' return ans[:-1]
Add-on method for fits returned by chained_nonlinear_fit.
def _timeout_to_float(self, timeout): if timeout is not None: try: timeout_float = float(timeout) except ValueError: raise ValueError( 'timeout_sec must be a valid number or None. timeout="{}"'.format( timeout ) ) if timeout_float: return timeout_float
Convert timeout to float. Return None if timeout is None, 0 or 0.0. timeout=None disables timeouts in Requests.
def get_features(cls, entry): features = [] for feature in entry.iterfind("./feature"): feature_dict = { 'description': feature.attrib.get('description'), 'type_': feature.attrib['type'], 'identifier': feature.attrib.get('id') } features.append(models.Feature(**feature_dict)) return features
get list of `models.Feature` from XML node entry :param entry: XML node entry :return: list of :class:`pyuniprot.manager.models.Feature`
def parts(self, *args, **kwargs): return self._client.parts(*args, activity=self.id, **kwargs)
Retrieve parts belonging to this activity. Without any arguments it retrieves the Instances related to this task only. This call only returns the configured properties in an activity. So properties that are not configured are not in the returned parts. See :class:`pykechain.Client.parts` for additional available parameters. Example ------- >>> task = project.activity('Specify Wheel Diameter') >>> parts = task.parts() To retrieve the models only. >>> parts = task.parts(category=Category.MODEL)
def setEditorData(self, spinBox, index): if index.isValid(): value = index.model().data(index, QtCore.Qt.EditRole) spinBox.setValue(value)
Sets the data to be displayed and edited by the editor from the data model item specified by the model index. Args: spinBox (BigIntSpinbox): editor widget. index (QModelIndex): model data index.
def LockRetryWrapper(self, subject, retrywrap_timeout=1, retrywrap_max_timeout=10, blocking=True, lease_time=None): timeout = 0 while timeout < retrywrap_max_timeout: try: return self.DBSubjectLock(subject, lease_time=lease_time) except DBSubjectLockError: if not blocking: raise stats_collector_instance.Get().IncrementCounter("datastore_retries") time.sleep(retrywrap_timeout) timeout += retrywrap_timeout raise DBSubjectLockError("Retry number exceeded.")
Retry a DBSubjectLock until it succeeds. Args: subject: The subject which the lock applies to. retrywrap_timeout: How long to wait before retrying the lock. retrywrap_max_timeout: The maximum time to wait for a retry until we raise. blocking: If False, raise on first lock failure. lease_time: lock lease time in seconds. Returns: The DBSubjectLock object Raises: DBSubjectLockError: If the maximum retry count has been reached.
def load_plugins(plugin_dir: str, module_prefix: str) -> int: count = 0 for name in os.listdir(plugin_dir): path = os.path.join(plugin_dir, name) if os.path.isfile(path) and \ (name.startswith('_') or not name.endswith('.py')): continue if os.path.isdir(path) and \ (name.startswith('_') or not os.path.exists( os.path.join(path, '__init__.py'))): continue m = re.match(r'([_A-Z0-9a-z]+)(.py)?', name) if not m: continue if load_plugin(f'{module_prefix}.{m.group(1)}'): count += 1 return count
Find all non-hidden modules or packages in a given directory, and import them with the given module prefix. :param plugin_dir: plugin directory to search :param module_prefix: module prefix used while importing :return: number of plugins successfully loaded
def get_text(nodelist): value = [] for node in nodelist: if node.nodeType == node.TEXT_NODE: value.append(node.data) return ''.join(value)
Get the value from a text node.
def set_idle_priority(pid=None): if pid is None: pid = os.getpid() lib.ioprio_set( lib.IOPRIO_WHO_PROCESS, pid, lib.IOPRIO_PRIO_VALUE(lib.IOPRIO_CLASS_IDLE, 0))
Puts a process in the idle io priority class. If pid is omitted, applies to the current process.
def merge_context(self, src=None): if src is None: return if self._ctx.start_ts == 0: self._ctx.start_ts = src.start_ts elif self._ctx.start_ts != src.start_ts: raise Exception('StartTs mismatch') self._ctx.keys.extend(src.keys) self._ctx.preds.extend(src.preds)
Merges context from this instance with src.
def draw_screen(self, size): self.tui.clear() canvas = self.top.render(size, focus=True) self.tui.draw_screen(size, canvas)
Render curses screen
def get_python(): if sys.platform == 'win32': python = path.join(VE_ROOT, 'Scripts', 'python.exe') else: python = path.join(VE_ROOT, 'bin', 'python') return python
Determine the path to the virtualenv python
def add(self, resource): if isinstance(resource, collections.Iterable): for r in resource: self.add_if_changed(r) else: self.add_if_changed(resource)
Add a resource change or an iterable collection of them. Allows multiple resource_change objects for the same resource (ie. URI) and preserves the order of addition.
def build_publish_pkt(self, mid, topic, payload, qos, retain, dup): pkt = MqttPkt() payloadlen = len(payload) packetlen = 2 + len(topic) + payloadlen if qos > 0: packetlen += 2 pkt.mid = mid pkt.command = NC.CMD_PUBLISH | ((dup & 0x1) << 3) | (qos << 1) | retain pkt.remaining_length = packetlen ret = pkt.alloc() if ret != NC.ERR_SUCCESS: return ret, None pkt.write_string(topic) if qos > 0: pkt.write_uint16(mid) if payloadlen > 0: pkt.write_bytes(payload, payloadlen) return NC.ERR_SUCCESS, pkt
Build PUBLISH packet.
def from_lal(cls, lalts, copy=True): from ..utils.lal import from_lal_unit try: unit = from_lal_unit(lalts.sampleUnits) except (TypeError, ValueError) as exc: warnings.warn("%s, defaulting to 'dimensionless'" % str(exc)) unit = None channel = Channel(lalts.name, sample_rate=1/lalts.deltaT, unit=unit, dtype=lalts.data.data.dtype) out = cls(lalts.data.data, channel=channel, t0=lalts.epoch, dt=lalts.deltaT, unit=unit, name=lalts.name, copy=False) if copy: return out.copy() return out
Generate a new TimeSeries from a LAL TimeSeries of any type.
def _sortValue_isMonospace(font): if font.info.postscriptIsFixedPitch: return 0 if not len(font): return 1 testWidth = None for glyph in font: if testWidth is None: testWidth = glyph.width else: if testWidth != glyph.width: return 1 return 0
Returns 0 if the font is monospace. Returns 1 if the font is not monospace.
def contains_group(store, path=None): path = normalize_storage_path(path) prefix = _path_to_prefix(path) key = prefix + group_meta_key return key in store
Return True if the store contains a group at the given logical path.
def _set_other(self): if self.dst.style['in'] == 'numpydoc': if self.docs['in']['raw'] is not None: self.docs['out']['post'] = self.dst.numpydoc.get_raw_not_managed(self.docs['in']['raw']) elif 'post' not in self.docs['out'] or self.docs['out']['post'] is None: self.docs['out']['post'] = ''
Sets other specific sections
def select_eep(self, rorg_func, rorg_type, direction=None, command=None): self.rorg_func = rorg_func self.rorg_type = rorg_type self._profile = self.eep.find_profile(self._bit_data, self.rorg, rorg_func, rorg_type, direction, command) return self._profile is not None
Set EEP based on FUNC and TYPE
def rewind(self, count): if count > self._index: raise ValueError("Can't rewind past beginning!") self._index -= count
Rewind index.
def minusExtras(self): assert self.isOpen() trimmed = self.clone() equals = {} for test in trimmed.tests: if test.op == '=': equals[test.property] = test.value extras = [] for (i, test) in enumerate(trimmed.tests): if test.op == '!=' and equals.has_key(test.property) and equals[test.property] != test.value: extras.append(i) while extras: trimmed.tests.pop(extras.pop()) return trimmed
Return a new Filter that's equal to this one, without extra terms that don't add meaning.
def func_dump(func): code = marshal.dumps(func.__code__).decode('raw_unicode_escape') defaults = func.__defaults__ if func.__closure__: closure = tuple(c.cell_contents for c in func.__closure__) else: closure = None return code, defaults, closure
Serialize user defined function.
def save_to_mat_file(self, parameter_space, result_parsing_function, filename, runs): for key in parameter_space: if not isinstance(parameter_space[key], list): parameter_space[key] = [parameter_space[key]] dimension_labels = [{key: str(parameter_space[key])} for key in parameter_space.keys() if len(parameter_space[key]) > 1] + [{'runs': range(runs)}] return savemat( filename, {'results': self.get_results_as_numpy_array(parameter_space, result_parsing_function, runs=runs), 'dimension_labels': dimension_labels})
Return the results relative to the desired parameter space in the form of a .mat file. Args: parameter_space (dict): dictionary containing parameter/list-of-values pairs. result_parsing_function (function): user-defined function, taking a result dictionary as argument, that can be used to parse the result files and return a list of values. filename (path): name of output .mat file. runs (int): number of runs to gather for each parameter combination.
def compute_err(self, solution_y, coefficients): error = 0 for modeled, expected in zip(solution_y, self.expected_values): error += abs(modeled - expected) if any([c < 0 for c in coefficients]): error *= 1.5 return error
Return an error value by finding the absolute difference for each element in a list of solution-generated y-values versus expected values. Compounds error by 50% for each negative coefficient in the solution. solution_y: list of y-values produced by a solution coefficients: list of polynomial coefficients represented by the solution return: error value
def set_probe_position(self, new_probe_position): if new_probe_position is not None: new_probe_position = Geometry.FloatPoint.make(new_probe_position) new_probe_position = Geometry.FloatPoint(y=max(min(new_probe_position.y, 1.0), 0.0), x=max(min(new_probe_position.x, 1.0), 0.0)) old_probe_position = self.__probe_position_value.value if ((old_probe_position is None) != (new_probe_position is None)) or (old_probe_position != new_probe_position): self.__probe_position_value.value = new_probe_position self.probe_state_changed_event.fire(self.probe_state, self.probe_position)
Set the probe position, in normalized coordinates with origin at top left.
def get_info_consistent(self, ndim): if ndim > len(self.spacing): spacing = self.spacing + (1.0, ) * (ndim - len(self.spacing)) else: spacing = self.spacing[:ndim] if ndim > len(self.offset): offset = self.offset + (0.0, ) * (ndim - len(self.offset)) else: offset = self.offset[:ndim] if ndim > self.direction.shape[0]: direction = np.identity(ndim) direction[:self.direction.shape[0], :self.direction.shape[0]] = self.direction else: direction = self.direction[:ndim, :ndim] return spacing, offset, direction
Returns the main meta-data information adapted to the supplied image dimensionality. It will try to resolve inconsistencies and other conflicts, altering the information avilable int he most plausible way. Parameters ---------- ndim : int image's dimensionality Returns ------- spacing : tuple of floats offset : tuple of floats direction : ndarray
def clean_new_password2(self): password1 = self.cleaned_data.get('new_password1') password2 = self.cleaned_data.get('new_password2') try: directory = APPLICATION.default_account_store_mapping.account_store directory.password_policy.strength.validate_password(password2) except ValueError as e: raise forms.ValidationError(str(e)) if password1 and password2: if password1 != password2: raise forms.ValidationError("The two passwords didn't match.") return password2
Check if passwords match and are valid.
def stemming_processor(words): stem = PorterStemmer().stem for word in words: word = stem(word, 0, len(word)-1) yield word
Porter Stemmer word processor
def ray_triangle_candidates(ray_origins, ray_directions, tree): ray_bounding = ray_bounds(ray_origins=ray_origins, ray_directions=ray_directions, bounds=tree.bounds) ray_candidates = [[]] * len(ray_origins) ray_id = [[]] * len(ray_origins) for i, bounds in enumerate(ray_bounding): ray_candidates[i] = np.array(list(tree.intersection(bounds)), dtype=np.int) ray_id[i] = np.ones(len(ray_candidates[i]), dtype=np.int) * i ray_id = np.hstack(ray_id) ray_candidates = np.hstack(ray_candidates) return ray_candidates, ray_id
Do broad- phase search for triangles that the rays may intersect. Does this by creating a bounding box for the ray as it passes through the volume occupied by the tree Parameters ------------ ray_origins: (m,3) float, ray origin points ray_directions: (m,3) float, ray direction vectors tree: rtree object, contains AABB of each triangle Returns ---------- ray_candidates: (n,) int, triangle indexes ray_id: (n,) int, corresponding ray index for a triangle candidate
def generate_uuid4(self) -> list: hexstr = randhex(30) uuid4 = [ hexstr[:8], hexstr[8:12], '4' + hexstr[12:15], '{:x}{}'.format(randbetween(8, 11), hexstr[15:18]), hexstr[18:] ] self.last_result = uuid4 return uuid4
Generate a list of parts of a UUID version 4 string. Usually, these parts are concatenated together using dashes.
def create_network(userid, os_version, network_info): print("\nConfiguring network interface for %s ..." % userid) network_create_info = client.send_request('guest_create_network_interface', userid, os_version, network_info) if network_create_info['overallRC']: raise RuntimeError("Failed to create network for guest %s!\n%s" % (userid, network_create_info)) else: print("Succeeded to create network for guest %s!" % userid)
Create network device and configure network interface. Input parameters: :userid: USERID of the guest, last 8 if length > 8 :os_version: os version of the image file :network_info: dict of network info
def is_dir(dirname): if not os.path.isdir(dirname): msg = "{0} is not a directory".format(dirname) raise argparse.ArgumentTypeError(msg) else: return dirname
Checks if a path is an actual directory that exists
def create_bwa_index_from_fasta_file(fasta_in, params=None): if params is None: params = {} index = BWA_index(params) results = index({'fasta_in': fasta_in}) return results
Create a BWA index from an input fasta file. fasta_in: the input fasta file from which to create the index params: dict of bwa index specific paramters This method returns a dictionary where the keys are the various output suffixes (.amb, .ann, .bwt, .pac, .sa) and the values are open file objects. The index prefix will be the same as fasta_in, unless the -p parameter is passed in params.
def close_alert(name=None, api_key=None, reason="Conditions are met.", action_type="Close"): if name is None: raise salt.exceptions.SaltInvocationError( 'Name cannot be None.') return create_alert(name, api_key, reason, action_type)
Close an alert in OpsGenie. It's a wrapper function for create_alert. Example usage with Salt's requisites and other global state arguments could be found above. Required Parameters: name It will be used as alert's alias. If you want to use the close functionality you must provide name field for both states like in above case. Optional Parameters: api_key It's the API Key you've copied while adding integration in OpsGenie. reason It will be used as alert's default message in OpsGenie. action_type OpsGenie supports the default values Create/Close for action_type. You can customize this field with OpsGenie's custom actions for other purposes like adding notes or acknowledging alerts.
def is_decimal(self): dt = DATA_TYPES['decimal'] if type(self.data) in dt['type']: self.type = 'DECIMAL' num_split = str(self.data).split('.', 1) self.len = len(num_split[0]) self.len_decimal = len(num_split[1]) return True
Determine if a data record is of the type float.
def get_hostname_text(self): try: hostname_text = self.device.send('hostname', timeout=10) if hostname_text: self.device.hostname = hostname_text.splitlines()[0] return hostname_text except CommandError: self.log("Non Unix jumphost type detected") return None
Return hostname information from the Unix host.
def start(name, timeout=90, with_deps=False, with_parents=False): if disabled(name): modify(name, start_type='Manual') ret = set() services = ServiceDependencies(name, get_all, info) start = services.start_order(with_deps=with_deps, with_parents=with_parents) log.debug("Starting services %s", start) for name in start: try: win32serviceutil.StartService(name) except pywintypes.error as exc: if exc.winerror != 1056: raise CommandExecutionError( 'Failed To Start {0}: {1}'.format(name, exc.strerror)) log.debug('Service "%s" is running', name) srv_status = _status_wait(service_name=name, end_time=time.time() + int(timeout), service_states=['Start Pending', 'Stopped']) ret.add(srv_status['Status'] == 'Running') return False not in ret
Start the specified service. .. warning:: You cannot start a disabled service in Windows. If the service is disabled, it will be changed to ``Manual`` start. Args: name (str): The name of the service to start timeout (int): The time in seconds to wait for the service to start before returning. Default is 90 seconds .. versionadded:: 2017.7.9,2018.3.4 with_deps (bool): If enabled start the given service and the services the current service depends on. with_parents (bool): If enabled and in case other running services depend on the to be start service, this flag indicates that those other services will be started as well. Returns: bool: ``True`` if successful, otherwise ``False``. Also returns ``True`` if the service is already started CLI Example: .. code-block:: bash salt '*' service.start <service name>
def evalsha(self, digest, keys=[], args=[]): return self.execute(b'EVALSHA', digest, len(keys), *(keys + args))
Execute a Lua script server side by its SHA1 digest.
def first_true(iterable, default=False, pred=None): return next(filter(pred, iterable), default)
Returns the first true value in the iterable. If no true value is found, returns *default* If *pred* is not None, returns the first item for which pred(item) is true.
def redirect(self, where: Optional[str] = None, default: Optional[str] = None, override: Optional[str] = None, **url_kwargs): return redirect(where, default, override, _cls=self, **url_kwargs)
Convenience method for returning redirect responses. :param where: A URL, endpoint, or config key name to redirect to. :param default: A URL, endpoint, or config key name to redirect to if ``where`` is invalid. :param override: explicitly redirect to a URL, endpoint, or config key name (takes precedence over the ``next`` value in query strings or forms) :param url_kwargs: the variable arguments of the URL rule :param _anchor: if provided this is added as anchor to the URL. :param _external: if set to ``True``, an absolute URL is generated. Server address can be changed via ``SERVER_NAME`` configuration variable which defaults to `localhost`. :param _external_host: if specified, the host of an external server to generate urls for (eg https://example.com or localhost:8888) :param _method: if provided this explicitly specifies an HTTP method. :param _scheme: a string specifying the desired URL scheme. The `_external` parameter must be set to ``True`` or a :exc:`ValueError` is raised. The default behavior uses the same scheme as the current request, or ``PREFERRED_URL_SCHEME`` from the :ref:`app configuration <config>` if no request context is available. As of Werkzeug 0.10, this also can be set to an empty string to build protocol-relative URLs.
def link(self, pid, to): self._assert_started() def really_link(): self._links[pid].add(to) log.info('Added link from %s to %s' % (pid, to)) def on_connect(stream): really_link() if self._is_local(pid): really_link() else: self.__loop.add_callback(self._maybe_connect, to, on_connect)
Link a local process to a possibly remote process. Note: It is more idiomatic to call ``link`` directly on the bound Process object instead. When ``pid`` is linked to ``to``, the termination of the ``to`` process (or the severing of its connection from the Process ``pid``) will result in the local process' ``exited`` method to be called with ``to``. This method returns immediately. :param pid: The pid of the linking process. :type pid: :class:`PID` :param to: The pid of the linked process. :type to: :class:`PID` :returns: Nothing
def write_seq_as_temp_fasta(seq): sr = ssbio.protein.sequence.utils.cast_to_seq_record(seq, id='tempfasta') return write_fasta_file(seq_records=sr, outname='temp', outdir=tempfile.gettempdir(), force_rerun=True)
Write a sequence as a temporary FASTA file Args: seq (str, Seq, SeqRecord): Sequence string, Biopython Seq or SeqRecord object Returns: str: Path to temporary FASTA file (located in system temporary files directory)
def set_value(self, name, value): value = to_text_string(value) code = u"get_ipython().kernel.set_value('%s', %s, %s)" % (name, value, PY2) if self._reading: self.kernel_client.input(u'!' + code) else: self.silent_execute(code)
Set value for a variable
def setup(sphinx): from flask import has_app_context from invenio_base.factory import create_app PACKAGES = ['invenio_base', 'invenio.modules.accounts', 'invenio.modules.records', 'invenio_knowledge'] if not has_app_context(): app = create_app(PACKAGES=PACKAGES) ctx = app.test_request_context('/') ctx.push()
Setup Sphinx object.
def cor(y_true, y_pred): y_true, y_pred = _mask_nan(y_true, y_pred) return np.corrcoef(y_true, y_pred)[0, 1]
Compute Pearson correlation coefficient.
def pdf_saver(filehandle, *args, **kwargs): "Uses werkzeug.FileStorage instance to save the converted image." fullpath = get_save_path(filehandle.filename) filehandle.save(fullpath, buffer_size=kwargs.get('buffer_size', 16384))
Uses werkzeug.FileStorage instance to save the converted image.
def get_default_storage_policy_of_datastore(profile_manager, datastore): hub = pbm.placement.PlacementHub( hubId=datastore._moId, hubType='Datastore') log.trace('placement_hub = %s', hub) try: policy_id = profile_manager.QueryDefaultRequirementProfile(hub) except vim.fault.NoPermission as exc: log.exception(exc) raise VMwareApiError('Not enough permissions. Required privilege: ' '{0}'.format(exc.privilegeId)) except vim.fault.VimFault as exc: log.exception(exc) raise VMwareApiError(exc.msg) except vmodl.RuntimeFault as exc: log.exception(exc) raise VMwareRuntimeError(exc.msg) policy_refs = get_policies_by_id(profile_manager, [policy_id]) if not policy_refs: raise VMwareObjectRetrievalError('Storage policy with id \'{0}\' was ' 'not found'.format(policy_id)) return policy_refs[0]
Returns the default storage policy reference assigned to a datastore. profile_manager Reference to the profile manager. datastore Reference to the datastore.
def on_delete(self, node): for tnode in node.targets: if tnode.ctx.__class__ != ast.Del: break children = [] while tnode.__class__ == ast.Attribute: children.append(tnode.attr) tnode = tnode.value if tnode.__class__ == ast.Name and tnode.id not in self.readonly_symbols: children.append(tnode.id) children.reverse() self.symtable.pop('.'.join(children)) else: msg = "could not delete symbol" self.raise_exception(node, msg=msg)
Delete statement.
def to_op(self): if not self._adds: return None changes = {} if self._adds: changes['adds'] = list(self._adds) return changes
Extracts the modification operation from the Hll. :rtype: dict, None
def get_reverse_index(self, base_index): r = self.reverse_index_mapping[base_index] if r < 0: raise IndexError("index %d not mapped in this segment" % base_index) return r
Get index into this segment's data given the index into the base data Raises IndexError if the base index doesn't map to anything in this segment's data
def Group(params, name=None, type=None): atts = {} if name: atts['name'] = name if type: atts['type'] = type g = objectify.Element('Group', attrib=atts) for p in params: g.append(p) return g
Groups together Params for adding under the 'What' section. Args: params(list of :func:`Param`): Parameter elements to go in this group. name(str): Group name. NB ``None`` is valid, since the group may be best identified by its type. type(str): Type of group, e.g. 'complex' (for real and imaginary).
def collapse(cls, holomap, ranges=None, mode='data'): if cls.definitions == []: return holomap clone = holomap.clone(shared_data=False) data = zip(ranges[1], holomap.data.values()) if ranges else holomap.data.items() for key, overlay in data: clone[key] = cls.collapse_element(overlay, ranges, mode) return clone
Given a map of Overlays, apply all applicable compositors.
def guard(params, guardian, error_class=GuardError, message=''): params = [params] if isinstance(params, string_types) else params def guard_decorate(f): @wraps(f) def _guard_decorate(*args, **kwargs): if guardian(**_params(f, args, kwargs, params)): return f(*args, **kwargs) else: raise error_class(message) return _guard_decorate return guard_decorate
A guard function - check parameters with guardian function on decorated function :param tuple or string params: guarded function parameter/s :param function guardian: verifying the conditions for the selected parameter :param Exception error_class: raised class when guardian return false :param string message: error message
def all_devices(cl_device_type=None, platform=None): if isinstance(cl_device_type, str): cl_device_type = device_type_from_string(cl_device_type) runtime_list = [] if platform is None: platforms = cl.get_platforms() else: platforms = [platform] for platform in platforms: if cl_device_type: devices = platform.get_devices(device_type=cl_device_type) else: devices = platform.get_devices() for device in devices: if device_supports_double(device): env = CLEnvironment(platform, device) runtime_list.append(env) return runtime_list
Get multiple device environments, optionally only of the indicated type. This will only fetch devices that support double point precision. Args: cl_device_type (cl.device_type.* or string): The type of the device we want, can be a opencl device type or a string matching 'GPU' or 'CPU'. platform (opencl platform): The opencl platform to select the devices from Returns: list of CLEnvironment: List with the CL device environments.
def generate_user_agent(os=None, navigator=None, platform=None, device_type=None): return generate_navigator(os=os, navigator=navigator, platform=platform, device_type=device_type)['user_agent']
Generates HTTP User-Agent header :param os: limit list of os for generation :type os: string or list/tuple or None :param navigator: limit list of browser engines for generation :type navigator: string or list/tuple or None :param device_type: limit possible oses by device type :type device_type: list/tuple or None, possible values: "desktop", "smartphone", "tablet", "all" :return: User-Agent string :rtype: string :raises InvalidOption: if could not generate user-agent for any combination of allowed oses and navigators :raise InvalidOption: if any of passed options is invalid
def _ProcessCommandSource(self, source): action = standard.ExecuteCommandFromClient request = rdf_client_action.ExecuteRequest( cmd=source.base_source.attributes["cmd"], args=source.base_source.attributes["args"], ) yield action, request
Prepare a request for calling the execute command action.
def set_queue_callback(self, name, func): if name in self.acquisition_hooks: self.acquisition_hooks[name].append(func) else: self.acquisition_hooks[name] = [func]
Sets a function to execute when the named acquistion queue has data placed in it. :param name: name of the queue to pull data from :type name: str :param func: function reference to execute, expects queue contents as argument(s) :type func: callable
def generate_format(self): with self.l('if isinstance({variable}, str):'): format_ = self._definition['format'] if format_ in self.FORMAT_REGEXS: format_regex = self.FORMAT_REGEXS[format_] self._generate_format(format_, format_ + '_re_pattern', format_regex) elif format_ == 'regex': with self.l('try:'): self.l('re.compile({variable})') with self.l('except Exception:'): self.l('raise JsonSchemaException("{name} must be a valid regex")') else: self.l('pass')
Means that value have to be in specified format. For example date, email or other. .. code-block:: python {'format': 'email'} Valid value for this definition is user@example.com but not @username
def add(self, domain_accession, domain_type, match_quality): self.matches[domain_type] = self.matches.get(domain_type, {}) self.matches[domain_type][domain_accession] = match_quality
match_quality should be a value between 0 and 1.
def remove(self, path, recursive=True, skip_trash=True): if recursive: to_delete = [] for s in self.get_all_data().keys(): if s.startswith(path): to_delete.append(s) for s in to_delete: self.get_all_data().pop(s) else: self.get_all_data().pop(path)
Removes the given mockfile. skip_trash doesn't have any meaning.
def get_container_mapping(self): layout = self.context.getLayout() container_mapping = {} for slot in layout: if slot["type"] != "a": continue position = slot["position"] container_uid = slot["container_uid"] container_mapping[container_uid] = position return container_mapping
Returns a mapping of container -> postition
def enable_autocenter(self, option): option = option.lower() assert(option in self.autocenter_options), \ ImageViewError("Bad autocenter option '%s': must be one of %s" % ( str(self.autocenter_options))) self.t_.set(autocenter=option)
Set ``autocenter`` behavior. Parameters ---------- option : {'on', 'override', 'once', 'off'} Option for auto-center behavior. A list of acceptable options can also be obtained by :meth:`get_autocenter_options`. Raises ------ ginga.ImageView.ImageViewError Invalid option.
def eval_to_ast(self, e, n, extra_constraints=(), exact=None): return [ ast.bv.BVV(v, e.size()) for v in self.eval(e, n, extra_constraints=extra_constraints, exact=exact) ]
Evaluates expression e, returning the results in the form of concrete ASTs.
def get_ca_certs(environ=os.environ): cert_paths = environ.get("TXAWS_CERTS_PATH", DEFAULT_CERTS_PATH).split(":") certificate_authority_map = {} for path in cert_paths: if not path: continue for cert_file_name in glob(os.path.join(path, "*.pem")): if not os.path.exists(cert_file_name): continue cert_file = open(cert_file_name) data = cert_file.read() cert_file.close() x509 = load_certificate(FILETYPE_PEM, data) digest = x509.digest("sha1") certificate_authority_map[digest] = x509 values = certificate_authority_map.values() if len(values) == 0: raise exception.CertsNotFoundError("Could not find any .pem files.") return values
Retrieve a list of CAs at either the DEFAULT_CERTS_PATH or the env override, TXAWS_CERTS_PATH. In order to find .pem files, this function checks first for presence of the TXAWS_CERTS_PATH environment variable that should point to a directory containing cert files. In the absense of this variable, the module-level DEFAULT_CERTS_PATH will be used instead. Note that both of these variables have have multiple paths in them, just like the familiar PATH environment variable (separated by colons).
def get_mors_with_properties(service_instance, object_type, property_list=None, container_ref=None, traversal_spec=None, local_properties=False): content_args = [service_instance, object_type] content_kwargs = {'property_list': property_list, 'container_ref': container_ref, 'traversal_spec': traversal_spec, 'local_properties': local_properties} try: content = get_content(*content_args, **content_kwargs) except BadStatusLine: content = get_content(*content_args, **content_kwargs) except IOError as exc: if exc.errno != errno.EPIPE: raise exc content = get_content(*content_args, **content_kwargs) object_list = [] for obj in content: properties = {} for prop in obj.propSet: properties[prop.name] = prop.val properties['object'] = obj.obj object_list.append(properties) log.trace('Retrieved %s objects', len(object_list)) return object_list
Returns a list containing properties and managed object references for the managed object. service_instance The Service Instance from which to obtain managed object references. object_type The type of content for which to obtain managed object references. property_list An optional list of object properties used to return even more filtered managed object reference results. container_ref An optional reference to the managed object to search under. Can either be an object of type Folder, Datacenter, ComputeResource, Resource Pool or HostSystem. If not specified, default behaviour is to search under the inventory rootFolder. traversal_spec An optional TraversalSpec to be used instead of the standard ``Traverse All`` spec local_properties Flag specigying whether the properties to be retrieved are local to the container. If that is the case, the traversal spec needs to be None.
def ylabelsize(self, size, index=1): self.layout['yaxis' + str(index)]['titlefont']['size'] = size return self
Set the size of the label. Parameters ---------- size : int Returns ------- Chart
def add_to_manifest(self, manifest): manifest.add_service(self.service.name) manifest.write_manifest()
Add to the manifest to make sure it is bound to the application.
def create_table(table, dbo, tablename, schema=None, commit=True, constraints=True, metadata=None, dialect=None, sample=1000): if sample > 0: table = head(table, sample) sql = make_create_table_statement(table, tablename, schema=schema, constraints=constraints, metadata=metadata, dialect=dialect) _execute(sql, dbo, commit=commit)
Create a database table based on a sample of data in the given `table`. Keyword arguments: table : table container Table data to load dbo : database object DB-API 2.0 connection, callable returning a DB-API 2.0 cursor, or SQLAlchemy connection, engine or session tablename : text Name of the table schema : text Name of the database schema to create the table in commit : bool If True commit the changes constraints : bool If True use length and nullable constraints metadata : sqlalchemy.MetaData Custom table metadata dialect : text One of {'access', 'sybase', 'sqlite', 'informix', 'firebird', 'mysql', 'oracle', 'maxdb', 'postgresql', 'mssql'} sample : int Number of rows to sample when inferring types etc., set to 0 to use the whole table
def sinc_window(num_zeros=64, precision=9, window=None, rolloff=0.945): if window is None: window = scipy.signal.blackmanharris elif not six.callable(window): raise TypeError('window must be callable, not type(window)={}'.format(type(window))) if not 0 < rolloff <= 1: raise ValueError('Invalid roll-off: rolloff={}'.format(rolloff)) if num_zeros < 1: raise ValueError('Invalid num_zeros: num_zeros={}'.format(num_zeros)) if precision < 0: raise ValueError('Invalid precision: precision={}'.format(precision)) num_bits = 2**precision n = num_bits * num_zeros sinc_win = rolloff * np.sinc(rolloff * np.linspace(0, num_zeros, num=n + 1, endpoint=True)) taper = window(2 * n + 1)[n:] interp_win = (taper * sinc_win) return interp_win, num_bits, rolloff
Construct a windowed sinc interpolation filter Parameters ---------- num_zeros : int > 0 The number of zero-crossings to retain in the sinc filter precision : int > 0 The number of filter coefficients to retain for each zero-crossing window : callable The window function. By default, uses Blackman-Harris. rolloff : float > 0 The roll-off frequency (as a fraction of nyquist) Returns ------- interp_window: np.ndarray [shape=(num_zeros * num_table + 1)] The interpolation window (right-hand side) num_bits: int The number of bits of precision to use in the filter table rolloff : float > 0 The roll-off frequency of the filter, as a fraction of Nyquist Raises ------ TypeError if `window` is not callable or `None` ValueError if `num_zeros < 1`, `precision < 1`, or `rolloff` is outside the range `(0, 1]`. Examples -------- >>> # A filter with 10 zero-crossings, 32 samples per crossing, and a ... # Hann window for tapering. >>> halfwin, prec, rolloff = resampy.filters.sinc_window(num_zeros=10, precision=5, ... window=scipy.signal.hann) >>> halfwin array([ 9.450e-01, 9.436e-01, ..., -7.455e-07, -0.000e+00]) >>> prec 32 >>> rolloff 0.945 >>> # Or using sinc-window filter construction directly in resample >>> y = resampy.resample(x, sr_orig, sr_new, filter='sinc_window', ... num_zeros=10, precision=5, ... window=scipy.signal.hann)
def form_valid(self, form): response = super().form_valid(form) messages.success(self.request, "Successfully created ({})".format(self.object)) return response
Add success message
def create_wiki(post_data): logger.info('Call create wiki') title = post_data['title'].strip() if len(title) < 2: logger.info(' ' * 4 + 'The title is too short.') return False the_wiki = MWiki.get_by_wiki(title) if the_wiki: logger.info(' ' * 4 + 'The title already exists.') MWiki.update(the_wiki.uid, post_data) return uid = '_' + tools.get_uu8d() return MWiki.__create_rec(uid, '1', post_data=post_data)
Create the wiki.
def post_registration_redirect(self, request, user): next_url = "/registration/register/complete/" if "next" in request.GET or "next" in request.POST: next_url = request.GET.get("next", None) or request.POST.get("next", None) or "/" return (next_url, (), {})
After registration, redirect to the home page or supplied "next" query string or hidden field value.