code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def list(ctx): if ctx.namespace != 'accounts': click.echo( click.style('Only account data is available for listing.', fg='red') ) return swag = create_swag_from_ctx(ctx) accounts = swag.get_all() _table = [[result['name'], result.get('id')] for result in accounts] click.echo( tabulate(_table, headers=["Account Name", "Account Number"]) )
List SWAG account info.
def create(self): logger.info("creating server") self.library.Srv_Create.restype = snap7.snap7types.S7Object self.pointer = snap7.snap7types.S7Object(self.library.Srv_Create())
create the server.
def cropbox(self, image, geometry, options): cropbox = options['cropbox'] if not cropbox: return image x, y, x2, y2 = parse_cropbox(cropbox) return self._cropbox(image, x, y, x2, y2)
Wrapper for ``_cropbox``
def enable_repeat(self, value=None): if value is None: value = not self.repeated spotifyconnect.Error.maybe_raise(lib.SpPlaybackEnableRepeat(value))
Enable repeat mode
def get_messages(module): answer = collections.OrderedDict() for name in dir(module): candidate = getattr(module, name) if inspect.isclass(candidate) and issubclass(candidate, message.Message): answer[name] = candidate return answer
Discovers all protobuf Message classes in a given import module. Args: module (module): A Python module; :func:`dir` will be run against this module to find Message subclasses. Returns: dict[str, google.protobuf.message.Message]: A dictionary with the Message class names as keys, and the Message subclasses themselves as values.
def dumps(self, value): for serializer in self: value = serializer.dumps(value) return value
returns serialized `value`.
def type_converter(text): if text.isdigit(): return int(text), int try: return float(text), float except ValueError: return text, STRING_TYPE
I convert strings into integers, floats, and strings!
def do_view(self): self.current.output['login_process'] = True self.current.task_data['login_successful'] = False if self.current.is_auth: self._do_upgrade() else: try: auth_result = self.current.auth.authenticate( self.current.input['username'], self.current.input['password']) self.current.task_data['login_successful'] = auth_result if auth_result: self._do_upgrade() except ObjectDoesNotExist: self.current.log.exception("Wrong username or another error occurred") pass except: raise if self.current.output.get('cmd') != 'upgrade': self.current.output['status_code'] = 403 else: KeepAlive(self.current.user_id).reset()
Authenticate user with given credentials. Connects user's queue and exchange
def assert_sigfigs_equal(x, y, sigfigs=3): xpow = np.floor(np.log10(x)) x = x * 10**(- xpow) y = y * 10**(- xpow) assert_almost_equal(x, y, sigfigs)
Tests if all elements in x and y agree up to a certain number of significant figures. :param np.ndarray x: Array of numbers. :param np.ndarray y: Array of numbers you want to be equal to ``x``. :param int sigfigs: How many significant figures you demand that they share. Default is 3.
def rdfs_properties(rdf): superprops = {} for s, o in rdf.subject_objects(RDFS.subPropertyOf): superprops.setdefault(s, set()) for sp in rdf.transitive_objects(s, RDFS.subPropertyOf): if sp != s: superprops[s].add(sp) for p, sps in superprops.items(): logging.debug("setting superproperties: %s -> %s", p, str(sps)) for s, o in rdf.subject_objects(p): for sp in sps: rdf.add((s, sp, o))
Perform RDFS subproperty inference. Add superproperties where subproperties have been used.
def adjustHSP(self, hsp): reduction = self._reductionForOffset( min(hsp.readStartInSubject, hsp.subjectStart)) hsp.readEndInSubject = hsp.readEndInSubject - reduction hsp.readStartInSubject = hsp.readStartInSubject - reduction hsp.subjectEnd = hsp.subjectEnd - reduction hsp.subjectStart = hsp.subjectStart - reduction
Adjust the read and subject start and end offsets in an HSP. @param hsp: a L{dark.hsp.HSP} or L{dark.hsp.LSP} instance.
def check_angle_sampling(nvecs, angles): failed_nvecs = [] failures = [] for i, vec in enumerate(nvecs): X = (angles*vec[:, None]).sum(axis=0) diff = float(np.abs(X.max() - X.min())) if diff < (2.*np.pi): warnings.warn("Need a longer integration window for mode {0}" .format(vec)) failed_nvecs.append(vec.tolist()) failures.append(0) elif (diff/len(X)) > np.pi: warnings.warn("Need a finer sampling for mode {0}" .format(str(vec))) failed_nvecs.append(vec.tolist()) failures.append(1) return np.array(failed_nvecs), np.array(failures)
Returns a list of the index of elements of n which do not have adequate toy angle coverage. The criterion is that we must have at least one sample in each Nyquist box when we project the toy angles along the vector n. Parameters ---------- nvecs : array_like Array of integer vectors. angles : array_like Array of angles. Returns ------- failed_nvecs : :class:`numpy.ndarray` Array of all integer vectors that failed checks. Has shape (N,3). failures : :class:`numpy.ndarray` Array of flags that designate whether this failed needing a longer integration window (0) or finer sampling (1).
def prophist(self,prop,fig=None,log=False, mask=None, selected=False,**kwargs): setfig(fig) inds = None if mask is not None: inds = np.where(mask)[0] elif inds is None: if selected: inds = self.selected.index else: inds = self.stars.index if selected: vals = self.selected[prop].values else: vals = self.stars[prop].iloc[inds].values if prop=='depth' and hasattr(self,'depth'): vals *= self.dilution_factor[inds] if log: h = plt.hist(np.log10(vals),**kwargs) else: h = plt.hist(vals,**kwargs) plt.xlabel(prop)
Plots a 1-d histogram of desired property. :param prop: Name of property to plot. Must be column of ``self.stars``. :param fig: (optional) Argument for :func:`plotutils.setfig` :param log: (optional) Whether to plot the histogram of log10 of the property. :param mask: (optional) Boolean array (length of ``self.stars``) to say which indices to plot (``True`` is good). :param selected: (optional) If ``True``, then only the "selected" stars (that is, stars obeying all distribution constraints attached to this object) will be plotted. In this case, ``mask`` will be ignored. :param **kwargs: Additional keyword arguments passed to :func:`plt.hist`.
def load_psat(cls, fd): from pylon.io.psat import PSATReader return PSATReader().read(fd)
Returns a case object from the given PSAT data file.
def requeue(self): if self.acknowledged: raise self.MessageStateError( "Message already acknowledged with state: %s" % self._state) self.backend.requeue(self.delivery_tag) self._state = "REQUEUED"
Reject this message and put it back on the queue. You must not use this method as a means of selecting messages to process. :raises MessageStateError: If the message has already been acknowledged/requeued/rejected.
def pathstrip(path, n): pathlist = [path] while os.path.dirname(pathlist[0]) != b'': pathlist[0:1] = os.path.split(pathlist[0]) return b'/'.join(pathlist[n:])
Strip n leading components from the given path
def body_template(self, value): if self.method == VERB.GET: raise AssertionError("body_template cannot be set for GET requests") if value is None: self.logger.warning("body_template is None, parsing will be ignored") return if not isinstance(value, DataCollection): msg = "body_template must be an instance of %s.%s" % ( DataCollection.__module__, DataCollection.__name__ ) raise AssertionError(msg) self._body_template = value self.set_deserializer_by_mime_type(self.content_type)
Must be an instance of a prestans.types.DataCollection subclass; this is generally set during the RequestHandler lifecycle. Setting this spwans the parsing process of the body. If the HTTP verb is GET an AssertionError is thrown. Use with extreme caution.
def update_host_template(resource_root, name, cluster_name, api_host_template): return call(resource_root.put, HOST_TEMPLATE_PATH % (cluster_name, name), ApiHostTemplate, data=api_host_template, api_version=3)
Update a host template identified by name in the specified cluster. @param resource_root: The root Resource object. @param name: Host template name. @param cluster_name: Cluster name. @param api_host_template: The updated host template. @return: The updated ApiHostTemplate. @since: API v3
def is_single_tree(data_wrapper): db = data_wrapper.data_block bad_ids = db[db[:, COLS.P] == -1][1:, COLS.ID] return CheckResult(len(bad_ids) == 0, bad_ids.tolist())
Check that data forms a single tree Only the first point has ID of -1. Returns: CheckResult with result and list of IDs Note: This assumes no_missing_parents passed.
def generate(self, num_to_generate, starting_place): res = [] activ = starting_place[None, :] index = activ.__getattribute__(self.argfunc)(1) item = self.weights[index] for x in range(num_to_generate): activ = self.forward(item, prev_activation=activ)[0] index = activ.__getattribute__(self.argfunc)(1) res.append(index) item = self.weights[index] return res
Generate data based on some initial position.
def bgrewriteaof(host=None, port=None, db=None, password=None): server = _connect(host, port, db, password) return server.bgrewriteaof()
Asynchronously rewrite the append-only file CLI Example: .. code-block:: bash salt '*' redis.bgrewriteaof
def attach_network_interface(self, network_interface_id, instance_id, device_index): params = {'NetworkInterfaceId' : network_interface_id, 'InstanceId' : instance_id, 'Deviceindex' : device_index} return self.get_status('AttachNetworkInterface', params, verb='POST')
Attaches a network interface to an instance. :type network_interface_id: str :param network_interface_id: The ID of the network interface to attach. :type instance_id: str :param instance_id: The ID of the instance that will be attached to the network interface. :type device_index: int :param device_index: The index of the device for the network interface attachment on the instance.
def create_cms_plugin_page(apphook, apphook_namespace, placeholder_slot=None): creator = CmsPluginPageCreator( apphook=apphook, apphook_namespace=apphook_namespace, ) creator.placeholder_slot = placeholder_slot plugin_page = creator.create() return plugin_page
Create cms plugin page in all existing languages. Add a link to the index page. :param apphook: e.g...........: 'FooBarApp' :param apphook_namespace: e.g.: 'foobar' :return:
def run_later(self, callable_, timeout, *args, **kwargs): self.lock.acquire() try: if self.die: raise RuntimeError('This timer has been shut down and ' 'does not accept new jobs.') job = TimerTask(callable_, *args, **kwargs) self._jobs.append((job, time.time() + timeout)) self._jobs.sort(key=lambda j: j[1]) self.lock.notify() return job finally: self.lock.release()
Schedules the specified callable for delayed execution. Returns a TimerTask instance that can be used to cancel pending execution.
def _view_changed(self, event=None): tr = self.node_transform(self._linked_view.scene) p1, p2 = tr.map(self._axis_ends()) if self.orientation in ('left', 'right'): self.axis.domain = (p1[1], p2[1]) else: self.axis.domain = (p1[0], p2[0])
Linked view transform has changed; update ticks.
def import_functions(names, src, dst): for name in names: module = importlib.import_module('pygsp.' + src) setattr(sys.modules['pygsp.' + dst], name, getattr(module, name))
Import functions in package from their implementation modules.
def get_responses(self): response_list = [] for question_map in self._my_map['questions']: response_list.append(self._get_response_from_question_map(question_map)) return ResponseList(response_list)
Gets list of the latest responses
def _get_updated_rows(self, auth, function): qps = [] for row in self._curs_pg: qps.append( { 'operator': 'equals', 'val1': 'id', 'val2': row['id'] } ) if len(qps) == 0: return [] q = qps[0] for qp in qps[1:]: q = { 'operator': 'or', 'val1': q, 'val2': qp } updated = function(auth, q, { 'max_result': 10000 })['result'] return updated
Get rows updated by last update query * `function` [function] Function to use for searching (one of the search_* functions). Helper function used to fetch all rows which was updated by the latest UPDATE ... RETURNING id query.
def get_parent_repository_ids(self, repository_id): if self._catalog_session is not None: return self._catalog_session.get_parent_catalog_ids(catalog_id=repository_id) return self._hierarchy_session.get_parents(id_=repository_id)
Gets the parent ``Ids`` of the given repository. arg: repository_id (osid.id.Id): a repository ``Id`` return: (osid.id.IdList) - the parent ``Ids`` of the repository raise: NotFound - ``repository_id`` is not found raise: NullArgument - ``repository_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.*
def write(self, text): text = str(text) text = text.replace(IAC, IAC+IAC) text = text.replace(chr(10), chr(13)+chr(10)) self.writecooked(text)
Send a packet to the socket. This function cooks output.
def getElementsCustomFilter(self, filterFunc, root='root'): (root, isFromRoot) = self._handleRootArg(root) elements = [] if isFromRoot is True and filterFunc(root) is True: elements.append(root) getElementsCustomFilter = self.getElementsCustomFilter for child in root.children: if filterFunc(child) is True: elements.append(child) elements += getElementsCustomFilter(filterFunc, child) return TagCollection(elements)
getElementsCustomFilter - Scan elements using a provided function @param filterFunc <function>(node) - A function that takes an AdvancedTag as an argument, and returns True if some arbitrary criteria is met @return - TagCollection of all matching elements
def kill_definitions(self, atom, code_loc, data=None, dummy=True): if data is None: data = DataSet(Undefined(atom.size), atom.size) self.kill_and_add_definition(atom, code_loc, data, dummy=dummy)
Overwrite existing definitions w.r.t 'atom' with a dummy definition instance. A dummy definition will not be removed during simplification. :param Atom atom: :param CodeLocation code_loc: :param object data: :return: None
def getCMakeFlags(self, engineRoot, fmt): return Utility.join( fmt.delim, [ '-DCMAKE_PREFIX_PATH=' + self.getPrefixDirectories(engineRoot, ';'), '-DCMAKE_INCLUDE_PATH=' + self.getIncludeDirectories(engineRoot, ';'), '-DCMAKE_LIBRARY_PATH=' + self.getLinkerDirectories(engineRoot, ';'), ] + self.resolveRoot(self.cmakeFlags, engineRoot), fmt.quotes )
Constructs the CMake invocation flags string for building against this library
def download_loci(self): pool = multiprocessing.Pool(processes=self.threads) pool.map(self.download_threads, self.loci_url) pool.close() pool.join()
Uses a multi-threaded approach to download allele files
def get_related_fields(model_class, field_name, path=""): if field_name: field, model, direct, m2m = _get_field_by_name(model_class, field_name) if direct: try: new_model = _get_remote_field(field).parent_model() except AttributeError: new_model = _get_remote_field(field).model else: if hasattr(field, 'related_model'): new_model = field.related_model else: new_model = field.model() path += field_name path += '__' else: new_model = model_class new_fields = get_relation_fields_from_model(new_model) model_ct = ContentType.objects.get_for_model(new_model) return (new_fields, model_ct, path)
Get fields for a given model
def get_backend(backend, path, backends): m_norm = normalize_vault_path(path) for mount_name, values in backends.items(): b_norm = normalize_vault_path(mount_name) if (m_norm == b_norm) and values['type'] == backend: return values return None
Returns mountpoint details for a backend
def memoize(fn=None): cache = { } arg_hash_fn = fn_arg_hash_function(fn) def decorated(*args, **kwargs): try: hash_ = arg_hash_fn(*args, **kwargs) except TypeError: return fn(*args, **kwargs) try: return cache[hash_] except KeyError: return_val = fn(*args, **kwargs) cache[hash_] = return_val return return_val _functools.update_wrapper(decorated, fn) return decorated
Caches the result of the provided function.
def _proc_builtin(self, tarfile): self.offset_data = tarfile.fileobj.tell() offset = self.offset_data if self.isreg() or self.type not in SUPPORTED_TYPES: offset += self._block(self.size) tarfile.offset = offset self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors) return self
Process a builtin type or an unknown type which will be treated as a regular file.
def Column(self, column_name): column_idx = None for idx, column in enumerate(self.header.columns): if column.name == column_name: column_idx = idx break if column_idx is None: raise KeyError("Column '{}' not found".format(column_name)) for row in self.rows: yield row.values[column_idx]
Iterates over values of a given column. Args: column_name: A nome of the column to retrieve the values for. Yields: Values of the specified column. Raises: KeyError: If given column is not present in the table.
def parseFile(self, srcFile, closeFile=False): try: result = self.parse(srcFile.read()) finally: if closeFile: srcFile.close() return result
Parses CSS file-like objects using the current cssBuilder. Use for external stylesheets.
def make_config(self, data: dict): self.validate_config(data) config_data = self.prepare_config(data) return config_data
Make a MIP config.
def transfer(self, from_acct: Account, b58_to_address: str, value: int, payer_acct: Account, gas_limit: int, gas_price: int) -> str: func = InvokeFunction('transfer') if not isinstance(value, int): raise SDKException(ErrorCode.param_err('the data type of value should be int.')) if value < 0: raise SDKException(ErrorCode.param_err('the value should be equal or great than 0.')) if not isinstance(from_acct, Account): raise SDKException(ErrorCode.param_err('the data type of from_acct should be Account.')) Oep4.__b58_address_check(b58_to_address) from_address = from_acct.get_address().to_bytes() to_address = Address.b58decode(b58_to_address).to_bytes() func.set_params_value(from_address, to_address, value) tx_hash = self.__sdk.get_network().send_neo_vm_transaction(self.__hex_contract_address, from_acct, payer_acct, gas_limit, gas_price, func, False) return tx_hash
This interface is used to call the Transfer method in ope4 that transfer an amount of tokens from one account to another account. :param from_acct: an Account class that send the oep4 token. :param b58_to_address: a base58 encode address that receive the oep4 token. :param value: an int value that indicate the amount oep4 token that will be transferred in this transaction. :param payer_acct: an Account class that used to pay for the transaction. :param gas_limit: an int value that indicate the gas limit. :param gas_price: an int value that indicate the gas price. :return: the hexadecimal transaction hash value.
def Update(self, data): m = len(data) self.params[:m] += data
Updates a Dirichlet distribution. data: sequence of observations, in order corresponding to params
def iter_cols(self, start=None, end=None): start = start or 0 end = end or self.ncols for i in range(start, end): yield self.iloc[:, i]
Iterate each of the Region cols in this region
def _init(init, X, N, rank, dtype): Uinit = [None for _ in range(N)] if isinstance(init, list): Uinit = init elif init == 'random': for n in range(1, N): Uinit[n] = array(rand(X.shape[n], rank), dtype=dtype) elif init == 'nvecs': for n in range(1, N): Uinit[n] = array(nvecs(X, n, rank), dtype=dtype) else: raise 'Unknown option (init=%s)' % str(init) return Uinit
Initialization for CP models
def configure(self, options, conf): self.conf = conf self.enabled = options.epdb_debugErrors or options.epdb_debugFailures self.enabled_for_errors = options.epdb_debugErrors self.enabled_for_failures = options.epdb_debugFailures
Configure which kinds of exceptions trigger plugin.
def _update_secret(namespace, name, data, apiserver_url): url = "{0}/api/v1/namespaces/{1}/secrets/{2}".format(apiserver_url, namespace, name) data = [{"op": "replace", "path": "/data", "value": data}] ret = _kpatch(url, data) if ret.get("status") == 404: return "Node {0} doesn't exist".format(url) return ret
Replace secrets data by a new one
def is_openmp_supported(): log_threshold = log.set_threshold(log.FATAL) ret = check_openmp_support() log.set_threshold(log_threshold) return ret
Determine whether the build compiler has OpenMP support.
def _get_queryset_methods(cls, queryset_class): def create_method(name, method): def manager_method(self, *args, **kwargs): return getattr(self.get_queryset(), name)(*args, **kwargs) manager_method.__name__ = method.__name__ manager_method.__doc__ = method.__doc__ return manager_method orig_method = models.Manager._get_queryset_methods new_methods = orig_method(queryset_class) inspect_func = inspect.isfunction for name, method in inspect.getmembers(queryset_class, predicate=inspect_func): if hasattr(cls, name) or name in new_methods: continue queryset_only = getattr(method, 'queryset_only', None) if queryset_only or (queryset_only is None and name.startswith('_')): continue new_methods[name] = create_method(name, method) return new_methods
Django overrloaded method for add cyfunction.
def set_expected_update_frequency(self, update_frequency): try: int(update_frequency) except ValueError: update_frequency = Dataset.transform_update_frequency(update_frequency) if not update_frequency: raise HDXError('Invalid update frequency supplied!') self.data['data_update_frequency'] = update_frequency
Set expected update frequency Args: update_frequency (str): Update frequency Returns: None
def F_beta(self, beta): try: F_dict = {} for i in self.TP.keys(): F_dict[i] = F_calc( TP=self.TP[i], FP=self.FP[i], FN=self.FN[i], beta=beta) return F_dict except Exception: return {}
Calculate FBeta score. :param beta: beta parameter :type beta : float :return: FBeta score for classes as dict
def ReplaceIxes(self, path, old_prefix, old_suffix, new_prefix, new_suffix): old_prefix = self.subst('$'+old_prefix) old_suffix = self.subst('$'+old_suffix) new_prefix = self.subst('$'+new_prefix) new_suffix = self.subst('$'+new_suffix) dir,name = os.path.split(str(path)) if name[:len(old_prefix)] == old_prefix: name = name[len(old_prefix):] if name[-len(old_suffix):] == old_suffix: name = name[:-len(old_suffix)] return os.path.join(dir, new_prefix+name+new_suffix)
Replace old_prefix with new_prefix and old_suffix with new_suffix. env - Environment used to interpolate variables. path - the path that will be modified. old_prefix - construction variable for the old prefix. old_suffix - construction variable for the old suffix. new_prefix - construction variable for the new prefix. new_suffix - construction variable for the new suffix.
def _merge(*args): return re.compile(r'^' + r'[/-]'.join(args) + r'(?:\s+' + _dow + ')?$')
Create a composite pattern and compile it.
def getSequenceCombinaisons(polymorphipolymorphicDnaSeqSeq, pos = 0) : if type(polymorphipolymorphicDnaSeqSeq) is not types.ListType : seq = list(polymorphipolymorphicDnaSeqSeq) else : seq = polymorphipolymorphicDnaSeqSeq if pos >= len(seq) : return [''.join(seq)] variants = [] if seq[pos] in polymorphicNucleotides : chars = decodePolymorphicNucleotide(seq[pos]) else : chars = seq[pos] for c in chars : rseq = copy.copy(seq) rseq[pos] = c variants.extend(getSequenceCombinaisons(rseq, pos + 1)) return variants
Takes a dna sequence with polymorphismes and returns all the possible sequences that it can yield
def _weight_generator(self, reviewers): scores = [r.anomalous_score for r in reviewers] mu = np.average(scores) sigma = np.std(scores) if sigma: def w(v): try: exp = math.exp(self.alpha * (v - mu) / sigma) return 1. / (1. + exp) except OverflowError: return 0. return w else: return lambda v: 1.
Compute a weight function for the given reviewers. Args: reviewers: a set of reviewers to compute weight function. Returns: a function computing a weight for a reviewer.
def mtf_bitransformer_base(): hparams = mtf_transformer2_base() hparams.max_length = 256 hparams.shared_embedding = True hparams.add_hparam("encoder_layers", ["self_att", "drd"] * 6) hparams.add_hparam("decoder_layers", ["self_att", "enc_att", "drd"] * 6) hparams.add_hparam("encoder_num_layers", 6) hparams.add_hparam("decoder_num_layers", 6) hparams.add_hparam("encoder_num_heads", 8) hparams.add_hparam("decoder_num_heads", 8) hparams.add_hparam("local_attention_radius", 128) hparams.add_hparam("encoder_num_memory_heads", 0) hparams.add_hparam("decoder_num_memory_heads", 0) hparams.add_hparam("encoder_shared_kv", False) hparams.add_hparam("decoder_shared_kv", False) hparams.add_hparam("decode_length_multiplier", 1.5) hparams.add_hparam("decode_length_constant", 10.0) hparams.add_hparam("alpha", 0.6) hparams.sampling_temp = 0.0 return hparams
Machine translation base configuration.
def deserialize(cls, data, content_type=None): deserializer = Deserializer(cls._infer_class_models()) return deserializer(cls.__name__, data, content_type=content_type)
Parse a str using the RestAPI syntax and return a model. :param str data: A str using RestAPI structure. JSON by default. :param str content_type: JSON by default, set application/xml if XML. :returns: An instance of this model :raises: DeserializationError if something went wrong
def CreateSitelinkFeedItem(feed_items, feed_item_id): site_link_from_feed = feed_items[feed_item_id] site_link_feed_item = { 'sitelinkText': site_link_from_feed['text'], 'sitelinkLine2': site_link_from_feed['line2'], 'sitelinkLine3': site_link_from_feed['line3'], } if 'finalUrls' in site_link_from_feed and site_link_from_feed['finalUrls']: site_link_feed_item['sitelinkFinalUrls'] = { 'urls': site_link_from_feed['finalUrls'] } if 'finalMobileUrls' in site_link_from_feed: site_link_feed_item['sitelinkFinalMobileUrls'] = { 'urls': site_link_from_feed['finalMobileUrls'] } site_link_feed_item['sitelinkTrackingUrlTemplate'] = ( site_link_from_feed['trackingUrlTemplate']) else: site_link_feed_item['sitelinkUrl'] = site_link_from_feed['url'] return site_link_feed_item
Creates a Sitelink Feed Item. Args: feed_items: a list of all Feed Items. feed_item_id: the Id of a specific Feed Item for which a Sitelink Feed Item should be created. Returns: The new Sitelink Feed Item.
def reset(self): self.reset_bars() self.url_progressbar.reset() for prop in dir(self): if prop.startswith("__"): continue prop_obj = getattr(self, prop) if prop_obj is not None and hasattr(prop_obj, "reset"): prop_obj.reset() properties = ( getattr(self.__class__, prop) for prop in self._property_list if hasattr(self.__class__, prop) ) for prop in properties: if hasattr(prop, "reset"): prop.reset() elif hasattr(prop, "__set__"): prop.__set__(None, "") self.additional_info = None
Reset all inputs back to default.
def register_arrays(self, arrays): if isinstance(arrays, collections.Mapping): arrays = arrays.itervalues() for ary in arrays: self.register_array(**ary)
Register arrays using a list of dictionaries defining the arrays. The list should itself contain dictionaries. i.e. .. code-block:: python D = [{ 'name':'uvw', 'shape':(3,'ntime','nbl'),'dtype':np.float32 }, { 'name':'lm', 'shape':(2,'nsrc'),'dtype':np.float32 }] Parameters ---------- arrays : A list or dict. A list or dictionary of dictionaries describing arrays.
def countBy(self, val): def by(result, key, value): if key not in result: result[key] = 0 result[key] += 1 res = self._group(self.obj, val, by) return self._wrap(res)
Counts instances of an object that group by a certain criterion. Pass either a string attribute to count by, or a function that returns the criterion.
def monitor_experiment(args): if args.time <= 0: print_error('please input a positive integer as time interval, the unit is second.') exit(1) while True: try: os.system('clear') update_experiment() show_experiment_info() time.sleep(args.time) except KeyboardInterrupt: exit(0) except Exception as exception: print_error(exception) exit(1)
monitor the experiment
def push(self, value: Union[int, bytes]) -> None: if len(self.values) > 1023: raise FullStack('Stack limit reached') validate_stack_item(value) self.values.append(value)
Push an item onto the stack.
def _parameter_objects(parameter_objects_from_pillars, parameter_object_overrides): from_pillars = copy.deepcopy(__salt__['pillar.get'](parameter_objects_from_pillars)) from_pillars.update(parameter_object_overrides) parameter_objects = _standardize(_dict_to_list_ids(from_pillars)) for parameter_object in parameter_objects: parameter_object['attributes'] = _properties_from_dict(parameter_object['attributes']) return parameter_objects
Return a list of parameter objects that configure the pipeline parameter_objects_from_pillars The pillar key to use for lookup parameter_object_overrides Parameter objects to use. Will override objects read from pillars.
def protect(self, passphrase, enc_alg, hash_alg): if self.is_public: warnings.warn("Public keys cannot be passphrase-protected", stacklevel=2) return if self.is_protected and not self.is_unlocked: warnings.warn("This key is already protected with a passphrase - " "please unlock it before attempting to specify a new passphrase", stacklevel=2) return for sk in itertools.chain([self], self.subkeys.values()): sk._key.protect(passphrase, enc_alg, hash_alg) del passphrase
Add a passphrase to a private key. If the key is already passphrase protected, it should be unlocked before a new passphrase can be specified. Has no effect on public keys. :param passphrase: A passphrase to protect the key with :type passphrase: ``str``, ``unicode`` :param enc_alg: Symmetric encryption algorithm to use to protect the key :type enc_alg: :py:obj:`~constants.SymmetricKeyAlgorithm` :param hash_alg: Hash algorithm to use in the String-to-Key specifier :type hash_alg: :py:obj:`~constants.HashAlgorithm`
def launched(): if not PREFIX: return False return os.path.realpath(sys.prefix) == os.path.realpath(PREFIX)
Test whether the current python environment is the correct lore env. :return: :any:`True` if the environment is launched :rtype: bool
def active_network_addresses(hypervisor): active = [] for network in hypervisor.listNetworks(): try: xml = hypervisor.networkLookupByName(network).XMLDesc(0) except libvirt.libvirtError: continue else: ip_element = etree.fromstring(xml).find('.//ip') address = ip_element.get('address') netmask = ip_element.get('netmask') active.append(ipaddress.IPv4Network(u'/'.join((address, netmask)), strict=False)) return active
Query libvirt for the already reserved addresses.
def get_health_check(name, region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) retries = 30 while True: try: lb = conn.get_all_load_balancers(load_balancer_names=[name]) lb = lb[0] ret = odict.OrderedDict() hc = lb.health_check ret['interval'] = hc.interval ret['target'] = hc.target ret['healthy_threshold'] = hc.healthy_threshold ret['timeout'] = hc.timeout ret['unhealthy_threshold'] = hc.unhealthy_threshold return ret except boto.exception.BotoServerError as e: if retries and e.code == 'Throttling': log.debug('Throttled by AWS API, will retry in 5 seconds.') time.sleep(5) retries -= 1 continue log.error('ELB %s not found.', name, exc_info_on_logleve=logging.DEBUG) return {}
Get the health check configured for this ELB. CLI example: .. code-block:: bash salt myminion boto_elb.get_health_check myelb
def all_files_in_directory(path): file_list = [] for dirname, dirnames, filenames in os.walk(path): for filename in filenames: file_list.append(os.path.join(dirname, filename)) return file_list
Recursively ist all files under a directory
def progress(status_code): lookup = { 'pk_dsa': 'DSA key generation', 'pk_elg': 'Elgamal key generation', 'primegen': 'Prime generation', 'need_entropy': 'Waiting for new entropy in the RNG', 'tick': 'Generic tick without any special meaning - still working.', 'starting_agent': 'A gpg-agent was started.', 'learncard': 'gpg-agent or gpgsm is learning the smartcard data.', 'card_busy': 'A smartcard is still working.' } for key, value in lookup.items(): if str(status_code) == key: return value
Translate PROGRESS status codes from GnuPG to messages.
def list_blocked_work_units(self, work_spec_name, start=0, limit=None): return self.registry.filter(WORK_UNITS_ + work_spec_name + _BLOCKED, start=start, limit=limit)
Get a dictionary of blocked work units for some work spec. The dictionary is from work unit name to work unit definiton. Work units included in this list are blocked because they were listed as the first work unit in :func:`add_dependent_work_units`, and the work unit(s) they depend on have not completed yet. This function does not tell why work units are blocked, it merely returns the fact that they are.
def _get_sync(self, url): response = self.session.get(url) if response.status_code == requests.codes.ok: return response.json() else: raise HTTPError
Internal method used for GET requests Args: url (str): URL to fetch Returns: Individual URL request's response Raises: HTTPError: If HTTP request failed.
def compute_eager_pipelines(self): for name, pipe in self._pipelines.items(): if pipe.eager: self.pipeline_output(name)
Compute any pipelines attached with eager=True.
def create_new_reference(self, obj, global_ref=False): opaque_ref = self.state.project.loader.extern_object.allocate() l.debug("Map %s to opaque reference 0x%x", obj, opaque_ref) if global_ref: self.global_refs[opaque_ref] = obj else: self.local_refs[opaque_ref] = obj return opaque_ref
Create a new reference thats maps to the given object. :param obj: Object which gets referenced. :param bool global_ref: Whether a local or global reference is created.
def decode_streaming(self, data): lookup = dict(((b, v), s) for (s, (b, v)) in self._table.items()) buffer = 0 size = 0 for byte in data: for m in [128, 64, 32, 16, 8, 4, 2, 1]: buffer = (buffer << 1) + bool(from_byte(byte) & m) size += 1 if (size, buffer) in lookup: symbol = lookup[size, buffer] if symbol == _EOF: return yield symbol buffer = 0 size = 0
Decode given data in streaming fashion :param data: sequence of bytes (string, list or generator of bytes) :return: generator of symbols
def get_magnitude_squared(self): return self.x*self.x + self.y*self.y
Returns the square of the magnitude of this vector.
def eventFilter(self, widget, event): if event.type() == QEvent.MouseButtonPress: if event.button() == Qt.LeftButton: self.sig_canvas_clicked.emit(self) return super(FigureThumbnail, self).eventFilter(widget, event)
A filter that is used to send a signal when the figure canvas is clicked.
def mark(request): notification_id = request.POST.get('id', None) action = request.POST.get('action', None) success = True if notification_id: try: notification = Notification.objects.get(pk=notification_id, recipient=request.user) if action == 'read': notification.mark_as_read() msg = _("Marked as read") elif action == 'unread': notification.mark_as_unread() msg = _("Marked as unread") else: success = False msg = _("Invalid mark action.") except Notification.DoesNotExist: success = False msg = _("Notification does not exists.") else: success = False msg = _("Invalid Notification ID") ctx = {'msg': msg, 'success': success, 'action': action} return notification_redirect(request, ctx)
Handles marking of individual notifications as read or unread. Takes ``notification id`` and mark ``action`` as POST data. :param request: HTTP request context. :returns: Response to mark action of supplied notification ID.
def op_funcdef_handle(tokens): func, base_args = get_infix_items(tokens) args = [] for arg in base_args[:-1]: rstrip_arg = arg.rstrip() if not rstrip_arg.endswith(unwrapper): if not rstrip_arg.endswith(","): arg += ", " elif arg.endswith(","): arg += " " args.append(arg) last_arg = base_args[-1] if last_arg.rstrip().endswith(","): last_arg = last_arg.rsplit(",")[0] args.append(last_arg) return func + "(" + "".join(args) + ")"
Process infix defs.
def to_tree(instance, *children): yield unicode(instance) for i, child in enumerate(children): lines = 0 yield "|" yield "+---" + unicode(child) if i != len(children) - 1: a = "|" else: a = " " for j, item in enumerate(child.itervalues()): if j != len(child) - 1: b = "|" else: b = " " if j == 0: yield a + " |" for k, line in enumerate(item.to_tree()): lines += 1 if k == 0: yield a + " +---" + line else: yield a + " " + b + " " + line if len(children) > 1 and i == len(children) - 1 and lines > 1: yield a
Generate tree structure of an instance, and its children. This method yields its results, instead of returning them.
def _new(self, name, **kwargs): if self._name_path: parent = self for path_element in self._name_path.split("/"): self._set_xml_from_keys(parent, (path_element, None)) parent = parent.find(path_element) parent.text = name else: ElementTree.SubElement(self, "name").text = name for item in self.data_keys.items(): self._set_xml_from_keys(self, item, **kwargs)
Create a new JSSObject with name and "keys". Generate a default XML template for this object, based on the class attribute "keys". Args: name: String name of the object to use as the object's name property. kwargs: Accepted keyword args can be viewed by checking the "data_keys" class attribute. Typically, they include all top-level keys, and non-duplicated keys used elsewhere. Values will be cast to string. (Int 10, bool False become string values "10" and "false"). Ignores kwargs that aren't in object's keys attribute.
def _on_stackexchange_user(self, future, access_token, response): response['access_token'] = access_token future.set_result(response)
Invoked as a callback when self.stackexchange_request returns the response to the request for user data. :param method future: The callback method to pass along :param str access_token: The access token for the user's use :param dict response: The HTTP response already decoded
def getPyClass(self): if self.hasExtPyClass(): classInfo = self.extPyClasses[self.name] return ".".join(classInfo) return 'Holder'
Name of generated inner class that will be specified as pyclass.
def dict_to_querystring(dictionary): s = u"" for d in dictionary.keys(): s = unicode.format(u"{0}{1}={2}&", s, d, dictionary[d]) return s[:-1]
Converts a dict to a querystring suitable to be appended to a URL.
def get_size(self): rec = self.get_rectangle() return (int(rec[2]-rec[0]), int(rec[3]-rec[1]))
Get the size of the tree. Returns: tupel: (width, height)
def load_nb(cls, inline=True): from IPython.display import publish_display_data cls._loaded = True init_notebook_mode(connected=not inline) publish_display_data(data={MIME_TYPES['jlab-hv-load']: get_plotlyjs()})
Loads the plotly notebook resources.
def get_dates_in_period(start=None, top=None, step=1, step_dict={}): delta = relativedelta(**step_dict) if step_dict else timedelta(days=step) start = start or datetime.today() top = top or start + delta dates = [] current = start while current <= top: dates.append(current) current += delta return dates
Return a list of dates from the `start` to `top`.
def _make_data(data) -> Tuple[List[Dict], List[Dict]]: jsdata = [] for idx, row in data.iterrows(): row.index = row.index.astype(str) rdict = row.to_dict() rdict.update(dict(key=str(idx))) jsdata.append(rdict) return jsdata, Table._make_columns(data.columns)
Transform table data into JSON.
def getPermutedTensors(W, kw, n, m2, noisePct): W2 = W.repeat(m2, 1) nz = W[0].nonzero() numberToZero = int(round(noisePct * kw)) for i in range(m2): indices = np.random.permutation(kw)[0:numberToZero] for j in indices: W2[i,nz[j]] = 0 return W2
Generate m2 noisy versions of W. Noisy version of W is generated by randomly permuting noisePct of the non-zero components to other components. :param W: :param n: :param m2: :param noisePct: :return:
def save(self, fname): with open(fname, 'wb') as f: f.write(encode(self.text))
Save the report
def main(): parser = argparse.ArgumentParser(description=DESCRIPTION) parser.add_argument( '-v', '--verbose', help='increase output verbosity', action='store_true' ) args = parser.parse_args() generator = SignatureGenerator(debug=args.verbose) crash_data = json.loads(sys.stdin.read()) ret = generator.generate(crash_data) print(json.dumps(ret, indent=2))
Takes crash data via stdin and generates a Socorro signature
def list(self,table, **kparams): records = self.api.list(table, **kparams) return records
get a collection of records by table name. returns a collection of SnowRecord obj.
def save(filepath, obj, on_overwrite = 'ignore'): filepath = preprocess(filepath) if os.path.exists(filepath): if on_overwrite == 'backup': backup = filepath + '.bak' shutil.move(filepath, backup) save(filepath, obj) try: os.remove(backup) except Exception, e: warnings.warn("Got an error while traing to remove "+backup+":"+str(e)) return else: assert on_overwrite == 'ignore' try: _save(filepath, obj) except RuntimeError, e: if str(e).find('recursion') != -1: warnings.warn('pylearn2.utils.save encountered the following ' 'error: ' + str(e) + '\nAttempting to resolve this error by calling ' + 'sys.setrecusionlimit and retrying') old_limit = sys.getrecursionlimit() try: sys.setrecursionlimit(50000) _save(filepath, obj) finally: sys.setrecursionlimit(old_limit)
Serialize `object` to a file denoted by `filepath`. Parameters ---------- filepath : str A filename. If the suffix is `.joblib` and joblib can be imported, `joblib.dump` is used in place of the regular pickling mechanisms; this results in much faster saves by saving arrays as separate .npy files on disk. If the file suffix is `.npy` than `numpy.save` is attempted on `obj`. Otherwise, (c)pickle is used. obj : object A Python object to be serialized. on_overwrite: A string specifying what to do if the file already exists. ignore: just overwrite it backup: make a copy of the file (<filepath>.bak) and delete it when done saving the new copy. this allows recovery of the old version of the file if saving the new one fails
def cleaned_selector(html): import parsel try: tree = _cleaned_html_tree(html) sel = parsel.Selector(root=tree, type='html') except (lxml.etree.XMLSyntaxError, lxml.etree.ParseError, lxml.etree.ParserError, UnicodeEncodeError): sel = parsel.Selector(html) return sel
Clean parsel.selector.
def start_log_monitor(self): stdout_file, stderr_file = self.new_log_files("log_monitor") process_info = ray.services.start_log_monitor( self.redis_address, self._logs_dir, stdout_file=stdout_file, stderr_file=stderr_file, redis_password=self._ray_params.redis_password) assert ray_constants.PROCESS_TYPE_LOG_MONITOR not in self.all_processes self.all_processes[ray_constants.PROCESS_TYPE_LOG_MONITOR] = [ process_info ]
Start the log monitor.
def parse_optimize(self): match = re.search("EQUILIBRIUM GEOMETRY LOCATED", self.text) spmatch = "SADDLE POINT LOCATED" in self.text located = True if match or spmatch else False points = grep_split(" BEGINNING GEOMETRY SEARCH POINT NSERCH=", self.text) if self.tddft == "excite": points = [self.parse_energy(point) for point in points[1:]] else: regex = re.compile(r'NSERCH:\s+\d+\s+E=\s+([+-]?\d+\.\d+)') points = [Energy(states=[State(0,None,float(m.group(1)), 0.0, 0.0)]) for m in regex.finditer(self.text)] if "FAILURE TO LOCATE STATIONARY POINT, TOO MANY STEPS TAKEN" in self.text: self.errcode = GEOM_NOT_LOCATED self.errmsg = "too many steps taken: %i"%len(points) if located: self.errcode = OK return Optimize(points=points)
Parse the ouput resulted of a geometry optimization. Or a saddle point.
def DateStringToDateObject(date_string): if re.match('^\d{8}$', date_string) == None: return None try: return datetime.date(int(date_string[0:4]), int(date_string[4:6]), int(date_string[6:8])) except ValueError: return None
Return a date object for a string "YYYYMMDD".
def set_sleep_timer(self, sleep_time_seconds): try: if sleep_time_seconds is None: sleep_time = '' else: sleep_time = format( datetime.timedelta(seconds=int(sleep_time_seconds)) ) self.avTransport.ConfigureSleepTimer([ ('InstanceID', 0), ('NewSleepTimerDuration', sleep_time), ]) except SoCoUPnPException as err: if 'Error 402 received' in str(err): raise ValueError('invalid sleep_time_seconds, must be integer \ value between 0 and 86399 inclusive or None') raise except ValueError: raise ValueError('invalid sleep_time_seconds, must be integer \ value between 0 and 86399 inclusive or None')
Sets the sleep timer. Args: sleep_time_seconds (int or NoneType): How long to wait before turning off speaker in seconds, None to cancel a sleep timer. Maximum value of 86399 Raises: SoCoException: Upon errors interacting with Sonos controller ValueError: Argument/Syntax errors
def get_iex_listed_symbol_dir(start=None, **kwargs): import warnings warnings.warn(WNG_MSG % ("get_iex_listed_symbol_dir", "refdata.get_iex_listed_symbol_dir")) return ListedSymbolDir(start=start, **kwargs)
MOVED to iexfinance.refdata.get_listed_symbol_dir
def hooked_by(self, addr): if not self.is_hooked(addr): l.warning("Address %s is not hooked", self._addr_to_str(addr)) return None return self._sim_procedures[addr]
Returns the current hook for `addr`. :param addr: An address. :returns: None if the address is not hooked.