code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def transformToNative(obj): if obj.isNative: return obj obj.isNative = True obj.value = splitFields(obj.value) return obj
Turn obj.value into a list.
def calc(path): total = 0 err = None if os.path.isdir(path): try: for entry in os.scandir(path): try: is_dir = entry.is_dir(follow_symlinks=False) except (PermissionError, FileNotFoundError): err = "!" return total, err if is_dir: result = calc(entry.path) total += result[0] err = result[1] if err: return total, err else: try: total += entry.stat(follow_symlinks=False).st_size except (PermissionError, FileNotFoundError): err = "!" return total, err except (PermissionError, FileNotFoundError): err = "!" return total, err else: total += os.path.getsize(path) return total, err
Takes a path as an argument and returns the total size in bytes of the file or directory. If the path is a directory the size will be calculated recursively.
def _message_received(self, msg): msg = Message.from_node(msg) return self.dispatch(msg)
Callback run when an XMPP Message is reveived. This callback delivers the message to every behaviour that is waiting for it. First, the aioxmpp.Message is converted to spade.message.Message Args: msg (aioxmpp.Messagge): the message just received. Returns: list(asyncio.Future): a list of futures of the append of the message at each matched behaviour.
def get_host_lock(url): hostname = get_hostname(url) return host_locks.setdefault(hostname, threading.Lock())
Get lock object for given URL host.
def run(hsm, aead_backend, args): write_pid_file(args.pid_file) server_address = (args.listen_addr, args.listen_port) httpd = YHSM_KSMServer(server_address, partial(YHSM_KSMRequestHandler, hsm, aead_backend, args)) my_log_message(args.debug or args.verbose, syslog.LOG_INFO, "Serving requests to 'http://%s:%s%s' with key handle(s) %s (YubiHSM: '%s', AEADs in '%s', DB in '%s')" % (args.listen_addr, args.listen_port, args.serve_url, args.key_handles, args.device, args.aead_dir, args.db_url)) httpd.serve_forever()
Start a BaseHTTPServer.HTTPServer and serve requests forever.
def rwishart_cov(n, C): p = np.shape(C)[0] sig = np.linalg.cholesky(C) if n <= (p-1): raise ValueError('Wishart parameter n must be greater ' 'than size of matrix.') norms = np.random.normal(size=(p * (p - 1)) // 2) chi_sqs = np.sqrt(np.random.chisquare(df=np.arange(n, n - p, -1))) A = flib.expand_triangular(chi_sqs, norms) flib.dtrmm_wrap(sig, A, side='L', uplo='L', transa='N', alpha=1.) w = np.asmatrix(np.dot(A, A.T)) flib.symmetrize(w) return w
Return a Wishart random matrix. :Parameters: n : int Degrees of freedom, > 0. C : matrix Symmetric and positive definite
def _parse_error(self, error): error = str(error) m = re.match(r'(\d+)\((\d+)\)\s*:\s(.*)', error) if m: return int(m.group(2)), m.group(3) m = re.match(r'ERROR:\s(\d+):(\d+):\s(.*)', error) if m: return int(m.group(2)), m.group(3) m = re.match(r'(\d+):(\d+)\((\d+)\):\s(.*)', error) if m: return int(m.group(2)), m.group(4) return None, error
Parses a single GLSL error and extracts the linenr and description Other GLIR implementations may omit this.
def srandmember(self, name, number=None): with self.pipe as pipe: f = Future() res = pipe.srandmember(self.redis_key(name), number=number) def cb(): if number is None: f.set(self.valueparse.decode(res.result)) else: f.set([self.valueparse.decode(v) for v in res.result]) pipe.on_execute(cb) return f
Return a random member of the set. :param name: str the name of the redis key :return: Future()
def modify(self, service_name, json, **kwargs): return self._send(requests.put, service_name, json, **kwargs)
Modify an AppNexus object
def get_agents(self): if self.retrieved: raise errors.IllegalState('List has already been retrieved.') self.retrieved = True return objects.AgentList(self._results, runtime=self._runtime)
Gets the agent list resulting from the search. return: (osid.authentication.AgentList) - the agent list raise: IllegalState - list already retrieved *compliance: mandatory -- This method must be implemented.*
def ipmi_method(self, command): ipmi = ipmitool(self.console, self.password, self.username) if command == "reboot": self.ipmi_method(command="status") if self.output == "Chassis Power is off": command = "on" ipmi.execute(self.ipmi_map[command]) if ipmi.status: self.error = ipmi.error.strip() else: self.output = ipmi.output.strip() self.status = ipmi.status
Use ipmitool to run commands with ipmi protocol
def overwrite_line(self, n, text): with self._moveback(n): self.term.stream.write(text)
Move back N lines and overwrite line with `text`.
def plot_evec(fignum, Vs, symsize, title): plt.figure(num=fignum) plt.text(-1.1, 1.15, title) symb, symkey = ['s', 'v', 'o'], 0 col = ['r', 'b', 'k'] for VEC in range(3): X, Y = [], [] for Vdirs in Vs: XY = pmag.dimap(Vdirs[VEC][0], Vdirs[VEC][1]) X.append(XY[0]) Y.append(XY[1]) plt.scatter(X, Y, s=symsize, marker=symb[VEC], c=col[VEC], edgecolors='none') plt.axis("equal")
plots eigenvector directions of S vectors Paramters ________ fignum : matplotlib figure number Vs : nested list of eigenvectors symsize : size in pts for symbol title : title for plot
def font_width(self): return self.get_font_width(font_name=self.font_name, font_size=self.font_size)
Return the badge font width.
def alphabetical_formula(self): alph_formula = super().alphabetical_formula chg_str = "" if self.charge > 0: chg_str = " +" + formula_double_format(self.charge, False) elif self.charge < 0: chg_str = " " + formula_double_format(self.charge, False) return alph_formula + chg_str
Returns a reduced formula string with appended charge
def get_vault_ids_by_authorization(self, authorization_id): mgr = self._get_provider_manager('AUTHORIZATION', local=True) lookup_session = mgr.get_authorization_lookup_session(proxy=self._proxy) lookup_session.use_federated_vault_view() authorization = lookup_session.get_authorization(authorization_id) id_list = [] for idstr in authorization._my_map['assignedVaultIds']: id_list.append(Id(idstr)) return IdList(id_list)
Gets the list of ``Vault`` ``Ids`` mapped to an ``Authorization``. arg: authorization_id (osid.id.Id): ``Id`` of an ``Authorization`` return: (osid.id.IdList) - list of vault ``Ids`` raise: NotFound - ``authorization_id`` is not found raise: NullArgument - ``authorization_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.*
def get_functions_by_search(self, function_query, function_search): if not self._can('search'): raise PermissionDenied() return self._provider_session.get_functions_by_search(function_query, function_search)
Pass through to provider FunctionSearchSession.get_functions_by_search
async def remove(self, name: str) -> None: LOGGER.debug('NodePoolManager.remove >>> name: %s', name) try: await pool.delete_pool_ledger_config(name) except IndyError as x_indy: LOGGER.info('Abstaining from node pool removal; indy-sdk error code %s', x_indy.error_code) LOGGER.debug('NodePool.remove <<<')
Remove serialized pool info if it exists. Abstain from removing open node pool.
def _prepare_wsdl_objects(self): self.DeletionControlType = self.client.factory.create('DeletionControlType') self.TrackingId = self.client.factory.create('TrackingId') self.TrackingId.TrackingIdType = self.client.factory.create('TrackingIdType')
Preps the WSDL data structures for the user.
def conjugate(self): return self.__class__(scalar=self.scalar, vector= -self.vector)
Quaternion conjugate, encapsulated in a new instance. For a unit quaternion, this is the same as the inverse. Returns: A new Quaternion object clone with its vector part negated
def getPysamVariants(self, referenceName, startPosition, endPosition): if referenceName in self._chromFileMap: varFileName = self._chromFileMap[referenceName] referenceName, startPosition, endPosition = \ self.sanitizeVariantFileFetch( referenceName, startPosition, endPosition) cursor = self.getFileHandle(varFileName).fetch( referenceName, startPosition, endPosition) for record in cursor: yield record
Returns an iterator over the pysam VCF records corresponding to the specified query.
def _set_annotation_to_str(annotation_data: Mapping[str, Mapping[str, bool]], key: str) -> str: value = annotation_data[key] if len(value) == 1: return 'SET {} = "{}"'.format(key, list(value)[0]) x = ('"{}"'.format(v) for v in sorted(value)) return 'SET {} = {{{}}}'.format(key, ', '.join(x))
Return a set annotation string.
def is_quoted(value): ret = '' if isinstance(value, six.string_types) \ and value[0] == value[-1] \ and value.startswith(('\'', '"')): ret = value[0] return ret
Return a single or double quote, if a string is wrapped in extra quotes. Otherwise return an empty string.
def libvlc_media_new_location(p_instance, psz_mrl): f = _Cfunctions.get('libvlc_media_new_location', None) or \ _Cfunction('libvlc_media_new_location', ((1,), (1,),), class_result(Media), ctypes.c_void_p, Instance, ctypes.c_char_p) return f(p_instance, psz_mrl)
Create a media with a certain given media resource location, for instance a valid URL. @note: To refer to a local file with this function, the file://... URI syntax B{must} be used (see IETF RFC3986). We recommend using L{libvlc_media_new_path}() instead when dealing with local files. See L{libvlc_media_release}. @param p_instance: the instance. @param psz_mrl: the media location. @return: the newly created media or NULL on error.
def _get_cookie(self, name, domain): for c in self.session.cookies: if c.name==name and c.domain==domain: return c return None
Return the cookie "name" for "domain" if found If there are mote than one, only the first is returned
def Run(self, unused_arg): reply = rdf_flows.GrrStatus(status=rdf_flows.GrrStatus.ReturnedStatus.OK) self.SendReply(reply, message_type=rdf_flows.GrrMessage.Type.STATUS) self.grr_worker.Sleep(10) logging.info("Dying on request.") os._exit(242)
Run the kill.
def from_parent(cls, parent: 'BlockHeader', gas_limit: int, difficulty: int, timestamp: int, coinbase: Address=ZERO_ADDRESS, nonce: bytes=None, extra_data: bytes=None, transaction_root: bytes=None, receipt_root: bytes=None) -> 'BlockHeader': header_kwargs = { 'parent_hash': parent.hash, 'coinbase': coinbase, 'state_root': parent.state_root, 'gas_limit': gas_limit, 'difficulty': difficulty, 'block_number': parent.block_number + 1, 'timestamp': timestamp, } if nonce is not None: header_kwargs['nonce'] = nonce if extra_data is not None: header_kwargs['extra_data'] = extra_data if transaction_root is not None: header_kwargs['transaction_root'] = transaction_root if receipt_root is not None: header_kwargs['receipt_root'] = receipt_root header = cls(**header_kwargs) return header
Initialize a new block header with the `parent` header as the block's parent hash.
def iter_milestones(self, state=None, sort=None, direction=None, number=-1, etag=None): url = self._build_url('milestones', base_url=self._api) accepted = {'state': ('open', 'closed'), 'sort': ('due_date', 'completeness'), 'direction': ('asc', 'desc')} params = {'state': state, 'sort': sort, 'direction': direction} for (k, v) in list(params.items()): if not (v and (v in accepted[k])): del params[k] if not params: params = None return self._iter(int(number), url, Milestone, params, etag)
Iterates over the milestones on this repository. :param str state: (optional), state of the milestones, accepted values: ('open', 'closed') :param str sort: (optional), how to sort the milestones, accepted values: ('due_date', 'completeness') :param str direction: (optional), direction to sort the milestones, accepted values: ('asc', 'desc') :param int number: (optional), number of milestones to return. Default: -1 returns all milestones :param str etag: (optional), ETag from a previous request to the same endpoint :returns: generator of :class:`Milestone <github3.issues.milestone.Milestone>`\ s
def has_blocking_background_send(self): for background_object in self.background_objects: if background_object.block_other_commands and background_object.run_state in ('S','N'): self.shutit_obj.log('All objects are: ' + str(self),level=logging.DEBUG) self.shutit_obj.log('The current blocking send object is: ' + str(background_object),level=logging.DEBUG) return True elif background_object.block_other_commands and background_object.run_state in ('F','C','T'): assert False, shutit_util.print_debug(msg='Blocking command should have been removed, in run_state: ' + background_object.run_state) else: assert background_object.block_other_commands is False, shutit_util.print_debug() return False
Check whether any blocking background commands are waiting to run. If any are, return True. If none are, return False.
def estimate_map(interface, state, label, inp): out = interface.output(0) centers = {} for row in inp: row = row.strip().split(state["delimiter"]) if len(row) > 1: x = [(0 if row[i] in state["missing_vals"] else float(row[i])) for i in state["X_indices"]] cluster = min((state['dist'](c, x), i) for i, c in state['centers'])[1] vertex = state['create'](x, 1.0) centers[cluster] = vertex if cluster not in centers else state["update"](centers[cluster], vertex) for cluster, values in centers.iteritems(): out.add(cluster, values)
Find the cluster `i` that is closest to the datapoint `e`.
def get_user_best(self, username, *, mode=OsuMode.osu, limit=50): return self._make_req(endpoints.USER_BEST, dict( k=self.key, u=username, type=_username_type(username), m=mode.value, limit=limit ), JsonList(SoloScore))
Get a user's best scores. Parameters ---------- username : str or int A `str` representing the user's username, or an `int` representing the user's id. mode : :class:`osuapi.enums.OsuMode` The osu! game mode for which to look up. Defaults to osu!standard. limit The maximum number of results to return. Defaults to 50, maximum 100.
def currentRepoTreeItemChanged(self): currentItem, currentIndex = self.getCurrentItem() hasCurrent = currentIndex.isValid() assert hasCurrent == (currentItem is not None), \ "If current idex is valid, currentIndex may not be None" if hasCurrent: logger.info("Adding rti to collector: {}".format(currentItem.nodePath)) self.collector.setRti(currentItem) self.currentItemActionGroup.setEnabled(hasCurrent) isTopLevel = hasCurrent and self.model().isTopLevelIndex(currentIndex) self.topLevelItemActionGroup.setEnabled(isTopLevel) self.openItemAction.setEnabled(currentItem is not None and currentItem.hasChildren() and not currentItem.isOpen) self.closeItemAction.setEnabled(currentItem is not None and currentItem.hasChildren() and currentItem.isOpen) logger.debug("Emitting sigRepoItemChanged: {}".format(currentItem)) self.sigRepoItemChanged.emit(currentItem)
Called to update the GUI when a repo tree item has changed or a new one was selected.
def fold(self, node): node = self.generic_visit(node) try: return nodes.Const.from_untrusted(node.as_const(), lineno=node.lineno, environment=self.environment) except nodes.Impossible: return node
Do constant folding.
def variables(self): p = lambda o: isinstance(o, Variable) and self._docfilter(o) return sorted(filter(p, self.doc.values()))
Returns all documented module level variables in the module sorted alphabetically as a list of `pydoc.Variable`.
def getOverlayWidthInMeters(self, ulOverlayHandle): fn = self.function_table.getOverlayWidthInMeters pfWidthInMeters = c_float() result = fn(ulOverlayHandle, byref(pfWidthInMeters)) return result, pfWidthInMeters.value
Returns the width of the overlay quad in meters. By default overlays are rendered on a quad that is 1 meter across
def save(self, fname, compression='blosc'): egg = { 'data' : self.data, 'analysis' : self.analysis, 'list_length' : self.list_length, 'n_lists' : self.n_lists, 'n_subjects' : self.n_subjects, 'position' : self.position, 'date_created' : self.date_created, 'meta' : self.meta } if fname[-4:]!='.fegg': fname+='.fegg' with warnings.catch_warnings(): warnings.simplefilter("ignore") dd.io.save(fname, egg, compression=compression)
Save method for the FriedEgg object The data will be saved as a 'fegg' file, which is a dictionary containing the elements of a FriedEgg saved in the hd5 format using `deepdish`. Parameters ---------- fname : str A name for the file. If the file extension (.fegg) is not specified, it will be appended. compression : str The kind of compression to use. See the deepdish documentation for options: http://deepdish.readthedocs.io/en/latest/api_io.html#deepdish.io.save
def keygrip_ed25519(vk): return _compute_keygrip([ ['p', util.num2bytes(0x7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFED, size=32)], ['a', b'\x01'], ['b', util.num2bytes(0x2DFC9311D490018C7338BF8688861767FF8FF5B2BEBE27548A14B235ECA6874A, size=32)], ['g', util.num2bytes(0x04216936D3CD6E53FEC0A4E231FDD6DC5C692CC7609525A7B2C9562D608F25D51A6666666666666666666666666666666666666666666666666666666666666658, size=65)], ['n', util.num2bytes(0x1000000000000000000000000000000014DEF9DEA2F79CD65812631A5CF5D3ED, size=32)], ['q', vk.to_bytes()], ])
Compute keygrip for Ed25519 public keys.
def add_migrations(self, migrations): if self.__closed: raise MigrationSessionError("Can't change applied session") self._to_apply.extend(migrations)
Add migrations to be applied. Args: migrations: a list of migrations to add of the form [(app, migration_name), ...] Raises: MigrationSessionError if called on a closed MigrationSession
def get_html_clear_filename(filename): newFilename = filename.replace(".html", "") newFilename = newFilename.replace(".md", "") newFilename = newFilename.replace(".txt", "") newFilename = newFilename.replace(".tile", "") newFilename = newFilename.replace(".jade", "") newFilename = newFilename.replace(".rst", "") newFilename = newFilename.replace(".docx", "") newFilename = newFilename.replace("index", "home") newFilename = newFilename.replace("-", " ") newFilename = newFilename.replace("_", " ") newFilename = newFilename.title() return newFilename
Clears the file extension from the filename and makes it nice looking
def verify(expr, params=None): try: compile(expr, params=params) return True except com.TranslationError: return False
Determine if expression can be successfully translated to execute on MapD
def clear_operations_touching(self, qubits: Iterable[ops.Qid], moment_indices: Iterable[int]): qubits = frozenset(qubits) for k in moment_indices: if 0 <= k < len(self._moments): self._moments[k] = self._moments[k].without_operations_touching( qubits)
Clears operations that are touching given qubits at given moments. Args: qubits: The qubits to check for operations on. moment_indices: The indices of moments to check for operations within.
def get_tags(self, only_autocomplete = False): return self._to_dict(('id', 'name', 'autocomplete'), self.conn.GetTags(only_autocomplete))
returns list of all tags. by default only those that have been set for autocomplete
def got_broker_module_type_defined(self, module_type): for broker_link in self.brokers: for module in broker_link.modules: if module.is_a_module(module_type): return True return False
Check if a module type is defined in one of the brokers :param module_type: module type to search for :type module_type: str :return: True if mod_type is found else False :rtype: bool
def check_version(version: str): code_version = parse_version(__version__) given_version = parse_version(version) check_condition(code_version[0] == given_version[0], "Given release version (%s) does not match release code version (%s)" % (version, __version__)) check_condition(code_version[1] == given_version[1], "Given major version (%s) does not match major code version (%s)" % (version, __version__))
Checks given version against code version and determines compatibility. Throws if versions are incompatible. :param version: Given version.
def get_window_size(self, windowHandle='current'): command = Command.GET_WINDOW_SIZE if self.w3c: if windowHandle != 'current': warnings.warn("Only 'current' window is supported for W3C compatibile browsers.") size = self.get_window_rect() else: size = self.execute(command, {'windowHandle': windowHandle}) if size.get('value', None) is not None: size = size['value'] return {k: size[k] for k in ('width', 'height')}
Gets the width and height of the current window. :Usage: :: driver.get_window_size()
def clear_cache(): del Cache._keys for k in list(Cache._cache.keys()): it = Cache._cache.pop(k) del it del Cache._cache Cache._keys = [] Cache._cache = {} gc.collect()
Remove all cached objects
async def _get_smallest_env(self): async def slave_task(mgr_addr): r_manager = await self.env.connect(mgr_addr, timeout=TIMEOUT) ret = await r_manager.get_agents(addr=True) return mgr_addr, len(ret) sizes = await create_tasks(slave_task, self.addrs, flatten=False) return sorted(sizes, key=lambda x: x[1])[0][0]
Get address of the slave environment manager with the smallest number of agents.
def __get_user(self): storage = object.__getattribute__(self, '_LazyUser__storage') user = getattr(self.__auth, 'get_user')() setattr(storage, self.__user_name, user) return user
Return the real user object.
def getUserSignupDate(self): userinfo = self.getUserInfo() timestamp = int(float(userinfo["signupTimeSec"])) return time.strftime("%m/%d/%Y %H:%M", time.gmtime(timestamp))
Returns the human readable date of when the user signed up for google reader.
def reset_position(self): self.pos = 0 self.col = 0 self.row = 1 self.eos = 0
Reset all current positions.
def find(self, name): collectors = self.get_collectors() for collector in collectors: if name.lower() == collector['name'].lower(): self.collector_id = collector['id'] return collector return {'status': 'No results found.'}
Returns a dict of collector's details if found. Args: name (str): name of collector searching for
def to_tensor(self): a_shape = shape_list(self.a) b_shape = shape_list(self.b) inner_dim = b_shape[1] result_dim = b_shape[0] flat_a = tf.reshape(self.a, [-1, inner_dim]) product = tf.matmul(flat_a, self.b, transpose_b=True) product_shape = a_shape[:-1] + [result_dim] product = tf.reshape(product, product_shape) product.set_shape(self.a.get_shape().as_list()[:-1] + [self.b.get_shape()[0]]) return product
Convert to Tensor.
def patchFile(filename, replacements): patched = Utility.readFile(filename) for key in replacements: patched = patched.replace(key, replacements[key]) Utility.writeFile(filename, patched)
Applies the supplied list of replacements to a file
def score(self, test_X, test_Y): with self.tf_graph.as_default(): with tf.Session() as self.tf_session: self.tf_saver.restore(self.tf_session, self.model_path) feed = { self.input_data: test_X, self.input_labels: test_Y, self.keep_prob: 1 } return self.accuracy.eval(feed)
Compute the mean accuracy over the test set. Parameters ---------- test_X : array_like, shape (n_samples, n_features) Test data. test_Y : array_like, shape (n_samples, n_features) Test labels. Returns ------- float : mean accuracy over the test set
def _create_table(self, table_name, column_types, primary=None, nullable=()): require_string(table_name, "table name") require_iterable_of(column_types, tuple, name="rows") if primary is not None: require_string(primary, "primary") require_iterable_of(nullable, str, name="nullable") column_decls = [] for column_name, column_type in column_types: decl = "%s %s" % (column_name, column_type) if column_name == primary: decl += " UNIQUE PRIMARY KEY" if column_name not in nullable: decl += " NOT NULL" column_decls.append(decl) column_decl_str = ", ".join(column_decls) create_table_sql = \ "CREATE TABLE %s (%s)" % (table_name, column_decl_str) self.execute_sql(create_table_sql)
Creates a sqlite3 table from the given metadata. Parameters ---------- column_types : list of (str, str) pairs First element of each tuple is the column name, second element is the sqlite3 type primary : str, optional Which column is the primary key nullable : iterable, optional Names of columns which have null values
def get_lastfm(method, lastfm_key='', **kwargs): if not lastfm_key: if 'lastfm_key' not in CONFIG or not CONFIG['lastfm_key']: logger.warning('No lastfm key configured') return '' else: lastfm_key = CONFIG['lastfm_key'] url = 'http://ws.audioscrobbler.com/2.0/?method={}&api_key={}&format=json' url = url.format(method, lastfm_key) for key in kwargs: url += '&{}={}'.format(key, kwargs[key]) response = get_url(url, parser='json') if 'error' in response: logger.error('Error number %d in lastfm query: %s', response['error'], response['message']) return '' return response
Request the specified method from the lastfm api.
def genms(self, scans=[]): if len(scans): scanstr = string.join([str(ss) for ss in sorted(scans)], ',') else: scanstr = self.allstr print 'Splitting out all cal scans (%s) with 1s int time' % scanstr newname = ps.sdm2ms(self.sdmfile, self.sdmfile.rstrip('/')+'.ms', scanstr, inttime='1') return newname
Generate an MS that contains all calibrator scans with 1 s integration time.
def _join_keys_v1(left, right): if left.endswith(':') or '::' in left: raise ValueError("Can't join a left string ending in ':' or containing '::'") return u"{}::{}".format(_encode_v1(left), _encode_v1(right))
Join two keys into a format separable by using _split_keys_v1.
def load_jws_from_request(req): current_app.logger.info("loading request with headers: %s" % req.headers) if (("content-type" in req.headers and "application/jose" in req.headers['content-type']) or ("Content-Type" in req.headers and "application/jose" in req.headers['Content-Type'])): path = urlparse.urlsplit(req.url).path for rule in current_app.url_map.iter_rules(): if path == rule.rule and req.method in rule.methods: dedata = req.get_data().decode('utf8') bp = current_app.bitjws.basepath req.jws_header, req.jws_payload = \ bitjws.validate_deserialize(dedata, requrl=bp + rule.rule) break
This function performs almost entirely bitjws authentication tasks. If valid bitjws message and signature headers are found, then the request will be assigned 'jws_header' and 'jws_payload' attributes. :param req: The flask request to load the jwt claim set from.
def _to_api_value(self, attribute_type, value): if not value: return None if isinstance(attribute_type, properties.Instance): return value.to_api() if isinstance(attribute_type, properties.List): return self._parse_api_value_list(value) return attribute_type.serialize(value)
Return a parsed value for the API.
def end_body(self): if self.write_copy_script: self.write( '<textarea id="c" class="invisible"></textarea>' '<script>' 'function cp(t){' 'var c=document.getElementById("c");' 'c.value=t;' 'c.select();' 'try{document.execCommand("copy")}' 'catch(e){}}' '</script>' ) self.write('</div>{}</body></html>', self._script)
Ends the whole document. This should be called the last
def add_root_bank(self, bank_id): if self._catalog_session is not None: return self._catalog_session.add_root_catalog(catalog_id=bank_id) return self._hierarchy_session.add_root(id_=bank_id)
Adds a root bank. arg: bank_id (osid.id.Id): the ``Id`` of a bank raise: AlreadyExists - ``bank_id`` is already in hierarchy raise: NotFound - ``bank_id`` not found raise: NullArgument - ``bank_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure occurred *compliance: mandatory -- This method must be implemented.*
def derive_annotations(self, annotations): cls = type(self) return cls( self[0], self[1], self[2], self[3], annotations, self[5] )
Derives a new event from this one setting the ``annotations`` attribute. Args: annotations: (Sequence[Union[amazon.ion.symbols.SymbolToken, unicode]]): The annotations associated with the derived event. Returns: IonEvent: The newly generated event.
def update_billing_info(self, billing_info): url = urljoin(self._url, '/billing_info') response = billing_info.http_request(url, 'PUT', billing_info, {'Content-Type': 'application/xml; charset=utf-8'}) if response.status == 200: pass elif response.status == 201: billing_info._url = response.getheader('Location') else: billing_info.raise_http_error(response) response_xml = response.read() logging.getLogger('recurly.http.response').debug(response_xml) billing_info.update_from_element(ElementTree.fromstring(response_xml))
Change this account's billing information to the given `BillingInfo`.
def whois_domains(self, domains): api_name = 'opendns-whois-domain' fmt_url_path = u'whois/{0}' return self._multi_get(api_name, fmt_url_path, domains)
Calls WHOIS domain end point Args: domains: An enumerable of domains Returns: A dict of {domain: domain_result}
def _build_url(self, api_call): if self.api_version in ('1.13.0', '1.13.0+update.1', '1.13.0+update.2'): if '/' not in api_call: return "{0}/{1}/index.json".format(self.site_url, api_call) return "{0}/{1}.json".format(self.site_url, api_call)
Build request url. Parameters: api_call (str): Base API Call. Returns: Complete url (str).
def detail(self, detail=None, ret_r=False): if detail or ret_r: self._detail = detail return self return self._detail
code's detail
def noam_norm(x, epsilon=1.0, name=None): with tf.name_scope(name, default_name="noam_norm", values=[x]): shape = x.get_shape() ndims = len(shape) return (tf.nn.l2_normalize(x, ndims - 1, epsilon=epsilon) * tf.sqrt( to_float(shape[-1])))
One version of layer normalization.
def callConfirmed(RepeatIndicator_presence=0, BearerCapability_presence=0, BearerCapability_presence1=0, Cause_presence=0, CallControlCapabilities_presence=0): a = TpPd(pd=0x3) b = MessageType(mesType=0x8) packet = a / b if RepeatIndicator_presence is 1: c = RepeatIndicatorHdr(ieiRI=0xD, eightBitRI=0x0) packet = packet / c if BearerCapability_presence is 1: d = BearerCapabilityHdr(ieiBC=0x04, eightBitBC=0x0) packet = packet / d if BearerCapability_presence1 is 1: e = BearerCapabilityHdr(ieiBC=0x04, eightBitBC=0x0) packet = packet / e if Cause_presence is 1: f = CauseHdr(ieiC=0x08, eightBitC=0x0) packet = packet / f if CallControlCapabilities_presence is 1: g = CallControlCapabilitiesHdr(ieiCCC=0x15, eightBitCCC=0x0) packet = packet / g return packet
CALL CONFIRMED Section 9.3.2
def _restore_stdout(self): if self.buffer: if self._mirror_output: output = sys.stdout.getvalue() error = sys.stderr.getvalue() if output: if not output.endswith('\n'): output += '\n' self._original_stdout.write(STDOUT_LINE % output) if error: if not error.endswith('\n'): error += '\n' self._original_stderr.write(STDERR_LINE % error) sys.stdout = self._original_stdout sys.stderr = self._original_stderr self._stdout_buffer.seek(0) self._stdout_buffer.truncate() self._stderr_buffer.seek(0) self._stderr_buffer.truncate()
Unhook stdout and stderr if buffering is enabled.
def before_request(self, request, method, url, headers): parts = urllib.parse.urlsplit(url) audience = urllib.parse.urlunsplit( (parts.scheme, parts.netloc, parts.path, "", "")) token = self._get_jwt_for_audience(audience) self.apply(headers, token=token)
Performs credential-specific before request logic. Args: request (Any): Unused. JWT credentials do not need to make an HTTP request to refresh. method (str): The request's HTTP method. url (str): The request's URI. This is used as the audience claim when generating the JWT. headers (Mapping): The request's headers.
def selectReceivePath(self, paths): logger.debug("%s", paths) if not paths: path = os.path.basename(self.userPath) + '/Anon' try: path = [p for p in paths if not p.startswith("/")][0] except IndexError: path = os.path.relpath(list(paths)[0], self.userPath) return self._fullPath(path)
From a set of source paths, recommend a destination path. The paths are relative or absolute, in a source Store. The result will be absolute, suitable for this destination Store.
def play_Track(self, track): if hasattr(track, 'name'): self.set_track_name(track.name) self.delay = 0 instr = track.instrument if hasattr(instr, 'instrument_nr'): self.change_instrument = True self.instrument = instr.instrument_nr for bar in track: self.play_Bar(bar)
Convert a Track object to MIDI events and write them to the track_data.
def init_drivers(enable_debug_driver=False): for driver in DRIVERS: try: if driver != DebugDriver or enable_debug_driver: CLASSES.append(driver) except Exception: continue
Initialize all the drivers.
def _register_info(self, server): server_url = urllib.parse.urlparse(server.get_url()) info = manager.TensorBoardInfo( version=version.VERSION, start_time=int(time.time()), port=server_url.port, pid=os.getpid(), path_prefix=self.flags.path_prefix, logdir=self.flags.logdir, db=self.flags.db, cache_key=self.cache_key, ) atexit.register(manager.remove_info_file) manager.write_info_file(info)
Write a TensorBoardInfo file and arrange for its cleanup. Args: server: The result of `self._make_server()`.
def __live_receivers(signal): with __lock: __purge() receivers = [funcref() for funcref in __receivers[signal]] return receivers
Return all signal handlers that are currently still alive for the input `signal`. Args: signal: A signal name. Returns: A list of callable receivers for the input signal.
def tunable(self, obj): self.tune = dict() if 'tune' in obj: for tunable in MOUNT_TUNABLES: tunable_key = tunable[0] map_val(self.tune, obj['tune'], tunable_key) if tunable_key in self.tune and \ is_vault_time(self.tune[tunable_key]): vault_time_s = vault_time_to_s(self.tune[tunable_key]) self.tune[tunable_key] = vault_time_s if 'description'in obj: self.tune['description'] = obj['description']
A tunable resource maps against a backend...
def rotation(cls, angle, pivot=None): ca, sa = cos_sin_deg(angle) if pivot is None: return tuple.__new__(cls, (ca, sa, 0.0, -sa, ca, 0.0, 0.0, 0.0, 1.0)) else: px, py = pivot return tuple.__new__( cls, ( ca, sa, px - px * ca + py * sa, -sa, ca, py - px * sa - py * ca, 0.0, 0.0, 1.0, ), )
Create a rotation transform at the specified angle, optionally about the specified pivot point. :param angle: Rotation angle in degrees :type angle: float :param pivot: Point to rotate about, if omitted the rotation is about the origin. :type pivot: sequence :rtype: Affine
def get_dexseq_gff(config, default=None): dexseq_gff = tz.get_in(tz.get_in(['dexseq_gff', 'keys'], LOOKUPS, {}), config, None) if not dexseq_gff: return None gtf_file = get_gtf_file(config) if gtf_file: base_dir = os.path.dirname(gtf_file) else: base_dir = os.path.dirname(dexseq_gff) base, _ = os.path.splitext(dexseq_gff) gff_file = os.path.join(base_dir, base + ".gff") if file_exists(gff_file): return gff_file gtf_file = os.path.join(base_dir, base + ".gff3") if file_exists(gtf_file): return gtf_file else: return None
some older versions of the genomes have the DEXseq gff file as gff instead of gff3, so this handles that by looking for either one
def _force_float(v): try: return float(v) except Exception as exc: return float('nan') logger.warning('Failed to convert {} to float with {} error. Using 0 instead.'.format(v, exc))
Converts given argument to float. On fail logs warning and returns 0.0. Args: v (any): value to convert to float Returns: float: converted v or 0.0 if conversion failed.
def compose(*funcs: Callable) -> Callable: def _compose(*args, **kw): ret = reduce(lambda acc, x: lambda f: f(acc(x)), funcs[::-1], lambda f: f(*args, **kw)) return ret(lambda x: x) return _compose
Compose multiple functions right to left. Composes zero or more functions into a functional composition. The functions are composed right to left. A composition of zero functions gives back the identity function. compose()(x) == x compose(f)(x) == f(x) compose(g, f)(x) == g(f(x)) compose(h, g, f)(x) == h(g(f(x))) ... Returns the composed function.
def generichash_blake2b_init(key=b'', salt=b'', person=b'', digest_size=crypto_generichash_BYTES): _checkparams(digest_size, key, salt, person) state = Blake2State(digest_size) _salt = ffi.new("unsigned char []", crypto_generichash_SALTBYTES) _person = ffi.new("unsigned char []", crypto_generichash_PERSONALBYTES) ffi.memmove(_salt, salt, len(salt)) ffi.memmove(_person, person, len(person)) rc = lib.crypto_generichash_blake2b_init_salt_personal(state._statebuf, key, len(key), digest_size, _salt, _person) ensure(rc == 0, 'Unexpected failure', raising=exc.RuntimeError) return state
Create a new initialized blake2b hash state :param key: must be at most :py:data:`.crypto_generichash_KEYBYTES_MAX` long :type key: bytes :param salt: must be at most :py:data:`.crypto_generichash_SALTBYTES` long; will be zero-padded if needed :type salt: bytes :param person: must be at most :py:data:`.crypto_generichash_PERSONALBYTES` long: will be zero-padded if needed :type person: bytes :param digest_size: must be at most :py:data:`.crypto_generichash_BYTES_MAX`; the default digest size is :py:data:`.crypto_generichash_BYTES` :type digest_size: int :return: a initialized :py:class:`.Blake2State` :rtype: object
def __validInterval(self, start, finish): url = self.__getURL(1, start.strftime("%Y-%m-%d"), finish.strftime("%Y-%m-%d")) data = self.__readAPI(url) if data["total_count"] >= 1000: middle = start + (finish - start)/2 self.__validInterval(start, middle) self.__validInterval(middle, finish) else: self.__intervals.append([start.strftime("%Y-%m-%d"), finish.strftime("%Y-%m-%d")]) self.__logger.info("New valid interval: " + start.strftime("%Y-%m-%d") + " to " + finish.strftime("%Y-%m-%d"))
Check if the interval is correct. An interval is correct if it has less than 1001 users. If the interval is correct, it will be added to '_intervals' attribute. Else, interval will be split in two news intervals and these intervals will be checked. :param start: start date of the interval. :type start: datetime.date. :param finish: finish date of the interval. :type finish: datetime.date.
def save_assets(self, dest_path): for idx, subplot in enumerate(self.subplots): subplot.save_assets(dest_path, suffix='_%d' % idx)
Save plot assets alongside dest_path. Some plots may have assets, like bitmap files, which need to be saved alongside the rendered plot file. :param dest_path: path of the main output file.
def _AssertDataIsList(key, lst): if not isinstance(lst, list) and not isinstance(lst, tuple): raise NotAListError('%s must be a list' % key) for element in lst: if not isinstance(element, str): raise ElementNotAStringError('Unsupported list element %s found in %s', (element, lst))
Assert that lst contains list data and is not structured.
def pub_connect(self): if self.pub_sock: self.pub_close() ctx = zmq.Context.instance() self._sock_data.sock = ctx.socket(zmq.PUSH) self.pub_sock.setsockopt(zmq.LINGER, -1) if self.opts.get('ipc_mode', '') == 'tcp': pull_uri = 'tcp://127.0.0.1:{0}'.format( self.opts.get('tcp_master_publish_pull', 4514) ) else: pull_uri = 'ipc://{0}'.format( os.path.join(self.opts['sock_dir'], 'publish_pull.ipc') ) log.debug("Connecting to pub server: %s", pull_uri) self.pub_sock.connect(pull_uri) return self._sock_data.sock
Create and connect this thread's zmq socket. If a publisher socket already exists "pub_close" is called before creating and connecting a new socket.
def unlock(): conn = __proxy__['junos.conn']() ret = {} ret['out'] = True try: conn.cu.unlock() ret['message'] = "Successfully unlocked the configuration." except jnpr.junos.exception.UnlockError as exception: ret['message'] = \ 'Could not unlock configuration due to : "{0}"'.format(exception) ret['out'] = False return ret
Unlocks the candidate configuration. CLI Example: .. code-block:: bash salt 'device_name' junos.unlock
def resource(self, uri, methods=frozenset({'GET'}), host=None, strict_slashes=None, stream=False, version=None, name=None, **kwargs): if strict_slashes is None: strict_slashes = self.strict_slashes def decorator(handler): self.resources.append(( FutureRoute(handler, uri, methods, host, strict_slashes, stream, version, name), kwargs)) return handler return decorator
Create a blueprint resource route from a decorated function. :param uri: endpoint at which the route will be accessible. :param methods: list of acceptable HTTP methods. :param host: :param strict_slashes: :param version: :param name: user defined route name for url_for :return: function or class instance Accepts any keyword argument that will be passed to the app resource.
def job_success_message(self, job, queue, job_result): return '[%s|%s|%s] success, in %s' % (queue._cached_name, job.pk.get(), job._cached_identifier, job.duration)
Return the message to log when a job is successful
def generate (self, ps): assert isinstance(ps, property_set.PropertySet) self.manager_.targets ().start_building (self) ps = ps.expand () all_property_sets = self.apply_default_build (ps) result = GenerateResult () for p in all_property_sets: result.extend (self.__generate_really (p)) self.manager_.targets ().end_building (self) return result
Select an alternative for this main target, by finding all alternatives which requirements are satisfied by 'properties' and picking the one with longest requirements set. Returns the result of calling 'generate' on that alternative.
def image(random=random, width=800, height=600, https=False, *args, **kwargs): target_fn = noun if width+height > 300: target_fn = thing if width+height > 2000: target_fn = sentence s = "" if https: s = "s" if random.choice([True, False]): return "http{s}://dummyimage.com/{width}x{height}/292929/e3e3e3&text={text}".format( s=s, width=width, height=height, text=target_fn(random=random)) else: return "http{s}://placekitten.com/{width}/{height}".format(s=s, width=width, height=height)
Generate the address of a placeholder image. >>> mock_random.seed(0) >>> image(random=mock_random) 'http://dummyimage.com/800x600/292929/e3e3e3&text=mighty poop' >>> image(random=mock_random, width=60, height=60) 'http://placekitten.com/60/60' >>> image(random=mock_random, width=1920, height=1080) 'http://dummyimage.com/1920x1080/292929/e3e3e3&text=To get to Westeros, you need to go to Britchestown, then drive west.' >>> image(random=mock_random, https=True, width=1920, height=1080) 'https://dummyimage.com/1920x1080/292929/e3e3e3&text=East Mysteryhall is in Westeros.'
def scan_dir(self, path): r for fname in glob.glob(os.path.join(path, '*' + TABLE_EXT)): if os.path.isfile(fname): with open(fname, 'r') as fobj: try: self.add_colortable(fobj, os.path.splitext(os.path.basename(fname))[0]) log.debug('Added colortable from file: %s', fname) except RuntimeError: log.info('Skipping unparsable file: %s', fname)
r"""Scan a directory on disk for color table files and add them to the registry. Parameters ---------- path : str The path to the directory with the color tables
def loads(cls, value): if len(value) == 1 and cls.sentinel in value: value = value[cls.sentinel] return value
Returns mapping type deserialized `value`.
def _get_compressed_vlan_list(self, pvlan_ids): if not pvlan_ids: return [] pvlan_list = list(pvlan_ids) pvlan_list.sort() compressed_list = [] begin = -1 prev_vlan = -1 for port_vlan in pvlan_list: if prev_vlan == -1: prev_vlan = port_vlan else: if (port_vlan - prev_vlan) == 1: if begin == -1: begin = prev_vlan prev_vlan = port_vlan else: if begin == -1: compressed_list.append(str(prev_vlan)) else: compressed_list.append("%d-%d" % (begin, prev_vlan)) begin = -1 prev_vlan = port_vlan if begin == -1: compressed_list.append(str(prev_vlan)) else: compressed_list.append("%s-%s" % (begin, prev_vlan)) return compressed_list
Generate a compressed vlan list ready for XML using a vlan set. Sample Use Case: Input vlan set: -------------- 1 - s = set([11, 50, 25, 30, 15, 16, 3, 8, 2, 1]) 2 - s = set([87, 11, 50, 25, 30, 15, 16, 3, 8, 2, 1, 88]) Returned compressed XML list: ---------------------------- 1 - compressed_list = ['1-3', '8', '11', '15-16', '25', '30', '50'] 2 - compressed_list = ['1-3', '8', '11', '15-16', '25', '30', '50', '87-88']
def main(): parser.name('tinman') parser.description(__desc__) p = parser.get() p.add_argument('-p', '--path', action='store', dest='path', help='Path to prepend to the Python system path') helper.start(Controller)
Invoked by the script installed by setuptools.
def copy_abiext(self, inext, outext): infile = self.has_abiext(inext) if not infile: raise RuntimeError('no file with extension %s in %s' % (inext, self)) for i in range(len(infile) - 1, -1, -1): if infile[i] == '_': break else: raise RuntimeError('Extension %s could not be detected in file %s' % (inext, infile)) outfile = infile[:i] + '_' + outext shutil.copy(infile, outfile) return 0
Copy the Abinit file with extension inext to a new file withw extension outext
def total_msg_recv(self): return (self.get_count(PeerCounterNames.RECV_UPDATES) + self.get_count(PeerCounterNames.RECV_REFRESH) + self.get_count(PeerCounterNames.RECV_NOTIFICATION))
Returns total number of UPDATE, NOTIFICATION and ROUTE_REFRESH messages received from this peer.
def walk_dependencies(root, visitor): def visit(parent, visitor): for d in get_dependencies(parent): visitor(d, parent) visit(d, visitor) visitor(root, None) visit(root, visitor)
Call visitor on root and all dependencies reachable from it in breadth first order. Args: root (component): component function or class visitor (function): signature is `func(component, parent)`. The call on root is `visitor(root, None)`.
def _bytes_to_human(self, B): KB = float(1024) MB = float(KB ** 2) GB = float(KB ** 3) TB = float(KB ** 4) if B < KB: return '{0} B'.format(B) B = float(B) if KB <= B < MB: return '{0:.2f} KB'.format(B/KB) elif MB <= B < GB: return '{0:.2f} MB'.format(B/MB) elif GB <= B < TB: return '{0:.2f} GB'.format(B/GB) elif TB <= B: return '{0:.2f} TB'.format(B/TB)
Return the given bytes as a human friendly KB, MB, GB, or TB string
def draw_rendered_map(self, surf): surf.blit_np_array(features.Feature.unpack_rgb_image( self._obs.observation.render_data.map))
Draw the rendered pixels.