code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def construct_stable_id( parent_context, polymorphic_type, relative_char_offset_start, relative_char_offset_end, ): doc_id, _, parent_doc_char_start, _ = split_stable_id(parent_context.stable_id) start = parent_doc_char_start + relative_char_offset_start end = parent_doc_char_start + relativ...
Contruct a stable ID for a Context given its parent and its character offsets relative to the parent.
def get(self, sid): return ChannelContext(self._version, service_sid=self._solution['service_sid'], sid=sid, )
Constructs a ChannelContext :param sid: The sid :returns: twilio.rest.chat.v1.service.channel.ChannelContext :rtype: twilio.rest.chat.v1.service.channel.ChannelContext
def create_unsigned_transaction(cls, *, nonce: int, gas_price: int, gas: int, to: Address, value: int, ...
Proxy for instantiating an unsigned transaction for this VM.
def artUrl(self): art = self.firstAttr('art', 'grandparentArt') return self._server.url(art, includeToken=True) if art else None
Return the first first art url starting on the most specific for that item.
def extendMarkdown(self, md, md_globals=None): if any( x not in md.treeprocessors for x in self.REQUIRED_EXTENSION_INTERNAL_NAMES): raise RuntimeError( "The attr_cols markdown extension depends the following" " extensions which must pre...
Initializes markdown extension components.
def side_task(pipe, *side_jobs): assert iterable(pipe), 'side_task needs the first argument to be iterable' for sj in side_jobs: assert callable(sj), 'all side_jobs need to be functions, not {}'.format(sj) side_jobs = (lambda i:i ,) + side_jobs for i in map(pipe, *side_jobs): yield i[0]
allows you to run a function in a pipeline without affecting the data
def _extract_axes_for_slice(self, axes): return {self._AXIS_SLICEMAP[i]: a for i, a in zip(self._AXIS_ORDERS[self._AXIS_LEN - len(axes):], axes)}
Return the slice dictionary for these axes.
def check_exists_repositories(repo): pkg_list = "PACKAGES.TXT" if repo == "sbo": pkg_list = "SLACKBUILDS.TXT" if check_for_local_repos(repo) is True: pkg_list = "PACKAGES.TXT" return "" if not os.path.isfile("{0}{1}{2}".format( _meta_.lib_path, repo, "_repo/{0}".forma...
Checking if repositories exists by PACKAGES.TXT file
def keyframe(self, index): index = int(index) if index < 0: index %= len(self) if self._keyframe.index == index: return if index == 0: self._keyframe = self.pages[0] return if self._indexed or index < len(self.pages): pa...
Set current keyframe. Load TiffPage from file if necessary.
def multi_replace(instr, search_list=[], repl_list=None): repl_list = [''] * len(search_list) if repl_list is None else repl_list for ser, repl in zip(search_list, repl_list): instr = instr.replace(ser, repl) return instr
Does a string replace with a list of search and replacements TODO: rename
def from_data(cls, data): if not data.shape[1] == 3: raise ValueError("Gyroscope data must have shape (N, 3)") instance = cls() instance.data = data return instance
Create gyroscope stream from data array Parameters ------------------- data : (N, 3) ndarray Data array of angular velocities (rad/s) Returns ------------------- GyroStream Stream object
def fetch_hg_push_log(repo_name, repo_url): newrelic.agent.add_custom_parameter("repo_name", repo_name) process = HgPushlogProcess() process.run(repo_url + '/json-pushes/?full=1&version=2', repo_name)
Run a HgPushlog etl process
def _update(self): aps = [] for k, v in self.records.items(): recall, prec = self._recall_prec(v, self.counts[k]) ap = self._average_precision(recall, prec) aps.append(ap) if self.num is not None and k < (self.num - 1): self.sum_metric[k] =...
update num_inst and sum_metric
def send(self, message): message = message.SerializeToString() self.socket.sendall(struct.pack('!I', len(message)) + message) length = struct.unpack('!I', self.socket.recv(4))[0] response = riemann_client.riemann_pb2.Msg() response.ParseFromString(socket_recvall(self.socket, leng...
Sends a message to a Riemann server and returns it's response :param message: The message to send to the Riemann server :returns: The response message from Riemann :raises RiemannError: if the server returns an error
def environment_failure(self, error): log.exception( 'Failed to create environment for %s: %s', self.__class__.__name__, get_error_message(error) ) self.shutdown(error)
Log environment failure for the daemon and exit with the error code. :param error: :return:
def get_process_token(): token = wintypes.HANDLE() res = process.OpenProcessToken( process.GetCurrentProcess(), process.TOKEN_ALL_ACCESS, token) if not res > 0: raise RuntimeError("Couldn't get process token") return token
Get the current process token
def sociallogin_from_response(self, request, response): from allauth.socialaccount.models import SocialLogin, SocialAccount adapter = get_adapter(request) uid = self.extract_uid(response) extra_data = self.extract_extra_data(response) common_fields = self.extract_common_fields(re...
Instantiates and populates a `SocialLogin` model based on the data retrieved in `response`. The method does NOT save the model to the DB. Data for `SocialLogin` will be extracted from `response` with the help of the `.extract_uid()`, `.extract_extra_data()`, `.extract_common_fie...
def contains(self, other): return (self.alt == other.alt and self.prefix.endswith(other.prefix) and self.suffix.startswith(other.suffix))
Is the other VariantSequence a subsequence of this one? The two sequences must agree on the alt nucleotides, the prefix of the longer must contain the prefix of the shorter, and the suffix of the longer must contain the suffix of the shorter.
def add_lv_load_area_group(self, lv_load_area_group): if lv_load_area_group not in self.lv_load_area_groups(): self._lv_load_area_groups.append(lv_load_area_group)
Adds a LV load_area to _lv_load_areas if not already existing.
def debug(self, request, message, extra_tags='', fail_silently=False): add(self.target_name, request, constants.DEBUG, message, extra_tags=extra_tags, fail_silently=fail_silently)
Add a message with the ``DEBUG`` level.
def create_linked_data_element_from_resource(self, resource): mp = self.__mp_reg.find_or_create_mapping(Link) return mp.data_element_class.create_from_resource(resource)
Returns a new linked data element for the given resource object. :returns: object implementing :class:`ILinkedDataElement`.
def isPackage(self, dotted_name, extrapath=None): candidate = self.isModule(dotted_name + '.__init__', extrapath) if candidate: candidate = candidate[:-len(".__init__")] return candidate
Is ``dotted_name`` the name of a package?
def _set_alarm(self, status, home_id): response = self._request( MINUT_HOMES_URL + "/{}".format(home_id), request_type='PUT', json={'alarm_status': status}) return response.get('alarm_status', '') == status
Set alarm satus.
def on_change(self, *callbacks): for callback in callbacks: if callback in self._callbacks: continue _check_callback(callback, ('event',)) self._callbacks[callback] = callback
Provide callbacks to invoke if the document or any Model reachable from its roots changes.
def ismount(self, path): path = make_string_path(path) if not path: return False normed_path = self.filesystem.absnormpath(path) sep = self.filesystem._path_separator(path) if self.filesystem.is_windows_fs: if self.filesystem.alternative_path_separator is ...
Return true if the given path is a mount point. Args: path: Path to filesystem object to be checked Returns: `True` if path is a mount point added to the fake file system. Under Windows also returns True for drive and UNC roots (independent of their exis...
def parse_literal(x): if isinstance(x, list): return [parse_literal(y) for y in x] elif isinstance(x, (bytes, str)): try: return int(x) except ValueError: try: return float(x) except ValueError: return x else: ...
return the smallest possible data type for a string or list of strings Parameters ---------- x: str or list a string to be parsed Returns ------- int, float or str the parsing result Examples -------- >>> isinstance(parse_literal('1.5'), float) True ...
def get(self, type: Type[T], query: Mapping[str, Any]) -> T: LOGGER.info("Getting SourceHandlers for \"{type}\"".format(type=type.__name__)) try: handlers = self._get_types[type] except KeyError: try: LOGGER.info("Building new SourceHandlers for \"{type}\"...
Gets a query from the data pipeline. 1) Extracts the query the sequence of data sources. 2) Inserts the result into the data sinks (if appropriate). 3) Transforms the result into the requested type if it wasn't already. 4) Inserts the transformed result into any data sinks. Arg...
def count_samples(ns_run, **kwargs): r kwargs.pop('logw', None) kwargs.pop('simulate', None) if kwargs: raise TypeError('Unexpected **kwargs: {0}'.format(kwargs)) return ns_run['logl'].shape[0]
r"""Number of samples in run. Unlike most estimators this does not require log weights, but for convenience will not throw an error if they are specified. Parameters ---------- ns_run: dict Nested sampling run dict (see the data_processing module docstring for more details). R...
def _get(self, key): self._populate_cache() if key not in self._cache: raise AttributeError("DataField has no member {}".format(key)) return self._cache[key]
Return given key from cache.
def stop_monitoring(self) -> None: self._context.optimisation_finished = True self._on_iteration() if self._print_summary: self.print_summary()
The recommended way of using Monitor is opening it with the `with` statement. In this case the user doesn't need to call this function explicitly. Otherwise the function should be called when the optimisation is done. The function sets the optimisation completed flag in the monitoring context a...
def arch(self): if self.method in ('buildArch', 'createdistrepo', 'livecd'): return self.params[2] if self.method in ('createrepo', 'runroot'): return self.params[1] if self.method == 'createImage': return self.params[3] if self.method == 'indirectioni...
Return an architecture for this task. :returns: an arch string (eg "noarch", or "ppc64le"), or None this task has no architecture associated with it.
def find_existing(self): sg = self.consul.find_secgroup(self.name) current = sg.rules log.debug('Current rules: %s' % current) log.debug('Intended rules: %s' % self.rules) exp_rules = [] for rule in self.rules: exp = ( rule[A.secgroup.PROTO...
Finds existing rule in secgroup. Populates ``self.create_these_rules`` and ``self.delete_these_rules``.
def _build_callback(self, config): wrapped = config['callback'] plugins = self.plugins + config['apply'] skip = config['skip'] try: for plugin in reversed(plugins): if True in skip: break if plugin in skip or type(plugin) in skip: continue ...
Apply plugins to a route and return a new callable.
def signature(self): iexec, execmod = self.context.parser.tree_find(self.context.el_name, self.context.module, "executables") if iexec is None: iexec, execmod = self.context.parser.tree_find(self.context.el_name, self.context.module, ...
Gets completion or call signature information for the current cursor.
def long_to_bytes(N, blocksize=1): bytestring = hex(N) bytestring = bytestring[2:] if bytestring.startswith('0x') else bytestring bytestring = bytestring[:-1] if bytestring.endswith('L') else bytestring bytestring = '0' + bytestring if (len(bytestring) % 2) != 0 else bytestring bytestring = binascii...
Given an input integer ``N``, ``long_to_bytes`` returns the representation of ``N`` in bytes. If ``blocksize`` is greater than ``1`` then the output string will be right justified and then padded with zero-bytes, such that the return values length is a multiple of ``blocksize``.
def wait_for_and_dismiss_alert(driver, timeout=settings.LARGE_TIMEOUT): alert = wait_for_and_switch_to_alert(driver, timeout) alert_text = alert.text alert.dismiss() return alert_text
Wait for and dismiss an alert. Returns the text from the alert. @Params driver - the webdriver object (required) timeout - the time to wait for the alert in seconds
def update_reading_events(readings, event_record): for i in range(event_record.start_interval - 1, event_record.end_interval): readings[i] = Reading( t_start=readings[i].t_start, t_end=readings[i].t_end, read_value=readings[i].read_value, uom=readings[i].uom, ...
Updates readings from a 300 row to reflect any events found in a subsequent 400 row
def _data(self): self.wait_to_read() hdl = NDArrayHandle() check_call(_LIB.MXNDArrayGetDataNDArray(self.handle, ctypes.byref(hdl))) return NDArray(hdl)
A deep copy NDArray of the data array associated with the BaseSparseNDArray. This function blocks. Do not use it in performance critical code.
def up(ctx, instance_id): session = create_session(ctx.obj['AWS_PROFILE_NAME']) ec2 = session.resource('ec2') try: instance = ec2.Instance(instance_id) instance.start() except botocore.exceptions.ClientError as e: click.echo("Invalid instance ID {0} ({1})".format(instance_id, e),...
Start EC2 instance
def validate_candidates(self): async def slave_task(addr, candidates): r_manager = await self.env.connect(addr) return await r_manager.validate_candidates(candidates) self._log(logging.DEBUG, "Validating {} candidates" .format(len(self.candidates))) cand...
Validate current candidates. This method validates the current candidate list in all the agents in the environment (or underlying slave environments) and replaces the current :attr:`candidates` with the list of validated candidates. The artifact candidates must be hashable and have a :...
def get_objective_search_session(self): if not self.supports_objective_search(): raise Unimplemented() try: from . import sessions except ImportError: raise OperationFailed() try: session = sessions.ObjectiveSearchSession(runtime=self._runt...
Gets the OsidSession associated with the objective search service. return: (osid.learning.ObjectiveSearchSession) - an ObjectiveSearchSession raise: OperationFailed - unable to complete request raise: Unimplemented - supports_objective_search() is false complia...
def list(self, entity=None): uri = "/%s" % self.uri_base if entity: uri = "%s?entityId=%s" % (uri, utils.get_id(entity)) resp, resp_body = self._list(uri, return_raw=True) return resp_body
Returns a dictionary of data, optionally filtered for a given entity.
def _WaitForStartup(self, deadline): start = time.time() sleep = 0.05 def Elapsed(): return time.time() - start while True: try: response, _ = self._http.request(self._host) if response.status == 200: logging.info('emulator responded after %f seconds', Elapsed()) ...
Waits for the emulator to start. Args: deadline: deadline in seconds Returns: True if the emulator responds within the deadline, False otherwise.
def main(config, host, port, logfile, debug, daemon, uid, gid, pidfile, umask, rundir): _main(**locals())
Main entry point for running a socket server from the commandline. This method will read in options from the commandline and call the L{config.init_config} method to get everything setup. Then, depending on whether deamon mode was specified or not, the process may be forked (or not) and the server will b...
def getAvailableTemplates(self): try: adapters = getAdapters((self.context, ), IGetStickerTemplates) except ComponentLookupError: logger.info("No IGetStickerTemplates adapters found.") adapters = None templates = [] if adapters is not None: ...
Returns an array with the templates of stickers available. Each array item is a dictionary with the following structure: {'id': <template_id>, 'title': <teamplate_title>, 'selected: True/False'}
def available_cpu_count() -> int: try: match = re.search(r'(?m)^Cpus_allowed:\s*(.*)$', open('/proc/self/status').read()) if match: res = bin(int(match.group(1).replace(',', ''), 16)).count('1') if res > 0: return res except IOErr...
Get the number of available CPUs. Number of available virtual or physical CPUs on this system, i.e. user/real as output by time(1) when called with an optimally scaling userspace-only program. Returns: Number of avaialable CPUs.
def instruction_ROL_register(self, opcode, register): a = register.value r = self.ROL(a) register.set(r)
Rotate accumulator left
def get_params(names): params = {} for name in names: value = request.form.get(name) or request.files.get(name) if value is not None: params[name] = value return params
Return a dictionary with params from request. TODO: I think we don't use it anymore and it should be removed before someone gets hurt.
def parse(self, src): self.cssBuilder.beginStylesheet() try: src = cssSpecial.cleanupCSS(src) try: src, stylesheet = self._parseStylesheet(src) except self.ParseError as err: err.setFullCSSSource(src) raise final...
Parses CSS string source using the current cssBuilder. Use for embedded stylesheets.
def lan(self, move: Move) -> str: return self._algebraic(move, long=True)
Gets the long algebraic notation of the given move in the context of the current position.
def unshift(self, chunk): if chunk: self._pos -= len(chunk) self._unconsumed.append(chunk)
Pushes a chunk of data back into the internal buffer. This is useful in certain situations where a stream is being consumed by code that needs to "un-consume" some amount of data that it has optimistically pulled out of the source, so that the data can be passed on to some other party.
def _parse(): if not args["reparse"]: settings.use_filesystem_cache = False c = CodeParser() if args["verbose"]: c.verbose = True if args["reparse"]: c.reparse(args["source"]) else: c.parse(args["source"]) return c
Parses the specified Fortran source file from which the wrappers will be constructed for ctypes.
def predict(self, inputs: np.ndarray) -> np.ndarray: return self.sess.run(self.out_var, {self.inp_var: inputs})
Run on multiple inputs
def was_modified_since(header=None, mtime=0, size=0): header_mtime = modified_since(header, size) if header_mtime and header_mtime <= mtime: return False return True
Check if an item was modified since the user last downloaded it :param header: the value of the ``If-Modified-Since`` header. If this is ``None``, simply return ``True`` :param mtime: the modification time of the item in question. :param size: the size of the item.
def _close_window(self, window): if window == self.active_window: self.close_active_window() else: original_active_window = self.active_window self.close_active_window() self.active_window = original_active_window
Close this window.
def _find_global(self, module, func): if module == __name__: if func == '_unpickle_call_error' or func == 'CallError': return _unpickle_call_error elif func == '_unpickle_sender': return self._unpickle_sender elif func == '_unpickle_context': ...
Return the class implementing `module_name.class_name` or raise `StreamError` if the module is not whitelisted.
def _index_document(self, document, force=False): query = text( ) self.execute(query, **document)
Adds dataset document to the index.
def save(self, path): self.clip.write_videofile(path, audio_fps=self.clip.audio.fps)
Save source video to file. Args: path (str): Filename to save to. Notes: Saves entire source video to file, not just currently selected frames.
def _check_authentication(self, request, request_args, request_kwargs): try: is_valid, status, reasons = self._verify( request, request_args=request_args, request_kwargs=request_kwargs, ) except Exception as e: logger.de...
Checks a request object to determine if that request contains a valid, and authenticated JWT. It returns a tuple: 1. Boolean whether the request is authenticated with a valid JWT 2. HTTP status code 3. Reasons (if any) for a potential authentication failure
def recruit(self): if not self.networks(full=False): self.log("All networks full: closing recruitment", "-----") self.recruiter.close_recruitment()
Recruit participants to the experiment as needed. This method runs whenever a participant successfully completes the experiment (participants who fail to finish successfully are automatically replaced). By default it recruits 1 participant at a time until all networks are full.
def sort_batch_by_length(tensor: torch.Tensor, sequence_lengths: torch.Tensor): if not isinstance(tensor, torch.Tensor) or not isinstance(sequence_lengths, torch.Tensor): raise ConfigurationError("Both the tensor and sequence lengths must be torch.Tensors.") sorted_sequence_lengths, permutation_index = ...
Sort a batch first tensor by some specified lengths. Parameters ---------- tensor : torch.FloatTensor, required. A batch first Pytorch tensor. sequence_lengths : torch.LongTensor, required. A tensor representing the lengths of some dimension of the tensor which we want to sort b...
def _sim_texture(r1, r2): return sum([min(a, b) for a, b in zip(r1["hist_t"], r2["hist_t"])])
calculate the sum of histogram intersection of texture
def snappy_encode(payload, xerial_compatible=False, xerial_blocksize=32 * 1024): if not has_snappy(): raise NotImplementedError("Snappy codec is not available") if xerial_compatible: def _chunker(): for i in range(0, len(payload), xerial_blocksize): ...
Compress the given data with the Snappy algorithm. :param bytes payload: Data to compress. :param bool xerial_compatible: If set then the stream is broken into length-prefixed blocks in a fashion compatible with the xerial snappy library. The format winds up being:: +-----...
def _trim_zeros_float(str_floats, na_rep='NaN'): trimmed = str_floats def _is_number(x): return (x != na_rep and not x.endswith('inf')) def _cond(values): finite = [x for x in values if _is_number(x)] return (len(finite) > 0 and all(x.endswith('0') for x in finite) and ...
Trims zeros, leaving just one before the decimal points if need be.
def SendSerializedMessage(self, message): try: ba = Helper.ToArray(message) ba2 = binascii.unhexlify(ba) self.bytes_out += len(ba2) self.transport.write(ba2) except Exception as e: logger.debug(f"Could not send serialized message {e}")
Send the `message` to the remote client. Args: message (neo.Network.Message):
def async_do(self, size=10): if hasattr(self._session, '_async_jobs'): logging.info("Executing asynchronous %s jobs found in queue by using %s threads..." % ( len(self._session._async_jobs), size)) threaded_requests.map(self._session._async_jobs, size=size)
Execute all asynchronous jobs and wait for them to finish. By default it will run on 10 threads. :param size: number of threads to run on.
def designspace(self): if self._designspace_is_complete: return self._designspace self._designspace_is_complete = True list(self.masters) self.to_designspace_axes() self.to_designspace_sources() self.to_designspace_instances() self.to_designspace_famil...
Get a designspace Document instance that links the masters together and holds instance data.
def __assert_false(returned): result = "Pass" if isinstance(returned, str): try: returned = bool(returned) except ValueError: raise try: assert (returned is False), "{0} not False".format(returned) except AssertionError ...
Test if an boolean is False
def import_module(module_fqname, superclasses=None): module_name = module_fqname.rpartition(".")[-1] module = __import__(module_fqname, globals(), locals(), [module_name]) modules = [class_ for cname, class_ in inspect.getmembers(module, inspect.isclass) if class_.__module__ ==...
Imports the module module_fqname and returns a list of defined classes from that module. If superclasses is defined then the classes returned will be subclasses of the specified superclass or superclasses. If superclasses is plural it must be a tuple of classes.
def cancel(self): if self.done(): return False self._client.cancel_operation(self._operation.name) return True
If last Operation's value of `done` is true, returns false; otherwise, issues OperationsClient.cancel_operation and returns true.
def on_message(self, message): if message.address != self._address: return if isinstance(message, velbus.ChannelNamePart1Message) or isinstance(message, velbus.ChannelNamePart1Message2): self._process_channel_name_message(1, message) elif isinstance(message, velbus.Channe...
Process received message
def workspace_cli(ctx, directory, mets_basename, backup): ctx.obj = WorkspaceCtx(os.path.abspath(directory), mets_basename, automatic_backup=backup)
Working with workspace
def sync_in(self, force=False): self.log('---- Sync In ----') self.dstate = self.STATES.BUILDING for path_name in self.source_fs.listdir(): f = self.build_source_files.instance_from_name(path_name) if not f: self.warn('Ignoring unknown file: {}'.format(pat...
Synchronize from files to records, and records to objects
def execute_replay() -> None: files = glob.glob('./replay/toDo/*') sorted_files = sorted(files, key=os.path.getctime) if not sorted_files: LOG.debug('Found %s, beginning execution.', sorted_files) for command_file in sorted_files: with open(command_file, 'r') as command: ...
Execute all commands. For every command that is found in replay/toDo, execute each of them and move the file to the replay/archive directory.
def posterior_samples(self, X, size=10, full_cov=False, Y_metadata=None, likelihood=None, **predict_kwargs): return self.posterior_samples_f(X, size, full_cov=full_cov, **predict_kwargs)
Samples the posterior GP at the points X, equivalent to posterior_samples_f due to the absence of a likelihood.
def check_mapping(self, m): if 'name' not in m: self.pr_dbg("Missing %s" % "name") return False for x in ['analyzed', 'indexed', 'type', 'scripted', 'count']: if x not in m or m[x] == "": self.pr_dbg("Missing %s" % x) self.pr_dbg("Full ...
Assert minimum set of fields in cache, does not validate contents
def eval(e, amplitude, e_0, alpha, e_cutoff, beta): xx = e / e_0 return amplitude * xx ** (-alpha) * np.exp(-(e / e_cutoff) ** beta)
One dimensional power law with an exponential cutoff model function
def pop(self, k, d=_POP_DEFAULT): if d is _POP_DEFAULT: return self._ingredients.pop(k) else: return self._ingredients.pop(k, d)
Pop an ingredient off of this shelf.
def get(self, r): if r is None: return None if r.lower() == '(sp)' and self.stack: return self.stack[-1] if r[:1] == '(': return self.mem[r[1:-1]] r = r.lower() if is_number(r): return str(valnum(r)) if not is_register(r): ...
Returns precomputed value of the given expression
def target(self): target = self.path or '/' if self.query: target = '{}?{}'.format(target, self.query) return target
The "target" i.e. local part of the URL, consisting of the path and query.
def query_mxrecords(self): import dns.resolver logging.info('Resolving DNS query...') answers = dns.resolver.query(self.domain, 'MX') addresses = [answer.exchange.to_text() for answer in answers] logging.info( '{} records found:\n{}'.format( len(addres...
Looks up for the MX DNS records of the recipient SMTP server
def _encode(self): data = ByteBuffer() if not hasattr(self, '__fields__'): return data.tostring() for field in self.__fields__: field.encode(self, data) return data.tostring()
Encode the message and return a bytestring.
def eye(root=None, zodb_uri=None, port=8080): if root is not None: root_factory = lambda request: Node(root) elif zodb_uri is not None: if '://' not in zodb_uri: zodb_uri = 'file://' + os.path.abspath(zodb_uri) from repoze.zodbconn.finder import PersistentApplicationF...
Serves a WSGI app to browse objects based on a root object or ZODB URI.
def postprocess_citedReferences(self, entry): if type(entry.citedReferences) is not list: entry.citedReferences = [entry.citedReferences]
If only a single cited reference was found, ensure that ``citedReferences`` is nonetheless a list.
def get_status(self, device_id): devices = self.get_devices() if devices != False: for device in devices: if device['door'] == device_id: return device['status'] return False
List only MyQ garage door devices.
def binarize(self, threshold=0): if not self.is_binarized(): self.pianoroll = (self.pianoroll > threshold)
Binarize the pianoroll. Parameters ---------- threshold : int or float A threshold used to binarize the pianorolls. Defaults to zero.
def _parse(value, strict=True): pattern = r'(?:(?P<hours>\d+):)?(?P<minutes>\d+):(?P<seconds>\d+)' match = re.match(pattern, value) if not match: raise ValueError('Invalid duration value: %s' % value) hours = safe_int(match.group('hours')) minutes = safe_int(match.group('minutes')) secon...
Preliminary duration value parser strict=True (by default) raises StrictnessError if either hours, minutes or seconds in duration value exceed allowed values
def _set_magic_constants(self): real_path = os.path.realpath(self._source_filename) self._replace['__FILE__'] = "'%s'" % real_path self._replace['__ROUTINE__'] = "'%s'" % self._routine_name self._replace['__DIR__'] = "'%s'" % os.path.dirname(real_path)
Adds magic constants to replace list.
def load_files(filenames,multiproc=False,**kwargs): filenames = np.atleast_1d(filenames) logger.debug("Loading %s files..."%len(filenames)) kwargs = [dict(filename=f,**kwargs) for f in filenames] if multiproc: from multiprocessing import Pool processes = multiproc if multiproc > 0 else N...
Load a set of FITS files with kwargs.
def _should_split_cell(cls, cell_text: str) -> bool: if ', ' in cell_text or '\n' in cell_text or '/' in cell_text: return True return False
Checks whether the cell should be split. We're just doing the same thing that SEMPRE did here.
def _input_stmt(self, stmt: Statement, sctx: SchemaContext) -> None: self.get_child("input")._handle_substatements(stmt, sctx)
Handle RPC or action input statement.
def on_invite(self, connection, event): sender = self.get_nick(event.source) invited = self.get_nick(event.target) channel = event.arguments[0] if invited == self._nickname: logging.info("! I am invited to %s by %s", channel, sender) connection.join(channel) ...
Got an invitation to a channel
def RegisterPlugin(self, report_plugin_cls): name = report_plugin_cls.__name__ if name in self.plugins: raise RuntimeError("Can't register two report plugins with the same " "name. In particular, can't register the same " "report plugin twice: %r" % name) ...
Registers a report plugin for use in the GRR UI.
def get_uninvoiced_hours(entries, billable=None): statuses = ('invoiced', 'not-invoiced') if billable is not None: billable = (billable.lower() == u'billable') entries = [e for e in entries if e.activity.billable == billable] hours = sum([e.hours for e in entries if e.status not in statuses]...
Given an iterable of entries, return the total hours that have not been invoiced. If billable is passed as 'billable' or 'nonbillable', limit to the corresponding entries.
def format_output(self, rendered_widgets): ret = [u'<ul class="formfield">'] for i, field in enumerate(self.fields): label = self.format_label(field, i) help_text = self.format_help_text(field, i) ret.append(u'<li>%s %s %s</li>' % ( label, rendered_wid...
This output will yeild all widgets grouped in a un-ordered list
def resource_urls(request): url_parsed = urlparse(settings.SEARCH_URL) defaults = dict( APP_NAME=__description__, APP_VERSION=__version__, SITE_URL=settings.SITE_URL.rstrip('/'), SEARCH_TYPE=settings.SEARCH_TYPE, SEARCH_URL=settings.SEARCH_URL, SEARCH_IP='%s://%s:...
Global values to pass to templates
def list_dvportgroups(dvs=None, portgroup_names=None, service_instance=None): ret_dict = [] proxy_type = get_proxy_type() if proxy_type == 'esxdatacenter': datacenter = __salt__['esxdatacenter.get_details']()['datacenter'] dc_ref = _get_proxy_target(service_instance) elif proxy_type == '...
Returns a list of distributed virtual switch portgroups. The list can be filtered by the portgroup names or by the DVS. dvs Name of the DVS containing the portgroups. Default value is None. portgroup_names List of portgroup names to look for. If None, all portgroups are ret...
def get_kwargs(self): return {k: v for k, v in vars(self).items() if k not in self._ignored}
Return kwargs from attached attributes.
def is_transaction_expired(transaction, block_number): is_update_expired = ( isinstance(transaction, ContractSendChannelUpdateTransfer) and transaction.expiration < block_number ) if is_update_expired: return True is_secret_register_expired = ( isinstance(transaction, Con...
True if transaction cannot be mined because it has expired. Some transactions are time dependent, e.g. the secret registration must be done before the lock expiration, and the update transfer must be done before the settlement window is over. If the current block is higher than any of these expirations...
def parse(self, file_name): self.object = self.parsed_class() with open(file_name, encoding='utf-8') as f: self.parse_str(f.read()) return self.object
Parse entire file and return relevant object. :param file_name: File path :type file_name: str :return: Parsed object