code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def cells(self): n = 0 for (order, cells) in self: n += len(cells) return n
The number of cells in the MOC. This gives the total number of cells at all orders, with cells from every order counted equally. >>> m = MOC(0, (1, 2)) >>> m.cells 2
def _pull_content_revision_parent(self): if self._revision_id is None: query_params = { "prop": "extracts|revisions", "explaintext": "", "rvprop": "ids", } query_params.update(self.__title_query_param()) request = se...
combine the pulling of these three properties
def getBottomLeft(self): x1 = float(self.get_x1()) x2 = float(self.get_x2()) y1 = float(self.get_y1()) y2 = float(self.get_y2()) if x1 < x2: if y1 < y2: return (x1, y1) else: return (x1, y2) else: if y1 <...
Retrieves the the bottom left coordinate of the line as tuple. Coordinates must be numbers.
def getExtensions(self, extname='SCI', section=None): if section is None: numext = 0 section = [] for hdu in self._image: if 'extname' in hdu.header and hdu.header['extname'] == extname: section.append(hdu.header['extver']) else: ...
Return the list of EXTVER values for extensions with name specified in extname.
def simple_swap(ins: Instruction) -> Instruction: try: rule = ins.details['transform']['simple_swap'] except KeyError: return ins replacement_ins = opcode_table[rule['op']] return Instruction( replacement_ins['mnemonic'], replacement_ins['op'], [Operand( ...
Replaces one instruction with another based on the transform rules in the bytecode definitions. This can help simplify your code as it reduces the overall number of instructions. For example, `aload_0` will become `aload 0`. :param ins: Instruction to potentially modify. :return: Potentially modifi...
def get_jvm_options(self): ret = [] for opt in self.get_options().options: ret.extend(safe_shlex_split(opt)) if (self.get_options().debug or self.get_options().is_flagged('debug_port') or self.get_options().is_flagged('debug_args')): debug_port = self.get_options().debug_port ...
Return the options to run this JVM with. These are options to the JVM itself, such as -Dfoo=bar, -Xmx=1g, -XX:-UseParallelGC and so on. Thus named because get_options() already exists (and returns this object's Pants options).
def find_experiment_export(app_id): cwd = os.getcwd() data_filename = "{}-data.zip".format(app_id) path_to_data = os.path.join(cwd, "data", data_filename) if os.path.exists(path_to_data): try: Data(path_to_data) except IOError: from dallinger import logger ...
Attempt to find a zipped export of an experiment with the ID provided and return its path. Returns None if not found. Search order: 1. local "data" subdirectory 2. user S3 bucket 3. Dallinger S3 bucket
def isidentifier(s, dotted=False): if dotted: return all(isidentifier(a) for a in s.split('.')) if PY3: return s.isidentifier() else: import re _name_re = re.compile(r"[a-zA-Z_][a-zA-Z0-9_]*$") return bool(_name_re.match(s))
A function equivalent to the str.isidentifier method on Py3
def minWidth(self): frags = self.frags nFrags = len(frags) if not nFrags: return 0 if nFrags == 1: f = frags[0] fS = f.fontSize fN = f.fontName words = hasattr(f, 'text') and split(f.text, ' ') or f.words func = lambda w, fS=fS,...
Attempt to determine a minimum sensible width
def nested_update(d, u): for k, v in list(u.items()): if isinstance(v, collections.Mapping): r = nested_update(d.get(k, {}), v) d[k] = r else: d[k] = u[k] return d
Merge two nested dicts. Nested dicts are sometimes used for representing various recursive structures. When updating such a structure, it may be convenient to present the updated data as a corresponding recursive structure. This function will then apply the update. Args: d: dict dict tha...
def scope(self, key): if self.name is None: return key return '{:s}/{:s}'.format(self.name, key)
Apply the name scope to a key Parameters ---------- key : string Returns ------- `name/key` if `name` is not `None`; otherwise, `key`.
def props_to_image(regionprops, shape, prop): r im = sp.zeros(shape=shape) for r in regionprops: if prop == 'convex': mask = r.convex_image else: mask = r.image temp = mask * r[prop] s = bbox_to_slices(r.bbox) im[s] += temp return im
r""" Creates an image with each region colored according the specified ``prop``, as obtained by ``regionprops_3d``. Parameters ---------- regionprops : list This is a list of properties for each region that is computed by PoreSpy's ``regionprops_3D`` or Skimage's ``regionsprops``. ...
def href(self): url = self.session.base_url + str(self.path) if self.path.is_collection and not self.path.is_root: return url + 's' return url
Return URL of the resource :rtype: str
def mean(self, only_valid=True) -> ErrorValue: if not only_valid: intensity = self.intensity error = self.error else: intensity = self.intensity[self.mask] error = self.error[self.mask] return ErrorValue(intensity.mean(), ...
Calculate the mean of the pixels, not counting the masked ones if only_valid is True.
def Recv(self): size = struct.unpack(_STRUCT_FMT, self._ReadN(_STRUCT_LEN))[0] if size > MAX_SIZE: raise ProtocolError("Expected size to be at most %d, got %d" % (MAX_SIZE, size)) with self._read_lock: buf = self._ReadN(size) ...
Accept a message from Fleetspeak. Returns: A tuple (common_pb2.Message, size of the message in bytes). Raises: ProtocolError: If we receive unexpected data from Fleetspeak.
def is_field_visible(self, field): context = self.context fieldname = field.getName() if fieldname == "Client" and context.portal_type in ("Client", ): return False if fieldname == "Batch" and context.portal_type in ("Batch", ): return False return True
Check if the field is visible
def qteIsMiniApplet(self, obj): try: ret = obj._qteAdmin.isMiniApplet except AttributeError: ret = False return ret
Test if instance ``obj`` is a mini applet. |Args| * ``obj`` (**object**): object to test. |Returns| * **bool**: whether or not ``obj`` is the mini applet. |Raises| * **None**
async def try_sending(self,msg,timeout_secs, max_attempts): if timeout_secs is None: timeout_secs = self.timeout if max_attempts is None: max_attempts = self.retry_count attempts = 0 while attempts < max_attempts: if msg.seq_num not in self.message: re...
Coroutine used to send message to the device when a response or ack is needed. This coroutine will try to send up to max_attempts time the message, waiting timeout_secs for an answer. If no answer is received, it will consider that the device is no longer accessible and will unregister it. ...
def pull(self): if not os.path.exists(self.repo_dir): yield from self.initialize_repo() else: yield from self.update()
Pull selected repo from a remote git repository, while preserving user changes
def clean_start_time(self): start = self.cleaned_data.get('start_time') if not start: return start active_entries = self.user.timepiece_entries.filter( start_time__gte=start, end_time__isnull=True) for entry in active_entries: output = ('The start time...
Make sure that the start time doesn't come before the active entry
def render(file): fp = file.open() content = fp.read() fp.close() notebook = nbformat.reads(content.decode('utf-8'), as_version=4) html_exporter = HTMLExporter() html_exporter.template_file = 'basic' (body, resources) = html_exporter.from_notebook_node(notebook) return body, resources
Generate the result HTML.
def blend(self, other, ratio=0.5): keep = 1.0 - ratio if not self.space == other.space: raise Exception("Colors must belong to the same color space.") values = tuple(((u * keep) + (v * ratio) for u, v in zip(self.values, other.values))) return self.__class__(self....
Blend this color with another color in the same color space. By default, blends the colors half-and-half (ratio: 0.5). :param Color other: The color to blend. :param float ratio: How much to blend (0 -> 1). :rtype: Color :returns: A new spectra.Color
def build_wheel(source_dir, wheel_dir, config_settings=None): if config_settings is None: config_settings = {} requires, backend = _load_pyproject(source_dir) hooks = Pep517HookCaller(source_dir, backend) with BuildEnvironment() as env: env.pip_install(requires) reqs = hooks.get_...
Build a wheel from a source directory using PEP 517 hooks. :param str source_dir: Source directory containing pyproject.toml :param str wheel_dir: Target directory to create wheel in :param dict config_settings: Options to pass to build backend This is a blocking function which will run pip in a subpr...
def generate_docs(self): if self.dst.style['out'] == 'numpydoc' and self.dst.numpydoc.first_line is not None: self.first_line = self.dst.numpydoc.first_line self._set_desc() self._set_params() self._set_return() self._set_raises() self._set_other() sel...
Generates the output docstring
def save_cPkl(fpath, data, verbose=None, n=None): verbose = _rectify_verb_write(verbose) if verbose: print('[util_io] * save_cPkl(%r, data)' % (util_path.tail(fpath, n=n),)) with open(fpath, 'wb') as file_: pickle.dump(data, file_, protocol=2)
Saves data to a pickled file with optional verbosity
def usable_id(cls, id): try: qry_id = int(id) except Exception: qry_id = None if not qry_id: msg = 'unknown identifier %s' % id cls.error(msg) return qry_id
Retrieve id from input which can be num or id.
def raw_response(self, cursor_id=None): if self.flags & 1: if cursor_id is None: raise ProtocolError("No cursor id for getMore operation") msg = "Cursor not found, cursor id: %d" % (cursor_id,) errobj = {"ok": 0, "errmsg": msg, "code": 43} raise Cu...
Check the response header from the database, without decoding BSON. Check the response for errors and unpack. Can raise CursorNotFound, NotMasterError, ExecutionTimeout, or OperationFailure. :Parameters: - `cursor_id` (optional): cursor_id we sent to get this response - ...
def list_feeds(self): feeds = configparser.ConfigParser() feeds.read(self.data_filename) return feeds.sections()
Output a list of all feed names
def link_files(files: set, workspace_src_dir: str, common_parent: str, conf): norm_dir = normpath(workspace_src_dir) base_dir = '' if common_parent: common_parent = normpath(common_parent) base_dir = commonpath(list(files) + [common_parent]) if base_dir != common_paren...
Sync the list of files and directories in `files` to destination directory specified by `workspace_src_dir`. "Sync" in the sense that every file given in `files` will be hard-linked under `workspace_src_dir` after this function returns, and no other files will exist under `workspace_src_dir`. F...
def _is_flag_group(obj): return ( isinstance(obj, h5py.Group) and isinstance(obj.get("active"), h5py.Dataset) and isinstance(obj.get("known"), h5py.Dataset) )
Returns `True` if `obj` is an `h5py.Group` that looks like if contains a flag
def fetch_new_id(self, ): parent = self.get_parent() if parent: others = parent._children else: others = [r for r in self.get_root()._reftracks if r.get_parent() is None] others = [r for r in others if r != self and r.get_typ() ...
Return a new id for the given reftrack to be set on the refobject The id can identify reftracks that share the same parent, type and element. :returns: A new id :rtype: int :raises: None
def _finish_transaction_with_retry(self, command_name, explict_retry): try: return self._finish_transaction(command_name, explict_retry) except ServerSelectionTimeoutError: raise except ConnectionFailure as exc: try: return self._finish_transac...
Run commit or abort with one retry after any retryable error. :Parameters: - `command_name`: Either "commitTransaction" or "abortTransaction". - `explict_retry`: True when this is an explict commit retry attempt, ie the application called session.commit_transaction() twice.
def host_report_msg(hostname, module_name, result, oneline): failed = utils.is_failed(result) msg = '' if module_name in [ 'command', 'shell', 'raw' ] and 'ansible_job_id' not in result and result.get('parsed',True) != False: if not failed: msg = command_generic_msg(hostname, result, one...
summarize the JSON results for a particular host
def get_values (feature, properties): if feature[0] != '<': feature = '<' + feature + '>' result = [] for p in properties: if get_grist (p) == feature: result.append (replace_grist (p, '')) return result
Returns all values of the given feature specified by the given property set.
def get_all_queues(self): resp = self._call('getAllQueues', proto.Empty()) return [Queue.from_protobuf(q) for q in resp.queues]
Get information about all queues in the cluster. Returns ------- queues : list of Queue Examples -------- >>> client.get_all_queues() [Queue<name='default', percent_used=0.00>, Queue<name='myqueue', percent_used=5.00>, Queue<name='child1', perc...
def sort2groups(array, gpat=['_R1','_R2']): groups = [REGroup(gp) for gp in gpat] unmatched = [] for item in array: matched = False for m in groups: if m.match(item): matched = True break if not matched: unmatched.append(item) return [sorted(m.list) for m i...
Sort an array of strings to groups by patterns
def maybe_start_recording(tokens, index): if _is_really_comment(tokens, index): return _CommentedLineRecorder(index, tokens[index].line) return None
Return a new _CommentedLineRecorder when it is time to record.
def create(self, edgeList=None, excludeEdges=None, networkName=None, nodeList=None, source=None, verbose=False): network=check_network(self,source, verbose=verbose) PARAMS=set_param(["edgeList","excludeEdges","networkName","nodeList","source"], \ [edgeList,excludeEdges,networkName,nodeList,netwo...
Create a new network from a list of nodes and edges in an existing source network. The SUID of the network and view are returned. :param edgeList (string, optional): Specifies a list of edges. The keywords all, selected, or unselected can be used to specify edges by their select...
def start_single(self, typ, scol): self.starting_single = True single = self.single = Single(typ=typ, group=self, indent=(scol - self.level)) self.singles.append(single) return single
Start a new single
def log_value(self, name, value, step=None): if isinstance(value, six.string_types): raise TypeError('"value" should be a number, got {}' .format(type(value))) value = float(value) self._check_step(step) tf_name = self._ensure_tf_name(name) ...
Log new value for given name on given step. Args: name (str): name of the variable (it will be converted to a valid tensorflow summary name). value (float): this is a real number to be logged as a scalar. step (int): non-negative integer used for visualizatio...
def services(namespace='default', **kwargs): cfg = _setup_conn(**kwargs) try: api_instance = kubernetes.client.CoreV1Api() api_response = api_instance.list_namespaced_service(namespace) return [srv['metadata']['name'] for srv in api_response.to_dict().get('items')] except (ApiExcepti...
Return a list of kubernetes services defined in the namespace CLI Examples:: salt '*' kubernetes.services salt '*' kubernetes.services namespace=default
def call(self, name, request=None, **params): if name not in self.resources: raise exceptions.HttpError( 'Unknown method \'%s\'' % name, status=status.HTTP_501_NOT_IMPLEMENTED) request = request or HttpRequest() resource = self.resources[name] ...
Call resource by ``Api`` name. :param name: The resource's name (short form) :param request: django.http.Request instance :param **params: Params for a resource's call :return object: Result of resource's execution
def dtype_repr(dtype): dtype = np.dtype(dtype) if dtype == np.dtype(int): return "'int'" elif dtype == np.dtype(float): return "'float'" elif dtype == np.dtype(complex): return "'complex'" elif dtype.shape: return "('{}', {})".format(dtype.base, dtype.shape) else:...
Stringify ``dtype`` for ``repr`` with default for int and float.
def path_dwim(basedir, given): if given.startswith("/"): return given elif given.startswith("~/"): return os.path.expanduser(given) else: return os.path.join(basedir, given)
make relative paths work like folks expect.
def get_valid_location(location): if location is not None and cellular_components.get(location) is None: loc = cellular_components_reverse.get(location) if loc is None: raise InvalidLocationError(location) else: return loc return location
Check if the given location represents a valid cellular component.
def check_for_local_repos(repo): repos_dict = Repo().default_repository() if repo in repos_dict: repo_url = repos_dict[repo] if repo_url.startswith("file:///"): return True
Check if repository is local
def speak(self, text): if not self.is_valid_string(text): raise Exception("%s is not ISO-8859-1 compatible." % (text)) if len(text) > 1023: lines = self.word_wrap(text, width=1023) for line in lines: self.queue.put("S%s" % (line)) else: ...
The main function to convert text into speech.
def generate_accounts(seeds): return { seed: { 'privatekey': encode_hex(sha3(seed)), 'address': encode_hex(privatekey_to_address(sha3(seed))), } for seed in seeds }
Create private keys and addresses for all seeds.
def check_yamls(cls, dap): problems = list() for yaml in dap.assistants_and_snippets: path = yaml + '.yaml' parsed_yaml = YamlLoader.load_yaml_by_path(dap._get_file(path, prepend=True)) if parsed_yaml: try: yaml_checker.check(path, ...
Check that all assistants and snippets are valid. Return list of DapProblems.
def add_json(self, json_obj, **kwargs): return self.add_bytes(encoding.Json().encode(json_obj), **kwargs)
Adds a json-serializable Python dict as a json file to IPFS. .. code-block:: python >>> c.add_json({'one': 1, 'two': 2, 'three': 3}) 'QmVz9g7m5u3oHiNKHj2CJX1dbG1gtismRS3g9NaPBBLbob' Parameters ---------- json_obj : dict A json-serializable Python di...
def flux_up(self, fluxUpBottom, emission=None): if emission is None: emission = np.zeros_like(self.absorptivity) E = np.concatenate((emission, np.atleast_1d(fluxUpBottom)), axis=-1) return np.squeeze(matrix_multiply(self.Tup, E[..., np.newaxis]))
Compute downwelling radiative flux at interfaces between layers. Inputs: * fluxDownTop: flux down at top * emission: emission from atmospheric levels (N) defaults to zero if not given Returns: * vector of downwelling radiative flux between levels (N+...
def get_name(self, **values) -> str: if not values and self.name: return self.name if values: for ck, cvs in _sorted_items(self.compounds): if ck in cvs and ck in values: continue comp_values = [values.pop(cv, getattr(self, cv))...
Get a new name string from this object's name values. :param values: Variable keyword arguments where the **key** should refer to a field on this object that will use the provided **value** to build the new name.
def read_data_from_bin_file(fileName): with open(fileName, mode='rb') as file: fileContent = file.read() (ChannelData, LenOf1Channel, NumOfChannels, SampleTime) = read_data_from_bytes(fileContent) return ChannelData, LenOf1Channel, NumOfChannels, SampleTime
Loads the binary data stored in the a binary file and extracts the data for each channel that was saved, along with the sample rate and length of the data array. Parameters ---------- fileContent : bytes bytes object containing the data from a .bin file exported from the saleae da...
def tickerId(self, contract_identifier): symbol = contract_identifier if isinstance(symbol, Contract): symbol = self.contractString(symbol) for tickerId in self.tickerIds: if symbol == self.tickerIds[tickerId]: return tickerId else: tic...
returns the tickerId for the symbol or sets one if it doesn't exits
def disqus_sso_script(context): settings = context["settings"] public_key = getattr(settings, "COMMENTS_DISQUS_API_PUBLIC_KEY", "") secret_key = getattr(settings, "COMMENTS_DISQUS_API_SECRET_KEY", "") user = context["request"].user if public_key and secret_key and user.is_authenticated(): co...
Provides a generic context variable which adds single-sign-on support to DISQUS if ``COMMENTS_DISQUS_API_PUBLIC_KEY`` and ``COMMENTS_DISQUS_API_SECRET_KEY`` are specified.
def is_bytes(string): if six.PY3 and isinstance(string, (bytes, memoryview, bytearray)): return True elif six.PY2 and isinstance(string, (buffer, bytearray)): return True return False
Check if a string is a bytes instance :param Union[str, bytes] string: A string that may be string or bytes like :return: Whether the provided string is a bytes type or not :rtype: bool
def __process_acl(self, load, auth_list): if 'eauth' not in load: return auth_list fstr = '{0}.process_acl'.format(load['eauth']) if fstr not in self.auth: return auth_list try: return self.auth[fstr](auth_list, self.opts) except Exception as e...
Allows eauth module to modify the access list right before it'll be applied to the request. For example ldap auth module expands entries
def get_db(cls): if cls._db: return getattr(cls._client, cls._db) return cls._client.get_default_database()
Return the database for the collection
def divide(x1, x2, output_shape=None, name=None): output_shape = convert_to_shape(output_shape) if not isinstance(x2, Tensor): return ScalarMultiplyOperation(x1, 1.0 / x2).outputs[0] with tf.name_scope(name, default_name="divide"): x1, x2 = binary_arguments_to_tensors(x1, x2) return multiply(x1, recip...
Binary division with broadcasting. Args: x1: a Tensor x2: a Tensor output_shape: an optional Shape name: an optional string Returns: a Tensor
def is_valid_data(obj): if obj: try: tmp = json.dumps(obj, default=datetime_encoder) del tmp except (TypeError, UnicodeDecodeError): return False return True
Check if data is JSON serializable.
def human_readable(self, dense_repr: Sequence[Sequence[int]]) -> List[List[str]]: transcripts = [] for dense_r in dense_repr: non_empty_phonemes = [phn_i for phn_i in dense_r if phn_i != 0] transcript = self.corpus.indices_to_labels(non_empty_phonemes) transcripts.app...
Returns a human readable version of a dense representation of either or reference to facilitate simple manual inspection.
def get_config(config, default_config): if not config: logging.warning('Using default config: %s', default_config) config = default_config try: with open(config, 'r') as config_file: return yaml.load(config_file) except (yaml.reader.ReaderError, yaml.parser.Pa...
Load configuration from file if in config, else use default
def clean_email(self): if get_user_model().objects.filter( Q(email__iexact=self.cleaned_data['email']) | Q(email_unconfirmed__iexact=self.cleaned_data['email'])): raise forms.ValidationError(_(u'This email address is already ' 'in use. Please supply a ...
Validate that the email address is unique.
def totalNumberOfTiles(self, minZoom=None, maxZoom=None): "Return the total number of tiles for this instance extent" nbTiles = 0 minZoom = minZoom or 0 if maxZoom: maxZoom = maxZoom + 1 else: maxZoom = len(self.RESOLUTIONS) for zoom in xrange(minZ...
Return the total number of tiles for this instance extent
def _check(peers): if not isinstance(peers, list): return False for peer in peers: if not isinstance(peer, six.string_types): return False if not HAS_NETADDR: return True ip_only_peers = [] for peer in peers: try: ip_only_peers.append(six.text_...
Checks whether the input is a valid list of peers and transforms domain names into IP Addresses
def x_runtime(f, *args, **kwargs): _t0 = now() r = f(*args, **kwargs) _t1 = now() r.headers['X-Runtime'] = '{0}s'.format(Decimal(str(_t1 - _t0))) return r
X-Runtime Flask Response Decorator.
def found(self): if 'ids' in self.kwargs: cid = self.kwargs['query']['collection']['eq'] return len(self.items_by_id(self.kwargs['ids'], cid)) kwargs = { 'page': 1, 'limit': 0 } kwargs.update(self.kwargs) results = self.query(**kwar...
Small query to determine total number of hits
def locate_private_alleles(*acs): acs = [asarray_ndim(ac, 2) for ac in acs] check_dim0_aligned(*acs) acs = ensure_dim1_aligned(*acs) pac = np.dstack(acs) npa = np.sum(pac > 0, axis=2) loc_pa = npa == 1 return loc_pa
Locate alleles that are found only in a single population. Parameters ---------- *acs : array_like, int, shape (n_variants, n_alleles) Allele counts arrays from each population. Returns ------- loc : ndarray, bool, shape (n_variants, n_alleles) Boolean array where elements are ...
def gradfunc(self, p): self._set_stochastics(p) for i in xrange(self.len): self.grad[i] = self.diff(i) return -1 * self.grad
The gradient-computing function that gets passed to the optimizers, if needed.
def function_call_with_timeout(fun_name, fun_args, secs=5): from multiprocessing import Process, Queue p = Process(target=fun_name, args=tuple(fun_args)) p.start() curr_secs = 0 no_timeout = False if secs == 0: no_timeout = True else: timeout = secs while p.is_alive() and not no_timeout:...
Run a Python function with a timeout. No interprocess communication or return values are handled. Setting secs to 0 gives infinite timeout.
def __flush(self, async=True): rh = self.rh messages = list(self.messages) stream_notices = list(self.stream_notices) self.stream_notices = [] self.messages = [] args = (rh, messages, stream_notices) if async: self.hub.threadPool.execute_named(self.__i...
Flushes messages through current HttpRequest and closes it. It assumes a current requesthandler and requires a lock on self.lock
def recv_exit_status(self, command, timeout=10, get_pty=False): status = None self.last_command = command stdin, stdout, stderr = self.cli.exec_command(command, get_pty=get_pty) if stdout and stderr and stdin: for _ in range(timeout): if stdout.channel.exit_st...
Execute a command and get its return value @param command: command to execute @type command: str @param timeout: command execution timeout @type timeout: int @param get_pty: get pty @type get_pty: bool @return: the exit code of the process or None in case of t...
def has_object_permission(self, request, view, obj): if not self.object_permissions: return True serializer_class = view.get_serializer_class() model_class = serializer_class.Meta.model action_method_name = None if hasattr(view, 'action'): action = self._g...
Overrides the standard function and figures out methods to call for object permissions.
def parse_if(self): node = result = nodes.If(lineno=self.stream.expect('name:if').lineno) while 1: node.test = self.parse_tuple(with_condexpr=False) node.body = self.parse_statements(('name:elif', 'name:else', 'name:endif')) ...
Parse an if construct.
def get_user_if_exists(strategy, details, user=None, *args, **kwargs): if user: return {'is_new': False} try: username = details.get('username') return { 'is_new': False, 'user': User.objects.get(username=username) } except User.DoesNotExist: p...
Return a User with the given username iff the User exists.
def get_next_invalid_time_from_t(self, timestamp): if not self.is_time_valid(timestamp): return timestamp t_day = self.get_next_invalid_day(timestamp) if timestamp < t_day: sec_from_morning = self.get_next_future_timerange_invalid(t_day) else: sec_from...
Get next invalid time for time range :param timestamp: time we compute from :type timestamp: int :return: timestamp of the next invalid time (LOCAL TIME) :rtype: int
def stelab(pobj, vobs): pobj = stypes.toDoubleVector(pobj) vobs = stypes.toDoubleVector(vobs) appobj = stypes.emptyDoubleVector(3) libspice.stelab_c(pobj, vobs, appobj) return stypes.cVectorToPython(appobj)
Correct the apparent position of an object for stellar aberration. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/stelab_c.html :param pobj: Position of an object with respect to the observer. :type pobj: 3-Element Array of floats :param vobs: Velocity of the observer with...
def configure(self, **configs): configs = self._deprecate_configs(**configs) self._config = {} for key in self.DEFAULT_CONFIG: self._config[key] = configs.pop(key, self.DEFAULT_CONFIG[key]) if configs: raise KafkaConfigurationError('Unknown configuration key(s): '...
Configure the consumer instance Configuration settings can be passed to constructor, otherwise defaults will be used: Keyword Arguments: bootstrap_servers (list): List of initial broker nodes the consumer should contact to bootstrap initial cluster metadata. This d...
def create_shared(self, name, ref): if self._shared is not None: raise RuntimeError('Can only set_shared once.') self._shared = GLShared(name, ref)
For the app backends to create the GLShared object. Parameters ---------- name : str The name. ref : object The reference.
def compute_fw_at_frac_max_1d_simple(Y, xc, X=None, f=0.5): yy = np.asarray(Y) if yy.ndim != 1: raise ValueError('array must be 1-d') if yy.size == 0: raise ValueError('array is empty') if X is None: xx = np.arange(yy.shape[0]) else: xx = X xpix = coor_to_pix_1d(x...
Compute the full width at fraction f of the maximum
def upload(self, filename, filedata=None, filepath=None, **kwargs): if filepath is None and filedata is None: raise GitlabUploadError("No file contents or path specified") if filedata is not None and filepath is not None: raise GitlabUploadError("File contents and file path speci...
Upload the specified file into the project. .. note:: Either ``filedata`` or ``filepath`` *MUST* be specified. Args: filename (str): The name of the file being uploaded filedata (bytes): The raw data of the file being uploaded filepath (str): The path t...
def updateColormap(self): if self.imgArgs['lut'] is not None: self.img.setLookupTable(self.imgArgs['lut']) self.img.setLevels(self.imgArgs['levels'])
Updates the currently colormap accoring to stored settings
def _resume_ssl_session( server_info: ServerConnectivityInfo, ssl_version_to_use: OpenSslVersionEnum, ssl_session: Optional[nassl._nassl.SSL_SESSION] = None, should_enable_tls_ticket: bool = False ) -> nassl._nassl.SSL_SESSION: ssl_connection = server_info.get...
Connect to the server and returns the session object that was assigned for that connection. If ssl_session is given, tries to resume that session.
def create(self, networkipv4s): data = {'networks': networkipv4s} return super(ApiNetworkIPv4, self).post('api/v3/networkv4/', data)
Method to create network-ipv4's :param networkipv4s: List containing networkipv4's desired to be created on database :return: None
def remover(self, id_groupl3): if not is_valid_int_param(id_groupl3): raise InvalidParameterError( u'The identifier of Group L3 is invalid or was not informed.') url = 'groupl3/' + str(id_groupl3) + '/' code, xml = self.submit(None, 'DELETE', url) return self....
Remove Group L3 from by the identifier. :param id_groupl3: Identifier of the Group L3. Integer value and greater than zero. :return: None :raise InvalidParameterError: The identifier of Group L3 is null and invalid. :raise GrupoL3NaoExisteError: Group L3 not registered. :raise...
def to_dict(self, properties=True): nodes = {} for node in self.nodes(): nd = { 'label': node.pred.short_form(), 'edges': self.edges(node.nodeid) } if node.lnk is not None: nd['lnk'] = {'from': node.cfrom, 'to': node.cto...
Encode the Eds as a dictionary suitable for JSON serialization.
def axis(self) -> Callable[[Any], Any]: axis_func = hist_axis_func( axis_type = self.axis_type ) return axis_func
Determine the axis to return based on the hist type.
def _forceInt(x,y,z,dens,b2,c2,i,glx=None,glw=None): def integrand(s): t= 1/s**2.-1. return dens(numpy.sqrt(x**2./(1.+t)+y**2./(b2+t)+z**2./(c2+t)))\ *(x/(1.+t)*(i==0)+y/(b2+t)*(i==1)+z/(c2+t)*(i==2))\ /numpy.sqrt((1.+(b2-1.)*s**2.)*(1.+(c2-1.)*s**2.)) if glx is None: ...
Integral that gives the force in x,y,z
def _calculate_solar_time(self, hour, eq_of_time, is_solar_time): if is_solar_time: return hour return ( (hour * 60 + eq_of_time + 4 * math.degrees(self._longitude) - 60 * self.time_zone) % 1440) / 60
Calculate Solar time for an hour.
def fromLatex(tex, *args, **kwargs): source = TexSoup(tex) return TOC('[document]', source=source, descendants=list(source.descendants), *args, **kwargs)
Creates abstraction using Latex :param str tex: Latex :return: TreeOfContents object
def remove_overlap(self, begin, end=None): hitlist = self.at(begin) if end is None else self.overlap(begin, end) for iv in hitlist: self.remove(iv)
Removes all intervals overlapping the given point or range. Completes in O((r+m)*log n) time, where: * n = size of the tree * m = number of matches * r = size of the search range (this is 1 for a point)
def str_deps(self): lines = [] app = lines.append app("Dependencies of node %s:" % str(self)) for i, dep in enumerate(self.deps): app("%d) %s, status=%s" % (i, dep.info, str(dep.status))) return "\n".join(lines)
Return the string representation of the dependencies of the node.
async def execute(self, keys=[], args=[], client=None): "Execute the script, passing any required ``args``" if client is None: client = self.registered_client args = tuple(keys) + tuple(args) if isinstance(client, BasePipeline): client.scripts.add(self) tr...
Execute the script, passing any required ``args``
def pubmed_url(args=sys.argv[1:], resolve_doi=True, out=sys.stdout): parser = argparse.ArgumentParser( description='Get a publication URL using a PubMed ID or PubMed URL') parser.add_argument('query', help='PubMed ID or PubMed URL') parser.add_argument( '-d', '--doi', action='store_false', h...
Get a publication URL via the command line using a PubMed ID or PubMed URL
def geo(self): out = dict(zip(['xmin', 'xres', 'rotation_x', 'ymax', 'rotation_y', 'yres'], self.raster.GetGeoTransform())) out['xmax'] = out['xmin'] + out['xres'] * self.cols out['ymin'] = out['ymax'] + out['yres'] * self.rows return out
General image geo information. Returns ------- dict a dictionary with keys `xmin`, `xmax`, `xres`, `rotation_x`, `ymin`, `ymax`, `yres`, `rotation_y`
def cyvcf_add_filter(rec, name): if rec.FILTER: filters = rec.FILTER.split(";") else: filters = [] if name not in filters: filters.append(name) rec.FILTER = filters return rec
Add a FILTER value to a cyvcf2 record
def cache_hash(*a, **kw): def cache_str(o): if isinstance(o, (types.FunctionType, types.BuiltinFunctionType, types.MethodType, types.BuiltinMethodType, types.UnboundMethodType)): return getattr(o, 'func_name', 'func') if isinstance(o, d...
Try to hash an arbitrary object for caching.
def get_soql_fields(soql): soql_fields = re.search('(?<=select)(?s)(.*)(?=from)', soql, re.IGNORECASE) soql_fields = re.sub(' ', '', soql_fields.group()) soql_fields = re.sub('\t', '', soql_fields) fields = re.split(',|\n|\r|', soql_fields) fields = [field for field in fields if field != ''] ret...
Gets queried columns names.
def _one_hidden(self, l:int)->Tensor: "Return one hidden state." nh = (self.n_hid if l != self.n_layers - 1 else self.emb_sz) // self.n_dir return one_param(self).new(1, self.bs, nh).zero_()
Return one hidden state.
def read(self): found = Client.read(self) if self.needs_distribute_ready(): self.distribute_ready() return found
Read some number of messages