Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
4,300
def weighted_mean_and_std(values, weights): average = np.average(values, weights=weights, axis=0) variance = np.dot(weights, (values - average) ** 2) / weights.sum() return (average, np.sqrt(variance))
Returns the weighted average and standard deviation. values, weights -- numpy ndarrays with the same shape.
4,301
def compute_plot_size(plot): if isinstance(plot, GridBox): ndmapping = NdMapping({(x, y): fig for fig, y, x in plot.children}, kdims=[, ]) cols = ndmapping.groupby() rows = ndmapping.groupby() width = sum([max([compute_plot_size(f)[0] for f in col]) for col in cols]) hei...
Computes the size of bokeh models that make up a layout such as figures, rows, columns, widgetboxes and Plot.
4,302
def create_protocol(self): self.sessions += 1 protocol = self.protocol_factory(self) protocol.copy_many_times_events(self) return protocol
Create a new protocol via the :attr:`protocol_factory` This method increase the count of :attr:`sessions` and build the protocol passing ``self`` as the producer.
4,303
def integrate(self, wavelengths=None, **kwargs): if in kwargs: self._validate_flux_unit(kwargs[], wav_only=True) x = self._validate_wavelengths(wavelengths) try: m = self.model.integral exc...
Perform integration. This uses any analytical integral that the underlying model has (i.e., ``self.model.integral``). If unavailable, it uses the default fall-back integrator set in the ``default_integrator`` configuration item. If wavelengths are provided, flux or throughput i...
4,304
async def get_guild_count(self, bot_id: int=None): if bot_id is None: bot_id = self.bot_id return await self.http.get_guild_count(bot_id)
This function is a coroutine. Gets a guild count from discordbots.org Parameters ========== bot_id: int[Optional] The bot_id of the bot you want to lookup. Defaults to the Bot provided in Client init Returns ======= stats: dict ...
4,305
def pr0_to_likelihood_array(outcomes, pr0): pr0 = pr0[np.newaxis, ...] pr1 = 1 - pr0 if len(np.shape(outcomes)) == 0: outcomes = np.array(outcomes)[None] return np.concatenate([ pr0 if outcomes[idx] == 0 else pr1 for idx ...
Assuming a two-outcome measurement with probabilities given by the array ``pr0``, returns an array of the form expected to be returned by ``likelihood`` method. :param numpy.ndarray outcomes: Array of integers indexing outcomes. :param numpy.ndarray pr0: Array of shape ``(n_mode...
4,306
def search_grouping(stmt, name): mod = stmt.i_orig_module while stmt is not None: if name in stmt.i_groupings: g = stmt.i_groupings[name] if (mod is not None and mod != g.i_orig_module and g.i_orig_module.keyword == ): ...
Search for a grouping in scope First search the hierarchy, then the module and its submodules.
4,307
def parse_file_name_starting_position(self): groups = mod_re.findall(, self.file_name) assert groups and len(groups) == 1 and len(groups[0]) == 4, .format(self.file_name) groups = groups[0] if groups[0] == : latitude = float(groups[1]) else: la...
Returns (latitude, longitude) of lower left point of the file
4,308
def connect(servers=None, framed_transport=False, timeout=None, retry_time=60, recycle=None, round_robin=None, max_retries=3): if servers is None: servers = [DEFAULT_SERVER] return ThreadLocalConnection(servers, framed_transport, timeout, retry_time, recycle, max_retries=max_re...
Constructs a single ElasticSearch connection. Connects to a randomly chosen server on the list. If the connection fails, it will attempt to connect to each server on the list in turn until one succeeds. If it is unable to find an active server, it will throw a NoServerAvailable exception. Failing ...
4,309
def assert_credentials_match(self, verifier, authc_token, account): cred_type = authc_token.token_info[] try: verifier.verify_credentials(authc_token, account[]) except IncorrectCredentialsException: updated_account = self.update_failed_attempt(authc_token, acco...
:type verifier: authc_abcs.CredentialsVerifier :type authc_token: authc_abcs.AuthenticationToken :type account: account_abcs.Account :returns: account_abcs.Account :raises IncorrectCredentialsException: when authentication fails, includin...
4,310
def console_exec(thread_id, frame_id, expression, dbg): frame = dbg.find_frame(thread_id, frame_id) is_multiline = expression.count() > 1 expression = str(expression.replace(, )) updated_globals = {} updated_globals.update(frame.f_globals) updated_globals.update(frame.f_loc...
returns 'False' in case expression is partially correct
4,311
def item_properties(self, handle): logger.debug("Getting properties for handle: {}".format(handle)) properties = { : self.get_size_in_bytes(handle), : self.get_utc_timestamp(handle), : self.get_hash(handle), : self.get_relpath(handle) } ...
Return properties of the item with the given handle.
4,312
def _set_show_system_info(self, v, load=False): if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=show_system_info.show_system_info, is_leaf=True, yang_name="show-system-info", rest_name="show-system-info", parent=self, path_helper=self._path_helper, extmethods=self._extmeth...
Setter method for show_system_info, mapped from YANG variable /brocade_ras_ext_rpc/show_system_info (rpc) If this variable is read-only (config: false) in the source YANG file, then _set_show_system_info is considered as a private method. Backends looking to populate this variable should do so via calli...
4,313
def linspace(self, start, stop, n): if n == 1: return [start] L = [0.0] * n nm1 = n - 1 nm1inv = 1.0 / nm1 for i in range(n): L[i] = nm1inv * (start*(nm1 - i) + stop*i) return L
Simple replacement for numpy linspace
4,314
def report(self): data = for sample in self.metadata: if sample[self.analysistype].primers != : sample[self.analysistype].report = os.path.join(sample[self.analysistype].reportdir, ...
Create reports of the findings
4,315
def distribution_compatible(dist, supported_tags=None): if supported_tags is None: supported_tags = get_supported() package = Package.from_href(dist.location) if not package: return False return package.compatible(supported_tags)
Is this distribution compatible with the given interpreter/platform combination? :param supported_tags: A list of tag tuples specifying which tags are supported by the platform in question. :returns: True if the distribution is compatible, False if it is unrecognized or incompatible.
4,316
def add_empty_fields(untl_dict): for element in UNTL_XML_ORDER: if element not in untl_dict: try: py_object = PYUNTL_DISPATCH[element]( content=, qualifier=, ) except: ...
Add empty values if UNTL fields don't have values.
4,317
def parse_task_declaration(self, declaration_subAST): String file_nameString file_namefile_nameString var_name = self.parse_declaration_name(declaration_subAST.attr("name")) var_type = self.parse_declaration_type(declaration_subAST.attr("type")) var_expressn = self.parse_declaration_expr...
Parses the declaration section of the WDL task AST subtree. Examples: String my_name String your_name Int two_chains_i_mean_names = 0 :param declaration_subAST: Some subAST representing a task declaration like: 'String file_name' :ret...
4,318
def indent(lines, amount, ch=): padding = amount * ch return padding + ( + padding).join(lines.split())
Indent the lines in a string by padding each one with proper number of pad characters
4,319
def _gassist_any(self,dg,dt,dt2,name,na=None,nodiag=False,memlimit=-1): if self.lib is None: raise ValueError("Not initialized.") import numpy as np from .auto import ftype_np,gtype_np from .types import isint if dg.dtype.char!=gtype_np: raise ValueError(+dg.dtype.char++gtype_np) if dt.dtype.char!=ftype_np ...
Calculates probability of gene i regulating gene j with genotype data assisted method, with the recommended combination of multiple tests. dg: numpy.ndarray(nt,ns,dtype=gtype(='u1' by default)) Genotype data. Entry dg[i,j] is genotype i's value for sample j. Each value must be among 0,1,...,na. Genotype i must ...
4,320
def main(path_dir, requirements_name): click.echo("\nWARNING: Uninstall libs it\nREMINDER: After uninstall libs, update your requirements file.\nUse the `pip freeze > requirements.txt` command.\n\nList of installed libs and your dependencies added on project\nrequirements that are not being used:\n') chec...
Console script for imports.
4,321
def NotificationsPost(self, parameters): if self.__SenseApiCall__(, , parameters = parameters): return True else: self.__error__ = "api call unsuccessful" return False
Create a notification on CommonSense. If successful the result, including the notification_id, can be obtained from getResponse(), and should be a json string. @param parameters (dictionary) - Dictionary containing the notification to create. @note - ...
4,322
def add_job(self, job): self.cur.execute("INSERT INTO jobs VALUES(?,?,?,?,?)", ( job["id"], job["description"], job["last-run"], job["next-run"], job["last-run-result"])) return True
Adds a new job into the cache. :param dict job: The job dictionary :returns: True
4,323
def _geolocation_extract(response): body = response.json() if response.status_code in (200, 404): return body try: error = body["error"]["errors"][0]["reason"] except KeyError: error = None if response.status_code == 403: raise exceptions._OverQueryLimit(respon...
Mimics the exception handling logic in ``client._get_body``, but for geolocation which uses a different response format.
4,324
def get_ips_by_equipment_and_environment(self, equip_nome, id_ambiente): if id_ambiente is None: raise InvalidParameterError( u) url = + str(equip_nome) + + str(id_ambiente) code, xml = self.submit(None, , url) return self.response(code, xml)
Search Group Equipment from by the identifier. :param id_egroup: Identifier of the Group Equipment. Integer value and greater than zero. :return: Dictionary with the following structure: :: {'equipaments': [{'nome': < name_equipament >, 'grupos': < id_group >, ...
4,325
def dict_to_numpy_array(d): return fromarrays(d.values(), np.dtype([(str(k), v.dtype) for k, v in d.items()]))
Convert a dict of 1d array to a numpy recarray
4,326
def _set_backreferences(self, context, items, **kwargs): if initializing: return uid = api.get_uid(context) raw = self.getRaw(context) or [] if isinstance(raw, basestring): raw = [raw, ] cur = set(raw) ...
Set the back references on the linked items This will set an annotation storage on the referenced items which point to the current context.
4,327
def save(self_or_cls, obj, basename, fmt=, key={}, info={}, options=None, **kwargs): if info or key: raise Exception() if isinstance(obj, (Plot, NdWidget)): plot = obj else: with StoreOptions.options(obj, options, **kwargs): plot = se...
Save a HoloViews object to file, either using an explicitly supplied format or to the appropriate default.
4,328
def validate_args(args): if not os.path.isdir(args.directory): print "Directory {} does not exist".format(args.directory) sys.exit(5) return args
Call all required validation functions :param args: :return:
4,329
def run(path, code=None, params=None, **meta): if in params: ignore_decorators = params[] else: ignore_decorators = None check_source_args = (code, path, ignore_decorators) if THIRD_ARG else (code, path) return [{ : e.line, ...
pydocstyle code checking. :return list: List of errors.
4,330
def _variant_po_to_dict(tokens) -> CentralDogma: dsl = FUNC_TO_DSL.get(tokens[FUNCTION]) if dsl is None: raise ValueError(.format(tokens)) return dsl( namespace=tokens[NAMESPACE], name=tokens[NAME], variants=[ _variant_to_dsl_helper(variant_tokens) ...
Convert a PyParsing data dictionary to a central dogma abundance (i.e., Protein, RNA, miRNA, Gene). :type tokens: ParseResult
4,331
def _filter_insane_successors(self, successors): old_successors = successors[::] successors = [ ] for i, suc in enumerate(old_successors): if suc.solver.symbolic(suc.ip): i + 1, len(old_successors), ...
Throw away all successors whose target doesn't make sense This method is called after we resolve an indirect jump using an unreliable method (like, not through one of the indirect jump resolvers, but through either pure concrete execution or backward slicing) to filter out the obviously incorre...
4,332
def stop_process(self): if self.process is not None: self._user_stop = True self.process.kill() self.setReadOnly(True) self._running = False
Stop the process (by killing it).
4,333
def csv_row_cleaner(rows): result = [] for row in rows: check_empty = len(exclude_empty_values(row)) > 1 check_set = len(set(exclude_empty_values(row))) > 1 check_last_allready = (result and result[-1] == row) if check_empty and check_set and n...
Clean row checking: - Not empty row. - >=1 element different in a row. - row allready in cleaned row result.
4,334
def _gather_from_files(self, config): command_file = config.get_help_files() cache_path = os.path.join(config.get_config_dir(), ) cols = _get_window_columns() with open(os.path.join(cache_path, command_file), ) as help_file: data = json.load(help_file) self....
gathers from the files in a way that is convienent to use
4,335
def changeTo(self, path): dictionary = DictSingle(Pair(, StringSingle(path))) self.value = [dictionary]
change value Args: path (str): the new environment path
4,336
def print_stack_trace(proc_obj, count=None, color=, opts={}): "Print count entries of the stack trace" if count is None: n=len(proc_obj.stack) else: n=min(len(proc_obj.stack), count) try: for i in range(n): print_stack_entry(proc_obj, i, color=color, opts=opts) ex...
Print count entries of the stack trace
4,337
def open_project(self, path=None, restart_consoles=True, save_previous_files=True): self.switch_to_plugin() if path is None: basedir = get_home_dir() path = getexistingdirectory(parent=self, caption=_("O...
Open the project located in `path`
4,338
def process_record_dataset(dataset, is_training, batch_size, shuffle_buffer, parse_record_fn, num_epochs=1, num_gpus=None, examples_per_epoch=None, dtype=tf.float32): dataset = dataset.prefetch(buffer_size=batch_size) if is_training: mlpe...
Given a Dataset with raw records, return an iterator over the records. Args: dataset: A Dataset representing raw records is_training: A boolean denoting whether the input is for training. batch_size: The number of samples per batch. shuffle_buffer: The buffer size to use when shuffling records. A lar...
4,339
def object_info(lcc_server, objectid, db_collection_id): privatesharedobject-%s-info.pngobjectids variability if available - `varinfo`: variability comments, variability features, type tags, period and epoch information if available - `neighbors`: information on the neighboring objects o...
This gets information on a single object from the LCC-Server. Returns a dict with all of the available information on an object, including finding charts, comments, object type and variability tags, and period-search results (if available). If you have an LCC-Server API key present in `~/.astrobase/lc...
4,340
def get(url, params=None, **kwargs): r kwargs.setdefault(, True) return request(, url, params=params, **kwargs)
r"""Sends a GET request. :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary, list of tuples or bytes to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response <Response>` object ...
4,341
def cfg_convert(self, value): rest = value m = self.WORD_PATTERN.match(rest) if m is None: raise ValueError("Unable to convert %r" % value) else: rest = rest[m.end():] d = self.config[m.groups()[0]] while rest: ...
Default converter for the cfg:// protocol.
4,342
def get(self, key): data = r_kv.get(key) return build_response(dict(data=data, code=200))
Get a key-value from storage according to the key name.
4,343
def compounding(start, stop, compound): def clip(value): return max(value, stop) if (start > stop) else min(value, stop) curr = float(start) while True: yield clip(curr) curr *= compound
Yield an infinite series of compounding values. Each time the generator is called, a value is produced by multiplying the previous value by the compound rate. EXAMPLE: >>> sizes = compounding(1., 10., 1.5) >>> assert next(sizes) == 1. >>> assert next(sizes) == 1 * 1.5 >>> assert nex...
4,344
def assert_equals(actual, expected, ignore_order=False, ignore_index=False, all_close=False): equals_, reason = equals(actual, expected, ignore_order, ignore_index, all_close, _return_reason=True) assert equals_, .format(reason, actual.to_string(), expected.to_string())
Assert 2 series are equal. Like ``assert equals(series1, series2, ...)``, but with better hints at where the series differ. See `equals` for detailed parameter doc. Parameters ---------- actual : ~pandas.Series expected : ~pandas.Series ignore_order : bool ignore_index : bool a...
4,345
def collection_choices(): from invenio_collections.models import Collection return [(0, _())] + [ (c.id, c.name) for c in Collection.query.all() ]
Return collection choices.
4,346
def complain(error): if callable(error): if DEVELOP: raise error() elif DEVELOP: raise error else: logger.warn_err(error)
Raises in develop; warns in release.
4,347
def transcode_to_utf8(filename, encoding): tmp = tempfile.TemporaryFile() for line in io.open(filename, encoding=encoding): tmp.write(line.strip().encode()) tmp.seek(0) return tmp
Convert a file in some other encoding into a temporary file that's in UTF-8.
4,348
def download(self): self.housekeeping() self.rippled_history() if self.resampling_frequencies is not None: self.find_markets() self.resample_time_series()
Walk from the current ledger index to the genesis ledger index, and download transactions from rippled.
4,349
def bg_compensate(img, sigma, splinepoints, scale): from PIL import Image import pylab from matplotlib.image import pil_to_array from centrosome.filter import canny import matplotlib img = Image.open(img) if img.mode==: imgdata = np.fromstring(img.tostrin...
Reads file, subtracts background. Returns [compensated image, background].
4,350
def actually_possibly_award(self, **state): user = state["user"] force_timestamp = state.pop("force_timestamp", None) awarded = self.award(**state) if awarded is None: return if awarded.level is None: assert len(self.levels) == 1 award...
Does the actual work of possibly awarding a badge.
4,351
def run_qsnp(align_bams, items, ref_file, assoc_files, region=None, out_file=None): if utils.file_exists(out_file): return out_file paired = get_paired_bams(align_bams, items) if paired.normal_bam: region_files = [] regions = _clean_regions(items, region) if...
Run qSNP calling on paired tumor/normal.
4,352
def fill_phenotype_calls(self,phenotypes=None,inplace=False): if phenotypes is None: phenotypes = list(self[].unique()) def _get_calls(label,phenos): d = dict([(x,0) for x in phenos]) if label!=label: return d d[label] = 1 return d if in...
Set the phenotype_calls according to the phenotype names
4,353
def _export_project_file(project, path, z, include_images, keep_compute_id, allow_all_nodes, temporary_dir): images = [] with open(path) as f: topology = json.load(f) if "topology" in topology: if "nodes" in topology["topology"]: for node in topology["topology"]["nod...
Take a project file (.gns3) and patch it for the export We rename the .gns3 project.gns3 to avoid the task to the client to guess the file name :param path: Path of the .gns3
4,354
def version(self, path, postmap=None, **params): q = httpd.merge_query(path, postmap) ans = { : taskforce_version, : .join(str(x) for x in sys.version_info[:3]), } ans[] = { : platform.system(), } ...
Return the taskforce version. Supports standard options.
4,355
def resolve(self, value=None): if self.matcher: self._init_matcher() matcher = self.evaluate() try: value = self._transform(value) self._assertion(matcher, value) except AssertionError as ex: raise...
Resolve the current expression against the supplied value
4,356
def list_ip(self, instance_id): output = self.client.describe_instances(InstanceIds=[instance_id]) output = output.get("Reservations")[0].get("Instances")[0] ips = {} ips[] = output.get("PrivateIpAddress") ips[] = output.get("PublicIpAddress") return ips
Add all IPs
4,357
def create_role(self, **kwargs): role = self.role_model(**kwargs) return self.put(role)
Creates and returns a new role from the given parameters.
4,358
def reset_password(self, token): expired, invalid, user = \ self.security_utils_service.reset_password_token_status(token) if invalid: self.flash( _(), category=) return self.redirect() elif expired: self.se...
View function verify a users reset password token from the email we sent to them. It also handles the form for them to set a new password. Supports html and json requests.
4,359
def element_abund_marco(i_decay, stable_isotope_list, stable_isotope_identifier, mass_fractions_array_not_decayed, mass_fractions_array_decayed): global elem_abund elem_abund = np.zeros(z_bismuth) global ele...
Given an array of isotopic abundances not decayed and a similar array of isotopic abundances not decayed, here elements abundances, and production factors for elements are calculated
4,360
def _grow(growth, walls, target, i, j, steps, new_steps, res): growth[:] = 0 if target[i, j]: res[0] = 1 res[1] = i res[2] = j return step = 1 s0, s1 = growth.shape step_len = 1 new_step_ind = 0 steps[new_step_ind, 0] = i...
fills [res] with [distance to next position where target == 1, x coord., y coord. of that position in target] using region growth i,j -> pixel position growth -> a work array, needed to measure the distance steps, new_steps -> current and last posit...
4,361
def lchisqprob(chisq,df): BIG = 20.0 def ex(x): BIG = 20.0 if x < -BIG: return 0.0 else: return math.exp(x) if chisq <=0 or df < 1: return 1.0 a = 0.5 * chisq if df%2 == 0: even = 1 else: even = 0 if df > 1: ...
Returns the (1-tailed) probability value associated with the provided chi-square value and df. Adapted from chisq.c in Gary Perlman's |Stat. Usage: lchisqprob(chisq,df)
4,362
def drop_constant_column_levels(df): columns = df.columns constant_levels = [i for i, level in enumerate(columns.levels) if len(level) <= 1] constant_levels.reverse() for i in constant_levels: columns = columns.droplevel(i) df.columns = columns
drop the levels of a multi-level column dataframe which are constant operates in place
4,363
def bridge_to_vlan(br): * cmd = .format(br) result = __salt__[](cmd) if result[] != 0: return False return int(result[])
Returns the VLAN ID of a bridge. Args: br: A string - bridge name Returns: VLAN ID of the bridge. The VLAN ID is 0 if the bridge is not a fake bridge. If the bridge does not exist, False is returned. CLI Example: .. code-block:: bash salt '*' openvswitch.bridge_to_p...
4,364
def start_action(logger=None, action_type="", _serializers=None, **fields): parent = current_action() if parent is None: return startTask(logger, action_type, _serializers, **fields) else: action = parent.child(logger, action_type, _serializers) action._start(fields) ret...
Create a child L{Action}, figuring out the parent L{Action} from execution context, and log the start message. You can use the result as a Python context manager, or use the L{Action.finish} API to explicitly finish it. with start_action(logger, "yourapp:subsystem:dosomething", ...
4,365
async def _async_get_sshable_ips(self, ip_addresses): async def _async_ping(ip_address): try: reader, writer = await asyncio.wait_for( asyncio.open_connection(ip_address, 22), timeout=5) except (OSError, TimeoutError): return ...
Return list of all IP address that could be pinged.
4,366
def get_section_by_rva(self, rva): for section in self.sections: if section.contains_rva(rva): return section return None
Get the section containing the given address.
4,367
def _resolve_plt(self, addr, irsb, indir_jump): if self.project.loader.all_elf_objects: if not any([ addr in obj.reverse_plt for obj in self.project.loader.all_elf_objects ]): return False if not irsb.has_statements: irsb ...
Determine if the IRSB at the given address is a PLT stub. If it is, concretely execute the basic block to resolve the jump target. :param int addr: Address of the block. :param irsb: The basic block. :param IndirectJump indir_jump: The IndirectJump inst...
4,368
def set_orthogonal_selection(self, selection, value, fields=None): if self._read_only: err_read_only() if not self._cache_metadata: self._load_metadata_nosync() indexer = OrthogonalIndexer(selection, self) self._set_selectio...
Modify data via a selection for each dimension of the array. Parameters ---------- selection : tuple A selection for each dimension of the array. May be any combination of int, slice, integer array or Boolean array. value : scalar or array-like Value ...
4,369
def ParseFileObject(self, parser_mediator, file_object): volume = pyfsntfs.volume() try: volume.open_file_object(file_object) except IOError as exception: parser_mediator.ProduceExtractionWarning( .format(exception)) try: usn_change_journal = volume.get_usn_change_journ...
Parses a NTFS $UsnJrnl metadata file-like object. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (dfvfs.FileIO): file-like object.
4,370
def get_category(self, id, **data): return self.get("/categories/{0}/".format(id), data=data)
GET /categories/:id/ Gets a :format:`category` by ID as ``category``.
4,371
def log_response(response: str, trim_log_values: bool = False, **kwargs: Any) -> None: return log_(response, response_logger, logging.INFO, trim=trim_log_values, **kwargs)
Log a response
4,372
def on_reset_compat_defaults_clicked(self, bnt): self.settings.general.reset() self.settings.general.reset() self.reload_erase_combos()
Reset default values to compat_{backspace,delete} dconf keys. The default values are retrivied from the guake.schemas file.
4,373
def name_for_scalar_relationship(base, local_cls, referred_cls, constraint): name = referred_cls.__name__.lower() + "_ref" return name
Overriding naming schemes.
4,374
def credit_note(request, note_id, access_code=None): note_id = int(note_id) current_note = CreditNoteController.for_id_or_404(note_id) apply_form = forms.ApplyCreditNoteForm( current_note.credit_note.invoice.user, request.POST or None, prefix="apply_note" ) refund_for...
Displays a credit note. If ``request`` is a ``POST`` request, forms for applying or refunding a credit note will be processed. This view requires a login, and the logged in user must be staff. Arguments: note_id (castable to int): The ID of the credit note to view. Returns: rende...
4,375
def n_members(self): if self.is_finite: return reduce(mul, [domain.n_members for domain in self._domains], 1) else: return np.inf
Returns the number of members in the domain if it `is_finite`, otherwise, returns `np.inf`. :type: ``int`` or ``np.inf``
4,376
def inter_event_time_distribution(self, u=None, v=None): dist = {} if u is None: first = True delta = None for ext in self.stream_interactions(): if first: delta = ext first = False ...
Return the distribution of inter event time. If u and v are None the dynamic graph intere event distribution is returned. If u is specified the inter event time distribution of interactions involving u is returned. If u and v are specified the inter event time distribution of (u, v) interactions...
4,377
def create(self, to, from_, method=values.unset, fallback_url=values.unset, fallback_method=values.unset, status_callback=values.unset, status_callback_event=values.unset, status_callback_method=values.unset, send_digits=values.unset, timeout=values.unset, rec...
Create a new CallInstance :param unicode to: Phone number, SIP address, or client identifier to call :param unicode from_: Twilio number from which to originate the call :param unicode method: HTTP method to use to fetch TwiML :param unicode fallback_url: Fallback URL in case of error ...
4,378
def dataset_path(cache=None, cachefile="~/.io3d_cache.yaml", get_root=False): local_data_dir = local_dir if cachefile is not None: cache = cachef.CacheFile(cachefile) if cache is not None: local_data_dir = cache.get_or_save_default("local_dataset_dir", local_dir) if get_r...
Get dataset path. :param cache: CacheFile object :param cachefile: cachefile path, default '~/.io3d_cache.yaml' :return: path to dataset
4,379
def is_dsub_operation(cls, op): if not cls.is_pipelines_operation(op): return False for name in [, , ]: if not cls.get_operation_label(op, name): return False return True
Determine if a pipelines operation is a dsub request. We don't have a rigorous way to identify an operation as being submitted by dsub. Our best option is to check for certain fields that have always been part of dsub operations. - labels: job-id, job-name, and user-id have always existed - envs: ...
4,380
def run_nested(self, nlive_init=500, maxiter_init=None, maxcall_init=None, dlogz_init=0.01, logl_max_init=np.inf, nlive_batch=500, wt_function=None, wt_kwargs=None, maxiter_batch=None, maxcall_batch=None, maxiter=None, maxcall=None, maxbatch=No...
**The main dynamic nested sampling loop.** After an initial "baseline" run using a constant number of live points, dynamically allocates additional (nested) samples to optimize a specified weight function until a specified stopping criterion is reached. Parameters ---------- ...
4,381
def get_objective_bank_admin_session(self, proxy, *args, **kwargs): if not self.supports_objective_bank_admin(): raise Unimplemented() try: from . import sessions except ImportError: raise OperationFailed() proxy = self._convert_proxy(proxy) ...
Gets the OsidSession associated with the objective bank administration service. :param proxy: a proxy :type proxy: ``osid.proxy.Proxy`` :return: an ``ObjectiveBankAdminSession`` :rtype: ``osid.learning.ObjectiveBankAdminSession`` :raise: ``NullArgument`` -- ``proxy`` is ``null``...
4,382
def _find_max_lag(x, rho_limit=0.05, maxmaxlag=20000, verbose=0): acv = autocov(x) rho = acv[0, 1] / acv[0, 0] lam = -1. / np.log(abs(rho)) maxlag = int(np.floor(3. * lam)) + 1 jump = int(np.ceil(0.01 * lam)) + 1 T = len(x) while ((abs(rho) > rho_limit) & (maxl...
Automatically find an appropriate maximum lag to calculate IAT
4,383
def run_ipython_notebook(notebook_str): from runipy.notebook_runner import NotebookRunner import nbformat import logging log_format = log_datefmt = logging.basicConfig( level=logging.INFO, format=log_format, datefmt=log_datefmt ) print() n...
References: https://github.com/paulgb/runipy >>> from utool.util_ipynb import * # NOQA
4,384
def _calc(self, y, w): if self.discrete: self.lclass_ids = weights.lag_categorical(w, self.class_ids, ties="tryself") else: ly = weights.lag_spatial(w, y) self.lclass_ids, self.lag_cutoffs, self.m = self._...
Helper to estimate spatial lag conditioned Markov transition probability matrices based on maximum likelihood techniques.
4,385
def Ctrl_C(self, delay=0): self._delay(delay) self.add(Command("KeyDown", % (BoardKey.Ctrl, 1))) self.add(Command("KeyPress", % (BoardKey.C, 1))) self.add(Command("KeyUp", % (BoardKey.Ctrl, 1)))
Ctrl + C shortcut.
4,386
def move_dirty_lock_file(dirty_lock_file, sm_path): if dirty_lock_file is not None \ and not dirty_lock_file == os.path.join(sm_path, dirty_lock_file.split(os.sep)[-1]): logger.debug("Move dirty lock from root tmp folder {0} to state machine folder {1}" "".format(dirty_...
Move the dirt_lock file to the sm_path and thereby is not found by auto recovery of backup anymore
4,387
def marv(ctx, config, loglevel, logfilter, verbosity): if config is None: cwd = os.path.abspath(os.path.curdir) while cwd != os.path.sep: config = os.path.join(cwd, ) if os.path.exists(config): break cwd = os.path.dirname(cwd) else: ...
Manage a Marv site
4,388
def network_info(host=None, admin_username=None, admin_password=None, module=None): inv = inventory(host=host, admin_username=admin_username, admin_password=admin_password) if inv is None: cmd = {} cmd[] = -1 cm...
Return Network Configuration CLI Example: .. code-block:: bash salt dell dracr.network_info
4,389
def guess_mime_type(url): (mimetype, _mimeencoding) = mimetypes.guess_type(url) if not mimetype: ext = os.path.splitext(url)[1] mimetype = _MIME_TYPES.get(ext) _logger.debug("mimetype({}): {}".format(url, mimetype)) if not mimetype: mimetype = "application/octet-stream" ...
Use the mimetypes module to lookup the type for an extension. This function also adds some extensions required for HTML5
4,390
def generate_confirmation_token(self, user): data = [str(user.id), self.hash_data(user.email)] return self.security.confirm_serializer.dumps(data)
Generates a unique confirmation token for the specified user. :param user: The user to work with
4,391
def plot_slab(slab, ax, scale=0.8, repeat=5, window=1.5, draw_unit_cell=True, decay=0.2, adsorption_sites=True): orig_slab = slab.copy() slab = reorient_z(slab) orig_cell = slab.lattice.matrix.copy() if repeat: slab.make_supercell([repeat, repeat, 1]) coords = np.array(sor...
Function that helps visualize the slab in a 2-D plot, for convenient viewing of output of AdsorbateSiteFinder. Args: slab (slab): Slab object to be visualized ax (axes): matplotlib axes with which to visualize scale (float): radius scaling for sites repeat (int): number of repea...
4,392
def _setup_cgroups(self, my_cpus, memlimit, memory_nodes, cgroup_values): logging.debug("Setting up cgroups for run.") subsystems = [BLKIO, CPUACCT, FREEZER, MEMORY] + self._cgroup_subsystems if my_cpus is not None or memory_nodes is not None: subsystems.append(CPU...
This method creates the CGroups for the following execution. @param my_cpus: None or a list of the CPU cores to use @param memlimit: None or memory limit in bytes @param memory_nodes: None or a list of memory nodes of a NUMA system to use @param cgroup_values: dict of additional values t...
4,393
def query_extensions(self, extension_query, account_token=None, account_token_header=None): query_parameters = {} if account_token is not None: query_parameters[] = self._serialize.query(, account_token, ) content = self._serialize.body(extension_query, ) response = ...
QueryExtensions. [Preview API] :param :class:`<ExtensionQuery> <azure.devops.v5_1.gallery.models.ExtensionQuery>` extension_query: :param str account_token: :param String account_token_header: Header to pass the account token :rtype: :class:`<ExtensionQueryResult> <azure.devops.v...
4,394
def splits(self): if not self.__splits_aggregate: self.__splits_aggregate = SplitsAggregate(self.book) return self.__splits_aggregate
Splits
4,395
def encode(self): tftpassert(self.filename, "filename required in initial packet") tftpassert(self.mode, "mode required in initial packet") filename = self.filename mode = self.mode if not isinstance(filename, bytes): filename = filename.encode() ...
Encode the packet's buffer from the instance variables.
4,396
def iMath(image, operation, *args): if operation not in _iMathOps: raise ValueError() imagedim = image.dimension outimage = image.clone() args = [imagedim, outimage, operation, image] + [a for a in args] processed_args = _int_antsProcessArguments(args) libfn = utils.get_lib_fn() ...
Perform various (often mathematical) operations on the input image/s. Additional parameters should be specific for each operation. See the the full iMath in ANTs, on which this function is based. ANTsR function: `iMath` Arguments --------- image : ANTsImage input object, usually antsIm...
4,397
def validate_single_matching_uri(all_blockchain_uris: List[str], w3: Web3) -> str: matching_uris = [ uri for uri in all_blockchain_uris if check_if_chain_matches_chain_uri(w3, uri) ] if not matching_uris: raise ValidationError("Package has no matching URIs on chain.") elif len(matc...
Return a single block URI after validating that it is the *only* URI in all_blockchain_uris that matches the w3 instance.
4,398
def _uniqualize(d): abcb pt = copy.deepcopy(d) seqs_for_del =[] vset = set({}) for k in pt: vset.add(pt[k]) tslen = vset.__len__() freq = {} for k in pt: v = pt[k] if(v in freq): freq[v] = freq[v] + 1 seqs_for_del.append(k) else: ...
d = {1:'a',2:'b',3:'c',4:'b'} _uniqualize(d)
4,399
def _onDocstring( self, docstr, line ): " Memorizes a function/class/module docstring " if self.objectsStack: self.objectsStack[ -1 ].docstring = \ Docstring( trim_docstring( docstr ), line ) return self.docstring = Docstring( trim_docstr...
Memorizes a function/class/module docstring