Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
374,400
def resync_package(ctx, opts, owner, repo, slug, skip_errors): click.echo( "Resynchonising the %(slug)s package ... " % {"slug": click.style(slug, bold=True)}, nl=False, ) context_msg = "Failed to resynchronise package!" with handle_api_exceptions( ctx, opts=opts, context_msg=context_msg, reraise_on_error=skip_errors ): with maybe_spinner(opts): api_resync_package(owner=owner, repo=repo, identifier=slug) click.secho("OK", fg="green")
Resynchronise a package.
374,401
def exit(self): if self.gdb_process: self.gdb_process.terminate() self.gdb_process.communicate() self.gdb_process = None return None
Terminate gdb process Returns: None
374,402
def _open_ftp(self): ftp = self.fs._open_ftp() ftp.voidcmd(str("TYPE I")) return ftp
Open an ftp object for the file.
374,403
def _modify_new_lines(code_to_modify, offset, code_to_insert): new_list = list(code_to_modify.co_lnotab) if not new_list: return None bytecode_delta = len(code_to_insert) byte_increments = code_to_modify.co_lnotab[0::2] line_increments = code_to_modify.co_lnotab[1::2] if offset == 0: new_list[0] += bytecode_delta else: addr = 0 it = zip(byte_increments, line_increments) for i, (byte_incr, _line_incr) in enumerate(it): addr += byte_incr if addr == offset: new_list[i * 2] += bytecode_delta break return bytes(new_list)
Update new lines: the bytecode inserted should be the last instruction of the previous line. :return: bytes sequence of code with updated lines offsets
374,404
def _cast_dict(self, data_dict): for key, value in data_dict.iteritems(): data_dict[key] = self._cast_value(value) if in data_dict: del data_dict[] return data_dict
Internal method that makes sure any dictionary elements are properly cast into the correct types, instead of just treating everything like a string from the csv file. Args: data_dict: dictionary containing bro log data. Returns: Cleaned Data dict.
374,405
def renderHTTP(self, context): request = IRequest(context) if request.isSecure(): renderer = self.wrappedResource else: renderer = _SecureWrapper(self.urlGenerator, self.wrappedResource) return renderer.renderHTTP(context)
Render the wrapped resource if HTTPS is already being used, otherwise invoke a helper which may generate a redirect.
374,406
def update(self, data, key): og_data = self.read() og_data[key] = data self.write(og_data)
Update a key's value's in a JSON file.
374,407
def populate_parallel_text(extract_dir: str, file_sets: List[Tuple[str, str, str]], dest_prefix: str, keep_separate: bool, head_n: int = 0): source_out = None target_out = None lines_written = 0 if not keep_separate: source_dest = dest_prefix + SUFFIX_SRC_GZ target_dest = dest_prefix + SUFFIX_TRG_GZ logging.info("Populate: %s %s", source_dest, target_dest) source_out = gzip.open(source_dest, "wt", encoding="utf-8") target_out = gzip.open(target_dest, "wt", encoding="utf-8") for i, (source_fname, target_fname, text_type) in enumerate(file_sets): if keep_separate: if source_out: source_out.close() if target_out: target_out.close() source_dest = dest_prefix + str(i) + "." + SUFFIX_SRC_GZ target_dest = dest_prefix + str(i) + "." + SUFFIX_TRG_GZ logging.info("Populate: %s %s", source_dest, target_dest) source_out = gzip.open(source_dest, "wt", encoding="utf-8") target_out = gzip.open(target_dest, "wt", encoding="utf-8") for source_line, target_line in zip( plain_text_iter(os.path.join(extract_dir, source_fname), text_type, DATA_SRC), plain_text_iter(os.path.join(extract_dir, target_fname), text_type, DATA_TRG)): if head_n > 0 and lines_written >= head_n: if keep_separate: lines_written = 0 break source_out.write("{}\n".format(source_line)) target_out.write("{}\n".format(target_line)) lines_written += 1 source_out.close() target_out.close()
Create raw parallel train, dev, or test files with a given prefix. :param extract_dir: Directory where raw files (inputs) are extracted. :param file_sets: Sets of files to use. :param dest_prefix: Prefix for output files. :param keep_separate: True if each file set (source-target pair) should have its own file (used for test sets). :param head_n: If N>0, use only the first N lines (used in test mode).
374,408
def show(self): hyper_combos = itertools.product(*list(self.hyper_params.values())) if not self.models: c_values = [[idx + 1, list(val)] for idx, val in enumerate(hyper_combos)] print(H2OTwoDimTable( col_header=[, + .join(list(self.hyper_params.keys())) + ], table_header= + self.model.__class__.__name__, cell_values=c_values)) else: print(self.sorted_metric_table())
Print models sorted by metric.
374,409
def jplace_split(self, original_jplace, cluster_dict): output_hash = {} for placement in original_jplace[]: alias_placements_list = [] nm_dict = {} p = placement[] if in placement.keys(): nm = placement[] elif in placement.keys(): nm = placement[] else: raise Exception("Unexpected jplace format: Either or are expected as keys in placement jplace .JSON file") for nm_entry in nm: nm_list = [] placement_read_name, plval = nm_entry read_alias_idx = placement_read_name.split()[-1] read_name = .join(placement_read_name.split()[:-1]) read_cluster = cluster_dict[read_alias_idx][read_name] for read in read_cluster: nm_list.append([read.name, plval]) if read_alias_idx not in nm_dict: nm_dict[read_alias_idx] = nm_list else: nm_dict[read_alias_idx] += nm_entry for alias_idx, nm_list in nm_dict.iteritems(): placement_hash = {: p, : nm_list} if alias_idx not in output_hash: output_hash[alias_idx] = [placement_hash] else: output_hash[alias_idx].append(placement_hash) return output_hash
To make GraftM more efficient, reads are dereplicated and merged into one file prior to placement using pplacer. This function separates the single jplace file produced by this process into the separate jplace files, one per input file (if multiple were provided) and backfills abundance (re-replicates?) into the placement file so analyses can be done using the placement files. Parameters ---------- original_jplace : dict (json) json .jplace file from the pplacer step. cluster_dict : dict dictionary stores information on pre-placement clustering Returns ------- A dict containing placement hashes to write to new jplace file. Each key represents a file alias
374,410
def bytes_from_readable_size(C, size, suffix=): s = re.split("^([0-9\.]+)\s*([%s]?)%s?" % (.join(C.SIZE_UNITS), suffix), size, flags=re.I) bytes, unit = round(float(s[1])), s[2].upper() while unit in C.SIZE_UNITS and C.SIZE_UNITS.index(unit) > 0: bytes *= 1024 unit = C.SIZE_UNITS[C.SIZE_UNITS.index(unit) - 1] return bytes
given a readable_size (as produced by File.readable_size()), return the number of bytes.
374,411
def count_variants_barplot(data): keys = OrderedDict() keys[] = {: } keys[] = {: } keys[] = {: } keys[] = {: } keys[] = {: } keys[] = {: } keys[] = {: } keys[] = {: } plot_conf = { : , : , : , : } return bargraph.plot(data, keys, plot_conf)
Return HTML for the Variant Counts barplot
374,412
def unescape(b, encoding): return string_literal_re.sub( lambda m: unescape_string_literal(m.group(), encoding), b )
Unescape all string and unicode literals in bytes.
374,413
def _rescale(self, bands): self.output("Rescaling", normal=True, arrow=True) for key, band in enumerate(bands): self.output("band %s" % self.bands[key], normal=True, color=, indent=1) bands[key] = sktransform.rescale(band, 2) bands[key] = (bands[key] * 65535).astype() return bands
Rescale bands
374,414
def execute(self): scenario_name = self._command_args[] role_name = os.getcwd().split(os.sep)[-1] role_directory = util.abs_path(os.path.join(os.getcwd(), os.pardir)) msg = .format(scenario_name) LOG.info(msg) molecule_directory = config.molecule_directory( os.path.join(role_directory, role_name)) scenario_directory = os.path.join(molecule_directory, scenario_name) scenario_base_directory = os.path.dirname(scenario_directory) if os.path.isdir(scenario_directory): msg = ( ).format(scenario_name) util.sysexit_with_message(msg) scenario_base_directory = os.path.join(role_directory, role_name) templates = [ .format(**self._command_args), .format(**self._command_args), ] for template in templates: self._process_templates(template, self._command_args, scenario_base_directory) self._process_templates(, self._command_args, role_directory) role_directory = os.path.join(role_directory, role_name) msg = .format( scenario_directory) LOG.success(msg)
Execute the actions necessary to perform a `molecule init scenario` and returns None. :return: None
374,415
def run_doxygen(folder): try: retcode = subprocess.call("cd %s; make doxygen" % folder, shell=True) if retcode < 0: sys.stderr.write("doxygen terminated by signal %s" % (-retcode)) except OSError as e: sys.stderr.write("doxygen execution failed: %s" % e)
Run the doxygen make command in the designated folder.
374,416
def l2norm_squared(a): value = 0 for i in xrange(a.shape[1]): value += np.dot(a[:,i],a[:,i]) return value
L2 normalize squared
374,417
def close_right(self): current_widget = self.widget(self.tab_under_menu()) index = self.indexOf(current_widget) if self._try_close_dirty_tabs(tab_range=range(index + 1, self.count())): while True: widget = self.widget(self.count() - 1) if widget != current_widget: self.remove_tab(self.count() - 1) else: break
Closes every editors tabs on the left of the current one.
374,418
def h(self): r if np.size(self._h) > 1: assert np.size(self._h) == self.n_modelparams return self._h else: return self._h * np.ones(self.n_modelparams)
r""" Returns the step size to be used in numerical differentiation with respect to the model parameters. The step size is given as a vector with length ``n_modelparams`` so that each model parameter can be weighted independently.
374,419
def persist_database(metamodel, path, mode=): with open(path, mode) as f: for kind in sorted(metamodel.metaclasses.keys()): metaclass = metamodel.metaclasses[kind] s = serialize_class(metaclass.clazz) f.write(s) for index_name, attribute_names in metaclass.indices.items(): attribute_names = .join(attribute_names) s = % (index_name, metaclass.kind, attribute_names) f.write(s) for ass in sorted(metamodel.associations, key=lambda x: x.rel_id): s = serialize_association(ass) f.write(s) for inst in metamodel.instances: s = serialize_instance(inst) f.write(s)
Persist all instances, class definitions and association definitions in a *metamodel* by serializing them and saving to a *path* on disk.
374,420
def crop(img, center, sz, mode=): center = np.array(center) sz = np.array(sz) istart = (center - sz / 2.).astype() iend = istart + sz imsz = img.shape[:2] if np.any(istart < 0) or np.any(iend > imsz): padwidth = [(np.minimum(0, istart[0]), np.maximum(0, iend[0]-imsz[0])), (np.minimum(0, istart[1]), np.maximum(0, iend[1]-imsz[1]))] padwidth += [(0, 0)] * (len(img.shape) - 2) img = np.pad(img, padwidth, mode=mode) istart = (np.maximum(0, istart[0]), np.maximum(0, istart[1])) return img[istart[0]:istart[0]+sz[0], istart[1]:istart[1]+sz[1]] return img[istart[0]:iend[0], istart[1]:iend[1]]
crop sz from ij as center :param img: :param center: ij :param sz: :param mode: :return:
374,421
def _notify_exit_thread(self, event): dwThreadId = event.get_tid() if self._has_thread_id(dwThreadId): self._del_thread(dwThreadId) return True
Notify the termination of a thread. This is done automatically by the L{Debug} class, you shouldn't need to call it yourself. @type event: L{ExitThreadEvent} @param event: Exit thread event. @rtype: bool @return: C{True} to call the user-defined handle, C{False} otherwise.
374,422
def availableRoles(self): s registration is not role-specific. ' eventRoles = self.eventrole_set.filter(capacity__gt=0) if eventRoles.count() > 0: return [x.role for x in eventRoles] elif isinstance(self,Series): return self.classDescription.danceTypeLevel.danceType.roles.all() return []
Returns the set of roles for this event. Since roles are not always custom specified for event, this looks for the set of available roles in multiple places. If no roles are found, then the method returns an empty list, in which case it can be assumed that the event's registration is not role-specific.
374,423
def create_thumbnail(uuid, thumbnail_width): size = thumbnail_width +
Create the thumbnail for an image.
374,424
def size(args): p = OptionParser(size.__doc__) opts, args = p.parse_args(args) if len(args) < 1: sys.exit(not p.print_help()) total_size = total_numrecords = 0 for f in args: cur_size = cur_numrecords = 0 for rec in iter_fastq(f): if not rec: break cur_numrecords += 1 cur_size += len(rec) print(" ".join(str(x) for x in \ (op.basename(f), cur_numrecords, cur_size))) total_numrecords += cur_numrecords total_size += cur_size if len(args) > 1: print(" ".join(str(x) for x in \ ("Total", total_numrecords, total_size)))
%prog size fastqfile Find the total base pairs in a list of fastq files
374,425
def chk_col_numbers(line_num, num_cols, tax_id_col, id_col, symbol_col): bad_col = if tax_id_col >= num_cols: bad_col = elif id_col >= num_cols: bad_col = elif symbol_col >= num_cols: bad_col = if bad_col: raise Exception( % (line_num, bad_col))
Check that none of the input column numbers is out of range. (Instead of defining this function, we could depend on Python's built-in IndexError exception for this issue, but the IndexError exception wouldn't include line number information, which is helpful for users to find exactly which line is the culprit.)
374,426
def get_common_password_hash(self, salt): password = self._password if password is None: raise SRPException() return self.hash(salt, self.hash(self._user, password, joiner=))
x = H(s | H(I | ":" | P)) :param int salt: :rtype: int
374,427
def guess_version_by_running_live_package( pkg_key, default="?" ): try: m = import_module(pkg_key) except ImportError: return default else: return getattr(m, "__version__", default)
Guess the version of a pkg when pip doesn't provide it. :param str pkg_key: key of the package :param str default: default version to return if unable to find :returns: version :rtype: string
374,428
def list(region=None, key=None, keyid=None, profile=None): try: conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) buckets = conn.list_buckets() if not bool(buckets.get()): log.warning() if in buckets: del buckets[] return buckets except ClientError as e: return {: __utils__[](e)}
List all buckets owned by the authenticated sender of the request. Returns list of buckets CLI Example: .. code-block:: yaml Owner: {...} Buckets: - {...} - {...}
374,429
def manage_request_types_view(request): request_types = RequestType.objects.all() return render_to_response(, { : "Admin - Manage Request Types", : request_types }, context_instance=RequestContext(request))
Manage requests. Display a list of request types with links to edit them. Also display a link to add a new request type. Restricted to presidents and superadmins.
374,430
def pip_search(self, search_string=None): extra_args = [, search_string] return self._call_pip(name=, extra_args=extra_args, callback=self._pip_search)
Search for pip packages in PyPI matching `search_string`.
374,431
def first_rec(ofile, Rec, file_type): keylist = [] opened = False while not opened: try: pmag_out = open(ofile, ) opened = True except IOError: time.sleep(1) outstring = "tab \t" + file_type + "\n" pmag_out.write(outstring) keystring = "" for key in list(Rec.keys()): keystring = keystring + + key.strip() keylist.append(key) keystring = keystring + pmag_out.write(keystring[1:]) pmag_out.close() return keylist
opens the file ofile as a magic template file with headers as the keys to Rec
374,432
def draw_on_image(self, image, color=(0, 255, 0), alpha=1.0, size=3, copy=True, raise_if_out_of_image=False): if copy: image = np.copy(image) if image.ndim == 2: assert ia.is_single_number(color), ( "Got a 2D image. Expected then to be a single number, " "but got %s." % (str(color),)) elif image.ndim == 3 and ia.is_single_number(color): color = [color] * image.shape[-1] input_dtype = image.dtype alpha_color = color if alpha < 0.01: return image elif alpha > 0.99: alpha = 1 else: image = image.astype(np.float32, copy=False) alpha_color = alpha * np.array(color) height, width = image.shape[0:2] y, x = self.y_int, self.x_int x1 = max(x - size//2, 0) x2 = min(x + 1 + size//2, width) y1 = max(y - size//2, 0) y2 = min(y + 1 + size//2, height) x1_clipped, x2_clipped = np.clip([x1, x2], 0, width) y1_clipped, y2_clipped = np.clip([y1, y2], 0, height) x1_clipped_ooi = (x1_clipped < 0 or x1_clipped >= width) x2_clipped_ooi = (x2_clipped < 0 or x2_clipped >= width+1) y1_clipped_ooi = (y1_clipped < 0 or y1_clipped >= height) y2_clipped_ooi = (y2_clipped < 0 or y2_clipped >= height+1) x_ooi = (x1_clipped_ooi and x2_clipped_ooi) y_ooi = (y1_clipped_ooi and y2_clipped_ooi) x_zero_size = (x2_clipped - x1_clipped) < 1 y_zero_size = (y2_clipped - y1_clipped) < 1 if not x_ooi and not y_ooi and not x_zero_size and not y_zero_size: if alpha == 1: image[y1_clipped:y2_clipped, x1_clipped:x2_clipped] = color else: image[y1_clipped:y2_clipped, x1_clipped:x2_clipped] = ( (1 - alpha) * image[y1_clipped:y2_clipped, x1_clipped:x2_clipped] + alpha_color ) else: if raise_if_out_of_image: raise Exception( "Cannot draw keypoint x=%.8f, y=%.8f on image with " "shape %s." % (y, x, image.shape)) if image.dtype.name != input_dtype.name: if input_dtype.name == "uint8": image = np.clip(image, 0, 255, out=image) image = image.astype(input_dtype, copy=False) return image
Draw the keypoint onto a given image. The keypoint is drawn as a square. Parameters ---------- image : (H,W,3) ndarray The image onto which to draw the keypoint. color : int or list of int or tuple of int or (3,) ndarray, optional The RGB color of the keypoint. If a single int ``C``, then that is equivalent to ``(C,C,C)``. alpha : float, optional The opacity of the drawn keypoint, where ``1.0`` denotes a fully visible keypoint and ``0.0`` an invisible one. size : int, optional The size of the keypoint. If set to ``S``, each square will have size ``S x S``. copy : bool, optional Whether to copy the image before drawing the keypoint. raise_if_out_of_image : bool, optional Whether to raise an exception if the keypoint is outside of the image. Returns ------- image : (H,W,3) ndarray Image with drawn keypoint.
374,433
def push_notification_devices_destroy_many(self, data, **kwargs): "https://developer.zendesk.com/rest_api/docs/core/push_notification_devices api_path = "/api/v2/push_notification_devices/destroy_many.json" return self.call(api_path, method="POST", data=data, **kwargs)
https://developer.zendesk.com/rest_api/docs/core/push_notification_devices#bulk-unregister-push-notification-devices
374,434
def _set_redist_rip(self, v, load=False): if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=redist_rip.redist_rip, is_container=, presence=False, yang_name="redist-rip", rest_name="redist-rip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u: {u: u}}, namespace=, defining_module=, yang_type=, is_config=False) except (TypeError, ValueError): raise ValueError({ : , : "container", : , }) self.__redist_rip = t if hasattr(self, ): self._set()
Setter method for redist_rip, mapped from YANG variable /isis_state/router_isis_config/is_address_family_v6/redist_rip (container) If this variable is read-only (config: false) in the source YANG file, then _set_redist_rip is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_redist_rip() directly.
374,435
def clean_inputs(data): if not utils.get_in(data, ("config", "algorithm", "variant_regions_orig")): data["config"]["algorithm"]["variant_regions_orig"] = dd.get_variant_regions(data) clean_vr = clean_file(dd.get_variant_regions(data), data, prefix="cleaned-") merged_vr = merge_overlaps(clean_vr, data) data["config"]["algorithm"]["variant_regions"] = clean_vr data["config"]["algorithm"]["variant_regions_merged"] = merged_vr if dd.get_coverage(data): if not utils.get_in(data, ("config", "algorithm", "coverage_orig")): data["config"]["algorithm"]["coverage_orig"] = dd.get_coverage(data) clean_cov_bed = clean_file(dd.get_coverage(data), data, prefix="cov-", simple=True) merged_cov_bed = merge_overlaps(clean_cov_bed, data) data["config"]["algorithm"]["coverage"] = clean_cov_bed data["config"]["algorithm"]["coverage_merged"] = merged_cov_bed if in get_svcallers(data): seq2c_ready_bed = prep_seq2c_bed(data) if not seq2c_ready_bed: logger.warning("Can't run Seq2C without a svregions or variant_regions BED file") else: data["config"]["algorithm"]["seq2c_bed_ready"] = seq2c_ready_bed elif regions.get_sv_bed(data): dd.set_sv_regions(data, clean_file(regions.get_sv_bed(data), data, prefix="svregions-")) return data
Clean BED input files to avoid overlapping segments that cause downstream issues. Per-merges inputs to avoid needing to call multiple times during later parallel steps.
374,436
def Decompress(self, compressed_data): try: if hasattr(lzma, ): uncompressed_data = self._lzma_decompressor.decompress( compressed_data, 0) else: uncompressed_data = self._lzma_decompressor.decompress(compressed_data) remaining_compressed_data = getattr( self._lzma_decompressor, , b) except (EOFError, IOError, LZMAError) as exception: raise errors.BackEndError(( ).format(exception)) return uncompressed_data, remaining_compressed_data
Decompresses the compressed data. Args: compressed_data (bytes): compressed data. Returns: tuple(bytes, bytes): uncompressed data and remaining compressed data. Raises: BackEndError: if the XZ compressed stream cannot be decompressed.
374,437
def GetDatabaseAccount(self, url_connection=None): if url_connection is None: url_connection = self.url_connection initial_headers = dict(self.default_headers) headers = base.GetHeaders(self, initial_headers, , , , , {}) request = request_object._RequestObject(, documents._OperationType.Read, url_connection) result, self.last_response_headers = self.__Get(, request, headers) database_account = documents.DatabaseAccount() database_account.DatabasesLink = database_account.MediaLink = if (http_constants.HttpHeaders.MaxMediaStorageUsageInMB in self.last_response_headers): database_account.MaxMediaStorageUsageInMB = ( self.last_response_headers[ http_constants.HttpHeaders.MaxMediaStorageUsageInMB]) if (http_constants.HttpHeaders.CurrentMediaStorageUsageInMB in self.last_response_headers): database_account.CurrentMediaStorageUsageInMB = ( self.last_response_headers[ http_constants.HttpHeaders.CurrentMediaStorageUsageInMB]) database_account.ConsistencyPolicy = result.get(constants._Constants.UserConsistencyPolicy) if constants._Constants.WritableLocations in result: database_account._WritableLocations = result[constants._Constants.WritableLocations] if constants._Constants.ReadableLocations in result: database_account._ReadableLocations = result[constants._Constants.ReadableLocations] if constants._Constants.EnableMultipleWritableLocations in result: database_account._EnableMultipleWritableLocations = result[constants._Constants.EnableMultipleWritableLocations] self._useMultipleWriteLocations = self.connection_policy.UseMultipleWriteLocations and database_account._EnableMultipleWritableLocations return database_account
Gets database account info. :return: The Database Account. :rtype: documents.DatabaseAccount
374,438
def count_leaves(x): if hasattr(x, ): x = list(x.values()) if hasattr(x, ): return sum(map(count_leaves, x)) return 1
Return the number of non-sequence items in a given recursive sequence.
374,439
def runCLI(): args = docopt(__doc__, version=) try: check_arguments(args) command_list = [, , ] select = itemgetter(, , ) selectedCommand = command_list[select(args).index(True)] cmdClass = get_command_class(selectedCommand) obj = cmdClass(args) obj.execute_command() except POSSIBLE_EXCEPTIONS as e: print(, e, )
The starting point for the execution of the Scrapple command line tool. runCLI uses the docstring as the usage description for the scrapple command. \ The class for the required command is selected by a dynamic dispatch, and the \ command is executed through the execute_command() method of the command class.
374,440
def delete_namespaced_deployment(self, name, namespace, **kwargs): kwargs[] = True if kwargs.get(): return self.delete_namespaced_deployment_with_http_info(name, namespace, **kwargs) else: (data) = self.delete_namespaced_deployment_with_http_info(name, namespace, **kwargs) return data
delete_namespaced_deployment # noqa: E501 delete a Deployment # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_namespaced_deployment(name, namespace, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the Deployment (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. :param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. :param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. :param V1DeleteOptions body: :return: V1Status If the method is called asynchronously, returns the request thread.
374,441
def wrap_handler(self, handler, context_switcher): context_switcher.add_context_in(lambda: LOGGER.addHandler(self.handler)) context_switcher.add_context_out(lambda: LOGGER.removeHandler(self.handler))
Enable/Disable handler.
374,442
def convert_parameters(self, request, *args, **kwargs): args = list(args) urlparam_i = 0 parameters = self.view_parameters.get(request.method.lower()) or self.view_parameters.get(None) if parameters is not None: for parameter_i, parameter in enumerate(parameters): if parameter_i == 0 or parameter.kind is inspect.Parameter.VAR_POSITIONAL or parameter.kind is inspect.Parameter.VAR_KEYWORD: pass elif parameter.name in kwargs: kwargs[parameter.name] = self.convert_value(kwargs[parameter.name], parameter, request) elif parameter_i - 1 < len(args): args[parameter_i - 1] = self.convert_value(args[parameter_i - 1], parameter, request) elif urlparam_i < len(request.dmp.urlparams): kwargs[parameter.name] = self.convert_value(request.dmp.urlparams[urlparam_i], parameter, request) urlparam_i += 1 elif parameter.default is not inspect.Parameter.empty: kwargs[parameter.name] = self.convert_value(parameter.default, parameter, request) else: kwargs[parameter.name] = self.convert_value(None, parameter, request) return args, kwargs
Iterates the urlparams and converts them according to the type hints in the current view function. This is the primary function of the class.
374,443
def metadata_to_buffers(metadata): results = [] for key, value in metadata.items(): assert len(key) < 256 assert len(value) < 2 ** 32 results.extend([ struct.pack(, len(key)), key, struct.pack(, len(value)), value, ]) return results
Transform a dict of metadata into a sequence of buffers. :param metadata: The metadata, as a dict. :returns: A list of buffers.
374,444
def fit_overlays(self, text, run_matchers=None, **kw): self._maybe_run_matchers(text, run_matchers) for i in self._list_match.fit_overlay(text, **kw): yield i
First all matchers will run and then I will try to combine them. Use run_matchers to force running(True) or not running(False) the matchers. See ListMatcher for arguments.
374,445
def groupfinder(userid, request): if userid and hasattr(request, "user") and request.user: groups = ["group:%s" % g.id for g in request.user.groups] return groups return []
Default groupfinder implementaion for pyramid applications :param userid: :param request: :return:
374,446
def update(self, pbar): if pbar.seconds_elapsed < 2e-6 or pbar.currval < 2e-6: scaled = power = 0 else: speed = pbar.currval / pbar.seconds_elapsed power = int(math.log(speed, 1000)) scaled = speed / 1000.**power return self._format % (scaled, self.prefixes[power], self.unit)
Updates the widget with the current SI prefixed speed.
374,447
def uniform_random_global_network(loc=2000, scale=250, n=100): arr = (np.random.normal(loc, scale, n)).astype(int) return pd.DataFrame(data={: arr, : uniform_random_global_points(n), : uniform_random_global_points(n)})
Returns an array of `n` uniformally randomly distributed `shapely.geometry.Point` objects.
374,448
def search_profiles( self, parent, request_metadata, profile_query=None, page_size=None, offset=None, disable_spell_check=None, order_by=None, case_sensitive_sort=None, histogram_queries=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): if "search_profiles" not in self._inner_api_calls: self._inner_api_calls[ "search_profiles" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.search_profiles, default_retry=self._method_configs["SearchProfiles"].retry, default_timeout=self._method_configs["SearchProfiles"].timeout, client_info=self._client_info, ) request = profile_service_pb2.SearchProfilesRequest( parent=parent, request_metadata=request_metadata, profile_query=profile_query, page_size=page_size, offset=offset, disable_spell_check=disable_spell_check, order_by=order_by, case_sensitive_sort=case_sensitive_sort, histogram_queries=histogram_queries, ) iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( self._inner_api_calls["search_profiles"], retry=retry, timeout=timeout, metadata=metadata, ), request=request, items_field="histogram_query_results", request_token_field="page_token", response_token_field="next_page_token", ) return iterator
Searches for profiles within a tenant. For example, search by raw queries "software engineer in Mountain View" or search by structured filters (location filter, education filter, etc.). See ``SearchProfilesRequest`` for more information. Example: >>> from google.cloud import talent_v4beta1 >>> >>> client = talent_v4beta1.ProfileServiceClient() >>> >>> parent = client.tenant_path('[PROJECT]', '[TENANT]') >>> >>> # TODO: Initialize `request_metadata`: >>> request_metadata = {} >>> >>> # Iterate over all results >>> for element in client.search_profiles(parent, request_metadata): ... # process element ... pass >>> >>> >>> # Alternatively: >>> >>> # Iterate over results one page at a time >>> for page in client.search_profiles(parent, request_metadata).pages: ... for element in page: ... # process element ... pass Args: parent (str): Required. The resource name of the tenant to search within. The format is "projects/{project\_id}/tenants/{tenant\_id}", for example, "projects/api-test-project/tenants/foo". request_metadata (Union[dict, ~google.cloud.talent_v4beta1.types.RequestMetadata]): Required. The meta information collected about the profile search user. This is used to improve the search quality of the service. These values are provided by users, and must be precise and consistent. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.RequestMetadata` profile_query (Union[dict, ~google.cloud.talent_v4beta1.types.ProfileQuery]): Optional. Search query to execute. See ``ProfileQuery`` for more details. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.ProfileQuery` page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page streaming is performed per-page, this determines the maximum number of resources in a page. offset (int): Optional. An integer that specifies the current offset (that is, starting result) in search results. This field is only considered if ``page_token`` is unset. The maximum allowed value is 5000. Otherwise an error is thrown. For example, 0 means to search from the first profile, and 10 means to search from the 11th profile. This can be used for pagination, for example pageSize = 10 and offset = 10 means to search from the second page. disable_spell_check (bool): Optional. This flag controls the spell-check feature. If ``false``, the service attempts to correct a misspelled query. For example, "enginee" is corrected to "engineer". order_by (str): Optional. The criteria that determines how search results are sorted. Defaults is "relevance desc" if no value is specified. Supported options are: - "relevance desc": By descending relevance, as determined by the API algorithms. - "update\_date desc": Sort by ``Profile.update_date`` in descending order (recently updated profiles first). - "create\_date desc": Sort by ``Profile.create_date`` in descending order (recently created profiles first). - "first\_name": Sort by ``PersonStrcuturedName.given_name`` in ascending order. - "first\_name desc": Sort by ``PersonStrcuturedName.given_name`` in descending order. - "last\_name": Sort by ``PersonStrcuturedName.family_name`` in ascending order. - "last\_name desc": Sort by ``PersonStrcuturedName.family_name`` in ascending order. case_sensitive_sort (bool): Optional. When sort by field is based on alphabetical order, sort values case sensitively (based on ASCII) when the value is set to true. Default value is case in-sensitive sort (false). histogram_queries (list[Union[dict, ~google.cloud.talent_v4beta1.types.HistogramQuery]]): Optional. A list of expressions specifies histogram requests against matching profiles for ``SearchProfilesRequest``. The expression syntax looks like a function definition with optional parameters. Function syntax: function\_name(histogram\_facet[, list of buckets]) Data types: - Histogram facet: facet names with format [a-zA-Z][a-zA-Z0-9\_]+. - String: string like "any string with backslash escape for quote(")." - Number: whole number and floating point number like 10, -1 and -0.01. - List: list of elements with comma(,) separator surrounded by square brackets. For example, [1, 2, 3] and ["one", "two", "three"]. Built-in constants: - MIN (minimum number similar to java Double.MIN\_VALUE) - MAX (maximum number similar to java Double.MAX\_VALUE) Built-in functions: - bucket(start, end[, label]) Bucket build-in function creates a bucket with range of \`start, end). Note that the end is exclusive. For example, bucket(1, MAX, "positive number") or bucket(1, 10). Histogram Facets: - admin1: Admin1 is a global placeholder for referring to state, province, or the particular term a country uses to define the geographic structure below the country level. Examples include states codes such as "CA", "IL", "NY", and provinces, such as "BC". - locality: Locality is a global placeholder for referring to city, town, or the particular term a country uses to define the geographic structure below the admin1 level. Examples include city names such as "Mountain View" and "New York". - extended\_locality: Extended locality is concatenated version of admin1 and locality with comma separator. For example, "Mountain View, CA" and "New York, NY". - postal\_code: Postal code of profile which follows locale code. - country: Country code (ISO-3166-1 alpha-2 code) of profile, such as US, JP, GB. - job\_title: Normalized job titles specified in EmploymentHistory. - company\_name: Normalized company name of profiles to match on. - institution: The school name. For example, "MIT", "University of California, Berkeley" - degree: Highest education degree in ISCED code. Each value in degree covers specific level of education, without any expansion to upper nor lower levels of education degree. - experience\_in\_months: experience in months. 0 means 0 month to 1 month (exclusive). - application\_date: The application date specifies application start dates. See [ApplicationDateFilter\` for more details. - application\_outcome\_reason: The application outcome reason specifies the outcome reasons of job application. See ``ApplicationOutcomeReasonFilter`` for more details. - application\_last\_stage: The application last stage specifies the last stage of job application. See ``ApplicationLastStageFilter`` for more details. - application\_job\_title: The application job title specifies the job applied for in the application. See ``ApplicationJobFilter`` for more details. - application\_status: The application status specifies the status of job application. See ``ApplicationStatusFilter`` for more details. - hirable\_status: Hirable status specifies the profile's hirable status. - string\_custom\_attribute: String custom attributes. Values can be accessed via square bracket notation like string\_custom\_attribute["key1"]. - numeric\_custom\_attribute: Numeric custom attributes. Values can be accessed via square bracket notation like numeric\_custom\_attribute["key1"]. Example expressions: - count(admin1) - count(experience\_in\_months, [bucket(0, 12, "1 year"), bucket(12, 36, "1-3 years"), bucket(36, MAX, "3+ years")]) - count(string\_custom\_attribute["assigned\_recruiter"]) - count(numeric\_custom\_attribute["favorite\_number"], [bucket(MIN, 0, "negative"), bucket(0, MAX, "non-negative")]) If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.HistogramQuery` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.gax.PageIterator` instance. By default, this is an iterable of :class:`~google.cloud.talent_v4beta1.types.HistogramQueryResult` instances. This object can also be configured to iterate over the pages of the response through the `options` parameter. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid.
374,449
def fail(self, message, status=500, **kw): self.request.response.setStatus(status) result = {"success": False, "errors": message, "status": status} result.update(kw) return result
Set a JSON error object and a status to the response
374,450
def load_structure_from_file(context: InstaloaderContext, filename: str) -> JsonExportable: compressed = filename.endswith() if compressed: fp = lzma.open(filename, ) else: fp = open(filename, ) json_structure = json.load(fp) fp.close() if in json_structure and in json_structure and \ in json_structure[]: node_type = json_structure[][] if node_type == "Post": return Post(context, json_structure[]) elif node_type == "Profile": return Profile(context, json_structure[]) elif node_type == "StoryItem": return StoryItem(context, json_structure[]) else: raise InvalidArgumentException("{}: Not an Instaloader JSON.".format(filename)) elif in json_structure: return Post.from_shortcode(context, json_structure[]) else: raise InvalidArgumentException("{}: Not an Instaloader JSON.".format(filename))
Loads a :class:`Post`, :class:`Profile` or :class:`StoryItem` from a '.json' or '.json.xz' file that has been saved by :func:`save_structure_to_file`. :param context: :attr:`Instaloader.context` linked to the new object, used for additional queries if neccessary. :param filename: Filename, ends in '.json' or '.json.xz'
374,451
def parse(cls, value, default=_no_default): if isinstance(value, cls): return value elif isinstance(value, int): e = cls._make_value(value) else: if not value: e = cls._make_value(0) else: r = 0 for k in value.split(","): v = cls._name_to_member.get(k, _no_default) if v is _no_default: if default is _no_default: raise _create_invalid_value_error(cls, value) else: return default r |= v.value e = cls._make_value(r) if not e.is_valid(): if default is _no_default: raise _create_invalid_value_error(cls, value) return default return e
Parses a flag integer or string into a Flags instance. Accepts the following types: - Members of this enum class. These are returned directly. - Integers. These are converted directly into a Flags instance with the given name. - Strings. The function accepts a comma-delimited list of flag names, corresponding to members of the enum. These are all ORed together. Examples: >>> class Car(Flags): ... is_big = 1 ... has_wheels = 2 >>> Car.parse(1) Car.is_big >>> Car.parse(3) Car.parse('has_wheels,is_big') >>> Car.parse('is_big,has_wheels') Car.parse('has_wheels,is_big')
374,452
def usedoc(other): docstring def inner(fnc): fnc.__doc__ = fnc.__doc__ or getattr(other, ) return fnc return inner
Decorator which copies __doc__ of given object into decorated one. Usage: >>> def fnc1(): ... """docstring""" ... pass >>> @usedoc(fnc1) ... def fnc2(): ... pass >>> fnc2.__doc__ 'docstring'collections.abc.D :param other: anything with a __doc__ attribute :type other: any :returns: decorator function :rtype: callable
374,453
def get(name, function=None): if function is not None: if hasattr(function, Settings.FUNCTION_SETTINGS_NAME): if name in getattr(function, Settings.FUNCTION_SETTINGS_NAME): return getattr(function, Settings.FUNCTION_SETTINGS_NAME)[name] return Settings.__global_setting_values[name]
Get a setting. `name` should be the name of the setting to look for. If the optional argument `function` is passed, this will look for a value local to the function before retrieving the global value.
374,454
def verify_fft_options(opt, parser): if len(opt.fft_backends) > 0: _all_backends = get_backend_names() for backend in opt.fft_backends: if backend not in _all_backends: parser.error("Backend {0} is not available".format(backend)) for backend in get_backend_modules(): try: backend.verify_fft_options(opt, parser) except AttributeError: pass
Parses the FFT options and verifies that they are reasonable. Parameters ---------- opt : object Result of parsing the CLI with OptionParser, or any object with the required attributes. parser : object OptionParser instance.
374,455
def get_closed_indices(self): state = self.conn.cluster.state() status = self.status() indices_metadata = set(state[][].keys()) indices_status = set(status[].keys()) return indices_metadata.difference(indices_status)
Get all closed indices.
374,456
def _get_gs_path(): path = os.environ.get("PATH", os.defpath) for dir in path.split(os.pathsep): for name in ("gs", "gs.exe", "gswin32c.exe"): g = os.path.join(dir, name) if os.path.exists(g): return g raise Exception("Ghostscript not found. path=%s" % str(path))
Guess where the Ghostscript executable is and return its absolute path name.
374,457
def update_where(self, res, depth=0, since=None, **kwargs): "Like update() but uses WHERE-style args" fetch = lambda: self._fetcher.fetch_all_latest(res, 0, kwargs, since=since) self._update(res, fetch, depth)
Like update() but uses WHERE-style args
374,458
def subst_path(self, path, target=None, source=None): if not SCons.Util.is_List(path): path = [path] def s(obj): try: get = obj.get except AttributeError: obj = SCons.Util.to_String_for_subst(obj) else: obj = get() return obj r = [] for p in path: if SCons.Util.is_String(p): p = self.subst(p, target=target, source=source, conv=s) if SCons.Util.is_List(p): if len(p) == 1: p = p[0] else: p = .join(map(SCons.Util.to_String_for_subst, p)) else: p = s(p) r.append(p) return r
Substitute a path list, turning EntryProxies into Nodes and leaving Nodes (and other objects) as-is.
374,459
def thermal_conductivity_Magomedov(T, P, ws, CASRNs, k_w=None): rs values consistently. Examples -------- >>> thermal_conductivity_Magomedov(293., 1E6, [.25], [], k_w=0.59827) 0.548654049375 References ---------- .. [1] Magomedov, U. B. "The Thermal Conductivity of Binary and Multicomponent Aqueous Solutions of Inorganic Substances at High Parameters of State." High Temperature 39, no. 2 (March 1, 2001): 221-26. doi:10.1023/A:1017518731726. k_w correlation must be providedAi']) sum1 += Ai*(ws[i] + 2E-4*ws[i]**3) return k_w*(1 - sum1) - 2E-8*P*T*sum(ws)
r'''Calculate the thermal conductivity of an aqueous mixture of electrolytes using the form proposed by Magomedov [1]_. Parameters are loaded by the function as needed. Function will fail if an electrolyte is not in the database. .. math:: \lambda = \lambda_w\left[ 1 - \sum_{i=1}^n A_i (w_i + 2\times10^{-4} w_i^3)\right] - 2\times10^{-8} PT\sum_{i=1}^n w_i Parameters ---------- T : float Temperature of liquid [K] P : float Pressure of the liquid [Pa] ws : array Weight fractions of liquid components other than water CASRNs : array CAS numbers of the liquid components other than water k_w : float Liquid thermal condiuctivity or pure water at T and P, [W/m/K] Returns ------- kl : float Liquid thermal condiuctivity, [W/m/K] Notes ----- Range from 273 K to 473 K, P from 0.1 MPa to 100 MPa. C from 0 to 25 mass%. Internal untis are MPa for pressure and weight percent. An example is sought for this function. It is not possible to reproduce the author's values consistently. Examples -------- >>> thermal_conductivity_Magomedov(293., 1E6, [.25], ['7758-94-3'], k_w=0.59827) 0.548654049375 References ---------- .. [1] Magomedov, U. B. "The Thermal Conductivity of Binary and Multicomponent Aqueous Solutions of Inorganic Substances at High Parameters of State." High Temperature 39, no. 2 (March 1, 2001): 221-26. doi:10.1023/A:1017518731726.
374,460
def all_pairs(seq1, seq2=None): if seq2 is None: seq2 = seq1 for item1 in seq1: for item2 in seq2: yield (item1, item2)
Yields all pairs drawn from ``seq1`` and ``seq2``. If ``seq2`` is ``None``, ``seq2 = seq1``. >>> stop_at.ed(all_pairs(xrange(100000), xrange(100000)), 8) ((0, 0), (0, 1), (0, 2), (0, 3), (0, 4), (0, 5), (0, 6), (0, 7))
374,461
def set(self, prefix, url, obj): if not self.cache_dir: return filename = self._get_cache_file(prefix, url) try: os.makedirs(os.path.join(self.cache_dir, prefix)) except OSError: pass with open(filename, ) as file: pickle.dump(obj, file)
Add an object into the cache
374,462
def widget_from_django_field(cls, f, default=widgets.Widget): result = default internal_type = "" if callable(getattr(f, "get_internal_type", None)): internal_type = f.get_internal_type() if internal_type in cls.WIDGETS_MAP: result = cls.WIDGETS_MAP[internal_type] if isinstance(result, str): result = getattr(cls, result)(f) else: try: from django.contrib.postgres.fields import ArrayField, JSONField except ImportError: class ArrayField: pass class JSONField: pass if isinstance(f, ArrayField): return widgets.SimpleArrayWidget elif isinstance(f, JSONField): return widgets.JSONWidget return result
Returns the widget that would likely be associated with each Django type. Includes mapping of Postgres Array and JSON fields. In the case that psycopg2 is not installed, we consume the error and process the field regardless.
374,463
def floating_ip_disassociate(self, server_name, floating_ip): nt_ks = self.compute_conn server_ = self.server_by_name(server_name) server = nt_ks.servers.get(server_.__dict__[]) server.remove_floating_ip(floating_ip) return self.floating_ip_list()[floating_ip]
Disassociate a floating IP from server .. versionadded:: 2016.3.0
374,464
def arr_astype(arr_type): b>Hi4f8 def f_astype(arr): return arr.astype(arr_type) f_astype.__name__ = "arr_astype_" + str(arr_type) return f_astype
Change dtype of array. Parameters: arr_type : str, np.dtype Character codes (e.g. 'b', '>H'), type strings (e.g. 'i4', 'f8'), Python types (e.g. float, int) and numpy dtypes (e.g. np.uint32) are allowed. Returns: array : np.array
374,465
def _init_display(self): self._command([ self.CMD_SSD1306_DISPLAY_OFF, self.CMD_SSD1306_SET_SCROLL_DEACTIVE, self.CMD_SSD1306_SET_MULTIPLEX_RATIO, 0x3F, self.CMD_SSD1306_SET_DISPLAY_OFFSET, 0x00, self.CMD_SSD1306_SET_DISPLAY_START_LINE, self.CMD_SSD1306_SET_COM_PINS, (0x02 | 0x10), self.CMD_SSD1306_SET_CONTRAST, 0x7F, self.CMD_SSD1306_ENTIRE_DISPLAY_ON_0, self.CMD_SSD1306_NORMAL_DISPLAY, self.CMD_SSD1306_SET_CLOCK_DIVIDE_RATIO, 0x80, self.CMD_SSD1306_CHARGE_PUMP, 0x14, self.CMD_SSD1306_SET_MEM_ADDR_MODE, 0x01, self.CMD_SSD1306_SCAN_DIRECTION_INC if self._mirror_v else self.CMD_SSD1306_SCAN_DIRECTION_DEC, self.CMD_SSD1306_SET_SEGMENT_REMAP_0 if self._mirror_h else self.CMD_SSD1306_SET_SEGMENT_REMAP_1, ])
! \~english Initialize the SSD1306 display chip \~chinese 初始化SSD1306显示芯片
374,466
def _discovery(self): data = self.cluster_nodes() self.cluster_name = data["cluster_name"] for _, nodedata in list(data["nodes"].items()): server = nodedata[].replace("]", "").replace("inet[", "http:/") if server not in self.servers: self.servers.append(server) self._init_connection() return self.servers
Find other servers asking nodes to given server
374,467
def get(self, key, default=NoDefault): key = normalize_key(key) if default is NoDefault: defaults = [] else: defaults = [default] for options in self.options: try: value = options[key] except KeyError: continue if isinstance(value, Default): defaults.append(value.value) continue else: return value if defaults: return defaults[0] return NoDefault
Retrieve a value from its key. Retrieval steps are: 1) Normalize the key 2) For each option group: a) Retrieve the value at that key b) If no value exists, continue c) If the value is an instance of 'Default', continue d) Otherwise, return the value 3) If no option had a non-default value for the key, return the first Default() option for the key (or :arg:`default`).
374,468
def drilldown_tree(self, session=None, json=False, json_fields=None): if not session: session = object_session(self) return self.get_tree( session, json=json, json_fields=json_fields, query=self._drilldown_query )
This method generate a branch from a tree, begining with current node. For example: node7.drilldown_tree() .. code:: level Nested sets example 1 1(1)22 --------------------- _______________|_________|_________ | | | | | | 2 2(2)5 6(4)11 | 12(7)21 | | ^ | ^ | 3 3(3)4 7(5)8 9(6)10 | 13(8)16 17(10)20 | | | | | 4 | 14(9)15 18(11)19 | | | --------------------- Example in tests: * :mod:`sqlalchemy_mptt.tests.cases.get_tree.test_drilldown_tree`
374,469
def authenticate(self): if request.headers.get(, ).startswith(): in_token = base64.b64decode(request.headers[][10:]) try: creds = current_app.extensions[][] except KeyError: raise RuntimeError() ctx = gssapi.SecurityContext(creds=creds, usage=) out_token = ctx.step(in_token) if ctx.complete: username = ctx._inquire(initiator_name=True).initiator_name return str(username), out_token return None, None
Attempts to authenticate the user if a token was provided.
374,470
def make_gui(self): self.option_window = Toplevel() self.option_window.protocol("WM_DELETE_WINDOW", self.on_exit) self.canvas_frame = tk.Frame(self, height=500) self.option_frame = tk.Frame(self.option_window, height=300) self.canvas_frame.pack(side=tk.LEFT, fill=tk.BOTH, expand=True) self.option_frame.pack(side=tk.RIGHT, fill=None, expand=False) self.make_options_frame() self.make_canvas_frame() self.disable_singlecolor()
Setups the general structure of the gui, the first function called
374,471
def check_if_ready(self): try: results = self.manager.check(self.results_id) except exceptions.ResultsNotReady as e: self._is_ready = False self._not_ready_exception = e except exceptions.ResultsExpired as e: self._is_ready = True self._expired_exception = e else: failures = self.get_failed_requests(results) members = self.get_new_members(results) self.results = self.__class__.Results(list(members), list(failures)) self._is_ready = True self._not_ready_exception = None
Check for and fetch the results if ready.
374,472
def update_distant_reference(self, ref): self.validate_reference_data(ref["data"]) self._zotero_lib.update_item(ref)
Validate and update the reference in Zotero. Existing fields not present will be left unmodified.
374,473
def show_tracebacks(self): if self.broker.tracebacks: print(file=self.stream) print("Tracebacks:", file=self.stream) for t in self.broker.tracebacks.values(): print(t, file=self.stream)
Show tracebacks
374,474
def get_release_environment(self, project, release_id, environment_id): route_values = {} if project is not None: route_values[] = self._serialize.url(, project, ) if release_id is not None: route_values[] = self._serialize.url(, release_id, ) if environment_id is not None: route_values[] = self._serialize.url(, environment_id, ) response = self._send(http_method=, location_id=, version=, route_values=route_values) return self._deserialize(, response)
GetReleaseEnvironment. [Preview API] Get a release environment. :param str project: Project ID or project name :param int release_id: Id of the release. :param int environment_id: Id of the release environment. :rtype: :class:`<ReleaseEnvironment> <azure.devops.v5_1.release.models.ReleaseEnvironment>`
374,475
def get_filter_func(patterns, prefix): prefix_len = len(prefix.strip(os.path.sep)) + 1 if any(i[1] for i in patterns): def _exclusion_func(tarinfo): name = tarinfo.name[prefix_len:] exclude = False for match_str, is_negative in patterns: if is_negative: if not exclude: continue if match_str.match(name) is not None: exclude = False elif exclude: continue elif match_str.match(name) is not None: exclude = True if exclude: return None return tarinfo else: exclusions = [i[0] for i in patterns] def _exclusion_func(tarinfo): name = tarinfo.name[prefix_len:] if any(match_str.match(name) is not None for match_str in exclusions): return None return tarinfo return _exclusion_func
Provides a filter function that can be used as filter argument on ``tarfile.add``. Generates the filter based on the patterns and prefix provided. Patterns should be a list of tuples. Each tuple consists of a compiled RegEx pattern and a boolean, indicating if it is an ignore entry or a negative exclusion (i.e. an exemption from exclusions). The prefix is used to match relative paths inside the tar file, and is removed from every entry passed into the functions. Note that all names passed into the returned function must be paths under the provided prefix. This condition is not checked! :param patterns: List of patterns and negative indicator. :type patterns: list[(__RegEx, bool)] :param prefix: Prefix to strip from all file names passed in. Leading and trailing path separators are removed. :type prefix: unicode | str :return: tarinfo.TarInfo -> tarinfo.TarInfo | NoneType
374,476
def add_header_info(data_api, struct_inflator): struct_inflator.set_header_info(data_api.r_free, data_api.r_work, data_api.resolution, data_api.title, data_api.deposition_date, data_api.release_date, data_api.experimental_methods)
Add ancilliary header information to the structure. :param data_api the interface to the decoded data :param struct_inflator the interface to put the data into the client object
374,477
def cmdloop(self, intro=None): self.preloop() if self.use_rawinput and self.completekey: try: import readline self.old_completer = readline.get_completer() readline.set_completer(self.complete) readline.parse_and_bind(self.completekey + ) except ImportError: pass try: if intro is not None: self.intro = intro if self.intro: self.stdout.write(str(self.intro)+"\n") stop = None while not stop: if self.cmdqueue: line = self.cmdqueue.pop(0) else: if self.use_rawinput: try: if sys.version_info[0] == 2: line = raw_input(self.prompt) else: line = input(self.prompt) except EOFError: line = except KeyboardInterrupt: line = else: self.stdout.write(self.prompt) self.stdout.flush() line = self.stdin.readline() if not len(line): line = else: line = line.rstrip() line = self.precmd(line) stop = self.onecmd(line) stop = self.postcmd(stop, line) self.postloop() finally: if self.use_rawinput and self.completekey: try: import readline readline.set_completer(self.old_completer) except ImportError: pass
Override the command loop to handle Ctrl-C.
374,478
def accessibles(self, roles=None): return [org[] for org in self.get_accessibles(self.request, roles=roles)]
Returns the list of *slugs* for which the accounts are accessibles by ``request.user`` filtered by ``roles`` if present.
374,479
def update_firewall_rule(firewall_rule, protocol=None, action=None, name=None, description=None, ip_version=None, source_ip_address=None, destination_ip_address=None, source_port=None, destination_port=None, shared=None, enabled=None, profile=None): * conn = _auth(profile) return conn.update_firewall_rule(firewall_rule, protocol, action, name, description, ip_version, source_ip_address, destination_ip_address, source_port, destination_port, shared, enabled)
Update a firewall rule CLI Example: .. code-block:: bash salt '*' neutron.update_firewall_rule firewall_rule protocol=PROTOCOL action=ACTION name=NAME description=DESCRIPTION ip_version=IP_VERSION source_ip_address=SOURCE_IP_ADDRESS destination_ip_address=DESTINATION_IP_ADDRESS source_port=SOURCE_PORT destination_port=DESTINATION_PORT shared=SHARED enabled=ENABLED :param firewall_rule: ID or name of firewall rule to update. :param protocol: Protocol for the firewall rule, choose "tcp","udp","icmp" or "None". (Optional) :param action: Action for the firewall rule, choose "allow" or "deny". (Optional) :param name: Name for the firewall rule. (Optional) :param description: Description for the firewall rule. (Optional) :param ip_version: IP protocol version, default: 4. (Optional) :param source_ip_address: Source IP address or subnet. (Optional) :param destination_ip_address: Destination IP address or subnet. (Optional) :param source_port: Source port (integer in [1, 65535] or range in a:b). (Optional) :param destination_port: Destination port (integer in [1, 65535] or range in a:b). (Optional) :param shared: Set shared to True, default: False. (Optional) :param enabled: To enable this rule, default: True. (Optional) :param profile: Profile to build on (Optional)
374,480
def get_version(brain_or_object): obj = get_object(brain_or_object) if not is_versionable(obj): return None return getattr(aq_base(obj), "version_id", 0)
Get the version of the current object :param brain_or_object: A single catalog brain or content object :type brain_or_object: ATContentType/DexterityContentType/CatalogBrain :returns: The current version of the object, or None if not available :rtype: int or None
374,481
def removeSessionWithKey(self, key): self.store.query( PersistentSession, PersistentSession.sessionKey == key).deleteFromStore()
Remove a persistent session, if it exists. @type key: L{bytes} @param key: The persistent session identifier.
374,482
def _method_complete(self, result): if isinstance(result, PrettyTensor): self._head = result return self elif isinstance(result, Loss): return result elif isinstance(result, PrettyTensorTupleMixin): self._head = result[0] return result else: self._head = self._head.with_tensor(result) return self
Called after an extention method with the result.
374,483
def _load_actor_from_local(self, driver_id, function_descriptor): module_name, class_name = (function_descriptor.module_name, function_descriptor.class_name) try: module = importlib.import_module(module_name) actor_class = getattr(module, class_name) if isinstance(actor_class, ray.actor.ActorClass): return actor_class._modified_class else: return actor_class except Exception: logger.exception( "Failed to load actor_class %s.".format(class_name)) raise Exception( "Actor {} failed to be imported from local code.".format( class_name))
Load actor class from local code.
374,484
def locked_get(self): credentials = None _helpers.validate_file(self._filename) try: f = open(self._filename, ) content = f.read() f.close() except IOError: return credentials try: credentials = client.Credentials.new_from_json(content) credentials.set_store(self) except ValueError: pass return credentials
Retrieve Credential from file. Returns: oauth2client.client.Credentials Raises: IOError if the file is a symbolic link.
374,485
def _wraptext(self, text, indent=0, width=0): return .join(self._wrap(par, indent, width) for par in text)
Shorthand for '\n'.join(self._wrap(par, indent, width) for par in text).
374,486
def customFilter(self, filterFunc): ret = self.__class__() for item in self: if filterFunc(item): ret.append(item) return ret
customFilter - Apply a custom filter to elements and return a QueryableList of matches @param filterFunc <lambda/function< - A lambda/function that is passed an item, and returns True if the item matches (will be returned), otherwise False. @return - A QueryableList object of the same type, with only the matching objects returned.
374,487
def clear_processes(self): for aProcess in self.iter_processes(): aProcess.clear() self.__processDict = dict()
Removes all L{Process}, L{Thread} and L{Module} objects in this snapshot.
374,488
def populate_field_list(self, excluded_fields=None): if excluded_fields is None: excluded_fields = [] self.field_list.clear() for field in self.layer.fields(): if field.type() not in qvariant_numbers: continue field_item = QListWidgetItem(self.field_list) field_item.setFlags( Qt.ItemIsEnabled | Qt.ItemIsSelectable | Qt.ItemIsDragEnabled) field_item.setData(Qt.UserRole, field.name()) field_item.setText(field.name()) self.field_list.addItem(field_item)
Helper to add field of the layer to the list. :param excluded_fields: List of field that want to be excluded. :type excluded_fields: list
374,489
def runif(self, seed=None): fr = H2OFrame._expr(expr=ExprNode("h2o.runif", self, -1 if seed is None else seed)) fr._ex._cache.ncols = 1 fr._ex._cache.nrows = self.nrow return fr
Generate a column of random numbers drawn from a uniform distribution [0,1) and having the same data layout as the source frame. :param int seed: seed for the random number generator. :returns: Single-column H2OFrame filled with doubles sampled uniformly from [0,1).
374,490
def collect_ansible_classes(): def trace_calls(frame, event, arg): if event != : return try: _locals = inspect.getargvalues(frame).locals if not in _locals: return _class = _locals[].__class__ _class_repr = repr(_class) if not in _class_repr: return ANSIBLE_CLASSES[_class] = True except (AttributeError, TypeError): pass print "Gathering classes" sys.settrace(trace_calls) main()
Run playbook and collect classes of ansible that are run.
374,491
def logspace(self,bins=None,units=None,conversion_function=convert_time,resolution=None,end_at_end=True): if type(bins) in [list, np.ndarray]: return bins min = conversion_function(self.min,from_units=self.units,to_units=units) max = conversion_function(self.max,from_units=self.units,to_units=units) if units is None: units = self.units if resolution is None: resolution = 1.0 if bins is None: bins = self.len(resolution=resolution,units=units,conversion_function=conversion_function) if units != and end_at_end: return np.logspace(np.log10(min),np.log10(max),bins+1)[:-1] return np.logspace(np.log10(min),np.log10(max),bins)
bins overwrites resolution
374,492
def send_await(self, msg, deadline=None): receiver = self.send_async(msg) response = receiver.get(deadline) data = response.unpickle() _vv and IOLOG.debug(, self, data) return data
Like :meth:`send_async`, but expect a single reply (`persist=False`) delivered within `deadline` seconds. :param mitogen.core.Message msg: The message. :param float deadline: If not :data:`None`, seconds before timing out waiting for a reply. :returns: Deserialized reply. :raises TimeoutError: No message was received and `deadline` passed.
374,493
def _head_object(s3_conn, bucket, key): try: return s3_conn.head_object(Bucket=bucket, Key=key) except botocore.exceptions.ClientError as e: if e.response[][] == : return None else: raise
Retrieve information about an object in S3 if it exists. Args: s3_conn (botocore.client.S3): S3 connection to use for operations. bucket (str): name of the bucket containing the key. key (str): name of the key to lookup. Returns: dict: S3 object information, or None if the object does not exist. See the AWS documentation for explanation of the contents. Raises: botocore.exceptions.ClientError: any error from boto3 other than key not found is passed through.
374,494
def getAllData(self, temp = True, accel = True, gyro = True): allData = {} if temp: allData["temp"] = self.getTemp() if accel: allData["accel"] = self.getAccelData( raw = False ) if gyro: allData["gyro"] = self.getGyroData() return allData
! Get all the available data. @param temp: True - Allow to return Temperature data @param accel: True - Allow to return Accelerometer data @param gyro: True - Allow to return Gyroscope data @return a dictionary data @retval {} Did not read any data @retval {"temp":32.3,"accel":{"x":0.45634,"y":0.2124,"z":1.334},"gyro":{"x":0.45634,"y":0.2124,"z":1.334}} Returned all data
374,495
def refresh(self): response = requests.get( % (API_BASE_URL, self.name)) attributes = response.json() self.ancestors = [Category(name) for name in attributes[]] self.contents = WikiText(attributes[], attributes[]) self.description = attributes[] self.guides = [] for guide in attributes[]: self.guides.append(Guide(guide[])) flags = dict(attributes[]).values() self.flags = [Flag.from_id(flag[]) for flag in flags] self.image = Image(attributes[][]) if attributes[] else None self.locale = attributes[] self.title = attributes[]
Refetch instance data from the API.
374,496
def _raise_unrecoverable_error_payplug(self, exception): message = ( + repr(exception) + ) raise exceptions.ClientError(message, client_exception=exception)
Raises an exceptions.ClientError with a message telling that the error probably comes from PayPlug. :param exception: Exception that caused the ClientError. :type exception: Exception :raise exceptions.ClientError
374,497
def GetLastKey(self, voice=1): voice_obj = self.GetChild(voice) if voice_obj is not None: key = BackwardSearch(KeyNode, voice_obj, 1) if key is not None: return key else: if hasattr(self, "key"): return self.key else: if hasattr(self, "key"): return self.key
key as in musical key, not index
374,498
def receive_message(self, message, data): if data[MESSAGE_TYPE] == TYPE_DEVICE_ADDED: uuid = data[][] name = data[][] self._add_member(uuid, name) return True if data[MESSAGE_TYPE] == TYPE_DEVICE_REMOVED: uuid = data[] self._remove_member(uuid) return True if data[MESSAGE_TYPE] == TYPE_DEVICE_UPDATED: uuid = data[][] name = data[][] self._add_member(uuid, name) return True if data[MESSAGE_TYPE] == TYPE_MULTIZONE_STATUS: members = data[][] members = \ {member[]: member[] for member in members} removed_members = \ list(set(self._members.keys())-set(members.keys())) added_members = list(set(members.keys())-set(self._members.keys())) _LOGGER.debug("(%s) Added members %s, Removed members: %s", self._uuid, added_members, removed_members) for uuid in removed_members: self._remove_member(uuid) for uuid in added_members: self._add_member(uuid, members[uuid]) for listener in list(self._status_listeners): listener.multizone_status_received() return True if data[MESSAGE_TYPE] == TYPE_SESSION_UPDATED: return True if data[MESSAGE_TYPE] == TYPE_CASTING_GROUPS: return True return False
Called when a multizone message is received.
374,499
def format_hexdump(arg): line = for i in range(0, len(arg), 16): if i > 0: line += chunk = arg[i:i + 16] hex_chunk = hexlify(chunk).decode() hex_line = .join(hex_chunk[j:j + 2] for j in range(0, len(hex_chunk), 2)) if len(hex_line) < (3 * 16) - 1: hex_line += * (((3 * 16) - 1) - len(hex_line)) ascii_line = .join(_convert_to_ascii(x) for x in chunk) offset_line = % i line += "%s %s %s" % (offset_line, hex_line, ascii_line) return line
Convert the bytes object to a hexdump. The output format will be: <offset, 4-byte> <16-bytes of output separated by 1 space> <16 ascii characters>