code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def scan_and_reimport(mod_type: str) -> List[Tuple[str, str]]: mod_enabled, mod_disabled = get_modules(mod_type) errors = [] for mod in mod_enabled + mod_disabled: if mod in sys.modules: msg = safe_reload(sys.modules[mod]) else: msg = safe_load(mod) if msg is not None: errors.append((mod, msg)) return errors
Scans folder for modules.
def add_source(self, source): geocode_service = self._get_service_by_name(source[0]) self._sources.append(geocode_service(**source[1]))
Add a geocoding service to this instance.
def _read_frame(self): if self._exeng == 'scapy': return self._scapy_read_frame() elif self._exeng == 'dpkt': return self._dpkt_read_frame() elif self._exeng == 'pyshark': return self._pyshark_read_frame() else: return self._default_read_frame()
Headquarters for frame reader.
def refs_to(cls, sha1, repo): matching = [] for refname in repo.listall_references(): symref = repo.lookup_reference(refname) dref = symref.resolve() oid = dref.target commit = repo.get(oid) if commit.hex == sha1: matching.append(symref.shorthand) return matching
Returns all refs pointing to the given SHA1.
def stmt_lambda_proc(self, inputstring, **kwargs): regexes = [] for i in range(len(self.stmt_lambdas)): name = self.stmt_lambda_name(i) regex = compile_regex(r"\b%s\b" % (name,)) regexes.append(regex) out = [] for line in inputstring.splitlines(): for i, regex in enumerate(regexes): if regex.search(line): indent, line = split_leading_indent(line) out.append(indent + self.stmt_lambdas[i]) out.append(line) return "\n".join(out)
Add statement lambda definitions.
def pydeps2reqs(deps): reqs = defaultdict(set) for k, v in list(deps.items()): p = v['path'] if p and not p.startswith(sys.real_prefix): if p.startswith(sys.prefix) and 'site-packages' in p: if not p.endswith('.pyd'): if '/win32/' in p.replace('\\', '/'): reqs['win32'] |= set(v['imported_by']) else: name = k.split('.', 1)[0] if name not in skiplist: reqs[name] |= set(v['imported_by']) if '_dummy' in reqs: del reqs['_dummy'] return '\n'.join(dep2req(name, reqs[name]) for name in sorted(reqs))
Convert a deps instance into requirements.
def _write_reads(reads, prefix): out_ma = prefix + ".ma" out_fasta = prefix + ".fasta" out_real = prefix + ".txt" with open(out_ma, 'w') as ma_handle: print("id\tseq\tsample", file=ma_handle, end="") with open(out_fasta, 'w') as fa_handle: with open(out_real, 'w') as read_handle: for idx, r in enumerate(reads): info = r.split("_") print("seq_%s\t%s\t%s" % (idx, reads[r][0], reads[r][1]), file=ma_handle, end="") print(">seq_%s\n%s" % (idx, reads[r][0]), file=fa_handle, end="") print("%s\t%s\t%s\t%s\t%s\t%s\t%s" % (idx, r, reads[r][0], reads[r][1], info[1], info[2], info[3]), file=read_handle, end="")
Write fasta file, ma file and real position
def cross_v3(vec_a, vec_b): return Vec3(vec_a.y * vec_b.z - vec_a.z * vec_b.y, vec_a.z * vec_b.x - vec_a.x * vec_b.z, vec_a.x * vec_b.y - vec_a.y * vec_b.x)
Return the crossproduct between vec_a and vec_b.
def purge_portlets(portal): logger.info("Purging portlets ...") def remove_portlets(context_portlet): mapping = portal.restrictedTraverse(context_portlet) for key in mapping.keys(): if key not in PORTLETS_TO_PURGE: logger.info("Skipping portlet: '{}'".format(key)) continue logger.info("Removing portlet: '{}'".format(key)) del mapping[key] remove_portlets("++contextportlets++plone.leftcolumn") remove_portlets("++contextportlets++plone.rightcolumn") setup = portal.portal_setup setup.runImportStepFromProfile(profile, 'portlets') logger.info("Purging portlets [DONE]")
Remove old portlets. Leave the Navigation portlet only
def _from_dict(cls, _dict): args = {} if 'aggregations' in _dict: args['aggregations'] = [ MetricAggregation._from_dict(x) for x in (_dict.get('aggregations')) ] return cls(**args)
Initialize a MetricResponse object from a json dictionary.
def _writeToTransport(self, data): self.transport.writeData(data) self.heartbeater.schedule()
Frame the array-like thing and write it.
def mapping_to_str(mapping): result = ["<"] for i, (key, value) in enumerate(mapping.items()): if i > 0: result.append(",") result += [key, "=", serialize_for_header(key, value)] result += [">"] return "".join(result)
Convert mapping to string
def _update_throughput(self, tablename, read, write, index): def get_desc(): desc = self.describe(tablename, refresh=True, require=True) if index is not None: return desc.global_indexes[index] return desc desc = get_desc() def num_or_star(value): return 0 if value == "*" else resolve(value) read = num_or_star(read) write = num_or_star(write) if read <= 0: read = desc.read_throughput if write <= 0: write = desc.write_throughput throughput = Throughput(read, write) kwargs = {} if index: kwargs["global_indexes"] = {index: throughput} else: kwargs["throughput"] = throughput self.connection.update_table(tablename, **kwargs) desc = get_desc() while desc.status == "UPDATING": time.sleep(5) desc = get_desc()
Update the throughput on a table or index
def run(self): self.factory.register(User, self.users_factory) self.factory(User, 50).create()
Run the database seeds.
def validate_auth_mechanism(option, value): if value not in MECHANISMS and value != 'CRAM-MD5': raise ValueError("%s must be in %s" % (option, tuple(MECHANISMS))) return value
Validate the authMechanism URI option.
def _resolve_user_group_names(opts): name_id_opts = {'uid': 'user.info', 'gid': 'group.info'} for ind, opt in enumerate(opts): if opt.split('=')[0] in name_id_opts: _givenid = opt.split('=')[1] _param = opt.split('=')[0] _id = _givenid if not re.match('[0-9]+$', _givenid): _info = __salt__[name_id_opts[_param]](_givenid) if _info and _param in _info: _id = _info[_param] opts[ind] = _param + '=' + six.text_type(_id) opts[ind] = opts[ind].replace('\\040', '\\ ') return opts
Resolve user and group names in related opts
def btc_is_singlesig_segwit(privkey_info): try: jsonschema.validate(privkey_info, PRIVKEY_MULTISIG_SCHEMA) if len(privkey_info['private_keys']) > 1: return False return privkey_info.get('segwit', False) except ValidationError: return False
Is the given key bundle a p2sh-p2wpkh key bundle?
def _key_to_pb(self, pb): key = self._key if key is None: pairs = [(self._get_kind(), None)] ref = key_module._ReferenceFromPairs(pairs, reference=pb.mutable_key()) else: ref = key.reference() pb.mutable_key().CopyFrom(ref) group = pb.mutable_entity_group() if key is not None and key.id(): elem = ref.path().element(0) if elem.id() or elem.name(): group.add_element().CopyFrom(elem)
Internal helper to copy the key into a protobuf.
def state(self, state: str) -> None: self._state = state for callback in self._callbacks: callback()
Update state of event.
def print_with_header(header, message, color, indent=0): print() padding = ' ' * indent print(padding + color + BOLD + header + ENDC + color + message + ENDC)
Use one of the functions below for printing, not this one.
def format_measure(measure): measure = abs(measure) if isinstance(measure, int): return to_text_string(measure) if 1.e-9 < measure <= 1.e-6: measure = u"{0:.2f} ns".format(measure / 1.e-9) elif 1.e-6 < measure <= 1.e-3: measure = u"{0:.2f} us".format(measure / 1.e-6) elif 1.e-3 < measure <= 1: measure = u"{0:.2f} ms".format(measure / 1.e-3) elif 1 < measure <= 60: measure = u"{0:.2f} sec".format(measure) elif 60 < measure <= 3600: m, s = divmod(measure, 3600) if s > 60: m, s = divmod(measure, 60) s = to_text_string(s).split(".")[-1] measure = u"{0:.0f}.{1:.2s} min".format(m, s) else: h, m = divmod(measure, 3600) if m > 60: m /= 60 measure = u"{0:.0f}h:{1:.0f}min".format(h, m) return measure
Get format and units for data coming from profiler task.
def atomic_write(path, mode): tmp_path = "%s%s_%s" % (path, constants.INCOMPLETE_SUFFIX, uuid.uuid4().hex) with tf.io.gfile.GFile(tmp_path, mode) as file_: yield file_ tf.io.gfile.rename(tmp_path, path, overwrite=True)
Writes to path atomically, by writing to temp file and renaming it.
def rigthgen(self, value=0): while True: yield self.newarray(self.nplanes_right * self.width, value)
Generate rows to fill right pixels in int mode
def __create_file_name(self, message_no): cwd = os.getcwd() filename = '{0}_{1}.xml'.format(self.output_prefix, message_no) return os.path.join(cwd, filename)
Create the filename to save to
def purge_bad_timestamp_files(file_list): "Given a list of image files, find bad frames, remove them and modify file_list" MAX_INITIAL_BAD_FRAMES = 15 bad_ts = Kinect.detect_bad_timestamps(Kinect.timestamps_from_file_list(file_list)) if not bad_ts: return file_list last_bad = max(bad_ts) if last_bad >= MAX_INITIAL_BAD_FRAMES: raise Exception('Only 15 initial bad frames are allowed, but last bad frame is %d' % last_bad) for i in range(last_bad + 1): os.remove(file_list[i]) file_list = file_list[last_bad+1:] return file_list
Given a list of image files, find bad frames, remove them and modify file_list
def _get_geneid2nt(nts): geneid2nt = {} for ntd in nts: geneid = ntd.GeneID if geneid not in geneid2nt: geneid2nt[geneid] = ntd else: print("DUPLICATE GeneID FOUND {N:9} {SYM}".format(N=geneid, SYM=ntd.Symbol)) return geneid2nt
Get geneid2nt given a list of namedtuples.
def once(ctx, name): from kibitzr.app import Application app = Application() sys.exit(app.run(once=True, log_level=ctx.obj['log_level'], names=name))
Run kibitzr checks once and exit
def load_genotypes(self): if DataParser.compressed_pedigree: self.genotype_file = gzip.open("%s.gz" % self.tped_file, 'rb') else: self.genotype_file = open(self.tped_file) self.filter_missing()
This really just intializes the file by opening it up.
def process_request(self, request_object): identifier = request_object.identifier resource = request_object.entity_cls.get(identifier) return ResponseSuccess(Status.SUCCESS, resource)
Fetch Resource and return Entity
def write(write_entry: FILE_WRITE_ENTRY): output_path = environ.paths.clean(write_entry.path) make_output_directory(output_path) writer.write_file(output_path, write_entry.contents)
Writes the contents of the specified file entry to its destination path.
def install_timers(config, context): timers = [] if config.get('capture_timeout_warnings'): timeout_threshold = config.get('timeout_warning_threshold') time_remaining = context.get_remaining_time_in_millis() / 1000 timers.append(Timer(time_remaining * timeout_threshold, timeout_warning, (config, context))) timers.append(Timer(max(time_remaining - .5, 0), timeout_error, [config])) if config.get('capture_memory_warnings'): timers.append(Timer(.5, memory_warning, (config, context))) for t in timers: t.start() return timers
Create the timers as specified by the plugin configuration.
def keys_to_string(data): if isinstance(data, dict): for key in list(data.keys()): if isinstance(key, six.string_types): value = data[key] val = keys_to_string(value) del data[key] data[key.encode("utf8", "ignore")] = val return data
Function to convert all the unicode keys in string keys
def _get_technologies(): tech = '' technologies = pyconnman.ConnManager().get_technologies() for path, params in technologies: tech += '{0}\n\tName = {1}\n\tType = {2}\n\tPowered = {3}\n\tConnected = {4}\n'.format( path, params['Name'], params['Type'], params['Powered'] == 1, params['Connected'] == 1) return tech
Returns the technologies of connman
def can_write(self): sm = getSecurityManager() permission = permissions.ModifyPortalContent if not sm.checkPermission(permission, self.context): return False return True
Check if the field is writeable
def _no_answer_do_retry(self, pk, pattern): logger.info('Resending for pattern %s', pattern) self.send_packet(pk, expected_reply=pattern, resend=True)
Resend packets that we have not gotten answers to
def ib_group_member_list(self, group_id): req_hook = 'pod/v1/admin/group/' + group_id + '/membership/list' req_args = None status_code, response = self.__rest__.GET_query(req_hook, req_args) self.logger.debug('%s: %s' % (status_code, response)) return status_code, response
ib group member list
def deserialize(self, value, **kwargs): if self.allow_scalar and not isinstance(value, (list, tuple)): value = [value] value = super(List, self).deserialize(value) result = [] errors = [] for index, val in enumerate(value): try: result.append(self.item_type.deserialize(val, **kwargs)) except ValidationError as exc: exc.index = index errors.append(exc) if errors: raise ValidationError(errors) return result
Deserialize every item of the list.
def from_gene_ids(cls, gene_ids: List[str]): genes = [ExpGene(id_) for id_ in gene_ids] return cls.from_genes(genes)
Initialize instance from gene IDs.
def change_password(self, new_password): self.set_password(new_password) self.save() password_changed.send(sender=self.__class__, user=self)
Changes password and sends a signal
def put_text(self, key, text): with open(key, "w") as fh: fh.write(text)
Put the text into the storage associated with the key.
def identify_core(core): for datatype, identifier in { int: _identify_scaler, numpy.int8: _identify_scaler, numpy.int16: _identify_scaler, numpy.int32: _identify_scaler, numpy.int64: _identify_scaler, float: _identify_scaler, numpy.float16: _identify_scaler, numpy.float32: _identify_scaler, numpy.float64: _identify_scaler, chaospy.poly.base.Poly: _identify_poly, dict: _identify_dict, numpy.ndarray: _identify_iterable, list: _identify_iterable, tuple: _identify_iterable, }.items(): if isinstance(core, datatype): return identifier(core) raise TypeError( "Poly arg: 'core' is not a valid type " + repr(core))
Identify the polynomial argument.
def chmod(self, tarinfo, targetpath): if hasattr(os, 'chmod'): try: os.chmod(targetpath, tarinfo.mode) except EnvironmentError as e: raise ExtractError("could not change mode")
Set file permissions of targetpath according to tarinfo.
def getDiscountedBulkPrice(self): price = self.getBulkPrice() price = price and price or 0 discount = self.bika_setup.getMemberDiscount() discount = discount and discount or 0 return float(price) - (float(price) * float(discount)) / 100
Compute discounted bulk discount excl. VAT
def add_func(self, transmute_func, transmute_context): swagger_path = transmute_func.get_swagger_path(transmute_context) for p in transmute_func.paths: self.add_path(p, swagger_path)
add a transmute function's swagger definition to the spec
def reset(self): status = self.m_objPCANBasic.Reset(self.m_PcanHandle) return status == PCAN_ERROR_OK
Command the PCAN driver to reset the bus after an error.
def getErrorResponse(self, errorCode, errorDescr): self.errorCode = errorCode self.errorDescr = errorDescr self.response = "yes" return self
This methods sets error attributes of an external method object.
def _compile(self, target, results_dir, source): obj = self._objpath(target, results_dir, source) safe_mkdir_for(obj) abs_source = os.path.join(get_buildroot(), source) include_dirs = [] for dep in target.dependencies: if self.is_library(dep): include_dirs.extend([os.path.join(get_buildroot(), dep.target_base)]) cmd = [self.cpp_toolchain.compiler] cmd.extend(['-c']) cmd.extend(('-I{0}'.format(i) for i in include_dirs)) cmd.extend(['-o' + obj, abs_source]) cmd.extend(self.get_options().cc_options) with self.context.new_workunit(name='cpp-compile', labels=[WorkUnitLabel.COMPILER]) as workunit: self.run_command(cmd, workunit) self.context.log.info('Built c++ object: {0}'.format(obj))
Compile given source to an object file.
def _parse_template_or_argument(self): self._head += 2 braces = 2 while self._read() == "{": self._head += 1 braces += 1 has_content = False self._push() while braces: if braces == 1: return self._emit_text_then_stack("{") if braces == 2: try: self._parse_template(has_content) except BadRoute: return self._emit_text_then_stack("{{") break try: self._parse_argument() braces -= 3 except BadRoute: try: self._parse_template(has_content) braces -= 2 except BadRoute: return self._emit_text_then_stack("{" * braces) if braces: has_content = True self._head += 1 self._emit_all(self._pop()) if self._context & contexts.FAIL_NEXT: self._context ^= contexts.FAIL_NEXT
Parse a template or argument at the head of the wikicode string.
def calc_secondary_parameters(self): self.a = self.x/(2.*self.d**.5) self.b = self.u/(2.*self.d**.5)
Determine the values of the secondary parameters `a` and `b`.
def _gather(self, *args, **kwargs): propagate = kwargs.pop('propagate', True) return (self.to_python(reply, propagate=propagate) for reply in self.actor._collect_replies(*args, **kwargs))
Generator over the results
def compress(samples, run_parallel): to_cram = [] finished = [] for data in [x[0] for x in samples]: if "cram" in dd.get_archive(data) or "cram-lossless" in dd.get_archive(data): to_cram.append([data]) else: finished.append([data]) crammed = run_parallel("archive_to_cram", to_cram) return finished + crammed
Perform compression of output files for long term storage.
def run(agent_id, force=False): if agent_id is None: agent_list = LiveSyncAgent.find_all() else: agent = LiveSyncAgent.find_first(id=agent_id) if agent is None: print 'No such agent' return agent_list = [agent] for agent in agent_list: if agent.backend is None: print cformat('Skipping agent: %{red!}{}%{reset} (backend not found)').format(agent.name) continue if not agent.initial_data_exported and not force: print cformat('Skipping agent: %{red!}{}%{reset} (initial export not performed)').format(agent.name) continue print cformat('Running agent: %{white!}{}%{reset}').format(agent.name) try: agent.create_backend().run() db.session.commit() except: db.session.rollback() raise
Runs the livesync agent
def from_dict(cls, d): structure = SlabEntry.from_dict(d["structure"]) energy = SlabEntry.from_dict(d["energy"]) miller_index = d["miller_index"] label = d["label"] coverage = d["coverage"] adsorbates = d["adsorbates"] clean_entry = d["clean_entry"] = self.clean_entry return SlabEntry(structure, energy, miller_index, label=label, coverage=coverage, adsorbates=adsorbates, clean_entry=clean_entry)
Returns a SlabEntry by reading in an dictionary
def use_comparative_comment_view(self): self._object_views['comment'] = COMPARATIVE for session in self._get_provider_sessions(): try: session.use_comparative_comment_view() except AttributeError: pass
Pass through to provider CommentLookupSession.use_comparative_comment_view
def _copy(self): copied_uri = Uri(self.uri.scheme, self.uri.host, self.uri.port, self.uri.path, self.uri.query.copy()) new_request = HttpRequest(uri=copied_uri, method=self.method, headers=self.headers.copy()) new_request._body_parts = self._body_parts[:] return new_request
Creates a deep copy of this request.
def add_synonym(self, other): self.synonyms.extend(other.synonyms) other.synonyms = self.synonyms
Every word in a group of synonyms shares the same list.
def meth_wdl(args): r = fapi.get_repository_method(args.namespace, args.method, args.snapshot_id, True) fapi._check_response_code(r, 200) return r.text
Retrieve WDL for given version of a repository method
def _instantiate_app(self, target_cls, kwargs): parse_context = ParseContext(kwargs['address'].spec_path, dict()) bundleprops_factory = Bundle(parse_context) kwargs['bundles'] = [ bundleprops_factory.create_bundle_props(bundle) for bundle in kwargs['bundles'] ] return target_cls(build_graph=self, **kwargs)
For App targets, convert BundleAdaptor to BundleProps.
def addBiosample(self, biosample): id_ = biosample.getId() self._biosampleIdMap[id_] = biosample self._biosampleIds.append(id_) self._biosampleNameMap[biosample.getName()] = biosample
Adds the specified biosample to this dataset.
def send_text(hwnd, txt): try: for c in txt: if c == '\n': win32api.SendMessage(hwnd, win32con.WM_KEYDOWN, win32con.VK_RETURN, 0) win32api.SendMessage(hwnd, win32con.WM_KEYUP, win32con.VK_RETURN, 0) else: win32api.SendMessage(hwnd, win32con.WM_CHAR, ord(c), 0) except Exception as ex: print('error calling SendMessage ' + str(ex))
sends the text 'txt' to the window handle hwnd using SendMessage
def from_eocube(eocube, ji): eocubewin = EOCubeChunk(ji, eocube.df_layers, eocube.chunksize, eocube.wdir) return eocubewin
Create a EOCubeChunk object from an EOCube object.
def close(self): self.run_info['end'] = datetime.now() self._p.stdin.close() for line in self._p.stdout: if line.startswith('NOTE: tsdb run:'): self._read_run_info(line) else: logging.debug('ACE cleanup: {}'.format(line.rstrip())) retval = self._p.wait() return retval
Close the ACE process and return the process's exit code.
def _clean_dirty(self, obj=None): obj = obj or self obj.__dict__['_dirty_attributes'].clear() obj._dirty = False for key, val in vars(obj).items(): if isinstance(val, BaseObject): self._clean_dirty(val) else: func = getattr(val, '_clean_dirty', None) if callable(func): func()
Recursively clean self and all child objects.
def in_scope(self, scope: str): old_scope = self.resolution_scope self.resolution_scope = urlparse.urljoin(old_scope, scope) try: yield finally: self.resolution_scope = old_scope
Context manager to handle current scope.
def _validate_config(): if not isinstance(__opts__['azurefs'], list): log.error('azurefs configuration is not formed as a list, skipping azurefs') return False for container in __opts__['azurefs']: if not isinstance(container, dict): log.error( 'One or more entries in the azurefs configuration list are ' 'not formed as a dict. Skipping azurefs: %s', container ) return False if 'account_name' not in container or 'container_name' not in container: log.error( 'An azurefs container configuration is missing either an ' 'account_name or a container_name: %s', container ) return False return True
Validate azurefs config, return False if it doesn't validate
def remove_null(obj): if isinstance(obj, set): try: obj.remove(None) except: pass elif isinstance(obj, list): for item in obj: if not is_not_null(item): obj.remove(item) return obj
reads through a list or set and strips any null values
def _extract_gaussian_gradient_magnitude(image, mask = slice(None), sigma = 1, voxelspacing = None): if voxelspacing is None: voxelspacing = [1.] * image.ndim sigma = _create_structure_array(sigma, voxelspacing) return _extract_intensities(scipy_gaussian_gradient_magnitude(image, sigma), mask)
Internal, single-image version of `gaussian_gradient_magnitude`.
def ReadIntoObject(buff, index, value_obj, length=0): raw_data = value_obj.GetRawData() count = 0 for (encoded_tag, encoded_length, encoded_field) in SplitBuffer( buff, index=index, length=length): type_info_obj = value_obj.type_infos_by_encoded_tag.get(encoded_tag) wire_format = (encoded_tag, encoded_length, encoded_field) if type_info_obj is None: raw_data[count] = (None, wire_format, None) count += 1 elif type_info_obj.__class__ is ProtoList: value_obj.Get(type_info_obj.name).wrapped_list.append((None, wire_format)) else: raw_data[type_info_obj.name] = (None, wire_format, type_info_obj) value_obj.SetRawData(raw_data)
Reads all tags until the next end group and store in the value_obj.
def _format_base_path(self, api_name): name = self.app_name if self.app_name != api_name: name = '{0}-{1}'.format(self.app_name, api_name) return name
Format the base path name.
def save_activity(self, activity_form, *args, **kwargs): if activity_form.is_for_update(): return self.update_activity(activity_form, *args, **kwargs) else: return self.create_activity(activity_form, *args, **kwargs)
Pass through to provider ActivityAdminSession.update_activity
def _join_strings(x): for i in range(len(x)-1): if x[i]['t'] == 'Str' and x[i+1]['t'] == 'Str': x[i]['c'] += x[i+1]['c'] del x[i+1] return None return True
Joins adjacent Str elements found in the element list 'x'.
def Regions(self, skip_mapped_files=False, skip_shared_regions=False, skip_executable_regions=False, skip_readonly_regions=False): try: maps_file = open("/proc/" + str(self.pid) + "/maps", "r") except OSError as e: raise process_error.ProcessError(e) with maps_file: for line in maps_file: m = self.maps_re.match(line) if not m: continue start = int(m.group(1), 16) end = int(m.group(2), 16) region_protec = m.group(3) inode = int(m.group(6)) if "r" in region_protec: if skip_mapped_files and inode != 0: continue if skip_shared_regions and "s" in region_protec: continue if skip_executable_regions and "x" in region_protec: continue if skip_readonly_regions and "w" not in region_protec: continue yield start, end - start
Returns an iterator over the readable regions for this process.
def rdf_suffix(fmt: str) -> str: for k, v in SUFFIX_FORMAT_MAP.items(): if fmt == v: return k return 'rdf'
Map the RDF format to the approproate suffix
def add_cmd_output(self, cmds, suggest_filename=None, root_symlink=None, timeout=300, stderr=True, chroot=True, runat=None, env=None, binary=False, sizelimit=None, pred=None): if isinstance(cmds, six.string_types): cmds = [cmds] if len(cmds) > 1 and (suggest_filename or root_symlink): self._log_warn("ambiguous filename or symlink for command list") if sizelimit is None: sizelimit = self.get_option("log_size") for cmd in cmds: self._add_cmd_output(cmd, suggest_filename=suggest_filename, root_symlink=root_symlink, timeout=timeout, stderr=stderr, chroot=chroot, runat=runat, env=env, binary=binary, sizelimit=sizelimit, pred=pred)
Run a program or a list of programs and collect the output
def _get_relationship_cell_val(self, obj, column): val = "" key = column['key'] related_key = column.get('related_key', None) related_obj = getattr(obj, key, None) if related_obj is None: return "" if column['__col__'].uselist: if related_key is not None: if column.get('index') is not None: if len(related_obj) > column['index']: rel_obj = related_obj[column['index']] val = self._get_formatted_val( rel_obj, related_key, column, ) else: _vals = [] for rel_obj in related_obj: _vals.append( self._get_formatted_val( rel_obj, related_key, column, ) ) val = '\n'.join(_vals) else: if related_key is not None: val = self._get_formatted_val(related_obj, related_key, column) return val
Return the value to insert in a relationship cell
def listen_tta(self, target, timeout): info = "{device} does not support listen as Type A Target" raise nfc.clf.UnsupportedTargetError(info.format(device=self))
Listen as Type A Target is not supported.
def process(self): response = None try: payload = self.receive() method, args, ref = self.parse(payload) response = self.execute(method, args, ref) except AuthenticateError as exception: logging.error( 'Service error while authenticating request: {}' .format(exception), exc_info=1) except AuthenticatorInvalidSignature as exception: logging.error( 'Service error while authenticating request: {}' .format(exception), exc_info=1) except DecodeError as exception: logging.error( 'Service error while decoding request: {}' .format(exception), exc_info=1) except RequestParseError as exception: logging.error( 'Service error while parsing request: {}' .format(exception), exc_info=1) else: logging.debug('Service received payload: {}'.format(payload)) if response: self.send(response) else: self.send('')
Receive data from socket and process request
def setup_logging(self): is_custom_logging = len(self.options.logging_config) > 0 is_custom_logging = is_custom_logging and os.path.isfile(self.options.logging_config) is_custom_logging = is_custom_logging and not self.options.dry_run if is_custom_logging: Logger.configure_by_file(self.options.logging_config) else: logging_format = "%(asctime)-15s - %(name)s - %(message)s" if self.options.dry_run: logging_format = "%(name)s - %(message)s" Logger.configure_default(logging_format, self.logging_level)
Setup of application logging.
def _count_classified_pixels(self): class_values = self.class_dictionary.values() classification_count = np.array([[[np.count_nonzero(prediction[np.nonzero(mask)] == class_val) for prediction, mask in zip(self.classification_masks, masktype)] for masktype in self.truth_masks] for class_val in class_values]) classification_count = np.moveaxis(classification_count, 0, -1) classification_count = np.moveaxis(classification_count, 0, -2) if self.pixel_classification_counts is None: self.pixel_classification_counts = np.copy(classification_count) else: self.pixel_classification_counts = np.concatenate((self.pixel_classification_counts, classification_count))
Count the pixels belonging to each classified class.
def remove_all_cts_records_by(file_name, crypto_idfp): db = XonoticDB.load_path(file_name) db.remove_all_cts_records_by(crypto_idfp) db.save(file_name)
Remove all cts records set by player with CRYPTO_IDFP
def nic_name_to_host(nic_name): from netifaces import ifaddresses, AF_INET host = ifaddresses(nic_name).setdefault(AF_INET, [{'addr': 'No IP addr'}] )[0]['addr'] return(host)
helper function to translate the name of a network card into a valid host name
def clean(self): result = super(User, self).clean() result['verified'] = 'verification_hash' not in self._resource return result
Verified value is derived from whether user has a verification hash
def exists(self): try: self.stat() except OSError as e: if e.errno not in (ENOENT, ENOTDIR): raise return False return True
Whether this path exists.
def _safe_run_theta(input_file, out_dir, output_ext, args, data): out_file = os.path.join(out_dir, _split_theta_ext(input_file) + output_ext) skip_file = out_file + ".skipped" if utils.file_exists(skip_file): return None if not utils.file_exists(out_file): with file_transaction(data, out_dir) as tx_out_dir: utils.safe_makedir(tx_out_dir) cmd = _get_cmd("RunTHetA.py") + args + \ [input_file, "--NUM_PROCESSES", dd.get_cores(data), "--FORCE", "-d", tx_out_dir] try: do.run(cmd, "Run THetA to calculate purity", log_error=False) except subprocess.CalledProcessError as msg: if ("Number of intervals must be greater than 1" in str(msg) or "This sample isn't a good candidate for THetA analysis" in str(msg)): with open(os.path.join(tx_out_dir, os.path.basename(skip_file)), "w") as out_handle: out_handle.write("Expected TheTA failure, skipping") return None else: raise return out_file
Run THetA, catching and continuing on any errors.
def _clone_reverses(self, old_reverses): for ctype, reverses in old_reverses.items(): for parts in reverses.values(): sub_objs = parts[1] field_name = parts[0] attrs = {} for sub_obj in sub_objs: if ctype != 'm2m' and not attrs: field = sub_obj._meta.get_field(field_name) attrs = { field.column: getattr(self, field.rel.field_name) } sub_obj._clone(**attrs) if ctype == 'm2m': setattr(self, field_name, sub_objs)
Clones all the objects that were previously gathered.
def input(self, prompt, default=None, show_default=True): return click.prompt(prompt, default=default, show_default=show_default)
Provide a command prompt.
def instance_signals_and_handlers(cls, instance): isignals = cls._signals.copy() ihandlers = cls._build_instance_handler_mapping( instance, cls._signal_handlers ) return isignals, ihandlers
Calculate per-instance signals and handlers.
def build_extra_headers(request, proxyMode, orgaMode, currentOrga): things_to_add = {} if proxyMode: for prop in settings.PIAPI_USERDATA: if hasattr(request.user, prop): things_to_add['user_' + prop] = getattr(request.user, prop) if orgaMode: things_to_add['orga_pk'] = currentOrga.pk things_to_add['orga_name'] = currentOrga.name things_to_add['orga_codops'] = currentOrga.ebu_codops things_to_add['base_url'] = baseURI if request and hasattr(request, 'META'): if 'REMOTE_ADDR' in request.META: things_to_add['remote-addr'] = request.META['REMOTE_ADDR'] if 'HTTP_X_FORWARDED_FOR' in request.META and getattr(settings, 'HONOR_X_FORWARDED_FOR'): things_to_add['remote-addr'] = request.META['HTTP_X_FORWARDED_FOR'] for meta_header, dest_header in [('HTTP_IF_NONE_MATCH', 'If-None-Match'), ('HTTP_ORIGIN', 'Origin'), ('HTTP_ACCESS_CONtROL_REQUEST_METHOD', 'Access-Control-Request-Method'), ('HTTP_ACCESS_CONTROL_REQUEST_HEADERS', 'Access-Control-Request-Headers')]: if meta_header in request.META: things_to_add[dest_header] = request.META[meta_header] return things_to_add
Build the list of extra headers
def iter_links(operations, page): for operation, ns, rule, func in operations: yield Link.for_( operation=operation, ns=ns, type=ns.subject_name, qs=page.to_items(), )
Generate links for an iterable of operations on a starting page.
def install_pip(env, requirements): try: installation_source_folder = config.installation_cache_folder() options = setuptools_install_options(installation_source_folder) if installation_source_folder is not None: zip_eggs_in_folder(installation_source_folder) env.execute(["-m", "easy_install"] + options + requirements) except (KeyboardInterrupt, SystemExit): raise except Exception: raise EnvironmentSetupError("pip installation failed.")
Install pip and its requirements using setuptools.
def __read_device(self): state = XinputState() res = self.manager.xinput.XInputGetState( self.__device_number, ctypes.byref(state)) if res == XINPUT_ERROR_SUCCESS: return state if res != XINPUT_ERROR_DEVICE_NOT_CONNECTED: raise RuntimeError( "Unknown error %d attempting to get state of device %d" % ( res, self.__device_number)) return None
Read the state of the gamepad.
def routeDefault(self, request, year=None): eventsView = request.GET.get('view', self.default_view) if eventsView in ("L", "list"): return self.serveUpcoming(request) elif eventsView in ("W", "weekly"): return self.serveWeek(request, year) else: return self.serveMonth(request, year)
Route a request to the default calendar view.
def add_if_none_match(self): option = Option() option.number = defines.OptionRegistry.IF_NONE_MATCH.number option.value = None self.add_option(option)
Add the if-none-match option to the request.
def _get_qsize(tuning, width): names = [x.to_shorthand() for x in tuning.tuning] basesize = len(max(names)) + 3 barsize = ((width - basesize) - 2) - 1 return max(0, int(barsize / 4.5))
Return a reasonable quarter note size for 'tuning' and 'width'.
def instances(exp=".*"): "Filter list of machines matching an expression" expression = re.compile(exp) instances = [] for node in ec2_instances(): if node.tags and ip(node): try: if expression.match(node.tags.get("Name")): instances.append(node) except TypeError: pass return instances
Filter list of machines matching an expression
def _get_parents_from_parts(kwargs): parent_builder = [] if kwargs['last_child_num'] is not None: parent_builder.append('{type}/{name}/'.format(**kwargs)) for index in range(1, kwargs['last_child_num']): child_namespace = kwargs.get('child_namespace_{}'.format(index)) if child_namespace is not None: parent_builder.append('providers/{}/'.format(child_namespace)) kwargs['child_parent_{}'.format(index)] = ''.join(parent_builder) parent_builder.append( '{{child_type_{0}}}/{{child_name_{0}}}/' .format(index).format(**kwargs)) child_namespace = kwargs.get('child_namespace_{}'.format(kwargs['last_child_num'])) if child_namespace is not None: parent_builder.append('providers/{}/'.format(child_namespace)) kwargs['child_parent_{}'.format(kwargs['last_child_num'])] = ''.join(parent_builder) kwargs['resource_parent'] = ''.join(parent_builder) if kwargs['name'] else None return kwargs
Get the parents given all the children parameters.
def layerize(begin_update=None, predict=None, *args, **kwargs): if begin_update is not None: return FunctionLayer(begin_update, predict=predict, *args, **kwargs) def wrapper(begin_update): return FunctionLayer(begin_update, *args, **kwargs) return wrapper
Wrap a function into a layer
def _nacm_default_deny_stmt(self, stmt: Statement, sctx: SchemaContext) -> None: if not hasattr(self, 'default_deny'): return if stmt.keyword == "default-deny-all": self.default_deny = DefaultDeny.all elif stmt.keyword == "default-deny-write": self.default_deny = DefaultDeny.write
Set NACM default access.
def flow(self)->FlowField: "Access the flow-field grid after applying queued affine and coord transforms." if self._affine_mat is not None: self._flow = _affine_inv_mult(self._flow, self._affine_mat) self._affine_mat = None self.transformed = True if len(self.flow_func) != 0: for f in self.flow_func[::-1]: self._flow = f(self._flow) self.transformed = True self.flow_func = [] return self._flow
Access the flow-field grid after applying queued affine and coord transforms.
def ReplaceHomoglyphs(s): homoglyphs = { '\xa0': ' ', '\u00e3': '', '\u00a0': ' ', '\u00a9': '(C)', '\u00ae': '(R)', '\u2014': '-', '\u2018': "'", '\u2019': "'", '\u201c': '"', '\u201d': '"', '\u2026': '...', '\u2e3a': '-', } def _ReplaceOne(c): equiv = homoglyphs.get(c) if equiv is not None: return equiv try: c.encode('ascii') return c except UnicodeError: pass try: return c.encode('unicode-escape').decode('ascii') except UnicodeError: return '?' return ''.join([_ReplaceOne(c) for c in s])
Returns s with unicode homoglyphs replaced by ascii equivalents.