text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def make_copy(klass, inst, func=None, argv=None, extra_argv=None, copy_sig=True): """Makes a new instance of the partial application wrapper based on an existing instance, optionally overriding the original's wrapped function and/or saved arguments. :param inst: The partial instance we're copying :param func: Override the original's wrapped function :param argv: Override saved argument values :param extra_argv: Override saved extra positional arguments :param copy_sig: Copy original's signature? :rtype: New partial wrapper instance """ dest = klass(func or inst.func) dest.argv = (argv or inst.argv).copy() dest.extra_argv = list(extra_argv if extra_argv else inst.extra_argv) if copy_sig: dest.__sig_from_partial(inst) return dest
[ "def", "make_copy", "(", "klass", ",", "inst", ",", "func", "=", "None", ",", "argv", "=", "None", ",", "extra_argv", "=", "None", ",", "copy_sig", "=", "True", ")", ":", "dest", "=", "klass", "(", "func", "or", "inst", ".", "func", ")", "dest", ...
39.545455
21.181818
def get_line_relative_to_node(self, target_node: ast.AST, offset: int) -> str: """ Raises: IndexError: when ``offset`` takes the request out of bounds of this Function's lines. """ return self.lines[target_node.lineno - self.node.lineno + offset]
[ "def", "get_line_relative_to_node", "(", "self", ",", "target_node", ":", "ast", ".", "AST", ",", "offset", ":", "int", ")", "->", "str", ":", "return", "self", ".", "lines", "[", "target_node", ".", "lineno", "-", "self", ".", "node", ".", "lineno", "...
42.857143
20.285714
def sendFuture(self, future): """Send a Future to be executed remotely.""" future = copy.copy(future) future.greenlet = None future.children = {} try: if shared.getConst(hash(future.callable), timeout=0): # Enforce name reference passing if already shared future.callable = SharedElementEncapsulation(hash(future.callable)) self.socket.send_multipart([ TASK, pickle.dumps(future.id, pickle.HIGHEST_PROTOCOL), pickle.dumps(future, pickle.HIGHEST_PROTOCOL), ]) except (pickle.PicklingError, TypeError) as e: # If element not picklable, pickle its name # TODO: use its fully qualified name scoop.logger.warn("Pickling Error: {0}".format(e)) future.callable = hash(future.callable) self.socket.send_multipart([ TASK, pickle.dumps(future.id, pickle.HIGHEST_PROTOCOL), pickle.dumps(future, pickle.HIGHEST_PROTOCOL), ])
[ "def", "sendFuture", "(", "self", ",", "future", ")", ":", "future", "=", "copy", ".", "copy", "(", "future", ")", "future", ".", "greenlet", "=", "None", "future", ".", "children", "=", "{", "}", "try", ":", "if", "shared", ".", "getConst", "(", "...
42.92
18.2
def LayerTree_loadSnapshot(self, tiles): """ Function path: LayerTree.loadSnapshot Domain: LayerTree Method name: loadSnapshot Parameters: Required arguments: 'tiles' (type: array) -> An array of tiles composing the snapshot. Returns: 'snapshotId' (type: SnapshotId) -> The id of the snapshot. Description: Returns the snapshot identifier. """ assert isinstance(tiles, (list, tuple) ), "Argument 'tiles' must be of type '['list', 'tuple']'. Received type: '%s'" % type( tiles) subdom_funcs = self.synchronous_command('LayerTree.loadSnapshot', tiles=tiles ) return subdom_funcs
[ "def", "LayerTree_loadSnapshot", "(", "self", ",", "tiles", ")", ":", "assert", "isinstance", "(", "tiles", ",", "(", "list", ",", "tuple", ")", ")", ",", "\"Argument 'tiles' must be of type '['list', 'tuple']'. Received type: '%s'\"", "%", "type", "(", "tiles", ")"...
31.05
20.65
def RANSAC(model_func, eval_func, data, num_points, num_iter, threshold, recalculate=False): """Apply RANSAC. This RANSAC implementation will choose the best model based on the number of points in the consensus set. At evaluation time the model is created using num_points points. Then it will be recalculated using the points in the consensus set. Parameters ------------ model_func: Takes a data parameter of size DxK where K is the number of points needed to construct the model and returns the model (Mx1 vector) eval_func: Takes a model parameter (Lx1) and one or more data points (DxC, C>=1) and calculates the score of the point(s) relative to the selected model data : array (DxN) where D is dimensionality and N number of samples """ M = None max_consensus = 0 all_idx = list(range(data.shape[1])) final_consensus = [] for k in range(num_iter): np.random.shuffle(all_idx) model_set = all_idx[:num_points] x = data[:, model_set] m = model_func(x) model_error = eval_func(m, data) assert model_error.ndim == 1 assert model_error.size == data.shape[1] consensus_idx = np.flatnonzero(model_error < threshold) if len(consensus_idx) > max_consensus: M = m max_consensus = len(consensus_idx) final_consensus = consensus_idx # Recalculate using current consensus set? if recalculate and len(final_consensus) > 0: final_consensus_set = data[:, final_consensus] M = model_func(final_consensus_set) return (M, final_consensus)
[ "def", "RANSAC", "(", "model_func", ",", "eval_func", ",", "data", ",", "num_points", ",", "num_iter", ",", "threshold", ",", "recalculate", "=", "False", ")", ":", "M", "=", "None", "max_consensus", "=", "0", "all_idx", "=", "list", "(", "range", "(", ...
43.081081
27.810811
def as_dict(self): """ turns attribute filter object into python dictionary """ output_dictionary = dict() for attribute_name, type_instance in inspect.getmembers(self): if attribute_name.startswith('__') or inspect.ismethod(type_instance): continue if isinstance(type_instance, bool): output_dictionary[attribute_name] = type_instance elif isinstance(type_instance, self.__class__): output_dictionary[attribute_name] = type_instance.as_dict() return output_dictionary
[ "def", "as_dict", "(", "self", ")", ":", "output_dictionary", "=", "dict", "(", ")", "for", "attribute_name", ",", "type_instance", "in", "inspect", ".", "getmembers", "(", "self", ")", ":", "if", "attribute_name", ".", "startswith", "(", "'__'", ")", "or"...
32.666667
23.888889
def parse_directives(lexer: Lexer, is_const: bool) -> List[DirectiveNode]: """Directives[Const]: Directive[?Const]+""" directives: List[DirectiveNode] = [] append = directives.append while peek(lexer, TokenKind.AT): append(parse_directive(lexer, is_const)) return directives
[ "def", "parse_directives", "(", "lexer", ":", "Lexer", ",", "is_const", ":", "bool", ")", "->", "List", "[", "DirectiveNode", "]", ":", "directives", ":", "List", "[", "DirectiveNode", "]", "=", "[", "]", "append", "=", "directives", ".", "append", "whil...
42.285714
10.714286
def read_df_or_series_from_csv(desired_type: Type[pd.DataFrame], file_path: str, encoding: str, logger: Logger, **kwargs) -> pd.DataFrame: """ Helper method to read a dataframe from a csv file. By default this is well suited for a dataframe with headers in the first row, for example a parameter dataframe. :param desired_type: :param file_path: :param encoding: :param logger: :param kwargs: :return: """ if desired_type is pd.Series: # as recommended in http://pandas.pydata.org/pandas-docs/stable/generated/pandas.Series.from_csv.html # and from http://stackoverflow.com/questions/15760856/how-to-read-a-pandas-series-from-a-csv-file # TODO there should be a way to decide between row-oriented (squeeze=True) and col-oriented (index_col=0) # note : squeeze=true only works for row-oriented, so we dont use it. We rather expect that a row-oriented # dataframe would be convertible to a series using the df to series converter below if 'index_col' not in kwargs.keys(): one_col_df = pd.read_csv(file_path, encoding=encoding, index_col=0, **kwargs) else: one_col_df = pd.read_csv(file_path, encoding=encoding, **kwargs) if one_col_df.shape[1] == 1: return one_col_df[one_col_df.columns[0]] else: raise Exception('Cannot build a series from this csv: it has more than two columns (one index + one value).' ' Probably the parsing chain $read_df_or_series_from_csv => single_row_or_col_df_to_series$' 'will work, though.') else: return pd.read_csv(file_path, encoding=encoding, **kwargs)
[ "def", "read_df_or_series_from_csv", "(", "desired_type", ":", "Type", "[", "pd", ".", "DataFrame", "]", ",", "file_path", ":", "str", ",", "encoding", ":", "str", ",", "logger", ":", "Logger", ",", "*", "*", "kwargs", ")", "->", "pd", ".", "DataFrame", ...
52.060606
34.848485
def project_new_folder(object_id, input_params={}, always_retry=True, **kwargs): """ Invokes the /project-xxxx/newFolder API method. For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Folders-and-Deletion#API-method%3A-%2Fclass-xxxx%2FnewFolder """ return DXHTTPRequest('/%s/newFolder' % object_id, input_params, always_retry=always_retry, **kwargs)
[ "def", "project_new_folder", "(", "object_id", ",", "input_params", "=", "{", "}", ",", "always_retry", "=", "True", ",", "*", "*", "kwargs", ")", ":", "return", "DXHTTPRequest", "(", "'/%s/newFolder'", "%", "object_id", ",", "input_params", ",", "always_retry...
54.857143
35.714286
def _find_install_targets(name=None, version=None, pkgs=None, sources=None, skip_suggestions=False, pkg_verify=False, normalize=True, ignore_epoch=False, reinstall=False, refresh=False, **kwargs): ''' Inspect the arguments to pkg.installed and discover what packages need to be installed. Return a dict of desired packages ''' was_refreshed = False if all((pkgs, sources)): return {'name': name, 'changes': {}, 'result': False, 'comment': 'Only one of "pkgs" and "sources" is permitted.'} # dict for packages that fail pkg.verify and their altered files altered_files = {} # Get the ignore_types list if any from the pkg_verify argument if isinstance(pkg_verify, list) \ and any(x.get('ignore_types') is not None for x in pkg_verify if isinstance(x, _OrderedDict) and 'ignore_types' in x): ignore_types = next(x.get('ignore_types') for x in pkg_verify if 'ignore_types' in x) else: ignore_types = [] # Get the verify_options list if any from the pkg_verify argument if isinstance(pkg_verify, list) \ and any(x.get('verify_options') is not None for x in pkg_verify if isinstance(x, _OrderedDict) and 'verify_options' in x): verify_options = next(x.get('verify_options') for x in pkg_verify if 'verify_options' in x) else: verify_options = [] if __grains__['os'] == 'FreeBSD': kwargs['with_origin'] = True if salt.utils.platform.is_windows(): # Windows requires a refresh to establish a pkg db if refresh=True, so # add it to the kwargs. kwargs['refresh'] = refresh resolve_capabilities = kwargs.get('resolve_capabilities', False) and 'pkg.list_provides' in __salt__ try: cur_pkgs = __salt__['pkg.list_pkgs'](versions_as_list=True, **kwargs) cur_prov = resolve_capabilities and __salt__['pkg.list_provides'](**kwargs) or dict() except CommandExecutionError as exc: return {'name': name, 'changes': {}, 'result': False, 'comment': exc.strerror} if salt.utils.platform.is_windows() and kwargs.pop('refresh', False): # We already refreshed when we called pkg.list_pkgs was_refreshed = True refresh = False if any((pkgs, sources)): if pkgs: desired = _repack_pkgs(pkgs, normalize=normalize) elif sources: desired = __salt__['pkg_resource.pack_sources']( sources, normalize=normalize, ) if not desired: # Badly-formatted SLS return {'name': name, 'changes': {}, 'result': False, 'comment': 'Invalidly formatted \'{0}\' parameter. See ' 'minion log.'.format('pkgs' if pkgs else 'sources')} to_unpurge = _find_unpurge_targets(desired, **kwargs) else: if salt.utils.platform.is_windows(): pkginfo = _get_package_info(name, saltenv=kwargs['saltenv']) if not pkginfo: return {'name': name, 'changes': {}, 'result': False, 'comment': 'Package {0} not found in the ' 'repository.'.format(name)} if version is None: version = _get_latest_pkg_version(pkginfo) if normalize: _normalize_name = \ __salt__.get('pkg.normalize_name', lambda pkgname: pkgname) desired = {_normalize_name(name): version} else: desired = {name: version} to_unpurge = _find_unpurge_targets(desired, **kwargs) # FreeBSD pkg supports `openjdk` and `java/openjdk7` package names origin = bool(re.search('/', name)) if __grains__['os'] == 'FreeBSD' and origin: cver = [k for k, v in six.iteritems(cur_pkgs) if v['origin'] == name] else: cver = cur_pkgs.get(name, []) if name not in to_unpurge: if version and version in cver \ and not reinstall \ and not pkg_verify: # The package is installed and is the correct version return {'name': name, 'changes': {}, 'result': True, 'comment': 'Version {0} of package \'{1}\' is already ' 'installed'.format(version, name)} # if cver is not an empty string, the package is already installed elif cver and version is None \ and not reinstall \ and not pkg_verify: # The package is installed return {'name': name, 'changes': {}, 'result': True, 'comment': 'Package {0} is already ' 'installed'.format(name)} version_spec = False if not sources: # Check for alternate package names if strict processing is not # enforced. Takes extra time. Disable for improved performance if not skip_suggestions: # Perform platform-specific pre-flight checks not_installed = dict([ (name, version) for name, version in desired.items() if not (name in cur_pkgs and (version is None or _fulfills_version_string(cur_pkgs[name], version))) ]) if not_installed: try: problems = _preflight_check(not_installed, **kwargs) except CommandExecutionError: pass else: comments = [] if problems.get('no_suggest'): comments.append( 'The following package(s) were not found, and no ' 'possible matches were found in the package db: ' '{0}'.format( ', '.join(sorted(problems['no_suggest'])) ) ) if problems.get('suggest'): for pkgname, suggestions in \ six.iteritems(problems['suggest']): comments.append( 'Package \'{0}\' not found (possible matches: ' '{1})'.format(pkgname, ', '.join(suggestions)) ) if comments: if len(comments) > 1: comments.append('') return {'name': name, 'changes': {}, 'result': False, 'comment': '. '.join(comments).rstrip()} # Resolve the latest package version for any packages with "latest" in the # package version wants_latest = [] \ if sources \ else [x for x, y in six.iteritems(desired) if y == 'latest'] if wants_latest: resolved_latest = __salt__['pkg.latest_version'](*wants_latest, refresh=refresh, **kwargs) if len(wants_latest) == 1: resolved_latest = {wants_latest[0]: resolved_latest} if refresh: was_refreshed = True refresh = False # pkg.latest_version returns an empty string when the package is # up-to-date. So check the currently-installed packages. If found, the # resolved latest version will be the currently installed one from # cur_pkgs. If not found, then the package doesn't exist and the # resolved latest version will be None. for key in resolved_latest: if not resolved_latest[key]: if key in cur_pkgs: resolved_latest[key] = cur_pkgs[key][-1] else: resolved_latest[key] = None # Update the desired versions with the ones we resolved desired.update(resolved_latest) # Find out which packages will be targeted in the call to pkg.install targets = {} to_reinstall = {} problems = [] warnings = [] failed_verify = False for package_name, version_string in six.iteritems(desired): cver = cur_pkgs.get(package_name, []) if resolve_capabilities and not cver and package_name in cur_prov: cver = cur_pkgs.get(cur_prov.get(package_name)[0], []) # Package not yet installed, so add to targets if not cver: targets[package_name] = version_string continue if sources: if reinstall: to_reinstall[package_name] = version_string continue elif 'lowpkg.bin_pkg_info' not in __salt__: continue # Metadata parser is available, cache the file and derive the # package's name and version err = 'Unable to cache {0}: {1}' try: cached_path = __salt__['cp.cache_file'](version_string, saltenv=kwargs['saltenv']) except CommandExecutionError as exc: problems.append(err.format(version_string, exc)) continue if not cached_path: problems.append(err.format(version_string, 'file not found')) continue elif not os.path.exists(cached_path): problems.append('{0} does not exist on minion'.format(version_string)) continue source_info = __salt__['lowpkg.bin_pkg_info'](cached_path) if source_info is None: warnings.append('Failed to parse metadata for {0}'.format(version_string)) continue else: verstr = source_info['version'] else: verstr = version_string if reinstall: to_reinstall[package_name] = version_string continue if not __salt__['pkg_resource.check_extra_requirements'](package_name, version_string): targets[package_name] = version_string continue # No version specified and pkg is installed elif __salt__['pkg_resource.version_clean'](version_string) is None: if (not reinstall) and pkg_verify: try: verify_result = __salt__['pkg.verify']( package_name, ignore_types=ignore_types, verify_options=verify_options, **kwargs ) except (CommandExecutionError, SaltInvocationError) as exc: failed_verify = exc.strerror continue if verify_result: to_reinstall[package_name] = version_string altered_files[package_name] = verify_result continue version_fulfilled = False allow_updates = bool(not sources and kwargs.get('allow_updates')) try: version_fulfilled = _fulfills_version_string(cver, verstr, ignore_epoch=ignore_epoch, allow_updates=allow_updates) except CommandExecutionError as exc: problems.append(exc.strerror) continue # Compare desired version against installed version. version_spec = True if not version_fulfilled: if reinstall: to_reinstall[package_name] = version_string else: version_conditions = _parse_version_string(version_string) if pkg_verify and any(oper == '==' for oper, version in version_conditions): try: verify_result = __salt__['pkg.verify']( package_name, ignore_types=ignore_types, verify_options=verify_options, **kwargs ) except (CommandExecutionError, SaltInvocationError) as exc: failed_verify = exc.strerror continue if verify_result: to_reinstall[package_name] = version_string altered_files[package_name] = verify_result else: log.debug( 'Current version (%s) did not match desired version ' 'specification (%s), adding to installation targets', cver, version_string ) targets[package_name] = version_string if failed_verify: problems.append(failed_verify) if problems: return {'name': name, 'changes': {}, 'result': False, 'comment': ' '.join(problems)} if not any((targets, to_unpurge, to_reinstall)): # All specified packages are installed msg = 'All specified packages are already installed{0}' msg = msg.format( ' and are at the desired version' if version_spec and not sources else '' ) ret = {'name': name, 'changes': {}, 'result': True, 'comment': msg} if warnings: ret.setdefault('warnings', []).extend(warnings) return ret return (desired, targets, to_unpurge, to_reinstall, altered_files, warnings, was_refreshed)
[ "def", "_find_install_targets", "(", "name", "=", "None", ",", "version", "=", "None", ",", "pkgs", "=", "None", ",", "sources", "=", "None", ",", "skip_suggestions", "=", "False", ",", "pkg_verify", "=", "False", ",", "normalize", "=", "True", ",", "ign...
41.385965
18.836257
def main(): '''main is the entrypoint to the sregistry client. The flow works to first to determine the subparser in use based on the command. The command then imports the correct main (files imported in this folder) associated with the action of choice. When the client is imported, it is actually importing a return of the function get_client() under sregistry/main, which plays the job of "sniffing" the environment to determine what flavor of client the user wants to activate. Installed within a singularity image, this start up style maps well to Standard Container Integration Format (SCIF) apps, where each client is a different entrypoint activated based on the environment variables. ''' from sregistry.main import Client as cli parser = get_parser() subparsers = get_subparsers(parser) def help(return_code=0): '''print help, including the software version and active client and exit with return code. ''' version = sregistry.__version__ name = cli.client_name print("\nSingularity Registry Global Client v%s [%s]" %(version, name)) parser.print_help() sys.exit(return_code) # If the user didn't provide any arguments, show the full help if len(sys.argv) == 1: help() try: args = parser.parse_args() except: sys.exit(0) if args.debug is False: os.environ['MESSAGELEVEL'] = "DEBUG" # Show the version and exit if args.command == "version": print(sregistry.__version__) sys.exit(0) from sregistry.logger import bot # Does the user want a shell? if args.command == "add": from .add import main elif args.command == "backend": from .backend import main elif args.command == "build": from .build import main elif args.command == "get": from .get import main elif args.command == "delete": from .delete import main elif args.command == "inspect": from .inspect import main elif args.command == "images": from .images import main elif args.command == "labels": from .labels import main elif args.command == "mv": from .mv import main elif args.command == "push": from .push import main elif args.command == "pull": from .pull import main elif args.command == "rename": from .rename import main elif args.command == "rm": from .rm import main elif args.command == "rmi": from .rmi import main elif args.command == "search": from .search import main elif args.command == "share": from .share import main elif args.command == "shell": from .shell import main # Pass on to the correct parser return_code = 0 try: main(args=args, parser=parser, subparser=subparsers[args.command]) sys.exit(return_code) except UnboundLocalError: return_code = 1 help(return_code)
[ "def", "main", "(", ")", ":", "from", "sregistry", ".", "main", "import", "Client", "as", "cli", "parser", "=", "get_parser", "(", ")", "subparsers", "=", "get_subparsers", "(", "parser", ")", "def", "help", "(", "return_code", "=", "0", ")", ":", "'''...
37.012987
21.584416
def dump(self, filename): """ Dumps statistics. @param filename: filename where stats will be dumped, filename is created and must not exist prior to this call. @type filename: string """ flags = os.O_WRONLY|os.O_CREAT|os.O_NOFOLLOW|os.O_EXCL fd = os.open(filename, flags, 0600) os.write(fd, str(self)) os.close(fd)
[ "def", "dump", "(", "self", ",", "filename", ")", ":", "flags", "=", "os", ".", "O_WRONLY", "|", "os", ".", "O_CREAT", "|", "os", ".", "O_NOFOLLOW", "|", "os", ".", "O_EXCL", "fd", "=", "os", ".", "open", "(", "filename", ",", "flags", ",", "0600...
33.5
16.5
def get_link_density(node, node_text=None): """ Computes the ratio for text in given node and text in links contained in the node. It is computed from number of characters in the texts. :parameter Element node: HTML element in which links density is computed. :parameter string node_text: Text content of given node if it was obtained before. :returns float: Returns value of computed 0 <= density <= 1, where 0 means no links and 1 means that node contains only links. """ if node_text is None: node_text = node.text_content() node_text = normalize_whitespace(node_text.strip()) text_length = len(node_text) if text_length == 0: return 0.0 links_length = sum(map(_get_normalized_text_length, node.findall(".//a"))) # Give 50 bonus chars worth of length for each img. # Tweaking this 50 down a notch should help if we hit false positives. img_bonuses = 50 * len(node.findall(".//img")) links_length = max(0, links_length - img_bonuses) return links_length / text_length
[ "def", "get_link_density", "(", "node", ",", "node_text", "=", "None", ")", ":", "if", "node_text", "is", "None", ":", "node_text", "=", "node", ".", "text_content", "(", ")", "node_text", "=", "normalize_whitespace", "(", "node_text", ".", "strip", "(", "...
36.724138
18.172414
def get_links(html, outformat): """Return a list of reference links from the html. Parameters ---------- html : str outformat : int the output format of the citations Returns ------- List[str] the links to the references """ if outformat == FORMAT_BIBTEX: refre = re.compile(r'<a href="https://scholar.googleusercontent.com(/scholar\.bib\?[^"]*)') elif outformat == FORMAT_ENDNOTE: refre = re.compile(r'<a href="https://scholar.googleusercontent.com(/scholar\.enw\?[^"]*)"') elif outformat == FORMAT_REFMAN: refre = re.compile(r'<a href="https://scholar.googleusercontent.com(/scholar\.ris\?[^"]*)"') elif outformat == FORMAT_WENXIANWANG: refre = re.compile(r'<a href="https://scholar.googleusercontent.com(/scholar\.ral\?[^"]*)"') reflist = refre.findall(html) # escape html entities reflist = [re.sub('&(%s);' % '|'.join(name2codepoint), lambda m: chr(name2codepoint[m.group(1)]), s) for s in reflist] return reflist
[ "def", "get_links", "(", "html", ",", "outformat", ")", ":", "if", "outformat", "==", "FORMAT_BIBTEX", ":", "refre", "=", "re", ".", "compile", "(", "r'<a href=\"https://scholar.googleusercontent.com(/scholar\\.bib\\?[^\"]*)'", ")", "elif", "outformat", "==", "FORMAT_...
36.857143
24.25
def minimac(args): """ %prog batchminimac input.txt Use MINIMAC3 to impute vcf on all chromosomes. """ p = OptionParser(minimac.__doc__) p.set_home("shapeit") p.set_home("minimac") p.set_outfile() p.set_chr() p.set_ref() p.set_cpus() opts, args = p.parse_args(args) if len(args) != 1: sys.exit(not p.print_help()) txtfile, = args ref = opts.ref mm = MakeManager() pf = txtfile.split(".")[0] allrawvcf = [] alloutvcf = [] chrs = opts.chr.split(",") for x in chrs: px = CM[x] chrvcf = pf + ".{0}.vcf".format(px) if txtfile.endswith(".vcf"): cmd = "vcftools --vcf {0} --chr {1}".format(txtfile, x) cmd += " --out {0}.{1} --recode".format(pf, px) cmd += " && mv {0}.{1}.recode.vcf {2}".format(pf, px, chrvcf) else: # 23andme cmd = "python -m jcvi.formats.vcf from23andme {0} {1}".format(txtfile, x) cmd += " --ref {0}".format(ref) mm.add(txtfile, chrvcf, cmd) chrvcf_hg38 = pf + ".{0}.23andme.hg38.vcf".format(px) minimac_liftover(mm, chrvcf, chrvcf_hg38, opts) allrawvcf.append(chrvcf_hg38) minimacvcf = "{0}.{1}.minimac.dose.vcf".format(pf, px) if x == "X": minimac_X(mm, x, chrvcf, opts) elif x in ["Y", "MT"]: cmd = "python -m jcvi.variation.impute passthrough" cmd += " {0} {1}".format(chrvcf, minimacvcf) mm.add(chrvcf, minimacvcf, cmd) else: minimac_autosome(mm, x, chrvcf, opts) # keep the best line for multi-allelic markers uniqvcf= "{0}.{1}.minimac.uniq.vcf".format(pf, px) cmd = "python -m jcvi.formats.vcf uniq {0} > {1}".\ format(minimacvcf, uniqvcf) mm.add(minimacvcf, uniqvcf, cmd) minimacvcf_hg38 = "{0}.{1}.minimac.hg38.vcf".format(pf, px) minimac_liftover(mm, uniqvcf, minimacvcf_hg38, opts) alloutvcf.append(minimacvcf_hg38) if len(allrawvcf) > 1: rawhg38vcfgz = pf + ".all.23andme.hg38.vcf.gz" cmd = "vcf-concat {0} | bgzip > {1}".format(" ".join(allrawvcf), rawhg38vcfgz) mm.add(allrawvcf, rawhg38vcfgz, cmd) if len(alloutvcf) > 1: outhg38vcfgz = pf + ".all.minimac.hg38.vcf.gz" cmd = "vcf-concat {0} | bgzip > {1}".format(" ".join(alloutvcf), outhg38vcfgz) mm.add(alloutvcf, outhg38vcfgz, cmd) mm.write()
[ "def", "minimac", "(", "args", ")", ":", "p", "=", "OptionParser", "(", "minimac", ".", "__doc__", ")", "p", ".", "set_home", "(", "\"shapeit\"", ")", "p", ".", "set_home", "(", "\"minimac\"", ")", "p", ".", "set_outfile", "(", ")", "p", ".", "set_ch...
33.430556
19.125
def hook_key(key, callback, suppress=False): """ Hooks key up and key down events for a single key. Returns the event handler created. To remove a hooked key use `unhook_key(key)` or `unhook_key(handler)`. Note: this function shares state with hotkeys, so `clear_all_hotkeys` affects it aswell. """ _listener.start_if_necessary() store = _listener.blocking_keys if suppress else _listener.nonblocking_keys scan_codes = key_to_scan_codes(key) for scan_code in scan_codes: store[scan_code].append(callback) def remove_(): del _hooks[callback] del _hooks[key] del _hooks[remove_] for scan_code in scan_codes: store[scan_code].remove(callback) _hooks[callback] = _hooks[key] = _hooks[remove_] = remove_ return remove_
[ "def", "hook_key", "(", "key", ",", "callback", ",", "suppress", "=", "False", ")", ":", "_listener", ".", "start_if_necessary", "(", ")", "store", "=", "_listener", ".", "blocking_keys", "if", "suppress", "else", "_listener", ".", "nonblocking_keys", "scan_co...
34.826087
16.565217
def WriteEventBody(self, event): """Writes the body of an event to the output. Args: event (EventObject): event. """ output_string = NativePythonFormatterHelper.GetFormattedEventObject(event) self._output_writer.Write(output_string)
[ "def", "WriteEventBody", "(", "self", ",", "event", ")", ":", "output_string", "=", "NativePythonFormatterHelper", ".", "GetFormattedEventObject", "(", "event", ")", "self", ".", "_output_writer", ".", "Write", "(", "output_string", ")" ]
31.5
16
def _cleanly_slice_encoded_string(encoded_string, length_limit): """ Takes a byte string (a UTF-8 encoded string) and splits it into two pieces such that the first slice is no longer than argument `length_limit`, then returns a tuple containing the first slice and remainder of the byte string, respectively. The first slice may actually be shorter than `length_limit`, because this ensures that the string does not get split in the middle of a multi-byte character. This works because the first byte in a multi-byte unicode character encodes how many bytes compose that character, so we can determine empirically if we are splitting in the middle of the character and correct for that. You can read more about how this works here: https://en.wikipedia.org/wiki/UTF-8#Description :param encoded_string: The encoded string to split in two :param length_limit: The maximum length allowed for the first slice of the string :return: A tuple of (slice, remaining) """ sliced, remaining = encoded_string[:length_limit], encoded_string[length_limit:] try: sliced.decode('utf-8') except UnicodeDecodeError as e: sliced, remaining = sliced[:e.start], sliced[e.start:] + remaining return sliced, remaining
[ "def", "_cleanly_slice_encoded_string", "(", "encoded_string", ",", "length_limit", ")", ":", "sliced", ",", "remaining", "=", "encoded_string", "[", ":", "length_limit", "]", ",", "encoded_string", "[", "length_limit", ":", "]", "try", ":", "sliced", ".", "deco...
55.666667
37.25
def validate_ldap(self): logging.debug('Validating LDAPLoginForm against LDAP') 'Validate the username/password data against ldap directory' ldap_mgr = current_app.ldap3_login_manager username = self.username.data password = self.password.data result = ldap_mgr.authenticate(username, password) if result.status == AuthenticationResponseStatus.success: self.user = ldap_mgr._save_user( result.user_dn, result.user_id, result.user_info, result.user_groups ) return True else: self.user = None self.username.errors.append('Invalid Username/Password.') self.password.errors.append('Invalid Username/Password.') return False
[ "def", "validate_ldap", "(", "self", ")", ":", "logging", ".", "debug", "(", "'Validating LDAPLoginForm against LDAP'", ")", "ldap_mgr", "=", "current_app", ".", "ldap3_login_manager", "username", "=", "self", ".", "username", ".", "data", "password", "=", "self",...
35.347826
19
def initialize_concept_scheme(rdf, cs, label, language, set_modified): """Initialize a concept scheme: Optionally add a label if the concept scheme doesn't have a label, and optionally add a dct:modified timestamp.""" # check whether the concept scheme is unlabeled, and label it if possible labels = list(rdf.objects(cs, RDFS.label)) + \ list(rdf.objects(cs, SKOS.prefLabel)) if len(labels) == 0: if not label: logging.warning( "Concept scheme has no label(s). " "Use --label option to set the concept scheme label.") else: logging.info( "Unlabeled concept scheme detected. Setting label to '%s'" % label) rdf.add((cs, RDFS.label, Literal(label, language))) if set_modified: curdate = datetime.datetime.utcnow().replace(microsecond=0).isoformat() + 'Z' rdf.remove((cs, DCTERMS.modified, None)) rdf.add((cs, DCTERMS.modified, Literal(curdate, datatype=XSD.dateTime)))
[ "def", "initialize_concept_scheme", "(", "rdf", ",", "cs", ",", "label", ",", "language", ",", "set_modified", ")", ":", "# check whether the concept scheme is unlabeled, and label it if possible", "labels", "=", "list", "(", "rdf", ".", "objects", "(", "cs", ",", "...
44.478261
22.043478
def get_last_modified_unix_sec(): """Get last modified unix time for a given file""" path = request.args.get("path") if path and os.path.isfile(path): try: last_modified = os.path.getmtime(path) return jsonify({"path": path, "last_modified_unix_sec": last_modified}) except Exception as e: return client_error({"message": "%s" % e, "path": path}) else: return client_error({"message": "File not found: %s" % path, "path": path})
[ "def", "get_last_modified_unix_sec", "(", ")", ":", "path", "=", "request", ".", "args", ".", "get", "(", "\"path\"", ")", "if", "path", "and", "os", ".", "path", ".", "isfile", "(", "path", ")", ":", "try", ":", "last_modified", "=", "os", ".", "pat...
38
22.153846
def _from_p12_keyfile_contents(cls, service_account_email, private_key_pkcs12, private_key_password=None, scopes='', token_uri=oauth2client.GOOGLE_TOKEN_URI, revoke_uri=oauth2client.GOOGLE_REVOKE_URI): """Factory constructor from JSON keyfile. Args: service_account_email: string, The email associated with the service account. private_key_pkcs12: string, The contents of a PKCS#12 keyfile. private_key_password: string, (Optional) Password for PKCS#12 private key. Defaults to ``notasecret``. scopes: List or string, (Optional) Scopes to use when acquiring an access token. token_uri: string, URI for token endpoint. For convenience defaults to Google's endpoints but any OAuth 2.0 provider can be used. revoke_uri: string, URI for revoke endpoint. For convenience defaults to Google's endpoints but any OAuth 2.0 provider can be used. Returns: ServiceAccountCredentials, a credentials object created from the keyfile. Raises: NotImplementedError if pyOpenSSL is not installed / not the active crypto library. """ if private_key_password is None: private_key_password = _PASSWORD_DEFAULT if crypt.Signer is not crypt.OpenSSLSigner: raise NotImplementedError(_PKCS12_ERROR) signer = crypt.Signer.from_string(private_key_pkcs12, private_key_password) credentials = cls(service_account_email, signer, scopes=scopes, token_uri=token_uri, revoke_uri=revoke_uri) credentials._private_key_pkcs12 = private_key_pkcs12 credentials._private_key_password = private_key_password return credentials
[ "def", "_from_p12_keyfile_contents", "(", "cls", ",", "service_account_email", ",", "private_key_pkcs12", ",", "private_key_password", "=", "None", ",", "scopes", "=", "''", ",", "token_uri", "=", "oauth2client", ".", "GOOGLE_TOKEN_URI", ",", "revoke_uri", "=", "oau...
50.731707
23.463415
def name(self): """Array name following h5py convention.""" if self.path: # follow h5py convention: add leading slash name = self.path if name[0] != '/': name = '/' + name return name return None
[ "def", "name", "(", "self", ")", ":", "if", "self", ".", "path", ":", "# follow h5py convention: add leading slash", "name", "=", "self", ".", "path", "if", "name", "[", "0", "]", "!=", "'/'", ":", "name", "=", "'/'", "+", "name", "return", "name", "re...
30.555556
14
def _set_ldp_params(self, v, load=False): """ Setter method for ldp_params, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/mpls_interface/ldp_params (container) If this variable is read-only (config: false) in the source YANG file, then _set_ldp_params is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_ldp_params() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=ldp_params.ldp_params, is_container='container', presence=False, yang_name="ldp-params", rest_name="ldp-params", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure LDP parameters', u'cli-full-command': None, u'cli-full-no': None, u'cli-add-mode': None, u'cli-mode-name': u'config-router-mpls-interface-$(interface-name)-ldp-params'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """ldp_params must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=ldp_params.ldp_params, is_container='container', presence=False, yang_name="ldp-params", rest_name="ldp-params", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure LDP parameters', u'cli-full-command': None, u'cli-full-no': None, u'cli-add-mode': None, u'cli-mode-name': u'config-router-mpls-interface-$(interface-name)-ldp-params'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)""", }) self.__ldp_params = t if hasattr(self, '_set'): self._set()
[ "def", "_set_ldp_params", "(", "self", ",", "v", ",", "load", "=", "False", ")", ":", "if", "hasattr", "(", "v", ",", "\"_utype\"", ")", ":", "v", "=", "v", ".", "_utype", "(", "v", ")", "try", ":", "t", "=", "YANGDynClass", "(", "v", ",", "bas...
87.909091
42
def upd_doc(self, doc, index_update=True, label_guesser_update=True): """ Update a document in the index """ if not self.index_writer and index_update: self.index_writer = self.index.writer() if not self.label_guesser_updater and label_guesser_update: self.label_guesser_updater = self.label_guesser.get_updater() logger.info("Updating modified doc: %s" % doc) if index_update: self._update_doc_in_index(self.index_writer, doc) if label_guesser_update: self.label_guesser_updater.upd_doc(doc)
[ "def", "upd_doc", "(", "self", ",", "doc", ",", "index_update", "=", "True", ",", "label_guesser_update", "=", "True", ")", ":", "if", "not", "self", ".", "index_writer", "and", "index_update", ":", "self", ".", "index_writer", "=", "self", ".", "index", ...
45.538462
14
def set(self, level=None): """ Set the default log level If the level is not specified environment variable DEBUG is used with the following meaning:: DEBUG=0 ... LOG_WARN (default) DEBUG=1 ... LOG_INFO DEBUG=2 ... LOG_DEBUG DEBUG=3 ... LOG_DETAILS DEBUG=4 ... LOG_DATA DEBUG=5 ... LOG_ALL (log all messages) """ # If level specified, use given if level is not None: Logging._level = level # Otherwise attempt to detect from the environment else: try: Logging._level = Logging.MAPPING[int(os.environ["DEBUG"])] except StandardError: Logging._level = logging.WARN self.logger.setLevel(Logging._level)
[ "def", "set", "(", "self", ",", "level", "=", "None", ")", ":", "# If level specified, use given", "if", "level", "is", "not", "None", ":", "Logging", ".", "_level", "=", "level", "# Otherwise attempt to detect from the environment", "else", ":", "try", ":", "Lo...
33.25
13.083333
def visual_search( self, accept_language=None, content_type=None, user_agent=None, client_id=None, client_ip=None, location=None, market=None, safe_search=None, set_lang=None, knowledge_request=None, image=None, custom_headers=None, raw=False, **operation_config): """Visual Search API lets you discover insights about an image such as visually similar images, shopping sources, and related searches. The API can also perform text recognition, identify entities (people, places, things), return other topical content for the user to explore, and more. For more information, see [Visual Search Overview](https://docs.microsoft.com/azure/cognitive-services/bing-visual-search/overview). :param accept_language: A comma-delimited list of one or more languages to use for user interface strings. The list is in decreasing order of preference. For additional information, including expected format, see [RFC2616](http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html). This header and the [setLang](https://docs.microsoft.com/en-us/rest/api/cognitiveservices/bing-visual-search-api-v7-reference#setlang) query parameter are mutually exclusive; do not specify both. If you set this header, you must also specify the [cc](https://docs.microsoft.com/en-us/rest/api/cognitiveservices/bing-visual-search-api-v7-reference#cc) query parameter. To determine the market to return results for, Bing uses the first supported language it finds from the list and combines it with the cc parameter value. If the list does not include a supported language, Bing finds the closest language and market that supports the request or it uses an aggregated or default market for the results. To determine the market that Bing used, see the BingAPIs-Market header. Use this header and the cc query parameter only if you specify multiple languages. Otherwise, use the [mkt](https://docs.microsoft.com/en-us/rest/api/cognitiveservices/bing-visual-search-api-v7-reference#mkt) and [setLang](https://docs.microsoft.com/en-us/rest/api/cognitiveservices/bing-visual-search-api-v7-reference#setlang) query parameters. A user interface string is a string that's used as a label in a user interface. There are few user interface strings in the JSON response objects. Any links to Bing.com properties in the response objects apply the specified language. :type accept_language: str :param content_type: Must be set to multipart/form-data and include a boundary parameter (for example, multipart/form-data; boundary=<boundary string>). For more details, see [Content form types]( https://docs.microsoft.com/en-us/azure/cognitive-services/bing-visual-search/overview#content-form-types). :type content_type: str :param user_agent: The user agent originating the request. Bing uses the user agent to provide mobile users with an optimized experience. Although optional, you are encouraged to always specify this header. The user-agent should be the same string that any commonly used browser sends. For information about user agents, see [RFC 2616](http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html). The following are examples of user-agent strings. Windows Phone: Mozilla/5.0 (compatible; MSIE 10.0; Windows Phone 8.0; Trident/6.0; IEMobile/10.0; ARM; Touch; NOKIA; Lumia 822). Android: Mozilla / 5.0 (Linux; U; Android 2.3.5; en - us; SCH - I500 Build / GINGERBREAD) AppleWebKit / 533.1 (KHTML; like Gecko) Version / 4.0 Mobile Safari / 533.1. iPhone: Mozilla / 5.0 (iPhone; CPU iPhone OS 6_1 like Mac OS X) AppleWebKit / 536.26 (KHTML; like Gecko) Mobile / 10B142 iPhone4; 1 BingWeb / 3.03.1428.20120423. PC: Mozilla / 5.0 (Windows NT 6.3; WOW64; Trident / 7.0; Touch; rv:11.0) like Gecko. iPad: Mozilla / 5.0 (iPad; CPU OS 7_0 like Mac OS X) AppleWebKit / 537.51.1 (KHTML, like Gecko) Version / 7.0 Mobile / 11A465 Safari / 9537.53. :type user_agent: str :param client_id: Bing uses this header to provide users with consistent behavior across Bing API calls. Bing often flights new features and improvements, and it uses the client ID as a key for assigning traffic on different flights. If you do not use the same client ID for a user across multiple requests, then Bing may assign the user to multiple conflicting flights. Being assigned to multiple conflicting flights can lead to an inconsistent user experience. For example, if the second request has a different flight assignment than the first, the experience may be unexpected. Also, Bing can use the client ID to tailor web results to that client ID’s search history, providing a richer experience for the user. Bing also uses this header to help improve result rankings by analyzing the activity generated by a client ID. The relevance improvements help with better quality of results delivered by Bing APIs and in turn enables higher click-through rates for the API consumer. IMPORTANT: Although optional, you should consider this header required. Persisting the client ID across multiple requests for the same end user and device combination enables 1) the API consumer to receive a consistent user experience, and 2) higher click-through rates via better quality of results from the Bing APIs. Each user that uses your application on the device must have a unique, Bing generated client ID. If you do not include this header in the request, Bing generates an ID and returns it in the X-MSEdge-ClientID response header. The only time that you should NOT include this header in a request is the first time the user uses your app on that device. Use the client ID for each Bing API request that your app makes for this user on the device. Persist the client ID. To persist the ID in a browser app, use a persistent HTTP cookie to ensure the ID is used across all sessions. Do not use a session cookie. For other apps such as mobile apps, use the device's persistent storage to persist the ID. The next time the user uses your app on that device, get the client ID that you persisted. Bing responses may or may not include this header. If the response includes this header, capture the client ID and use it for all subsequent Bing requests for the user on that device. ATTENTION: You must ensure that this Client ID is not linkable to any authenticatable user account information. If you include the X-MSEdge-ClientID, you must not include cookies in the request. :type client_id: str :param client_ip: The IPv4 or IPv6 address of the client device. The IP address is used to discover the user's location. Bing uses the location information to determine safe search behavior. Although optional, you are encouraged to always specify this header and the X-Search-Location header. Do not obfuscate the address (for example, by changing the last octet to 0). Obfuscating the address results in the location not being anywhere near the device's actual location, which may result in Bing serving erroneous results. :type client_ip: str :param location: A semicolon-delimited list of key/value pairs that describe the client's geographical location. Bing uses the location information to determine safe search behavior and to return relevant local content. Specify the key/value pair as <key>:<value>. The following are the keys that you use to specify the user's location. lat (required): The latitude of the client's location, in degrees. The latitude must be greater than or equal to -90.0 and less than or equal to +90.0. Negative values indicate southern latitudes and positive values indicate northern latitudes. long (required): The longitude of the client's location, in degrees. The longitude must be greater than or equal to -180.0 and less than or equal to +180.0. Negative values indicate western longitudes and positive values indicate eastern longitudes. re (required): The radius, in meters, which specifies the horizontal accuracy of the coordinates. Pass the value returned by the device's location service. Typical values might be 22m for GPS/Wi-Fi, 380m for cell tower triangulation, and 18,000m for reverse IP lookup. ts (optional): The UTC UNIX timestamp of when the client was at the location. (The UNIX timestamp is the number of seconds since January 1, 1970.) head (optional): The client's relative heading or direction of travel. Specify the direction of travel as degrees from 0 through 360, counting clockwise relative to true north. Specify this key only if the sp key is nonzero. sp (optional): The horizontal velocity (speed), in meters per second, that the client device is traveling. alt (optional): The altitude of the client device, in meters. are (optional): The radius, in meters, that specifies the vertical accuracy of the coordinates. Specify this key only if you specify the alt key. Although many of the keys are optional, the more information that you provide, the more accurate the location results are. Although optional, you are encouraged to always specify the user's geographical location. Providing the location is especially important if the client's IP address does not accurately reflect the user's physical location (for example, if the client uses VPN). For optimal results, you should include this header and the X-MSEdge-ClientIP header, but at a minimum, you should include this header. :type location: str :param market: The market where the results come from. Typically, mkt is the country where the user is making the request from. However, it could be a different country if the user is not located in a country where Bing delivers results. The market must be in the form <language code>-<country code>. For example, en-US. The string is case insensitive. For a list of possible market values, see [Market Codes](https://docs.microsoft.com/en-us/azure/cognitive-services/bing-visual-search/supported-countries-markets). NOTE: If known, you are encouraged to always specify the market. Specifying the market helps Bing route the request and return an appropriate and optimal response. If you specify a market that is not listed in [Market Codes](https://docs.microsoft.com/en-us/azure/cognitive-services/bing-visual-search/supported-countries-markets), Bing uses a best fit market code based on an internal mapping that is subject to change. :type market: str :param safe_search: Filter the image results in actions with type 'VisualSearch' for adult content. The following are the possible filter values. Off: May return images with adult content. Moderate: Do not return images with adult content. Strict: Do not return images with adult content. The default is Moderate. If the request comes from a market that Bing's adult policy requires that safeSearch is set to Strict, Bing ignores the safeSearch value and uses Strict. If you use the site: filter in the knowledge request, there is the chance that the response may contain adult content regardless of what the safeSearch query parameter is set to. Use site: only if you are aware of the content on the site and your scenario supports the possibility of adult content. Possible values include: 'Off', 'Moderate', 'Strict' :type safe_search: str or ~azure.cognitiveservices.search.visualsearch.models.SafeSearch :param set_lang: The language to use for user interface strings. Specify the language using the ISO 639-1 2-letter language code. For example, the language code for English is EN. The default is EN (English). Although optional, you should always specify the language. Typically, you set setLang to the same language specified by mkt unless the user wants the user interface strings displayed in a different language. A user interface string is a string that's used as a label in a user interface. There are few user interface strings in the JSON response objects. Also, any links to Bing.com properties in the response objects apply the specified language. :type set_lang: str :param knowledge_request: The form data is a JSON object that identifies the image using an insights token or URL to the image. The object may also include an optional crop area that identifies an area of interest in the image. The insights token and URL are mutually exclusive – do not specify both. You may specify knowledgeRequest form data and image form data in the same request only if knowledgeRequest form data specifies the cropArea field only (it must not include an insights token or URL). :type knowledge_request: str :param image: The form data is an image binary. The Content-Disposition header's name parameter must be set to "image". You must specify an image binary if you do not use knowledgeRequest form data to specify the image; you may not use both forms to specify an image. You may specify knowledgeRequest form data and image form data in the same request only if knowledgeRequest form data specifies the cropArea field only (it must not include an insights token or URL). :type image: Generator :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: ImageKnowledge or ClientRawResponse if raw=true :rtype: ~azure.cognitiveservices.search.visualsearch.models.ImageKnowledge or ~msrest.pipeline.ClientRawResponse :raises: :class:`ErrorResponseException<azure.cognitiveservices.search.visualsearch.models.ErrorResponseException>` """ # Construct URL url = self.visual_search.metadata['url'] path_format_arguments = { 'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True) } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} if market is not None: query_parameters['mkt'] = self._serialize.query("market", market, 'str') if safe_search is not None: query_parameters['safeSearch'] = self._serialize.query("safe_search", safe_search, 'str') if set_lang is not None: query_parameters['setLang'] = self._serialize.query("set_lang", set_lang, 'str') # Construct headers header_parameters = {} header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'multipart/form-data' if custom_headers: header_parameters.update(custom_headers) header_parameters['X-BingApis-SDK'] = self._serialize.header("self.x_bing_apis_sdk", self.x_bing_apis_sdk, 'str') if accept_language is not None: header_parameters['Accept-Language'] = self._serialize.header("accept_language", accept_language, 'str') if content_type is not None: header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') if user_agent is not None: header_parameters['User-Agent'] = self._serialize.header("user_agent", user_agent, 'str') if client_id is not None: header_parameters['X-MSEdge-ClientID'] = self._serialize.header("client_id", client_id, 'str') if client_ip is not None: header_parameters['X-MSEdge-ClientIP'] = self._serialize.header("client_ip", client_ip, 'str') if location is not None: header_parameters['X-Search-Location'] = self._serialize.header("location", location, 'str') # Construct form data form_data_content = { 'knowledgeRequest': knowledge_request, 'image': image, } # Construct and send request request = self._client.post(url, query_parameters, header_parameters, form_content=form_data_content) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: raise models.ErrorResponseException(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('ImageKnowledge', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized
[ "def", "visual_search", "(", "self", ",", "accept_language", "=", "None", ",", "content_type", "=", "None", ",", "user_agent", "=", "None", ",", "client_id", "=", "None", ",", "client_ip", "=", "None", ",", "location", "=", "None", ",", "market", "=", "N...
65.407407
33.011111
def create(self, pools): """ Method to create pool's :param pools: List containing pool's desired to be created on database :return: None """ data = {'server_pools': pools} return super(ApiPool, self).post('api/v3/pool/', data)
[ "def", "create", "(", "self", ",", "pools", ")", ":", "data", "=", "{", "'server_pools'", ":", "pools", "}", "return", "super", "(", "ApiPool", ",", "self", ")", ".", "post", "(", "'api/v3/pool/'", ",", "data", ")" ]
27.6
18.6
def _log(message): """ Logs a message. :param str message: The log message. :rtype: None """ # @todo Replace with log package. print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()) + ' ' + str(message), flush=True)
[ "def", "_log", "(", "message", ")", ":", "# @todo Replace with log package.", "print", "(", "time", ".", "strftime", "(", "'%Y-%m-%d %H:%M:%S'", ",", "time", ".", "localtime", "(", ")", ")", "+", "' '", "+", "str", "(", "message", ")", ",", "flush", "=", ...
26.9
20.5
def allow_unregister(self, plugin_override=True): """ Returns True if students can unregister from course """ vals = self._hook_manager.call_hook('course_allow_unregister', course=self, default=self._allow_unregister) return vals[0] if len(vals) and plugin_override else self._allow_unregister
[ "def", "allow_unregister", "(", "self", ",", "plugin_override", "=", "True", ")", ":", "vals", "=", "self", ".", "_hook_manager", ".", "call_hook", "(", "'course_allow_unregister'", ",", "course", "=", "self", ",", "default", "=", "self", ".", "_allow_unregist...
78.5
31.75
def from_str(cls, s): """Construct an import object from a string.""" ast_obj = ast.parse(s).body[0] if not isinstance(ast_obj, cls._expected_ast_type): raise AssertionError( 'Expected ast of type {!r} but got {!r}'.format( cls._expected_ast_type, ast_obj ) ) return cls(ast_obj)
[ "def", "from_str", "(", "cls", ",", "s", ")", ":", "ast_obj", "=", "ast", ".", "parse", "(", "s", ")", ".", "body", "[", "0", "]", "if", "not", "isinstance", "(", "ast_obj", ",", "cls", ".", "_expected_ast_type", ")", ":", "raise", "AssertionError", ...
36.090909
13.636364
def repo( state, host, source, target, branch='master', pull=True, rebase=False, user=None, group=None, use_ssh_user=False, ssh_keyscan=False, ): ''' Clone/pull git repositories. + source: the git source URL + target: target directory to clone to + branch: branch to pull/checkout + pull: pull any changes for the branch + rebase: when pulling, use ``--rebase`` + user: chown files to this user after + group: chown files to this group after + ssh_keyscan: keyscan the remote host if not in known_hosts before clone/pull + [DEPRECATED] use_ssh_user: whether to use the SSH user to clone/pull SSH user (deprecated, please use ``preserve_sudo_env``): This is an old hack from pyinfra <0.4 which did not support the global kwarg ``preserve_sudo_env``. It does the following: * makes the target directory writeable by all * clones/pulls w/o sudo as the connecting SSH user * removes other/group write permissions - unless group is defined, in which case only other ''' if use_ssh_user: logger.warning( 'Use of `use_ssh_user` is deprecated, please use `preserve_sudo_env` instead.', ) # Ensure our target directory exists yield files.directory(state, host, target) # If we're going to chown this after clone/pull, and we're sudo'd, we need to make the # directory writeable by the SSH user if use_ssh_user: yield chmod(target, 'go+w', recursive=True) # Do we need to scan for the remote host key? if ssh_keyscan: # Attempt to parse the domain from the git repository domain = re.match(r'^[a-zA-Z0-9]+@([0-9a-zA-Z\.\-]+)', source) if domain: yield ssh.keyscan(state, host, domain.group(1)) else: raise OperationError( 'Could not parse domain (to SSH keyscan) from: {0}'.format(source), ) # Store git commands for directory prefix git_commands = [] is_repo = host.fact.directory('/'.join((target, '.git'))) # Cloning new repo? if not is_repo: git_commands.append('clone {0} --branch {1} .'.format(source, branch)) # Ensuring existing repo else: current_branch = host.fact.git_branch(target) if current_branch != branch: git_commands.append('fetch') # fetch to ensure we have the branch locally git_commands.append('checkout {0}'.format(branch)) if pull: if rebase: git_commands.append('pull --rebase') else: git_commands.append('pull') # Attach prefixes for directory command_prefix = 'cd {0} && git'.format(target) git_commands = [ '{0} {1}'.format(command_prefix, command) for command in git_commands ] if use_ssh_user: git_commands = [ { 'command': command, 'sudo': False, 'sudo_user': False, } for command in git_commands ] for cmd in git_commands: yield cmd if use_ssh_user: # Remove write permissions from other or other+group when no group yield chmod( target, 'o-w' if group else 'go-w', recursive=True, ) # Apply any user or group if user or group: yield chown(target, user, group, recursive=True)
[ "def", "repo", "(", "state", ",", "host", ",", "source", ",", "target", ",", "branch", "=", "'master'", ",", "pull", "=", "True", ",", "rebase", "=", "False", ",", "user", "=", "None", ",", "group", "=", "None", ",", "use_ssh_user", "=", "False", "...
31.575472
22.330189
def luhn_checksum(number, chars=DIGITS): ''' Calculates the Luhn checksum for `number` :param number: string or int :param chars: string >>> luhn_checksum(1234) 4 ''' length = len(chars) number = [chars.index(n) for n in reversed(str(number))] return ( sum(number[::2]) + sum(sum(divmod(i * 2, length)) for i in number[1::2]) ) % length
[ "def", "luhn_checksum", "(", "number", ",", "chars", "=", "DIGITS", ")", ":", "length", "=", "len", "(", "chars", ")", "number", "=", "[", "chars", ".", "index", "(", "n", ")", "for", "n", "in", "reversed", "(", "str", "(", "number", ")", ")", "]...
22.529412
22.882353
def attribute_iterator(self, mapped_class=None, key=None): """ Returns an iterator over all mapped attributes for the given mapped class and attribute key. See :method:`get_attribute_map` for details. """ for attr in self._attribute_iterator(mapped_class, key): yield attr
[ "def", "attribute_iterator", "(", "self", ",", "mapped_class", "=", "None", ",", "key", "=", "None", ")", ":", "for", "attr", "in", "self", ".", "_attribute_iterator", "(", "mapped_class", ",", "key", ")", ":", "yield", "attr" ]
45.428571
18.857143
def get_input_grads(self, merge_multi_context=True): """Get the gradients with respect to the inputs of the module. Parameters ---------- merge_multi_context : bool Defaults to ``True``. In the case when data-parallelism is used, the outputs will be collected from multiple devices. A `True` value indicate that we should merge the collected results so that they look like from a single executor. Returns ------- If `merge_multi_context` is ``True``, it is like ``[grad1, grad2]``. Otherwise, it is like ``[[grad1_dev1, grad1_dev2], [grad2_dev1, grad2_dev2]]``. All the output elements are `NDArray`. """ assert self.inputs_need_grad if merge_multi_context: return _merge_multi_context(self.input_grad_arrays, self.data_layouts) return self.input_grad_arrays
[ "def", "get_input_grads", "(", "self", ",", "merge_multi_context", "=", "True", ")", ":", "assert", "self", ".", "inputs_need_grad", "if", "merge_multi_context", ":", "return", "_merge_multi_context", "(", "self", ".", "input_grad_arrays", ",", "self", ".", "data_...
43.047619
24.333333
def delete_documents(self): """Deletes all the documents using the pk associated to them. """ pk = str(self._primary_key) for doc in self._whoosh.searcher().documents(): if pk in doc: doc_pk = str(doc[pk]) self._whoosh.delete_by_term(pk, doc_pk)
[ "def", "delete_documents", "(", "self", ")", ":", "pk", "=", "str", "(", "self", ".", "_primary_key", ")", "for", "doc", "in", "self", ".", "_whoosh", ".", "searcher", "(", ")", ".", "documents", "(", ")", ":", "if", "pk", "in", "doc", ":", "doc_pk...
34.75
9
def _insert(self, name, value, timestamp, intervals, **kwargs): ''' Insert the value. ''' if 'pipeline' in kwargs: pipe = kwargs.get('pipeline') else: pipe = self._client.pipeline(transaction=False) for interval,config in self._intervals.iteritems(): timestamps = self._normalize_timestamps(timestamp, intervals, config) for tstamp in timestamps: self._insert_data(name, value, tstamp, interval, config, pipe, ttl_batch=kwargs.get('ttl_batch')) if 'pipeline' not in kwargs: pipe.execute()
[ "def", "_insert", "(", "self", ",", "name", ",", "value", ",", "timestamp", ",", "intervals", ",", "*", "*", "kwargs", ")", ":", "if", "'pipeline'", "in", "kwargs", ":", "pipe", "=", "kwargs", ".", "get", "(", "'pipeline'", ")", "else", ":", "pipe", ...
32.352941
21.764706
def _get_to_one_relationship_value(self, obj, column): """ Compute datas produced for a many to one relationship :param obj obj: The instance we manage :param dict column: The column description dictionnary :returns: The associated value """ related_key = column.get('related_key', None) related = getattr(obj, column['__col__'].key) if related: if related_key is not None: value = self._get_formatted_val( related, related_key, column ) else: value = column['__prop__'].compile_obj(related) else: value = "" return value
[ "def", "_get_to_one_relationship_value", "(", "self", ",", "obj", ",", "column", ")", ":", "related_key", "=", "column", ".", "get", "(", "'related_key'", ",", "None", ")", "related", "=", "getattr", "(", "obj", ",", "column", "[", "'__col__'", "]", ".", ...
34.75
15.15
def add_rec_new(self, k, val): """Recursively add a new value and its children to me, and assign a variable to it. Args: k (str): The name of the variable to assign. val (LispVal): The value to be added and assigned. Returns: LispVal: The added value. """ self.rec_new(val) self[k] = val return val
[ "def", "add_rec_new", "(", "self", ",", "k", ",", "val", ")", ":", "self", ".", "rec_new", "(", "val", ")", "self", "[", "k", "]", "=", "val", "return", "val" ]
27.642857
18.214286
def _update_param(self): r"""Update parameters This method updates the values of the algorthm parameters with the methods provided """ # Update the gamma parameter. if not isinstance(self._beta_update, type(None)): self._beta = self._beta_update(self._beta) # Update lambda parameter. if not isinstance(self._lambda_update, type(None)): self._lambda = self._lambda_update(self._lambda)
[ "def", "_update_param", "(", "self", ")", ":", "# Update the gamma parameter.", "if", "not", "isinstance", "(", "self", ".", "_beta_update", ",", "type", "(", "None", ")", ")", ":", "self", ".", "_beta", "=", "self", ".", "_beta_update", "(", "self", ".", ...
30.866667
20.333333
def __last_commit(self): """ Retrieve the most recent commit message (with ``svn info``) Returns: tuple: (datestr, (revno, user, None, desc)) $ svn info Path: . URL: http://python-dlp.googlecode.com/svn/trunk/layercake-python Repository Root: http://python-dlp.googlecode.com/svn Repository UUID: d0ad5f6e-b329-0410-b51c-492c9c4f233d Revision: 378 Node Kind: directory Schedule: normal Last Changed Author: chimezie Last Changed Rev: 378 Last Changed Date: 2011-05-01 01:31:38 -0500 (Sun, 01 May 2011) """ cmd = ['svn', 'info'] op = self.sh(cmd, shell=False) if not op: return None author, rev, datestr = op.split('\n')[7:10] author = author.split(': ', 1)[1].strip() rev = rev.split(': ', 1)[1].strip() datestr = datestr.split(': ', 1)[1].split('(', 1)[0].strip() return datestr, (rev, author, None, None)
[ "def", "__last_commit", "(", "self", ")", ":", "cmd", "=", "[", "'svn'", ",", "'info'", "]", "op", "=", "self", ".", "sh", "(", "cmd", ",", "shell", "=", "False", ")", "if", "not", "op", ":", "return", "None", "author", ",", "rev", ",", "datestr"...
35.321429
17.392857
def current_url_name(context): """ Returns the name of the current URL, namespaced, or False. Example usage: {% current_url_name as url_name %} <a href="#"{% if url_name == 'myapp:home' %} class="active"{% endif %}">Home</a> """ url_name = False if context.request.resolver_match: url_name = "{}:{}".format( context.request.resolver_match.namespace, context.request.resolver_match.url_name ) return url_name
[ "def", "current_url_name", "(", "context", ")", ":", "url_name", "=", "False", "if", "context", ".", "request", ".", "resolver_match", ":", "url_name", "=", "\"{}:{}\"", ".", "format", "(", "context", ".", "request", ".", "resolver_match", ".", "namespace", ...
29.888889
21.555556
def get_video_end_time(video_file): """Get video end time in seconds""" if not os.path.isfile(video_file): print("Error, video file {} does not exist".format(video_file)) return None try: time_string = FFProbe(video_file).video[0].creation_time try: creation_time = datetime.datetime.strptime( time_string, TIME_FORMAT) except: creation_time = datetime.datetime.strptime( time_string, TIME_FORMAT_2) except: return None return creation_time
[ "def", "get_video_end_time", "(", "video_file", ")", ":", "if", "not", "os", ".", "path", ".", "isfile", "(", "video_file", ")", ":", "print", "(", "\"Error, video file {} does not exist\"", ".", "format", "(", "video_file", ")", ")", "return", "None", "try", ...
34.3125
16.75
def set_copyright(self, copyright_): """Sets the copyright. arg: copyright (string): the new copyright raise: InvalidArgument - ``copyright`` is invalid raise: NoAccess - ``Metadata.isReadOnly()`` is ``true`` raise: NullArgument - ``copyright`` is ``null`` *compliance: mandatory -- This method must be implemented.* """ # Implemented from template for osid.repository.AssetForm.set_title_template self._my_map['copyright'] = self._get_display_text(copyright_, self.get_copyright_metadata())
[ "def", "set_copyright", "(", "self", ",", "copyright_", ")", ":", "# Implemented from template for osid.repository.AssetForm.set_title_template", "self", ".", "_my_map", "[", "'copyright'", "]", "=", "self", ".", "_get_display_text", "(", "copyright_", ",", "self", ".",...
46.666667
23.916667
def getschemas(cls): """Get inner schemas by name. :return: ordered dict by name. :rtype: OrderedDict """ members = getmembers(cls, lambda member: isinstance(member, Schema)) result = OrderedDict() for name, member in members: result[name] = member return result
[ "def", "getschemas", "(", "cls", ")", ":", "members", "=", "getmembers", "(", "cls", ",", "lambda", "member", ":", "isinstance", "(", "member", ",", "Schema", ")", ")", "result", "=", "OrderedDict", "(", ")", "for", "name", ",", "member", "in", "member...
23.5
19.357143
def update_cache(self, data=None): """call with new data or set data to self.cache_data and call this """ if data: self.cache_data = data self.cache_updated = timezone.now() self.save()
[ "def", "update_cache", "(", "self", ",", "data", "=", "None", ")", ":", "if", "data", ":", "self", ".", "cache_data", "=", "data", "self", ".", "cache_updated", "=", "timezone", ".", "now", "(", ")", "self", ".", "save", "(", ")" ]
33
8.571429
def assign(name, value): ''' Assign a single sysctl parameter for this minion CLI Example: .. code-block:: bash salt '*' sysctl.assign net.ipv4.ip_forward 1 ''' value = six.text_type(value) if six.PY3: tran_tab = name.translate(''.maketrans('./', '/.')) else: if isinstance(name, unicode): # pylint: disable=incompatible-py3-code,undefined-variable trans_args = {ord('/'): u'.', ord('.'): u'/'} else: trans_args = string.maketrans('./', '/.') tran_tab = name.translate(trans_args) sysctl_file = '/proc/sys/{0}'.format(tran_tab) if not os.path.exists(sysctl_file): raise CommandExecutionError('sysctl {0} does not exist'.format(name)) ret = {} cmd = 'sysctl -w {0}="{1}"'.format(name, value) data = __salt__['cmd.run_all'](cmd, python_shell=False) out = data['stdout'] err = data['stderr'] # Example: # # sysctl -w net.ipv4.tcp_rmem="4096 87380 16777216" # net.ipv4.tcp_rmem = 4096 87380 16777216 regex = re.compile(r'^{0}\s+=\s+{1}$'.format(re.escape(name), re.escape(value))) if not regex.match(out) or 'Invalid argument' in six.text_type(err): if data['retcode'] != 0 and err: error = err else: error = out raise CommandExecutionError('sysctl -w failed: {0}'.format(error)) new_name, new_value = out.split(' = ', 1) ret[new_name] = new_value return ret
[ "def", "assign", "(", "name", ",", "value", ")", ":", "value", "=", "six", ".", "text_type", "(", "value", ")", "if", "six", ".", "PY3", ":", "tran_tab", "=", "name", ".", "translate", "(", "''", ".", "maketrans", "(", "'./'", ",", "'/.'", ")", "...
31.933333
23.844444
def get_all_handlers(self) -> T.Dict[str, T.List[T.Callable]]: """Returns a dict with event names as keys and lists of registered handlers as values.""" events = {} for event, handlers in self._events.items(): events[event] = list(handlers) return events
[ "def", "get_all_handlers", "(", "self", ")", "->", "T", ".", "Dict", "[", "str", ",", "T", ".", "List", "[", "T", ".", "Callable", "]", "]", ":", "events", "=", "{", "}", "for", "event", ",", "handlers", "in", "self", ".", "_events", ".", "items"...
37.5
14.5
def map_axes(dim_vars, reverse_map=False): """ axis name -> [dimension names] dimension name -> [axis_name], length 0 if reverse_map """ ret_val = defaultdict(list) axes = ['X', 'Y', 'Z', 'T'] for k, v in dim_vars.items(): axis = getattr(v, 'axis', '') if not axis: continue axis = axis.upper() if axis in axes: if reverse_map: ret_val[k].append(axis) else: ret_val[axis].append(k) return dict(ret_val)
[ "def", "map_axes", "(", "dim_vars", ",", "reverse_map", "=", "False", ")", ":", "ret_val", "=", "defaultdict", "(", "list", ")", "axes", "=", "[", "'X'", ",", "'Y'", ",", "'Z'", ",", "'T'", "]", "for", "k", ",", "v", "in", "dim_vars", ".", "items",...
24.952381
13.904762
def get_parent_book_ids(self, book_id): """Gets the parent ``Ids`` of the given book. arg: book_id (osid.id.Id): a book ``Id`` return: (osid.id.IdList) - the parent ``Ids`` of the book raise: NotFound - ``book_id`` is not found raise: NullArgument - ``book_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.* """ # Implemented from template for # osid.resource.BinHierarchySession.get_parent_bin_ids if self._catalog_session is not None: return self._catalog_session.get_parent_catalog_ids(catalog_id=book_id) return self._hierarchy_session.get_parents(id_=book_id)
[ "def", "get_parent_book_ids", "(", "self", ",", "book_id", ")", ":", "# Implemented from template for", "# osid.resource.BinHierarchySession.get_parent_bin_ids", "if", "self", ".", "_catalog_session", "is", "not", "None", ":", "return", "self", ".", "_catalog_session", "....
47
17.588235
def ssn(self, min_age=16, max_age=90): """ Returns 11 character Estonian personal identity code (isikukood, IK). Age of person is between 16 and 90 years, based on local computer date. This function assigns random sex to person. An Estonian Personal identification code consists of 11 digits, generally given without any whitespace or other delimiters. The form is GYYMMDDSSSC, where G shows sex and century of birth (odd number male, even number female, 1-2 19th century, 3-4 20th century, 5-6 21st century), SSS is a serial number separating persons born on the same date and C a checksum. https://en.wikipedia.org/wiki/National_identification_number#Estonia """ age = datetime.timedelta( days=self.generator.random.randrange( min_age * 365, max_age * 365)) birthday = datetime.date.today() - age if birthday.year < 2000: ik = self.generator.random.choice(('3', '4')) elif birthday.year < 2100: ik = self.generator.random.choice(('5', '6')) else: ik = self.generator.random.choice(('7', '8')) ik += "%02d%02d%02d" % ((birthday.year % 100), birthday.month, birthday.day) ik += str(self.generator.random.randrange(0, 999)).zfill(3) return ik + str(checksum([int(ch) for ch in ik]))
[ "def", "ssn", "(", "self", ",", "min_age", "=", "16", ",", "max_age", "=", "90", ")", ":", "age", "=", "datetime", ".", "timedelta", "(", "days", "=", "self", ".", "generator", ".", "random", ".", "randrange", "(", "min_age", "*", "365", ",", "max_...
47.033333
20.366667
def __start(self): # pragma: no cover """Starts the real-time engine that captures tasks.""" assert not self.dispatcher_thread self.dispatcher_thread = threading.Thread(target=self.__run_dispatcher, name='clearly-dispatcher') self.dispatcher_thread.daemon = True self.running = True # graceful shutdown self.dispatcher_thread.start()
[ "def", "__start", "(", "self", ")", ":", "# pragma: no cover", "assert", "not", "self", ".", "dispatcher_thread", "self", ".", "dispatcher_thread", "=", "threading", ".", "Thread", "(", "target", "=", "self", ".", "__run_dispatcher", ",", "name", "=", "'clearl...
42.6
17.2
def register(**kwargs): """Registers a notification_cls. """ def _wrapper(notification_cls): if not issubclass(notification_cls, (Notification,)): raise RegisterNotificationError( f"Wrapped class must be a 'Notification' class. " f"Got '{notification_cls.__name__}'" ) site_notifications.register(notification_cls=notification_cls) return notification_cls return _wrapper
[ "def", "register", "(", "*", "*", "kwargs", ")", ":", "def", "_wrapper", "(", "notification_cls", ")", ":", "if", "not", "issubclass", "(", "notification_cls", ",", "(", "Notification", ",", ")", ")", ":", "raise", "RegisterNotificationError", "(", "f\"Wrapp...
30.4
19.4
def _load_features_from_images(self, images, names=None): """ Load feature image data from image files. Args: images: A list of image filenames. names: An optional list of strings to use as the feature names. Must be in the same order as the images. """ if names is not None and len(names) != len(images): raise Exception( "Lists of feature names and images must be of same length!") self.feature_names = names if names is not None else images self.feature_images = imageutils.load_imgs(images, self.masker)
[ "def", "_load_features_from_images", "(", "self", ",", "images", ",", "names", "=", "None", ")", ":", "if", "names", "is", "not", "None", "and", "len", "(", "names", ")", "!=", "len", "(", "images", ")", ":", "raise", "Exception", "(", "\"Lists of featur...
46.461538
19.846154
def get_xy_steps(bbox, h_dim): r"""Return meshgrid spacing based on bounding box. bbox: dictionary Dictionary containing coordinates for corners of study area. h_dim: integer Horizontal resolution in meters. Returns ------- x_steps, (X, ) ndarray Number of grids in x dimension. y_steps: (Y, ) ndarray Number of grids in y dimension. """ x_range, y_range = get_xy_range(bbox) x_steps = np.ceil(x_range / h_dim) y_steps = np.ceil(y_range / h_dim) return int(x_steps), int(y_steps)
[ "def", "get_xy_steps", "(", "bbox", ",", "h_dim", ")", ":", "x_range", ",", "y_range", "=", "get_xy_range", "(", "bbox", ")", "x_steps", "=", "np", ".", "ceil", "(", "x_range", "/", "h_dim", ")", "y_steps", "=", "np", ".", "ceil", "(", "y_range", "/"...
24.681818
17.090909
def install_wic(self, wic_slot_id, wic): """ Installs a WIC on this adapter. :param wic_slot_id: WIC slot ID (integer) :param wic: WIC instance """ self._wics[wic_slot_id] = wic # Dynamips WICs ports start on a multiple of 16 + port number # WIC1 port 1 = 16, WIC1 port 2 = 17 # WIC2 port 1 = 32, WIC2 port 2 = 33 # WIC3 port 1 = 48, WIC3 port 2 = 49 base = 16 * (wic_slot_id + 1) for wic_port in range(0, wic.interfaces): port_number = base + wic_port self._ports[port_number] = None
[ "def", "install_wic", "(", "self", ",", "wic_slot_id", ",", "wic", ")", ":", "self", ".", "_wics", "[", "wic_slot_id", "]", "=", "wic", "# Dynamips WICs ports start on a multiple of 16 + port number", "# WIC1 port 1 = 16, WIC1 port 2 = 17", "# WIC2 port 1 = 32, WIC2 port 2 = ...
32.777778
11
def _is_expired(self, key): """Check if a key is expired. If so, delete the key.""" if not hasattr(self, '_index'): return False # haven't initalized yet, so don't bother try: timeout = self._index[key] except KeyError: if self.timeout: self._index[key] = int(time() + self.timeout) else: self._index[key] = None return False if timeout is None or timeout >= time(): return False del self[key] # key expired, so delete it from container return True
[ "def", "_is_expired", "(", "self", ",", "key", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'_index'", ")", ":", "return", "False", "# haven't initalized yet, so don't bother", "try", ":", "timeout", "=", "self", ".", "_index", "[", "key", "]", "ex...
37.1875
14.375
def long2rfc1924(l): """Convert a network byte order 128-bit integer to an rfc1924 IPv6 address. >>> long2rfc1924(ip2long('1080::8:800:200C:417A')) '4)+k&C#VzJ4br>0wv%Yp' >>> long2rfc1924(ip2long('::')) '00000000000000000000' >>> long2rfc1924(MAX_IP) '=r54lj&NUUO~Hi%c2ym0' :param l: Network byte order 128-bit integer. :type l: int :returns: RFC 1924 IPv6 address :raises: TypeError """ if MAX_IP < l or l < MIN_IP: raise TypeError( "expected int between %d and %d inclusive" % (MIN_IP, MAX_IP)) o = [] r = l while r > 85: o.append(_RFC1924_ALPHABET[r % 85]) r = r // 85 o.append(_RFC1924_ALPHABET[r]) return ''.join(reversed(o)).zfill(20)
[ "def", "long2rfc1924", "(", "l", ")", ":", "if", "MAX_IP", "<", "l", "or", "l", "<", "MIN_IP", ":", "raise", "TypeError", "(", "\"expected int between %d and %d inclusive\"", "%", "(", "MIN_IP", ",", "MAX_IP", ")", ")", "o", "=", "[", "]", "r", "=", "l...
26.035714
18.214286
def requires_columns(required_cols): """Decorator that raises a `MalformedResultsError` if any of `required_cols` is not present as a column in the matches of the `Results` object bearing the decorated method. :param required_cols: names of required columns :type required_cols: `list` of `str` """ def dec(f): @wraps(f) def decorated_function(*args, **kwargs): actual_cols = list(args[0]._matches.columns) missing_cols = [] for required_col in required_cols: if required_col not in actual_cols: missing_cols.append('"{}"'.format(required_col)) if missing_cols: raise MalformedResultsError( constants.MISSING_REQUIRED_COLUMNS_ERROR.format( ', '.join(missing_cols))) return f(*args, **kwargs) return decorated_function return dec
[ "def", "requires_columns", "(", "required_cols", ")", ":", "def", "dec", "(", "f", ")", ":", "@", "wraps", "(", "f", ")", "def", "decorated_function", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "actual_cols", "=", "list", "(", "args", "[", ...
38.291667
14.583333
def parse(file_contents, file_name): ''' Takes a list of files which are assumed to be jinja2 templates and tries to parse the contents of the files Args: file_contents (str): File contents of a jinja file Raises: Exception: An exception is raised if the contents of the file cannot be parsed. ''' env = Environment() result = "" try: env.parse(file_contents) except Exception: _, exc_value, _ = sys.exc_info() result += "ERROR: Jinja2 Template File: {0}".format(file_name) result += repr(exc_value) + '\n' return result
[ "def", "parse", "(", "file_contents", ",", "file_name", ")", ":", "env", "=", "Environment", "(", ")", "result", "=", "\"\"", "try", ":", "env", ".", "parse", "(", "file_contents", ")", "except", "Exception", ":", "_", ",", "exc_value", ",", "_", "=", ...
26.652174
24.304348
def _asciify_dict(data): """ Ascii-fies dict keys and values """ ret = {} for key, value in data.iteritems(): if isinstance(key, unicode): key = _remove_accents(key) key = key.encode('utf-8') # # note new if if isinstance(value, unicode): value = _remove_accents(value) value = value.encode('utf-8') elif isinstance(value, list): value = _asciify_list(value) elif isinstance(value, dict): value = _asciify_dict(value) ret[key] = value return ret
[ "def", "_asciify_dict", "(", "data", ")", ":", "ret", "=", "{", "}", "for", "key", ",", "value", "in", "data", ".", "iteritems", "(", ")", ":", "if", "isinstance", "(", "key", ",", "unicode", ")", ":", "key", "=", "_remove_accents", "(", "key", ")"...
33.470588
7.058824
def update_geometry(self): """ Updates the Widget geometry. :return: Method success. :rtype: bool """ self.setGeometry(self.__editor.contentsRect().left(), self.__editor.contentsRect().top(), self.get_width(), self.__editor.contentsRect().height()) return True
[ "def", "update_geometry", "(", "self", ")", ":", "self", ".", "setGeometry", "(", "self", ".", "__editor", ".", "contentsRect", "(", ")", ".", "left", "(", ")", ",", "self", ".", "__editor", ".", "contentsRect", "(", ")", ".", "top", "(", ")", ",", ...
29.307692
16.384615
def _install_gatk_jar(name, fname, manifest, system_config, toolplus_dir): """Install a jar for GATK or associated tools like MuTect. """ if not fname.endswith(".jar"): raise ValueError("--toolplus argument for %s expects a jar file: %s" % (name, fname)) version = get_gatk_jar_version(name, fname) store_dir = utils.safe_makedir(os.path.join(toolplus_dir, name, version)) shutil.copyfile(fname, os.path.join(store_dir, os.path.basename(fname))) _update_system_file(system_config, name, {"dir": store_dir}) _update_manifest(manifest, name, version)
[ "def", "_install_gatk_jar", "(", "name", ",", "fname", ",", "manifest", ",", "system_config", ",", "toolplus_dir", ")", ":", "if", "not", "fname", ".", "endswith", "(", "\".jar\"", ")", ":", "raise", "ValueError", "(", "\"--toolplus argument for %s expects a jar f...
57.9
20.2
def printoptions(): '''print paver options. Prettified by json. `long_description` is removed ''' x = json.dumps(environment.options, indent=4, sort_keys=True, skipkeys=True, cls=MyEncoder) print(x)
[ "def", "printoptions", "(", ")", ":", "x", "=", "json", ".", "dumps", "(", "environment", ".", "options", ",", "indent", "=", "4", ",", "sort_keys", "=", "True", ",", "skipkeys", "=", "True", ",", "cls", "=", "MyEncoder", ")", "print", "(", "x", ")...
24
16
def _prevent_default_initializer_splitting(self, item, indent_amt): """Prevent splitting between a default initializer. When there is a default initializer, it's best to keep it all on the same line. It's nicer and more readable, even if it goes over the maximum allowable line length. This goes back along the current line to determine if we have a default initializer, and, if so, to remove extraneous whitespaces and add a line break/indent before it if needed. """ if unicode(item) == '=': # This is the assignment in the initializer. Just remove spaces for # now. self._delete_whitespace() return if (not self._prev_item or not self._prev_prev_item or unicode(self._prev_item) != '='): return self._delete_whitespace() prev_prev_index = self._lines.index(self._prev_prev_item) if ( isinstance(self._lines[prev_prev_index - 1], self._Indent) or self.fits_on_current_line(item.size + 1) ): # The default initializer is already the only item on this line. # Don't insert a newline here. return # Replace the space with a newline/indent combo. if isinstance(self._lines[prev_prev_index - 1], self._Space): del self._lines[prev_prev_index - 1] self.add_line_break_at(self._lines.index(self._prev_prev_item), indent_amt)
[ "def", "_prevent_default_initializer_splitting", "(", "self", ",", "item", ",", "indent_amt", ")", ":", "if", "unicode", "(", "item", ")", "==", "'='", ":", "# This is the assignment in the initializer. Just remove spaces for", "# now.", "self", ".", "_delete_whitespace",...
39.578947
23.210526
def get_digest_keys(self): """Returns a list of the type choices""" digest_keys = [] for col in xrange(self.GetNumberCols()): digest_key = self.GetCellValue(self.has_header, col) if digest_key == "": digest_key = self.digest_types.keys()[0] digest_keys.append(digest_key) return digest_keys
[ "def", "get_digest_keys", "(", "self", ")", ":", "digest_keys", "=", "[", "]", "for", "col", "in", "xrange", "(", "self", ".", "GetNumberCols", "(", ")", ")", ":", "digest_key", "=", "self", ".", "GetCellValue", "(", "self", ".", "has_header", ",", "co...
33.272727
16.545455
def page(self, course, error="", post=False): """ Get all data and display the page """ users = sorted(list(self.user_manager.get_users_info(self.user_manager.get_course_registered_users(course, False)).items()), key=lambda k: k[1][0] if k[1] is not None else "") users = OrderedDict(sorted(list(self.user_manager.get_users_info(course.get_staff()).items()), key=lambda k: k[1][0] if k[1] is not None else "") + users) user_data = OrderedDict([(username, { "username": username, "realname": user[0] if user is not None else "", "email": user[1] if user is not None else "", "total_tasks": 0, "task_grades": {"answer": 0, "match": 0}, "task_succeeded": 0, "task_tried": 0, "total_tries": 0, "grade": 0, "url": self.submission_url_generator(username)}) for username, user in users.items()]) for username, data in self.user_manager.get_course_caches(list(users.keys()), course).items(): user_data[username].update(data if data is not None else {}) if "csv" in web.input(): return make_csv(user_data) return self.template_helper.get_renderer().course_admin.student_list(course, list(user_data.values()), error, post)
[ "def", "page", "(", "self", ",", "course", ",", "error", "=", "\"\"", ",", "post", "=", "False", ")", ":", "users", "=", "sorted", "(", "list", "(", "self", ".", "user_manager", ".", "get_users_info", "(", "self", ".", "user_manager", ".", "get_course_...
61.095238
40.666667
def get_stp_mst_detail_output_msti_msti_bridge_id(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") get_stp_mst_detail = ET.Element("get_stp_mst_detail") config = get_stp_mst_detail output = ET.SubElement(get_stp_mst_detail, "output") msti = ET.SubElement(output, "msti") instance_id_key = ET.SubElement(msti, "instance-id") instance_id_key.text = kwargs.pop('instance_id') msti_bridge_id = ET.SubElement(msti, "msti-bridge-id") msti_bridge_id.text = kwargs.pop('msti_bridge_id') callback = kwargs.pop('callback', self._callback) return callback(config)
[ "def", "get_stp_mst_detail_output_msti_msti_bridge_id", "(", "self", ",", "*", "*", "kwargs", ")", ":", "config", "=", "ET", ".", "Element", "(", "\"config\"", ")", "get_stp_mst_detail", "=", "ET", ".", "Element", "(", "\"get_stp_mst_detail\"", ")", "config", "=...
44.533333
14.733333
def expand_template(template, namespace): """ Expand the given (preparsed) template. Currently, only Tempita templates are supported. @param template: The template, in preparsed form, or as a string (which then will be preparsed). @param namespace: Custom namespace that is added to the predefined defaults and takes precedence over those. @return: The expanded template. @raise LoggableError: In case of typical errors during template execution. """ # Create helper namespace formatters = dict((name[4:], method) for name, method in globals().items() if name.startswith("fmt_") ) helpers = Bunch() helpers.update(formatters) # Default templating namespace variables = dict(h=helpers, c=config.custom_template_helpers) variables.update(formatters) # redundant, for backwards compatibility # Provided namespace takes precedence variables.update(namespace) # Expand template try: template = preparse(template) return template.substitute(**variables) except (AttributeError, ValueError, NameError, TypeError) as exc: hint = '' if "column" in str(exc): try: col = int(str(exc).split("column")[1].split()[0]) except (TypeError, ValueError): pass else: hint = "%svVv\n" % (' ' * (col+4)) content = getattr(template, "content", template) raise error.LoggableError("%s: %s in template:\n%s%s" % ( type(exc).__name__, exc, hint, "\n".join("%3d: %s" % (i+1, line) for i, line in enumerate(content.splitlines())) ))
[ "def", "expand_template", "(", "template", ",", "namespace", ")", ":", "# Create helper namespace", "formatters", "=", "dict", "(", "(", "name", "[", "4", ":", "]", ",", "method", ")", "for", "name", ",", "method", "in", "globals", "(", ")", ".", "items"...
37.704545
20.090909
def draw(self): """Draws the dragger at the current mouse location. Should be called in every frame. """ if not self.visible: return if self.isEnabled: # Draw the dragger's current appearance to the window. if self.dragging: self.window.blit(self.surfaceDown, self.rect) else: # mouse is up if self.mouseOver: self.window.blit(self.surfaceOver, self.rect) else: self.window.blit(self.surfaceUp, self.rect) else: self.window.blit(self.surfaceDisabled, self.rect)
[ "def", "draw", "(", "self", ")", ":", "if", "not", "self", ".", "visible", ":", "return", "if", "self", ".", "isEnabled", ":", "# Draw the dragger's current appearance to the window.\r", "if", "self", ".", "dragging", ":", "self", ".", "window", ".", "blit", ...
31.904762
19.238095
def delete_process_work_item_type_rule(self, process_id, wit_ref_name, rule_id): """DeleteProcessWorkItemTypeRule. [Preview API] Removes a rule from the work item type in the process. :param str process_id: The ID of the process :param str wit_ref_name: The reference name of the work item type :param str rule_id: The ID of the rule """ route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') if wit_ref_name is not None: route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str') if rule_id is not None: route_values['ruleId'] = self._serialize.url('rule_id', rule_id, 'str') self._send(http_method='DELETE', location_id='76fe3432-d825-479d-a5f6-983bbb78b4f3', version='5.0-preview.2', route_values=route_values)
[ "def", "delete_process_work_item_type_rule", "(", "self", ",", "process_id", ",", "wit_ref_name", ",", "rule_id", ")", ":", "route_values", "=", "{", "}", "if", "process_id", "is", "not", "None", ":", "route_values", "[", "'processId'", "]", "=", "self", ".", ...
54.166667
19.5
def blend(self, other, percent=0.5): """blend this color with the other one. Args: :other: the grapefruit.Color to blend with this one. Returns: A grapefruit.Color instance which is the result of blending this color on the other one. >>> c1 = Color.from_rgb(1, 0.5, 0, 0.2) >>> c2 = Color.from_rgb(1, 1, 1, 0.6) >>> c3 = c1.blend(c2) >>> c3 Color(1.0, 0.75, 0.5, 0.4) """ dest = 1.0 - percent rgb = tuple(((u * percent) + (v * dest) for u, v in zip(self.__rgb, other.__rgb))) a = (self.__a * percent) + (other.__a * dest) return Color(rgb, 'rgb', a, self.__wref)
[ "def", "blend", "(", "self", ",", "other", ",", "percent", "=", "0.5", ")", ":", "dest", "=", "1.0", "-", "percent", "rgb", "=", "tuple", "(", "(", "(", "u", "*", "percent", ")", "+", "(", "v", "*", "dest", ")", "for", "u", ",", "v", "in", ...
28.318182
19.409091
def get_variant_genotypes(self, variant): """Get the genotypes from a well formed variant instance. Args: marker (Variant): A Variant instance. Returns: A list of Genotypes instance containing a pointer to the variant as well as a vector of encoded genotypes. """ if not self.has_index: raise NotImplementedError("Not implemented when IMPUTE2 file is " "not indexed (see genipe)") # Find the variant in the index try: impute2_chrom = CHROM_STR_TO_INT[variant.chrom.name] except KeyError: raise ValueError( "Invalid chromosome ('{}') for IMPUTE2.".format(variant.chrom) ) variant_info = self._impute2_index[ (self._impute2_index.chrom == impute2_chrom) & (self._impute2_index.pos == variant.pos) ] if variant_info.shape[0] == 0: logging.variant_not_found(variant) return [] elif variant_info.shape[0] == 1: return self._get_biallelic_variant(variant, variant_info) else: return self._get_multialleic_variant(variant, variant_info)
[ "def", "get_variant_genotypes", "(", "self", ",", "variant", ")", ":", "if", "not", "self", ".", "has_index", ":", "raise", "NotImplementedError", "(", "\"Not implemented when IMPUTE2 file is \"", "\"not indexed (see genipe)\"", ")", "# Find the variant in the index", "try"...
32.837838
22.297297
def _server_enable(): """Checks whether the server should be enabled/disabled and makes the change accordingly. """ prev = None if "enabled" not in db else db["enabled"] if args["disable"]: db["enabled"] = False okay("Disabled the CI server. No pull requests will be processed.") if args["enable"]: db["enabled"] = True okay("Enabled the CI server. Pull request monitoring online.") #Only perform the save if something actually changed. if prev != db["enabled"]: _save_db()
[ "def", "_server_enable", "(", ")", ":", "prev", "=", "None", "if", "\"enabled\"", "not", "in", "db", "else", "db", "[", "\"enabled\"", "]", "if", "args", "[", "\"disable\"", "]", ":", "db", "[", "\"enabled\"", "]", "=", "False", "okay", "(", "\"Disable...
33.25
19.125
def retrieveVals(self): """Retrieve values for graphs.""" lighttpdInfo = LighttpdInfo(self._host, self._port, self._user, self._password, self._statuspath, self._ssl) stats = lighttpdInfo.getServerStats() if self.hasGraph('lighttpd_access'): self.setGraphVal('lighttpd_access', 'reqs', stats['Total Accesses']) if self.hasGraph('lighttpd_bytes'): self.setGraphVal('lighttpd_bytes', 'bytes', stats['Total kBytes'] * 1000) if self.hasGraph('lighttpd_servers'): self.setGraphVal('lighttpd_servers', 'busy', stats['BusyServers']) self.setGraphVal('lighttpd_servers', 'idle', stats['IdleServers']) self.setGraphVal('lighttpd_servers', 'max', stats['MaxServers'])
[ "def", "retrieveVals", "(", "self", ")", ":", "lighttpdInfo", "=", "LighttpdInfo", "(", "self", ".", "_host", ",", "self", ".", "_port", ",", "self", ".", "_user", ",", "self", ".", "_password", ",", "self", ".", "_statuspath", ",", "self", ".", "_ssl"...
56.4
18.6
def _addPort(n: LNode, lp: LPort, intf: Interface, reverseDirection=False): """ add port to LPort for interface """ origin = originObjOfPort(intf) d = intf._direction d = PortTypeFromDir(d) if reverseDirection: d = PortType.opposite(d) new_lp = LPort(lp, d, lp.side, name=intf._name) new_lp.originObj = origin if intf._interfaces: for child_intf in intf._interfaces: _addPort(n, new_lp, child_intf, reverseDirection=reverseDirection) lp.children.append(new_lp) new_lp.parent = lp if n._node2lnode is not None: n._node2lnode[origin] = new_lp return new_lp
[ "def", "_addPort", "(", "n", ":", "LNode", ",", "lp", ":", "LPort", ",", "intf", ":", "Interface", ",", "reverseDirection", "=", "False", ")", ":", "origin", "=", "originObjOfPort", "(", "intf", ")", "d", "=", "intf", ".", "_direction", "d", "=", "Po...
26.4
14.32
def detect_global_table_updates(record): """This will detect DDB Global Table updates that are not relevant to application data updates. These need to be skipped over as they are pure noise. :param record: :return: """ # This only affects MODIFY events. if record['eventName'] == 'MODIFY': # Need to compare the old and new images to check for GT specific changes only (just pop off the GT fields) old_image = remove_global_dynamo_specific_fields(record['dynamodb']['OldImage']) new_image = remove_global_dynamo_specific_fields(record['dynamodb']['NewImage']) if json.dumps(old_image, sort_keys=True) == json.dumps(new_image, sort_keys=True): return True return False
[ "def", "detect_global_table_updates", "(", "record", ")", ":", "# This only affects MODIFY events.", "if", "record", "[", "'eventName'", "]", "==", "'MODIFY'", ":", "# Need to compare the old and new images to check for GT specific changes only (just pop off the GT fields)", "old_ima...
43.117647
25.764706
def rectangle(cls, vertices, **kwargs): """Shortcut for creating a rectangle aligned with the screen axes from only two corners. Parameters ---------- vertices : array-like An array containing the ``[x, y]`` positions of two corners. kwargs Other keyword arguments are passed to the |Shape| constructor. """ bottom_left, top_right = vertices top_left = [bottom_left[0], top_right[1]] bottom_right = [top_right[0], bottom_left[1]] return cls([bottom_left, bottom_right, top_right, top_left], **kwargs)
[ "def", "rectangle", "(", "cls", ",", "vertices", ",", "*", "*", "kwargs", ")", ":", "bottom_left", ",", "top_right", "=", "vertices", "top_left", "=", "[", "bottom_left", "[", "0", "]", ",", "top_right", "[", "1", "]", "]", "bottom_right", "=", "[", ...
39.466667
19.266667
def list(): """ List available format. """ choice_len = max(map(len, _input_choices.keys())) tmpl = " {:<%d}: {}\n" % choice_len text = ''.join(map( lambda k_v: tmpl.format(k_v[0], k_v[1][0]), six.iteritems(_input_choices))) click.echo(text)
[ "def", "list", "(", ")", ":", "choice_len", "=", "max", "(", "map", "(", "len", ",", "_input_choices", ".", "keys", "(", ")", ")", ")", "tmpl", "=", "\" {:<%d}: {}\\n\"", "%", "choice_len", "text", "=", "''", ".", "join", "(", "map", "(", "lambda", ...
30
15.111111
def write(proto_dataset_uri, input): """Use YAML from a file or stdin to populate the readme. To stream content from stdin use "-", e.g. echo "desc: my data" | dtool readme write <DS_URI> - """ proto_dataset = dtoolcore.ProtoDataSet.from_uri( uri=proto_dataset_uri ) _validate_and_put_readme(proto_dataset, input.read())
[ "def", "write", "(", "proto_dataset_uri", ",", "input", ")", ":", "proto_dataset", "=", "dtoolcore", ".", "ProtoDataSet", ".", "from_uri", "(", "uri", "=", "proto_dataset_uri", ")", "_validate_and_put_readme", "(", "proto_dataset", ",", "input", ".", "read", "("...
31.636364
16.454545
def add_edge(self, node1_name, node2_name, edge_length=DEFAULT_EDGE_LENGTH): """ Adds a new edge to the current tree with specified characteristics Forbids addition of an edge, if a parent node is not present Forbids addition of an edge, if a child node already exists :param node1_name: name of the parent node, to which an edge shall be added :param node2_name: name of newly added child node :param edge_length: a length of specified edge :return: nothing, inplace changes :raises: ValueError (if parent node IS NOT present in the tree, or child node IS already present in the tree) """ if not self.__has_node(name=node1_name): raise ValueError("Can not add an edge to a non-existing node {name}".format(name=node1_name)) if self.__has_node(name=node2_name): raise ValueError("Can not add an edge to already existing node {name}".format(name=node2_name)) self.multicolors_are_up_to_date = False self.__get_node_by_name(name=node1_name).add_child(name=node2_name, dist=edge_length)
[ "def", "add_edge", "(", "self", ",", "node1_name", ",", "node2_name", ",", "edge_length", "=", "DEFAULT_EDGE_LENGTH", ")", ":", "if", "not", "self", ".", "__has_node", "(", "name", "=", "node1_name", ")", ":", "raise", "ValueError", "(", "\"Can not add an edge...
60.888889
29.277778
def add_angles(self, indexes, deg=False, cossin=False, periodic=True): """ Adds the list of angles to the feature list Parameters ---------- indexes : np.ndarray, shape=(num_pairs, 3), dtype=int an array with triplets of atom indices deg : bool, optional, default = False If False (default), angles will be computed in radians. If True, angles will be computed in degrees. cossin : bool, optional, default = False If True, each angle will be returned as a pair of (sin(x), cos(x)). This is useful, if you calculate the mean (e.g TICA/PCA, clustering) in that space. periodic : bool, optional, default = True If `periodic` is True and the trajectory contains unitcell information, we will treat dihedrals that cross periodic images using the minimum image convention. """ from .angles import AngleFeature indexes = self._check_indices(indexes, pair_n=3) f = AngleFeature(self.topology, indexes, deg=deg, cossin=cossin, periodic=periodic) self.__add_feature(f)
[ "def", "add_angles", "(", "self", ",", "indexes", ",", "deg", "=", "False", ",", "cossin", "=", "False", ",", "periodic", "=", "True", ")", ":", "from", ".", "angles", "import", "AngleFeature", "indexes", "=", "self", ".", "_check_indices", "(", "indexes...
45.076923
18.769231
def get_oauth_url(self): """ Returns the URL with OAuth params """ params = OrderedDict() if "?" in self.url: url = self.url[:self.url.find("?")] for key, value in parse_qsl(urlparse(self.url).query): params[key] = value else: url = self.url params["oauth_consumer_key"] = self.consumer_key params["oauth_timestamp"] = self.timestamp params["oauth_nonce"] = self.generate_nonce() params["oauth_signature_method"] = "HMAC-SHA256" params["oauth_signature"] = self.generate_oauth_signature(params, url) query_string = urlencode(params) return "%s?%s" % (url, query_string)
[ "def", "get_oauth_url", "(", "self", ")", ":", "params", "=", "OrderedDict", "(", ")", "if", "\"?\"", "in", "self", ".", "url", ":", "url", "=", "self", ".", "url", "[", ":", "self", ".", "url", ".", "find", "(", "\"?\"", ")", "]", "for", "key", ...
34.7
18.75
def handle_backend_response(self, orig_request, backend_request, response_status, response_headers, response_body, method_config, start_response): """Handle backend response, transforming output as needed. This calls start_response and returns the response body. Args: orig_request: An ApiRequest, the original request from the user. backend_request: An ApiRequest, the transformed request that was sent to the backend handler. response_status: A string, the status from the response. response_headers: A dict, the headers from the response. response_body: A string, the body of the response. method_config: A dict, the API config of the method to be called. start_response: A function with semantics defined in PEP-333. Returns: A string containing the response body. """ # Verify that the response is json. If it isn't treat, the body as an # error message and wrap it in a json error response. for header, value in response_headers: if (header.lower() == 'content-type' and not value.lower().startswith('application/json')): return self.fail_request(orig_request, 'Non-JSON reply: %s' % response_body, start_response) self.check_error_response(response_body, response_status) # Check if the response from the API was empty. Empty REST responses # generate a HTTP 204. empty_response = self.check_empty_response(orig_request, method_config, start_response) if empty_response is not None: return empty_response body = self.transform_rest_response(response_body) cors_handler = self._create_cors_handler(orig_request) return util.send_wsgi_response(response_status, response_headers, body, start_response, cors_handler=cors_handler)
[ "def", "handle_backend_response", "(", "self", ",", "orig_request", ",", "backend_request", ",", "response_status", ",", "response_headers", ",", "response_body", ",", "method_config", ",", "start_response", ")", ":", "# Verify that the response is json. If it isn't treat, t...
45.906977
23.465116
def uniformly_refine_triangulation(self, faces=False, trisect=False): """ return points defining a refined triangulation obtained by bisection of all edges in the triangulation """ if faces: x_v1, y_v1 = self._add_face_centroids() else: if not trisect: x_v1, y_v1 = self._add_midpoints() else: x_v1, y_v1 = self._add_tripoints(ratio=0.333333) return x_v1, y_v1
[ "def", "uniformly_refine_triangulation", "(", "self", ",", "faces", "=", "False", ",", "trisect", "=", "False", ")", ":", "if", "faces", ":", "x_v1", ",", "y_v1", "=", "self", ".", "_add_face_centroids", "(", ")", "else", ":", "if", "not", "trisect", ":"...
29.5
22.25
def QPSK_BEP(tx_data,rx_data,Ncorr = 1024,Ntransient = 0): """ Count bit errors between a transmitted and received QPSK signal. Time delay between streams is detected as well as ambiquity resolution due to carrier phase lock offsets of :math:`k*\\frac{\\pi}{4}`, k=0,1,2,3. The ndarray sdata is Tx +/-1 symbols as complex numbers I + j*Q. The ndarray data is Rx +/-1 symbols as complex numbers I + j*Q. Note: Ncorr needs to be even """ #Remove Ntransient symbols tx_data = tx_data[Ntransient:] rx_data = rx_data[Ntransient:] #Correlate the first Ncorr symbols at four possible phase rotations R0 = np.fft.ifft(np.fft.fft(rx_data,Ncorr)* np.conj(np.fft.fft(tx_data,Ncorr))) R1 = np.fft.ifft(np.fft.fft(1j*rx_data,Ncorr)* np.conj(np.fft.fft(tx_data,Ncorr))) R2 = np.fft.ifft(np.fft.fft(-1*rx_data,Ncorr)* np.conj(np.fft.fft(tx_data,Ncorr))) R3 = np.fft.ifft(np.fft.fft(-1j*rx_data,Ncorr)* np.conj(np.fft.fft(tx_data,Ncorr))) #Place the zero lag value in the center of the array R0 = np.fft.fftshift(R0) R1 = np.fft.fftshift(R1) R2 = np.fft.fftshift(R2) R3 = np.fft.fftshift(R3) R0max = np.max(R0.real) R1max = np.max(R1.real) R2max = np.max(R2.real) R3max = np.max(R3.real) R = np.array([R0max,R1max,R2max,R3max]) Rmax = np.max(R) kphase_max = np.where(R == Rmax)[0] kmax = kphase_max[0] #Correlation lag value is zero at the center of the array if kmax == 0: lagmax = np.where(R0.real == Rmax)[0] - Ncorr/2 elif kmax == 1: lagmax = np.where(R1.real == Rmax)[0] - Ncorr/2 elif kmax == 2: lagmax = np.where(R2.real == Rmax)[0] - Ncorr/2 elif kmax == 3: lagmax = np.where(R3.real == Rmax)[0] - Ncorr/2 taumax = lagmax[0] print('kmax = %d, taumax = %d' % (kmax, taumax)) # Count bit and symbol errors over the entire input ndarrays # Begin by making tx and rx length equal and apply phase rotation to rx if taumax < 0: tx_data = tx_data[-taumax:] tx_data = tx_data[:min(len(tx_data),len(rx_data))] rx_data = 1j**kmax*rx_data[:len(tx_data)] else: rx_data = 1j**kmax*rx_data[taumax:] rx_data = rx_data[:min(len(tx_data),len(rx_data))] tx_data = tx_data[:len(rx_data)] #Convert to 0's and 1's S_count = len(tx_data) tx_I = np.int16((tx_data.real + 1)/2) tx_Q = np.int16((tx_data.imag + 1)/2) rx_I = np.int16((rx_data.real + 1)/2) rx_Q = np.int16((rx_data.imag + 1)/2) I_errors = tx_I ^ rx_I Q_errors = tx_Q ^ rx_Q #A symbol errors occurs when I or Q or both are in error S_errors = I_errors | Q_errors #return 0 return S_count,np.sum(I_errors),np.sum(Q_errors),np.sum(S_errors)
[ "def", "QPSK_BEP", "(", "tx_data", ",", "rx_data", ",", "Ncorr", "=", "1024", ",", "Ntransient", "=", "0", ")", ":", "#Remove Ntransient symbols", "tx_data", "=", "tx_data", "[", "Ntransient", ":", "]", "rx_data", "=", "rx_data", "[", "Ntransient", ":", "]...
40.897059
15.161765
def start(path=None, host=None, port=None, color=None, cors=None, detach=False, nolog=False): """start web server""" if detach: sys.argv.append('--no-log') idx = sys.argv.index('-d') del sys.argv[idx] cmd = sys.executable + ' ' + ' '.join([sys.argv[0], 'start'] + sys.argv[1:]) if os.name == 'nt': cmd = 'start /B %s' % cmd else: cmd = '%s &' % cmd os.system(cmd) else: if path: path = os.path.abspath(path) app.config['PATH_HTML']= first_value(path, app.config.get('PATH_HTML',None), os.getcwd()) app.config['HOST'] = first_value(host, app.config.get('HOST',None), '0.0.0.0') app.config['PORT'] = int(first_value(port, app.config.get('PORT',None), 5001)) app.logger.setLevel(logging.DEBUG) app.config['historylog'] = HistoryHandler() app.logger.addHandler(app.config['historylog']) if not nolog: app.logger.addHandler(StreamHandler()) if cors: CORS(app) app.run(host = app.config['HOST'], port = app.config['PORT'], threaded = True)
[ "def", "start", "(", "path", "=", "None", ",", "host", "=", "None", ",", "port", "=", "None", ",", "color", "=", "None", ",", "cors", "=", "None", ",", "detach", "=", "False", ",", "nolog", "=", "False", ")", ":", "if", "detach", ":", "sys", "....
43.148148
18.592593
def load(self): """ Extract tabular data as |TableData| instances from a MediaWiki text object. |load_source_desc_text| :return: Loaded table data iterator. |load_table_name_desc| =================== ============================================== Format specifier Value after the replacement =================== ============================================== ``%(filename)s`` ``""`` ``%(key)s`` | This replaced to: | **(1)** ``caption`` mark of the table | **(2)** ``%(format_name)s%(format_id)s`` | if ``caption`` mark not included | in the table. ``%(format_name)s`` ``"mediawiki"`` ``%(format_id)s`` |format_id_desc| ``%(global_id)s`` |global_id| =================== ============================================== :rtype: |TableData| iterator :raises pytablereader.DataError: If the MediaWiki data is invalid or empty. """ self._validate() self._logger.logging_load() formatter = MediaWikiTableFormatter(self.source) formatter.accept(self) return formatter.to_table_data()
[ "def", "load", "(", "self", ")", ":", "self", ".", "_validate", "(", ")", "self", ".", "_logger", ".", "logging_load", "(", ")", "formatter", "=", "MediaWikiTableFormatter", "(", "self", ".", "source", ")", "formatter", ".", "accept", "(", "self", ")", ...
38.657143
18.942857
def buffered_read(fh, lock, offsets, bytecounts, buffersize=None): """Return iterator over segments read from file.""" if buffersize is None: buffersize = 2**26 length = len(offsets) i = 0 while i < length: data = [] with lock: size = 0 while size < buffersize and i < length: fh.seek(offsets[i]) bytecount = bytecounts[i] data.append(fh.read(bytecount)) # buffer = bytearray(bytecount) # n = fh.readinto(buffer) # data.append(buffer[:n]) size += bytecount i += 1 for segment in data: yield segment
[ "def", "buffered_read", "(", "fh", ",", "lock", ",", "offsets", ",", "bytecounts", ",", "buffersize", "=", "None", ")", ":", "if", "buffersize", "is", "None", ":", "buffersize", "=", "2", "**", "26", "length", "=", "len", "(", "offsets", ")", "i", "=...
33.047619
12.809524
def extract(self, content, output): """Try to extract tables from an invoice""" for table in self['tables']: # First apply default options. plugin_settings = DEFAULT_OPTIONS.copy() plugin_settings.update(table) table = plugin_settings # Validate settings assert 'start' in table, 'Table start regex missing' assert 'end' in table, 'Table end regex missing' assert 'body' in table, 'Table body regex missing' start = re.search(table['start'], content) end = re.search(table['end'], content) if not start or not end: logger.warning('no table body found - start %s, end %s', start, end) continue table_body = content[start.end(): end.start()] for line in re.split(table['line_separator'], table_body): # if the line has empty lines in it , skip them if not line.strip('').strip('\n') or not line: continue match = re.search(table['body'], line) if match: for field, value in match.groupdict().items(): # If a field name already exists, do not overwrite it if field in output: continue if field.startswith('date') or field.endswith('date'): output[field] = self.parse_date(value) if not output[field]: logger.error("Date parsing failed on date '%s'", value) return None elif field.startswith('amount'): output[field] = self.parse_number(value) else: output[field] = value logger.debug('ignoring *%s* because it doesn\'t match anything', line)
[ "def", "extract", "(", "self", ",", "content", ",", "output", ")", ":", "for", "table", "in", "self", "[", "'tables'", "]", ":", "# First apply default options.", "plugin_settings", "=", "DEFAULT_OPTIONS", ".", "copy", "(", ")", "plugin_settings", ".", "update...
39.26087
20.326087
def mercado(self): """ Obtém o status do mercado na rodada atual. Returns: Uma instância de cartolafc.Mercado representando o status do mercado na rodada atual. """ url = '{api_url}/mercado/status'.format(api_url=self._api_url) data = self._request(url) return Mercado.from_dict(data)
[ "def", "mercado", "(", "self", ")", ":", "url", "=", "'{api_url}/mercado/status'", ".", "format", "(", "api_url", "=", "self", ".", "_api_url", ")", "data", "=", "self", ".", "_request", "(", "url", ")", "return", "Mercado", ".", "from_dict", "(", "data"...
33.7
22.2
def make_call_types(f, globals_d): # type: (Callable, Dict) -> Tuple[Dict[str, Anno], Anno] """Make a call_types dictionary that describes what arguments to pass to f Args: f: The function to inspect for argument names (without self) globals_d: A dictionary of globals to lookup annotation definitions in """ arg_spec = getargspec(f) args = [k for k in arg_spec.args if k != "self"] defaults = {} # type: Dict[str, Any] if arg_spec.defaults: default_args = args[-len(arg_spec.defaults):] for a, default in zip(default_args, arg_spec.defaults): defaults[a] = default if not getattr(f, "__annotations__", None): # Make string annotations from the type comment if there is one annotations = make_annotations(f, globals_d) else: annotations = f.__annotations__ call_types = OrderedDict() # type: Dict[str, Anno] for a in args: anno = anno_with_default(annotations[a], defaults.get(a, NO_DEFAULT)) assert isinstance(anno, Anno), \ "Argument %r has type %r which is not an Anno" % (a, anno) call_types[a] = anno return_type = anno_with_default(annotations.get("return", None)) if return_type is Any: return_type = Anno("Any return value", Any, "return") assert return_type is None or isinstance(return_type, Anno), \ "Return has type %r which is not an Anno" % (return_type,) return call_types, return_type
[ "def", "make_call_types", "(", "f", ",", "globals_d", ")", ":", "# type: (Callable, Dict) -> Tuple[Dict[str, Anno], Anno]", "arg_spec", "=", "getargspec", "(", "f", ")", "args", "=", "[", "k", "for", "k", "in", "arg_spec", ".", "args", "if", "k", "!=", "\"self...
39.351351
20.675676
def cmd_startstop(options): """Start or Stop the specified instance. Finds instances that match args and instance-state expected by the command. Then, the target instance is determined, the action is performed on the instance, and the eturn information is displayed. Args: options (object): contains args and data from parser. """ statelu = {"start": "stopped", "stop": "running"} options.inst_state = statelu[options.command] debg.dprint("toggle set state: ", options.inst_state) (i_info, param_str) = gather_data(options) (tar_inst, tar_idx) = determine_inst(i_info, param_str, options.command) response = awsc.startstop(tar_inst, options.command) responselu = {"start": "StartingInstances", "stop": "StoppingInstances"} filt = responselu[options.command] resp = {} state_term = ('CurrentState', 'PreviousState') for i, j in enumerate(state_term): resp[i] = response["{0}".format(filt)][0]["{0}".format(j)]['Name'] print("Current State: {}{}{} - Previous State: {}{}{}\n". format(C_STAT[resp[0]], resp[0], C_NORM, C_STAT[resp[1]], resp[1], C_NORM))
[ "def", "cmd_startstop", "(", "options", ")", ":", "statelu", "=", "{", "\"start\"", ":", "\"stopped\"", ",", "\"stop\"", ":", "\"running\"", "}", "options", ".", "inst_state", "=", "statelu", "[", "options", ".", "command", "]", "debg", ".", "dprint", "(",...
44.076923
20.192308
def visitObjectExpr(self, ctx: jsgParser.ObjectExprContext): """ objectExpr: OBRACE membersDef? CBRACE OBRACE (LEXER_ID_REF | ANY)? MAPSTO valueType ebnfSuffix? CBRACE """ if not self._name: self._name = self._context.anon_id() if ctx.membersDef(): self.visitChildren(ctx) elif ctx.MAPSTO(): if ctx.LEXER_ID_REF(): self._map_name_type = as_token(ctx) # Any and absent mean the same thing self._map_valuetype = JSGValueType(self._context, ctx.valueType()) if ctx.ebnfSuffix(): self._map_ebnf = JSGEbnf(self._context, ctx.ebnfSuffix())
[ "def", "visitObjectExpr", "(", "self", ",", "ctx", ":", "jsgParser", ".", "ObjectExprContext", ")", ":", "if", "not", "self", ".", "_name", ":", "self", ".", "_name", "=", "self", ".", "_context", ".", "anon_id", "(", ")", "if", "ctx", ".", "membersDef...
45.866667
15
def watchdog_handler(self): """Take care of threads if wachdog expires.""" _LOGGING.debug('%s Watchdog expired. Resetting connection.', self.name) self.watchdog.stop() self.reset_thrd.set()
[ "def", "watchdog_handler", "(", "self", ")", ":", "_LOGGING", ".", "debug", "(", "'%s Watchdog expired. Resetting connection.'", ",", "self", ".", "name", ")", "self", ".", "watchdog", ".", "stop", "(", ")", "self", ".", "reset_thrd", ".", "set", "(", ")" ]
43.4
15
def _get_matching_dns_entry_ids(self, identifier=None, rtype=None, name=None, content=None): """Return a list of DNS entries that match the given criteria.""" record_ids = [] if not identifier: records = self._list_records(rtype, name, content) record_ids = [record['id'] for record in records] else: record_ids.append(identifier) return record_ids
[ "def", "_get_matching_dns_entry_ids", "(", "self", ",", "identifier", "=", "None", ",", "rtype", "=", "None", ",", "name", "=", "None", ",", "content", "=", "None", ")", ":", "record_ids", "=", "[", "]", "if", "not", "identifier", ":", "records", "=", ...
45.1
16.4
def getStyleCount(self, verbose=None): """ Returns the number of Visual Styles available in the current session :param verbose: print more :returns: 200: successful operation """ response=api(url=self.___url+'styles/count', method="GET", verbose=verbose, parse_params=False) return response
[ "def", "getStyleCount", "(", "self", ",", "verbose", "=", "None", ")", ":", "response", "=", "api", "(", "url", "=", "self", ".", "___url", "+", "'styles/count'", ",", "method", "=", "\"GET\"", ",", "verbose", "=", "verbose", ",", "parse_params", "=", ...
30.818182
22.454545
def disable_host_svc_notifications(self, host): """Disable services notifications for a host Format of the line that triggers function call:: DISABLE_HOST_SVC_NOTIFICATIONS;<host_name> :param host: host to edit :type host: alignak.objects.host.Host :return: None """ for service_id in host.services: if service_id in self.daemon.services: service = self.daemon.services[service_id] self.disable_svc_notifications(service) self.send_an_element(service.get_update_status_brok())
[ "def", "disable_host_svc_notifications", "(", "self", ",", "host", ")", ":", "for", "service_id", "in", "host", ".", "services", ":", "if", "service_id", "in", "self", ".", "daemon", ".", "services", ":", "service", "=", "self", ".", "daemon", ".", "servic...
39.2
14.466667
def b58check_unpack(b58_s): """ Takes in a base 58 check string and returns: the version byte, the original encoded binary string, and the checksum. """ num_leading_zeros = len(re.match(r'^1*', b58_s).group(0)) # convert from b58 to b16 hex_s = change_charset(b58_s, B58_KEYSPACE, HEX_KEYSPACE) # if an odd number of hex characters are present, add a zero to the front if len(hex_s) % 2 == 1: hex_s = "0" + hex_s # convert from b16 to b2 bin_s = unhexlify(hex_s) # add in the leading zeros bin_s = '\x00' * num_leading_zeros + bin_s # make sure the newly calculated checksum equals the embedded checksum newly_calculated_checksum = bin_checksum(bin_s[:-4]) embedded_checksum = bin_s[-4:] if not (newly_calculated_checksum == embedded_checksum): raise ValueError('b58check value has an invalid checksum') # return values version_byte = bin_s[:1] encoded_value = bin_s[1:-4] checksum = bin_s[-4:] return version_byte, encoded_value, checksum
[ "def", "b58check_unpack", "(", "b58_s", ")", ":", "num_leading_zeros", "=", "len", "(", "re", ".", "match", "(", "r'^1*'", ",", "b58_s", ")", ".", "group", "(", "0", ")", ")", "# convert from b58 to b16", "hex_s", "=", "change_charset", "(", "b58_s", ",", ...
42.5
14.708333
def move(self, from_id, to_uuid): """Move an identity into a unique identity. The method moves the identity identified by <from_id> to the unique identity <to_uuid>. In the case of<from_id> is equal to <to_uuid> and this unique identity does not exist, a new unique identity will be created, detaching <from_id> from its current unique identity and moving it to the new one. When <to_uuid> is the unique identity that is currently related to <from_id>, the action does not have any effect. The same occurs when either <from_id> or <to_uuid> are None or empty. :param from_id: identifier of the identity set to be moved :param to_uuid: identifier of the unique identity where 'from_id' will be moved """ if not from_id or not to_uuid: return CMD_SUCCESS try: api.move_identity(self.db, from_id, to_uuid) self.display('move.tmpl', from_id=from_id, to_uuid=to_uuid) except NotFoundError as e: self.error(str(e)) return e.code return CMD_SUCCESS
[ "def", "move", "(", "self", ",", "from_id", ",", "to_uuid", ")", ":", "if", "not", "from_id", "or", "not", "to_uuid", ":", "return", "CMD_SUCCESS", "try", ":", "api", ".", "move_identity", "(", "self", ".", "db", ",", "from_id", ",", "to_uuid", ")", ...
38.1
22.166667