repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
listlengths
20
707
docstring
stringlengths
3
17.3k
docstring_tokens
listlengths
3
222
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
idx
int64
0
252k
infothrill/python-dyndnsc
dyndnsc/detector/dnswanip.py
find_ip
def find_ip(family=AF_INET, flavour="opendns"): """Find the publicly visible IP address of the current system. This uses public DNS infrastructure that implement a special DNS "hack" to return the IP address of the requester rather than some other address. :param family: address family, optional, default AF_INET (ipv4) :param flavour: selector for public infrastructure provider, optional """ flavours = { "opendns": { AF_INET: { "@": ("resolver1.opendns.com", "resolver2.opendns.com"), "qname": "myip.opendns.com", "rdtype": "A", }, AF_INET6: { "@": ("resolver1.ipv6-sandbox.opendns.com", "resolver2.ipv6-sandbox.opendns.com"), "qname": "myip.opendns.com", "rdtype": "AAAA", }, }, } flavour = flavours["opendns"] resolver = dns.resolver.Resolver() # specify the custom nameservers to be used (as IPs): resolver.nameservers = [next(iter(resolve(h, family=family))) for h in flavour[family]["@"]] answers = resolver.query(qname=flavour[family]["qname"], rdtype=flavour[family]["rdtype"]) for rdata in answers: return rdata.address return None
python
def find_ip(family=AF_INET, flavour="opendns"): """Find the publicly visible IP address of the current system. This uses public DNS infrastructure that implement a special DNS "hack" to return the IP address of the requester rather than some other address. :param family: address family, optional, default AF_INET (ipv4) :param flavour: selector for public infrastructure provider, optional """ flavours = { "opendns": { AF_INET: { "@": ("resolver1.opendns.com", "resolver2.opendns.com"), "qname": "myip.opendns.com", "rdtype": "A", }, AF_INET6: { "@": ("resolver1.ipv6-sandbox.opendns.com", "resolver2.ipv6-sandbox.opendns.com"), "qname": "myip.opendns.com", "rdtype": "AAAA", }, }, } flavour = flavours["opendns"] resolver = dns.resolver.Resolver() # specify the custom nameservers to be used (as IPs): resolver.nameservers = [next(iter(resolve(h, family=family))) for h in flavour[family]["@"]] answers = resolver.query(qname=flavour[family]["qname"], rdtype=flavour[family]["rdtype"]) for rdata in answers: return rdata.address return None
[ "def", "find_ip", "(", "family", "=", "AF_INET", ",", "flavour", "=", "\"opendns\"", ")", ":", "flavours", "=", "{", "\"opendns\"", ":", "{", "AF_INET", ":", "{", "\"@\"", ":", "(", "\"resolver1.opendns.com\"", ",", "\"resolver2.opendns.com\"", ")", ",", "\"...
Find the publicly visible IP address of the current system. This uses public DNS infrastructure that implement a special DNS "hack" to return the IP address of the requester rather than some other address. :param family: address family, optional, default AF_INET (ipv4) :param flavour: selector for public infrastructure provider, optional
[ "Find", "the", "publicly", "visible", "IP", "address", "of", "the", "current", "system", "." ]
2196d48aa6098da9835a7611fbdb0b5f0fbf51e4
https://github.com/infothrill/python-dyndnsc/blob/2196d48aa6098da9835a7611fbdb0b5f0fbf51e4/dyndnsc/detector/dnswanip.py#L19-L51
train
44,200
infothrill/python-dyndnsc
dyndnsc/detector/dnswanip.py
IPDetector_DnsWanIp.detect
def detect(self): """ Detect the WAN IP of the current process through DNS. Depending on the 'family' option, either ipv4 or ipv6 resolution is carried out. :return: ip address """ theip = find_ip(family=self.opts_family) self.set_current_value(theip) return theip
python
def detect(self): """ Detect the WAN IP of the current process through DNS. Depending on the 'family' option, either ipv4 or ipv6 resolution is carried out. :return: ip address """ theip = find_ip(family=self.opts_family) self.set_current_value(theip) return theip
[ "def", "detect", "(", "self", ")", ":", "theip", "=", "find_ip", "(", "family", "=", "self", ".", "opts_family", ")", "self", ".", "set_current_value", "(", "theip", ")", "return", "theip" ]
Detect the WAN IP of the current process through DNS. Depending on the 'family' option, either ipv4 or ipv6 resolution is carried out. :return: ip address
[ "Detect", "the", "WAN", "IP", "of", "the", "current", "process", "through", "DNS", "." ]
2196d48aa6098da9835a7611fbdb0b5f0fbf51e4
https://github.com/infothrill/python-dyndnsc/blob/2196d48aa6098da9835a7611fbdb0b5f0fbf51e4/dyndnsc/detector/dnswanip.py#L76-L87
train
44,201
infothrill/python-dyndnsc
dyndnsc/common/dynamiccli.py
parse_cmdline_args
def parse_cmdline_args(args, classes): """ Parse all updater and detector related arguments from args. Returns a list of ("name", { "k": "v"}) :param args: argparse arguments """ if args is None: raise ValueError("args must not be None") parsed_args = {} for kls in classes: prefix = kls.configuration_key_prefix() name = kls.configuration_key if getattr(args, "%s_%s" % (prefix, name), False): logging.debug( "Gathering initargs for '%s.%s'", prefix, name) initargs = {} for arg_name in kls.init_argnames(): val = getattr(args, "%s_%s_%s" % (prefix, name, arg_name)) if val is not None: initargs[arg_name] = val if prefix not in parsed_args: parsed_args[prefix] = [] parsed_args[prefix].append((name, initargs)) return parsed_args
python
def parse_cmdline_args(args, classes): """ Parse all updater and detector related arguments from args. Returns a list of ("name", { "k": "v"}) :param args: argparse arguments """ if args is None: raise ValueError("args must not be None") parsed_args = {} for kls in classes: prefix = kls.configuration_key_prefix() name = kls.configuration_key if getattr(args, "%s_%s" % (prefix, name), False): logging.debug( "Gathering initargs for '%s.%s'", prefix, name) initargs = {} for arg_name in kls.init_argnames(): val = getattr(args, "%s_%s_%s" % (prefix, name, arg_name)) if val is not None: initargs[arg_name] = val if prefix not in parsed_args: parsed_args[prefix] = [] parsed_args[prefix].append((name, initargs)) return parsed_args
[ "def", "parse_cmdline_args", "(", "args", ",", "classes", ")", ":", "if", "args", "is", "None", ":", "raise", "ValueError", "(", "\"args must not be None\"", ")", "parsed_args", "=", "{", "}", "for", "kls", "in", "classes", ":", "prefix", "=", "kls", ".", ...
Parse all updater and detector related arguments from args. Returns a list of ("name", { "k": "v"}) :param args: argparse arguments
[ "Parse", "all", "updater", "and", "detector", "related", "arguments", "from", "args", "." ]
2196d48aa6098da9835a7611fbdb0b5f0fbf51e4
https://github.com/infothrill/python-dyndnsc/blob/2196d48aa6098da9835a7611fbdb0b5f0fbf51e4/dyndnsc/common/dynamiccli.py#L11-L37
train
44,202
infothrill/python-dyndnsc
dyndnsc/common/dynamiccli.py
DynamicCliMixin.register_arguments
def register_arguments(cls, parser): """Register command line options. Implement this method for normal options behavior with protection from OptionConflictErrors. If you override this method and want the default --$name option(s) to be registered, be sure to call super(). """ if hasattr(cls, "_dont_register_arguments"): return prefix = cls.configuration_key_prefix() cfgkey = cls.configuration_key parser.add_argument("--%s-%s" % (prefix, cfgkey), action="store_true", dest="%s_%s" % (prefix, cfgkey), default=False, help="%s: %s" % (cls.__name__, cls.help())) args = cls.init_argnames() defaults = cls._init_argdefaults() for arg in args[0:len(args) - len(defaults)]: parser.add_argument("--%s-%s-%s" % (prefix, cfgkey, arg), dest="%s_%s_%s" % (prefix, cfgkey, arg), help="") for i, arg in enumerate(args[len(args) - len(defaults):]): parser.add_argument("--%s-%s-%s" % (prefix, cfgkey, arg), dest="%s_%s_%s" % (prefix, cfgkey, arg), default=defaults[i], help="default: %(default)s")
python
def register_arguments(cls, parser): """Register command line options. Implement this method for normal options behavior with protection from OptionConflictErrors. If you override this method and want the default --$name option(s) to be registered, be sure to call super(). """ if hasattr(cls, "_dont_register_arguments"): return prefix = cls.configuration_key_prefix() cfgkey = cls.configuration_key parser.add_argument("--%s-%s" % (prefix, cfgkey), action="store_true", dest="%s_%s" % (prefix, cfgkey), default=False, help="%s: %s" % (cls.__name__, cls.help())) args = cls.init_argnames() defaults = cls._init_argdefaults() for arg in args[0:len(args) - len(defaults)]: parser.add_argument("--%s-%s-%s" % (prefix, cfgkey, arg), dest="%s_%s_%s" % (prefix, cfgkey, arg), help="") for i, arg in enumerate(args[len(args) - len(defaults):]): parser.add_argument("--%s-%s-%s" % (prefix, cfgkey, arg), dest="%s_%s_%s" % (prefix, cfgkey, arg), default=defaults[i], help="default: %(default)s")
[ "def", "register_arguments", "(", "cls", ",", "parser", ")", ":", "if", "hasattr", "(", "cls", ",", "\"_dont_register_arguments\"", ")", ":", "return", "prefix", "=", "cls", ".", "configuration_key_prefix", "(", ")", "cfgkey", "=", "cls", ".", "configuration_k...
Register command line options. Implement this method for normal options behavior with protection from OptionConflictErrors. If you override this method and want the default --$name option(s) to be registered, be sure to call super().
[ "Register", "command", "line", "options", "." ]
2196d48aa6098da9835a7611fbdb0b5f0fbf51e4
https://github.com/infothrill/python-dyndnsc/blob/2196d48aa6098da9835a7611fbdb0b5f0fbf51e4/dyndnsc/common/dynamiccli.py#L60-L87
train
44,203
softwarefactory-project/rdopkg
rdopkg/actions/distgit/actions.py
tag_patches_branch
def tag_patches_branch(package, local_patches_branch, patches_branch, force=False, push=False): """ Tag the local_patches_branch with this package's NVR. """ vr = specfile.Spec().get_vr(epoch=False) nvr_tag = package + '-' + vr tag_cmd = ['tag', nvr_tag, local_patches_branch] if force: tag_cmd.append('-f') git(*tag_cmd) if push: patches_remote = patches_branch.partition('/')[0] git('push', patches_remote, nvr_tag) else: print('Not pushing tag. Run "git push patches %s" by hand.' % nvr_tag)
python
def tag_patches_branch(package, local_patches_branch, patches_branch, force=False, push=False): """ Tag the local_patches_branch with this package's NVR. """ vr = specfile.Spec().get_vr(epoch=False) nvr_tag = package + '-' + vr tag_cmd = ['tag', nvr_tag, local_patches_branch] if force: tag_cmd.append('-f') git(*tag_cmd) if push: patches_remote = patches_branch.partition('/')[0] git('push', patches_remote, nvr_tag) else: print('Not pushing tag. Run "git push patches %s" by hand.' % nvr_tag)
[ "def", "tag_patches_branch", "(", "package", ",", "local_patches_branch", ",", "patches_branch", ",", "force", "=", "False", ",", "push", "=", "False", ")", ":", "vr", "=", "specfile", ".", "Spec", "(", ")", ".", "get_vr", "(", "epoch", "=", "False", ")"...
Tag the local_patches_branch with this package's NVR.
[ "Tag", "the", "local_patches_branch", "with", "this", "package", "s", "NVR", "." ]
2d2bed4e7cd329558a36d0dd404ec4ac8f9f254c
https://github.com/softwarefactory-project/rdopkg/blob/2d2bed4e7cd329558a36d0dd404ec4ac8f9f254c/rdopkg/actions/distgit/actions.py#L1014-L1027
train
44,204
infothrill/python-dyndnsc
dyndnsc/common/load.py
load_class
def load_class(module_name, class_name): """Return class object specified by module name and class name. Return None if module failed to be imported. :param module_name: string module name :param class_name: string class name """ try: plugmod = import_module(module_name) except Exception as exc: warn("Importing built-in plugin %s.%s raised an exception: %r" % (module_name, class_name, repr(exc)), ImportWarning) return None else: return getattr(plugmod, class_name)
python
def load_class(module_name, class_name): """Return class object specified by module name and class name. Return None if module failed to be imported. :param module_name: string module name :param class_name: string class name """ try: plugmod = import_module(module_name) except Exception as exc: warn("Importing built-in plugin %s.%s raised an exception: %r" % (module_name, class_name, repr(exc)), ImportWarning) return None else: return getattr(plugmod, class_name)
[ "def", "load_class", "(", "module_name", ",", "class_name", ")", ":", "try", ":", "plugmod", "=", "import_module", "(", "module_name", ")", "except", "Exception", "as", "exc", ":", "warn", "(", "\"Importing built-in plugin %s.%s raised an exception: %r\"", "%", "(",...
Return class object specified by module name and class name. Return None if module failed to be imported. :param module_name: string module name :param class_name: string class name
[ "Return", "class", "object", "specified", "by", "module", "name", "and", "class", "name", "." ]
2196d48aa6098da9835a7611fbdb0b5f0fbf51e4
https://github.com/infothrill/python-dyndnsc/blob/2196d48aa6098da9835a7611fbdb0b5f0fbf51e4/dyndnsc/common/load.py#L9-L24
train
44,205
infothrill/python-dyndnsc
dyndnsc/common/load.py
find_class
def find_class(name, classes): """Return class in ``classes`` identified by configuration key ``name``.""" name = name.lower() cls = next((c for c in classes if c.configuration_key == name), None) if cls is None: raise ValueError("No class named '%s' could be found" % name) return cls
python
def find_class(name, classes): """Return class in ``classes`` identified by configuration key ``name``.""" name = name.lower() cls = next((c for c in classes if c.configuration_key == name), None) if cls is None: raise ValueError("No class named '%s' could be found" % name) return cls
[ "def", "find_class", "(", "name", ",", "classes", ")", ":", "name", "=", "name", ".", "lower", "(", ")", "cls", "=", "next", "(", "(", "c", "for", "c", "in", "classes", "if", "c", ".", "configuration_key", "==", "name", ")", ",", "None", ")", "if...
Return class in ``classes`` identified by configuration key ``name``.
[ "Return", "class", "in", "classes", "identified", "by", "configuration", "key", "name", "." ]
2196d48aa6098da9835a7611fbdb0b5f0fbf51e4
https://github.com/infothrill/python-dyndnsc/blob/2196d48aa6098da9835a7611fbdb0b5f0fbf51e4/dyndnsc/common/load.py#L27-L33
train
44,206
GreenBuildingRegistry/yaml-config
yamlconf/utils.py
decamel
def decamel(string): """"Split CamelCased words. CamelCase -> Camel Case, dromedaryCase -> dromedary Case. """ regex = re.compile(r'(\B[A-Z][a-z]*)') return regex.sub(r' \1', string)
python
def decamel(string): """"Split CamelCased words. CamelCase -> Camel Case, dromedaryCase -> dromedary Case. """ regex = re.compile(r'(\B[A-Z][a-z]*)') return regex.sub(r' \1', string)
[ "def", "decamel", "(", "string", ")", ":", "regex", "=", "re", ".", "compile", "(", "r'(\\B[A-Z][a-z]*)'", ")", "return", "regex", ".", "sub", "(", "r' \\1'", ",", "string", ")" ]
Split CamelCased words. CamelCase -> Camel Case, dromedaryCase -> dromedary Case.
[ "Split", "CamelCased", "words", "." ]
3d4bf4cadd07d4c3b71674077bd7cf16efb6ea10
https://github.com/GreenBuildingRegistry/yaml-config/blob/3d4bf4cadd07d4c3b71674077bd7cf16efb6ea10/yamlconf/utils.py#L48-L54
train
44,207
GreenBuildingRegistry/yaml-config
yamlconf/utils.py
decamel_to_snake
def decamel_to_snake(string): """Convert to lower case, join camel case with underscore. CamelCase -> camel_case. Camel Case -> camel_case. """ strings = [decamel(word) if not word.isupper() else word.lower() for word in string.split()] return "_".join([snake(dstring)for dstring in strings])
python
def decamel_to_snake(string): """Convert to lower case, join camel case with underscore. CamelCase -> camel_case. Camel Case -> camel_case. """ strings = [decamel(word) if not word.isupper() else word.lower() for word in string.split()] return "_".join([snake(dstring)for dstring in strings])
[ "def", "decamel_to_snake", "(", "string", ")", ":", "strings", "=", "[", "decamel", "(", "word", ")", "if", "not", "word", ".", "isupper", "(", ")", "else", "word", ".", "lower", "(", ")", "for", "word", "in", "string", ".", "split", "(", ")", "]",...
Convert to lower case, join camel case with underscore. CamelCase -> camel_case. Camel Case -> camel_case.
[ "Convert", "to", "lower", "case", "join", "camel", "case", "with", "underscore", ".", "CamelCase", "-", ">", "camel_case", ".", "Camel", "Case", "-", ">", "camel_case", "." ]
3d4bf4cadd07d4c3b71674077bd7cf16efb6ea10
https://github.com/GreenBuildingRegistry/yaml-config/blob/3d4bf4cadd07d4c3b71674077bd7cf16efb6ea10/yamlconf/utils.py#L57-L63
train
44,208
softwarefactory-project/rdopkg
rdopkg/actionmods/rdoinfo.py
info_file
def info_file(distro=None): """Return default distroinfo info file""" if not distro: distro = cfg['DISTRO'] info_file_conf = distro.upper() + 'INFO_FILE' try: return cfg[info_file_conf] except KeyError: raise exception.InvalidUsage( why="Couldn't find config option %s for distro: %s" % (info_file_conf, distro))
python
def info_file(distro=None): """Return default distroinfo info file""" if not distro: distro = cfg['DISTRO'] info_file_conf = distro.upper() + 'INFO_FILE' try: return cfg[info_file_conf] except KeyError: raise exception.InvalidUsage( why="Couldn't find config option %s for distro: %s" % (info_file_conf, distro))
[ "def", "info_file", "(", "distro", "=", "None", ")", ":", "if", "not", "distro", ":", "distro", "=", "cfg", "[", "'DISTRO'", "]", "info_file_conf", "=", "distro", ".", "upper", "(", ")", "+", "'INFO_FILE'", "try", ":", "return", "cfg", "[", "info_file_...
Return default distroinfo info file
[ "Return", "default", "distroinfo", "info", "file" ]
2d2bed4e7cd329558a36d0dd404ec4ac8f9f254c
https://github.com/softwarefactory-project/rdopkg/blob/2d2bed4e7cd329558a36d0dd404ec4ac8f9f254c/rdopkg/actionmods/rdoinfo.py#L87-L97
train
44,209
softwarefactory-project/rdopkg
rdopkg/actionmods/rdoinfo.py
get_distroinfo
def get_distroinfo(distro=None): """Get DistroInfo initialized from configuration""" if not distro: distro = cfg['DISTRO'] _info_file = info_file(distro) # prefer git fetcher if available git_info_url_conf = distro.upper() + 'INFO_REPO' try: remote_git_info = cfg[git_info_url_conf] return DistroInfo(_info_file, remote_git_info=remote_git_info) except KeyError: pass # try raw remote fetcher remote_info_url_conf = distro.upper() + 'INFO_RAW_URL' try: remote_info = cfg[remote_info_url_conf] return DistroInfo(_info_file, remote_info=remote_info) except KeyError: raise exception.InvalidUsage( why="Couldn't find config option %s or %s for distro: %s" % (git_info_url_conf, remote_info_url_conf, distro))
python
def get_distroinfo(distro=None): """Get DistroInfo initialized from configuration""" if not distro: distro = cfg['DISTRO'] _info_file = info_file(distro) # prefer git fetcher if available git_info_url_conf = distro.upper() + 'INFO_REPO' try: remote_git_info = cfg[git_info_url_conf] return DistroInfo(_info_file, remote_git_info=remote_git_info) except KeyError: pass # try raw remote fetcher remote_info_url_conf = distro.upper() + 'INFO_RAW_URL' try: remote_info = cfg[remote_info_url_conf] return DistroInfo(_info_file, remote_info=remote_info) except KeyError: raise exception.InvalidUsage( why="Couldn't find config option %s or %s for distro: %s" % (git_info_url_conf, remote_info_url_conf, distro))
[ "def", "get_distroinfo", "(", "distro", "=", "None", ")", ":", "if", "not", "distro", ":", "distro", "=", "cfg", "[", "'DISTRO'", "]", "_info_file", "=", "info_file", "(", "distro", ")", "# prefer git fetcher if available", "git_info_url_conf", "=", "distro", ...
Get DistroInfo initialized from configuration
[ "Get", "DistroInfo", "initialized", "from", "configuration" ]
2d2bed4e7cd329558a36d0dd404ec4ac8f9f254c
https://github.com/softwarefactory-project/rdopkg/blob/2d2bed4e7cd329558a36d0dd404ec4ac8f9f254c/rdopkg/actionmods/rdoinfo.py#L100-L120
train
44,210
softwarefactory-project/rdopkg
rdopkg/utils/specfile.py
spec_fn
def spec_fn(spec_dir='.'): """ Return the filename for a .spec file in this directory. """ specs = [f for f in os.listdir(spec_dir) if os.path.isfile(f) and f.endswith('.spec')] if not specs: raise exception.SpecFileNotFound() if len(specs) != 1: raise exception.MultipleSpecFilesFound() return specs[0]
python
def spec_fn(spec_dir='.'): """ Return the filename for a .spec file in this directory. """ specs = [f for f in os.listdir(spec_dir) if os.path.isfile(f) and f.endswith('.spec')] if not specs: raise exception.SpecFileNotFound() if len(specs) != 1: raise exception.MultipleSpecFilesFound() return specs[0]
[ "def", "spec_fn", "(", "spec_dir", "=", "'.'", ")", ":", "specs", "=", "[", "f", "for", "f", "in", "os", ".", "listdir", "(", "spec_dir", ")", "if", "os", ".", "path", ".", "isfile", "(", "f", ")", "and", "f", ".", "endswith", "(", "'.spec'", "...
Return the filename for a .spec file in this directory.
[ "Return", "the", "filename", "for", "a", ".", "spec", "file", "in", "this", "directory", "." ]
2d2bed4e7cd329558a36d0dd404ec4ac8f9f254c
https://github.com/softwarefactory-project/rdopkg/blob/2d2bed4e7cd329558a36d0dd404ec4ac8f9f254c/rdopkg/utils/specfile.py#L78-L88
train
44,211
softwarefactory-project/rdopkg
rdopkg/utils/specfile.py
Spec.get_patches_ignore_regex
def get_patches_ignore_regex(self): """Returns a string representing a regex for filtering out patches This string is parsed from a comment in the specfile that contains the word filter-out followed by an equal sign. For example, a comment as such: # patches_ignore=(regex) would mean this method returns the string '(regex)' Only a very limited subset of characters are accepted so no fancy stuff like matching groups etc. """ match = re.search(r'# *patches_ignore=([\w *.+?[\]|{,}\-_]+)', self.txt) if not match: return None regex_string = match.group(1) try: return re.compile(regex_string) except Exception: return None
python
def get_patches_ignore_regex(self): """Returns a string representing a regex for filtering out patches This string is parsed from a comment in the specfile that contains the word filter-out followed by an equal sign. For example, a comment as such: # patches_ignore=(regex) would mean this method returns the string '(regex)' Only a very limited subset of characters are accepted so no fancy stuff like matching groups etc. """ match = re.search(r'# *patches_ignore=([\w *.+?[\]|{,}\-_]+)', self.txt) if not match: return None regex_string = match.group(1) try: return re.compile(regex_string) except Exception: return None
[ "def", "get_patches_ignore_regex", "(", "self", ")", ":", "match", "=", "re", ".", "search", "(", "r'# *patches_ignore=([\\w *.+?[\\]|{,}\\-_]+)'", ",", "self", ".", "txt", ")", "if", "not", "match", ":", "return", "None", "regex_string", "=", "match", ".", "g...
Returns a string representing a regex for filtering out patches This string is parsed from a comment in the specfile that contains the word filter-out followed by an equal sign. For example, a comment as such: # patches_ignore=(regex) would mean this method returns the string '(regex)' Only a very limited subset of characters are accepted so no fancy stuff like matching groups etc.
[ "Returns", "a", "string", "representing", "a", "regex", "for", "filtering", "out", "patches" ]
2d2bed4e7cd329558a36d0dd404ec4ac8f9f254c
https://github.com/softwarefactory-project/rdopkg/blob/2d2bed4e7cd329558a36d0dd404ec4ac8f9f254c/rdopkg/utils/specfile.py#L305-L327
train
44,212
softwarefactory-project/rdopkg
rdopkg/utils/specfile.py
Spec.get_vr
def get_vr(self, epoch=None): """get VR string from .spec Version, Release and Epoch epoch is None: prefix epoch if present (default) epoch is True: prefix epoch even if not present (0:) epoch is False: omit epoch even if present """ version = self.get_tag('Version', expand_macros=True) e = None if epoch is None or epoch: try: e = self.get_tag('Epoch') except exception.SpecFileParseError: pass if epoch is None and e: epoch = True if epoch: if not e: e = '0' version = '%s:%s' % (e, version) release = self.get_tag('Release') release = re.sub(r'%\{?\??dist\}?$', '', release) release = self.expand_macro(release) if release: return '%s-%s' % (version, release) return version
python
def get_vr(self, epoch=None): """get VR string from .spec Version, Release and Epoch epoch is None: prefix epoch if present (default) epoch is True: prefix epoch even if not present (0:) epoch is False: omit epoch even if present """ version = self.get_tag('Version', expand_macros=True) e = None if epoch is None or epoch: try: e = self.get_tag('Epoch') except exception.SpecFileParseError: pass if epoch is None and e: epoch = True if epoch: if not e: e = '0' version = '%s:%s' % (e, version) release = self.get_tag('Release') release = re.sub(r'%\{?\??dist\}?$', '', release) release = self.expand_macro(release) if release: return '%s-%s' % (version, release) return version
[ "def", "get_vr", "(", "self", ",", "epoch", "=", "None", ")", ":", "version", "=", "self", ".", "get_tag", "(", "'Version'", ",", "expand_macros", "=", "True", ")", "e", "=", "None", "if", "epoch", "is", "None", "or", "epoch", ":", "try", ":", "e",...
get VR string from .spec Version, Release and Epoch epoch is None: prefix epoch if present (default) epoch is True: prefix epoch even if not present (0:) epoch is False: omit epoch even if present
[ "get", "VR", "string", "from", ".", "spec", "Version", "Release", "and", "Epoch" ]
2d2bed4e7cd329558a36d0dd404ec4ac8f9f254c
https://github.com/softwarefactory-project/rdopkg/blob/2d2bed4e7cd329558a36d0dd404ec4ac8f9f254c/rdopkg/utils/specfile.py#L583-L608
train
44,213
softwarefactory-project/rdopkg
rdopkg/utils/specfile.py
Spec.get_nvr
def get_nvr(self, epoch=None): """get NVR string from .spec Name, Version, Release and Epoch""" name = self.get_tag('Name', expand_macros=True) vr = self.get_vr(epoch=epoch) return '%s-%s' % (name, vr)
python
def get_nvr(self, epoch=None): """get NVR string from .spec Name, Version, Release and Epoch""" name = self.get_tag('Name', expand_macros=True) vr = self.get_vr(epoch=epoch) return '%s-%s' % (name, vr)
[ "def", "get_nvr", "(", "self", ",", "epoch", "=", "None", ")", ":", "name", "=", "self", ".", "get_tag", "(", "'Name'", ",", "expand_macros", "=", "True", ")", "vr", "=", "self", ".", "get_vr", "(", "epoch", "=", "epoch", ")", "return", "'%s-%s'", ...
get NVR string from .spec Name, Version, Release and Epoch
[ "get", "NVR", "string", "from", ".", "spec", "Name", "Version", "Release", "and", "Epoch" ]
2d2bed4e7cd329558a36d0dd404ec4ac8f9f254c
https://github.com/softwarefactory-project/rdopkg/blob/2d2bed4e7cd329558a36d0dd404ec4ac8f9f254c/rdopkg/utils/specfile.py#L610-L614
train
44,214
infothrill/python-dyndnsc
dyndnsc/common/detect_ip.py
detect_ip
def detect_ip(kind): """ Detect IP address. kind can be: IPV4 - returns IPv4 address IPV6_ANY - returns any IPv6 address (no preference) IPV6_PUBLIC - returns public IPv6 address IPV6_TMP - returns temporary IPV6 address (privacy extensions) This function either returns an IP address (str) or raises a GetIpException. """ if kind not in (IPV4, IPV6_PUBLIC, IPV6_TMP, IPV6_ANY): raise ValueError("invalid kind specified") # We create an UDP socket and connect it to a public host. # We query the OS to know what our address is. # No packet will really be sent since we are using UDP. af = socket.AF_INET if kind == IPV4 else socket.AF_INET6 s = socket.socket(af, socket.SOCK_DGRAM) try: if kind in [IPV6_PUBLIC, IPV6_TMP, ]: # caller wants some specific kind of IPv6 address (not IPV6_ANY) try: if kind == IPV6_PUBLIC: preference = socket.IPV6_PREFER_SRC_PUBLIC elif kind == IPV6_TMP: preference = socket.IPV6_PREFER_SRC_TMP s.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_ADDR_PREFERENCES, preference) except socket.error as e: if e.errno == errno.ENOPROTOOPT: raise GetIpException("Kernel doesn't support IPv6 address preference") else: raise GetIpException("Unable to set IPv6 address preference: %s" % e) try: outside_ip = OUTSIDE_IPV4 if kind == IPV4 else OUTSIDE_IPV6 s.connect((outside_ip, 9)) except (socket.error, socket.gaierror) as e: raise GetIpException(str(e)) ip = s.getsockname()[0] finally: s.close() return ip
python
def detect_ip(kind): """ Detect IP address. kind can be: IPV4 - returns IPv4 address IPV6_ANY - returns any IPv6 address (no preference) IPV6_PUBLIC - returns public IPv6 address IPV6_TMP - returns temporary IPV6 address (privacy extensions) This function either returns an IP address (str) or raises a GetIpException. """ if kind not in (IPV4, IPV6_PUBLIC, IPV6_TMP, IPV6_ANY): raise ValueError("invalid kind specified") # We create an UDP socket and connect it to a public host. # We query the OS to know what our address is. # No packet will really be sent since we are using UDP. af = socket.AF_INET if kind == IPV4 else socket.AF_INET6 s = socket.socket(af, socket.SOCK_DGRAM) try: if kind in [IPV6_PUBLIC, IPV6_TMP, ]: # caller wants some specific kind of IPv6 address (not IPV6_ANY) try: if kind == IPV6_PUBLIC: preference = socket.IPV6_PREFER_SRC_PUBLIC elif kind == IPV6_TMP: preference = socket.IPV6_PREFER_SRC_TMP s.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_ADDR_PREFERENCES, preference) except socket.error as e: if e.errno == errno.ENOPROTOOPT: raise GetIpException("Kernel doesn't support IPv6 address preference") else: raise GetIpException("Unable to set IPv6 address preference: %s" % e) try: outside_ip = OUTSIDE_IPV4 if kind == IPV4 else OUTSIDE_IPV6 s.connect((outside_ip, 9)) except (socket.error, socket.gaierror) as e: raise GetIpException(str(e)) ip = s.getsockname()[0] finally: s.close() return ip
[ "def", "detect_ip", "(", "kind", ")", ":", "if", "kind", "not", "in", "(", "IPV4", ",", "IPV6_PUBLIC", ",", "IPV6_TMP", ",", "IPV6_ANY", ")", ":", "raise", "ValueError", "(", "\"invalid kind specified\"", ")", "# We create an UDP socket and connect it to a public ho...
Detect IP address. kind can be: IPV4 - returns IPv4 address IPV6_ANY - returns any IPv6 address (no preference) IPV6_PUBLIC - returns public IPv6 address IPV6_TMP - returns temporary IPV6 address (privacy extensions) This function either returns an IP address (str) or raises a GetIpException.
[ "Detect", "IP", "address", "." ]
2196d48aa6098da9835a7611fbdb0b5f0fbf51e4
https://github.com/infothrill/python-dyndnsc/blob/2196d48aa6098da9835a7611fbdb0b5f0fbf51e4/dyndnsc/common/detect_ip.py#L51-L97
train
44,215
softwarefactory-project/rdopkg
rdopkg/actionmods/cbsbuild.py
setup_kojiclient
def setup_kojiclient(profile): """Setup koji client session """ opts = koji.read_config(profile) for k, v in opts.iteritems(): opts[k] = os.path.expanduser(v) if type(v) is str else v kojiclient = koji.ClientSession(opts['server'], opts=opts) kojiclient.ssl_login(opts['cert'], None, opts['serverca']) return kojiclient
python
def setup_kojiclient(profile): """Setup koji client session """ opts = koji.read_config(profile) for k, v in opts.iteritems(): opts[k] = os.path.expanduser(v) if type(v) is str else v kojiclient = koji.ClientSession(opts['server'], opts=opts) kojiclient.ssl_login(opts['cert'], None, opts['serverca']) return kojiclient
[ "def", "setup_kojiclient", "(", "profile", ")", ":", "opts", "=", "koji", ".", "read_config", "(", "profile", ")", "for", "k", ",", "v", "in", "opts", ".", "iteritems", "(", ")", ":", "opts", "[", "k", "]", "=", "os", ".", "path", ".", "expanduser"...
Setup koji client session
[ "Setup", "koji", "client", "session" ]
2d2bed4e7cd329558a36d0dd404ec4ac8f9f254c
https://github.com/softwarefactory-project/rdopkg/blob/2d2bed4e7cd329558a36d0dd404ec4ac8f9f254c/rdopkg/actionmods/cbsbuild.py#L31-L39
train
44,216
softwarefactory-project/rdopkg
rdopkg/actionmods/cbsbuild.py
retrieve_sources
def retrieve_sources(): """Retrieve sources using spectool """ spectool = find_executable('spectool') if not spectool: log.warn('spectool is not installed') return try: specfile = spec_fn() except Exception: return cmd = [spectool, "-g", specfile] output = subprocess.check_output(' '.join(cmd), shell=True) log.warn(output)
python
def retrieve_sources(): """Retrieve sources using spectool """ spectool = find_executable('spectool') if not spectool: log.warn('spectool is not installed') return try: specfile = spec_fn() except Exception: return cmd = [spectool, "-g", specfile] output = subprocess.check_output(' '.join(cmd), shell=True) log.warn(output)
[ "def", "retrieve_sources", "(", ")", ":", "spectool", "=", "find_executable", "(", "'spectool'", ")", "if", "not", "spectool", ":", "log", ".", "warn", "(", "'spectool is not installed'", ")", "return", "try", ":", "specfile", "=", "spec_fn", "(", ")", "exce...
Retrieve sources using spectool
[ "Retrieve", "sources", "using", "spectool" ]
2d2bed4e7cd329558a36d0dd404ec4ac8f9f254c
https://github.com/softwarefactory-project/rdopkg/blob/2d2bed4e7cd329558a36d0dd404ec4ac8f9f254c/rdopkg/actionmods/cbsbuild.py#L91-L105
train
44,217
softwarefactory-project/rdopkg
rdopkg/actionmods/cbsbuild.py
create_srpm
def create_srpm(dist='el7'): """Create an srpm Requires that sources are available in local directory dist: set package dist tag (default: el7) """ if not RPM_AVAILABLE: raise RpmModuleNotAvailable() path = os.getcwd() try: specfile = spec_fn() spec = Spec(specfile) except Exception: return rpmdefines = ["--define 'dist .{}'".format(dist), "--define '_sourcedir {}'".format(path), "--define '_srcrpmdir {}'".format(path)] rpm.addMacro('_sourcedir', '.{}'.format(dist)) # FIXME: needs to be fixed in Spec rpm.addMacro('dist', '.{}'.format(dist)) module_name = spec.get_tag('Name', True) version = spec.get_tag('Version', True) release = spec.get_tag('Release', True) srpm = os.path.join(path, "{}-{}-{}.src.rpm".format(module_name, version, release)) # See if we need to build the srpm if os.path.exists(srpm): log.warn('Srpm found, rewriting it.') cmd = ['rpmbuild'] cmd.extend(rpmdefines) cmd.extend(['--nodeps', '-bs', specfile]) output = subprocess.check_output(' '.join(cmd), shell=True) log.warn(output) srpm = output.split()[1] return srpm
python
def create_srpm(dist='el7'): """Create an srpm Requires that sources are available in local directory dist: set package dist tag (default: el7) """ if not RPM_AVAILABLE: raise RpmModuleNotAvailable() path = os.getcwd() try: specfile = spec_fn() spec = Spec(specfile) except Exception: return rpmdefines = ["--define 'dist .{}'".format(dist), "--define '_sourcedir {}'".format(path), "--define '_srcrpmdir {}'".format(path)] rpm.addMacro('_sourcedir', '.{}'.format(dist)) # FIXME: needs to be fixed in Spec rpm.addMacro('dist', '.{}'.format(dist)) module_name = spec.get_tag('Name', True) version = spec.get_tag('Version', True) release = spec.get_tag('Release', True) srpm = os.path.join(path, "{}-{}-{}.src.rpm".format(module_name, version, release)) # See if we need to build the srpm if os.path.exists(srpm): log.warn('Srpm found, rewriting it.') cmd = ['rpmbuild'] cmd.extend(rpmdefines) cmd.extend(['--nodeps', '-bs', specfile]) output = subprocess.check_output(' '.join(cmd), shell=True) log.warn(output) srpm = output.split()[1] return srpm
[ "def", "create_srpm", "(", "dist", "=", "'el7'", ")", ":", "if", "not", "RPM_AVAILABLE", ":", "raise", "RpmModuleNotAvailable", "(", ")", "path", "=", "os", ".", "getcwd", "(", ")", "try", ":", "specfile", "=", "spec_fn", "(", ")", "spec", "=", "Spec",...
Create an srpm Requires that sources are available in local directory dist: set package dist tag (default: el7)
[ "Create", "an", "srpm", "Requires", "that", "sources", "are", "available", "in", "local", "directory" ]
2d2bed4e7cd329558a36d0dd404ec4ac8f9f254c
https://github.com/softwarefactory-project/rdopkg/blob/2d2bed4e7cd329558a36d0dd404ec4ac8f9f254c/rdopkg/actionmods/cbsbuild.py#L108-L148
train
44,218
infothrill/python-dyndnsc
dyndnsc/updater/afraid.py
compute_auth_key
def compute_auth_key(userid, password): """ Compute the authentication key for freedns.afraid.org. This is the SHA1 hash of the string b'userid|password'. :param userid: ascii username :param password: ascii password :return: ascii authentication key (SHA1 at this point) """ import sys if sys.version_info >= (3, 0): return hashlib.sha1(b"|".join((userid.encode("ascii"), # noqa: S303 password.encode("ascii")))).hexdigest() return hashlib.sha1("|".join((userid, password))).hexdigest()
python
def compute_auth_key(userid, password): """ Compute the authentication key for freedns.afraid.org. This is the SHA1 hash of the string b'userid|password'. :param userid: ascii username :param password: ascii password :return: ascii authentication key (SHA1 at this point) """ import sys if sys.version_info >= (3, 0): return hashlib.sha1(b"|".join((userid.encode("ascii"), # noqa: S303 password.encode("ascii")))).hexdigest() return hashlib.sha1("|".join((userid, password))).hexdigest()
[ "def", "compute_auth_key", "(", "userid", ",", "password", ")", ":", "import", "sys", "if", "sys", ".", "version_info", ">=", "(", "3", ",", "0", ")", ":", "return", "hashlib", ".", "sha1", "(", "b\"|\"", ".", "join", "(", "(", "userid", ".", "encode...
Compute the authentication key for freedns.afraid.org. This is the SHA1 hash of the string b'userid|password'. :param userid: ascii username :param password: ascii password :return: ascii authentication key (SHA1 at this point)
[ "Compute", "the", "authentication", "key", "for", "freedns", ".", "afraid", ".", "org", "." ]
2196d48aa6098da9835a7611fbdb0b5f0fbf51e4
https://github.com/infothrill/python-dyndnsc/blob/2196d48aa6098da9835a7611fbdb0b5f0fbf51e4/dyndnsc/updater/afraid.py#L60-L74
train
44,219
infothrill/python-dyndnsc
dyndnsc/updater/afraid.py
records
def records(credentials, url="https://freedns.afraid.org/api/"): """ Yield the dynamic DNS records associated with this account. :param credentials: an AfraidCredentials instance :param url: the service URL """ params = {"action": "getdyndns", "sha": credentials.sha} req = requests.get( url, params=params, headers=constants.REQUEST_HEADERS_DEFAULT, timeout=60) for record_line in (line.strip() for line in req.text.splitlines() if len(line.strip()) > 0): yield AfraidDynDNSRecord(*record_line.split("|"))
python
def records(credentials, url="https://freedns.afraid.org/api/"): """ Yield the dynamic DNS records associated with this account. :param credentials: an AfraidCredentials instance :param url: the service URL """ params = {"action": "getdyndns", "sha": credentials.sha} req = requests.get( url, params=params, headers=constants.REQUEST_HEADERS_DEFAULT, timeout=60) for record_line in (line.strip() for line in req.text.splitlines() if len(line.strip()) > 0): yield AfraidDynDNSRecord(*record_line.split("|"))
[ "def", "records", "(", "credentials", ",", "url", "=", "\"https://freedns.afraid.org/api/\"", ")", ":", "params", "=", "{", "\"action\"", ":", "\"getdyndns\"", ",", "\"sha\"", ":", "credentials", ".", "sha", "}", "req", "=", "requests", ".", "get", "(", "url...
Yield the dynamic DNS records associated with this account. :param credentials: an AfraidCredentials instance :param url: the service URL
[ "Yield", "the", "dynamic", "DNS", "records", "associated", "with", "this", "account", "." ]
2196d48aa6098da9835a7611fbdb0b5f0fbf51e4
https://github.com/infothrill/python-dyndnsc/blob/2196d48aa6098da9835a7611fbdb0b5f0fbf51e4/dyndnsc/updater/afraid.py#L77-L89
train
44,220
infothrill/python-dyndnsc
dyndnsc/updater/afraid.py
update
def update(url): """ Update remote DNS record by requesting its special endpoint URL. This automatically picks the IP address using the HTTP connection: it is not possible to specify the IP address explicitly. :param url: URL to retrieve for triggering the update :return: IP address """ req = requests.get( url, headers=constants.REQUEST_HEADERS_DEFAULT, timeout=60) req.close() # Response must contain an IP address, or else we can't parse it. # Also, the IP address in the response is the newly assigned IP address. ipregex = re.compile(r"\b(?P<ip>(?:[0-9]{1,3}\.){3}[0-9]{1,3})\b") ipmatch = ipregex.search(req.text) if ipmatch: return str(ipaddress(ipmatch.group("ip"))) LOG.error("couldn't parse the server's response '%s'", req.text) return None
python
def update(url): """ Update remote DNS record by requesting its special endpoint URL. This automatically picks the IP address using the HTTP connection: it is not possible to specify the IP address explicitly. :param url: URL to retrieve for triggering the update :return: IP address """ req = requests.get( url, headers=constants.REQUEST_HEADERS_DEFAULT, timeout=60) req.close() # Response must contain an IP address, or else we can't parse it. # Also, the IP address in the response is the newly assigned IP address. ipregex = re.compile(r"\b(?P<ip>(?:[0-9]{1,3}\.){3}[0-9]{1,3})\b") ipmatch = ipregex.search(req.text) if ipmatch: return str(ipaddress(ipmatch.group("ip"))) LOG.error("couldn't parse the server's response '%s'", req.text) return None
[ "def", "update", "(", "url", ")", ":", "req", "=", "requests", ".", "get", "(", "url", ",", "headers", "=", "constants", ".", "REQUEST_HEADERS_DEFAULT", ",", "timeout", "=", "60", ")", "req", ".", "close", "(", ")", "# Response must contain an IP address, or...
Update remote DNS record by requesting its special endpoint URL. This automatically picks the IP address using the HTTP connection: it is not possible to specify the IP address explicitly. :param url: URL to retrieve for triggering the update :return: IP address
[ "Update", "remote", "DNS", "record", "by", "requesting", "its", "special", "endpoint", "URL", "." ]
2196d48aa6098da9835a7611fbdb0b5f0fbf51e4
https://github.com/infothrill/python-dyndnsc/blob/2196d48aa6098da9835a7611fbdb0b5f0fbf51e4/dyndnsc/updater/afraid.py#L92-L112
train
44,221
infothrill/python-dyndnsc
dyndnsc/updater/afraid.py
AfraidCredentials.sha
def sha(self): """Return sha, lazily compute if not done yet.""" if self._sha is None: self._sha = compute_auth_key(self.userid, self.password) return self._sha
python
def sha(self): """Return sha, lazily compute if not done yet.""" if self._sha is None: self._sha = compute_auth_key(self.userid, self.password) return self._sha
[ "def", "sha", "(", "self", ")", ":", "if", "self", ".", "_sha", "is", "None", ":", "self", ".", "_sha", "=", "compute_auth_key", "(", "self", ".", "userid", ",", "self", ".", "password", ")", "return", "self", ".", "_sha" ]
Return sha, lazily compute if not done yet.
[ "Return", "sha", "lazily", "compute", "if", "not", "done", "yet", "." ]
2196d48aa6098da9835a7611fbdb0b5f0fbf51e4
https://github.com/infothrill/python-dyndnsc/blob/2196d48aa6098da9835a7611fbdb0b5f0fbf51e4/dyndnsc/updater/afraid.py#L53-L57
train
44,222
infothrill/python-dyndnsc
dyndnsc/common/subject.py
Subject.register_observer
def register_observer(self, observer, events=None): """Register a listener function. :param observer: external listener function :param events: tuple or list of relevant events (default=None) """ if events is not None and not isinstance(events, (tuple, list)): events = (events,) if observer in self._observers: LOG.warning("Observer '%r' already registered, overwriting for events" " %r", observer, events) self._observers[observer] = events
python
def register_observer(self, observer, events=None): """Register a listener function. :param observer: external listener function :param events: tuple or list of relevant events (default=None) """ if events is not None and not isinstance(events, (tuple, list)): events = (events,) if observer in self._observers: LOG.warning("Observer '%r' already registered, overwriting for events" " %r", observer, events) self._observers[observer] = events
[ "def", "register_observer", "(", "self", ",", "observer", ",", "events", "=", "None", ")", ":", "if", "events", "is", "not", "None", "and", "not", "isinstance", "(", "events", ",", "(", "tuple", ",", "list", ")", ")", ":", "events", "=", "(", "events...
Register a listener function. :param observer: external listener function :param events: tuple or list of relevant events (default=None)
[ "Register", "a", "listener", "function", "." ]
2196d48aa6098da9835a7611fbdb0b5f0fbf51e4
https://github.com/infothrill/python-dyndnsc/blob/2196d48aa6098da9835a7611fbdb0b5f0fbf51e4/dyndnsc/common/subject.py#L17-L29
train
44,223
infothrill/python-dyndnsc
dyndnsc/common/subject.py
Subject.notify_observers
def notify_observers(self, event=None, msg=None): """Notify observers.""" for observer, events in list(self._observers.items()): # LOG.debug("trying to notify the observer") if events is None or event is None or event in events: try: observer(self, event, msg) except (Exception,) as ex: # pylint: disable=broad-except self.unregister_observer(observer) errmsg = "Exception in message dispatch: Handler '{0}' unregistered for event '{1}' ".format( observer.__class__.__name__, event) LOG.error(errmsg, exc_info=ex)
python
def notify_observers(self, event=None, msg=None): """Notify observers.""" for observer, events in list(self._observers.items()): # LOG.debug("trying to notify the observer") if events is None or event is None or event in events: try: observer(self, event, msg) except (Exception,) as ex: # pylint: disable=broad-except self.unregister_observer(observer) errmsg = "Exception in message dispatch: Handler '{0}' unregistered for event '{1}' ".format( observer.__class__.__name__, event) LOG.error(errmsg, exc_info=ex)
[ "def", "notify_observers", "(", "self", ",", "event", "=", "None", ",", "msg", "=", "None", ")", ":", "for", "observer", ",", "events", "in", "list", "(", "self", ".", "_observers", ".", "items", "(", ")", ")", ":", "# LOG.debug(\"trying to notify the obse...
Notify observers.
[ "Notify", "observers", "." ]
2196d48aa6098da9835a7611fbdb0b5f0fbf51e4
https://github.com/infothrill/python-dyndnsc/blob/2196d48aa6098da9835a7611fbdb0b5f0fbf51e4/dyndnsc/common/subject.py#L31-L42
train
44,224
infothrill/python-dyndnsc
dyndnsc/detector/socket_ip.py
IPDetector_Socket.detect
def detect(self): """Detect the IP address.""" if self.opts_family == AF_INET6: kind = IPV6_PUBLIC else: # 'INET': kind = IPV4 theip = None try: theip = detect_ip(kind) except GetIpException: LOG.exception("socket detector raised an exception:") self.set_current_value(theip) return theip
python
def detect(self): """Detect the IP address.""" if self.opts_family == AF_INET6: kind = IPV6_PUBLIC else: # 'INET': kind = IPV4 theip = None try: theip = detect_ip(kind) except GetIpException: LOG.exception("socket detector raised an exception:") self.set_current_value(theip) return theip
[ "def", "detect", "(", "self", ")", ":", "if", "self", ".", "opts_family", "==", "AF_INET6", ":", "kind", "=", "IPV6_PUBLIC", "else", ":", "# 'INET':", "kind", "=", "IPV4", "theip", "=", "None", "try", ":", "theip", "=", "detect_ip", "(", "kind", ")", ...
Detect the IP address.
[ "Detect", "the", "IP", "address", "." ]
2196d48aa6098da9835a7611fbdb0b5f0fbf51e4
https://github.com/infothrill/python-dyndnsc/blob/2196d48aa6098da9835a7611fbdb0b5f0fbf51e4/dyndnsc/detector/socket_ip.py#L32-L44
train
44,225
infothrill/python-dyndnsc
dyndnsc/detector/iface.py
IPDetector_Iface._detect
def _detect(self): """Use the netifaces module to detect ifconfig information.""" theip = None try: if self.opts_family == AF_INET6: addrlist = netifaces.ifaddresses(self.opts_iface)[netifaces.AF_INET6] else: addrlist = netifaces.ifaddresses(self.opts_iface)[netifaces.AF_INET] except ValueError as exc: LOG.error("netifaces choked while trying to get network interface" " information for interface '%s'", self.opts_iface, exc_info=exc) else: # now we have a list of addresses as returned by netifaces for pair in addrlist: try: detip = ipaddress(pair["addr"]) except (TypeError, ValueError) as exc: LOG.debug("Found invalid IP '%s' on interface '%s'!?", pair["addr"], self.opts_iface, exc_info=exc) continue if self.netmask is not None: if detip in self.netmask: theip = pair["addr"] else: continue else: theip = pair["addr"] break # we use the first IP found # theip can still be None at this point! self.set_current_value(theip) return theip
python
def _detect(self): """Use the netifaces module to detect ifconfig information.""" theip = None try: if self.opts_family == AF_INET6: addrlist = netifaces.ifaddresses(self.opts_iface)[netifaces.AF_INET6] else: addrlist = netifaces.ifaddresses(self.opts_iface)[netifaces.AF_INET] except ValueError as exc: LOG.error("netifaces choked while trying to get network interface" " information for interface '%s'", self.opts_iface, exc_info=exc) else: # now we have a list of addresses as returned by netifaces for pair in addrlist: try: detip = ipaddress(pair["addr"]) except (TypeError, ValueError) as exc: LOG.debug("Found invalid IP '%s' on interface '%s'!?", pair["addr"], self.opts_iface, exc_info=exc) continue if self.netmask is not None: if detip in self.netmask: theip = pair["addr"] else: continue else: theip = pair["addr"] break # we use the first IP found # theip can still be None at this point! self.set_current_value(theip) return theip
[ "def", "_detect", "(", "self", ")", ":", "theip", "=", "None", "try", ":", "if", "self", ".", "opts_family", "==", "AF_INET6", ":", "addrlist", "=", "netifaces", ".", "ifaddresses", "(", "self", ".", "opts_iface", ")", "[", "netifaces", ".", "AF_INET6", ...
Use the netifaces module to detect ifconfig information.
[ "Use", "the", "netifaces", "module", "to", "detect", "ifconfig", "information", "." ]
2196d48aa6098da9835a7611fbdb0b5f0fbf51e4
https://github.com/infothrill/python-dyndnsc/blob/2196d48aa6098da9835a7611fbdb0b5f0fbf51e4/dyndnsc/detector/iface.py#L66-L96
train
44,226
softwarefactory-project/rdopkg
features/environment.py
clean_tempdir
def clean_tempdir(context, scenario): """ Clean up temporary test dirs for passed tests. Leave failed test dirs for manual inspection. """ tempdir = getattr(context, 'tempdir', None) if tempdir and scenario.status == 'passed': shutil.rmtree(tempdir) del(context.tempdir)
python
def clean_tempdir(context, scenario): """ Clean up temporary test dirs for passed tests. Leave failed test dirs for manual inspection. """ tempdir = getattr(context, 'tempdir', None) if tempdir and scenario.status == 'passed': shutil.rmtree(tempdir) del(context.tempdir)
[ "def", "clean_tempdir", "(", "context", ",", "scenario", ")", ":", "tempdir", "=", "getattr", "(", "context", ",", "'tempdir'", ",", "None", ")", "if", "tempdir", "and", "scenario", ".", "status", "==", "'passed'", ":", "shutil", ".", "rmtree", "(", "tem...
Clean up temporary test dirs for passed tests. Leave failed test dirs for manual inspection.
[ "Clean", "up", "temporary", "test", "dirs", "for", "passed", "tests", "." ]
2d2bed4e7cd329558a36d0dd404ec4ac8f9f254c
https://github.com/softwarefactory-project/rdopkg/blob/2d2bed4e7cd329558a36d0dd404ec4ac8f9f254c/features/environment.py#L6-L16
train
44,227
infothrill/python-dyndnsc
dyndnsc/detector/rand.py
RandomIPGenerator.is_reserved_ip
def is_reserved_ip(self, ip): """Check if the given ip address is in a reserved ipv4 address space. :param ip: ip address :return: boolean """ theip = ipaddress(ip) for res in self._reserved_netmasks: if theip in ipnetwork(res): return True return False
python
def is_reserved_ip(self, ip): """Check if the given ip address is in a reserved ipv4 address space. :param ip: ip address :return: boolean """ theip = ipaddress(ip) for res in self._reserved_netmasks: if theip in ipnetwork(res): return True return False
[ "def", "is_reserved_ip", "(", "self", ",", "ip", ")", ":", "theip", "=", "ipaddress", "(", "ip", ")", "for", "res", "in", "self", ".", "_reserved_netmasks", ":", "if", "theip", "in", "ipnetwork", "(", "res", ")", ":", "return", "True", "return", "False...
Check if the given ip address is in a reserved ipv4 address space. :param ip: ip address :return: boolean
[ "Check", "if", "the", "given", "ip", "address", "is", "in", "a", "reserved", "ipv4", "address", "space", "." ]
2196d48aa6098da9835a7611fbdb0b5f0fbf51e4
https://github.com/infothrill/python-dyndnsc/blob/2196d48aa6098da9835a7611fbdb0b5f0fbf51e4/dyndnsc/detector/rand.py#L52-L62
train
44,228
infothrill/python-dyndnsc
dyndnsc/detector/rand.py
RandomIPGenerator.random_public_ip
def random_public_ip(self): """Return a randomly generated, public IPv4 address. :return: ip address """ randomip = random_ip() while self.is_reserved_ip(randomip): randomip = random_ip() return randomip
python
def random_public_ip(self): """Return a randomly generated, public IPv4 address. :return: ip address """ randomip = random_ip() while self.is_reserved_ip(randomip): randomip = random_ip() return randomip
[ "def", "random_public_ip", "(", "self", ")", ":", "randomip", "=", "random_ip", "(", ")", "while", "self", ".", "is_reserved_ip", "(", "randomip", ")", ":", "randomip", "=", "random_ip", "(", ")", "return", "randomip" ]
Return a randomly generated, public IPv4 address. :return: ip address
[ "Return", "a", "randomly", "generated", "public", "IPv4", "address", "." ]
2196d48aa6098da9835a7611fbdb0b5f0fbf51e4
https://github.com/infothrill/python-dyndnsc/blob/2196d48aa6098da9835a7611fbdb0b5f0fbf51e4/dyndnsc/detector/rand.py#L64-L72
train
44,229
infothrill/python-dyndnsc
dyndnsc/detector/rand.py
IPDetector_Random.detect
def detect(self): """Detect IP and return it.""" for theip in self.rips: LOG.debug("detected %s", str(theip)) self.set_current_value(str(theip)) return str(theip)
python
def detect(self): """Detect IP and return it.""" for theip in self.rips: LOG.debug("detected %s", str(theip)) self.set_current_value(str(theip)) return str(theip)
[ "def", "detect", "(", "self", ")", ":", "for", "theip", "in", "self", ".", "rips", ":", "LOG", ".", "debug", "(", "\"detected %s\"", ",", "str", "(", "theip", ")", ")", "self", ".", "set_current_value", "(", "str", "(", "theip", ")", ")", "return", ...
Detect IP and return it.
[ "Detect", "IP", "and", "return", "it", "." ]
2196d48aa6098da9835a7611fbdb0b5f0fbf51e4
https://github.com/infothrill/python-dyndnsc/blob/2196d48aa6098da9835a7611fbdb0b5f0fbf51e4/dyndnsc/detector/rand.py#L102-L107
train
44,230
softwarefactory-project/rdopkg
rdopkg/guess.py
patches_base_ref
def patches_base_ref(default=exception.CantGuess): """Return a git reference to patches branch base. Returns first part of .spec's patches_base is found, otherwise return Version(+%{milestone}). """ ref = None try: spec = specfile.Spec() ref, _ = spec.get_patches_base(expand_macros=True) if ref: ref, _ = tag2version(ref) else: ref = spec.get_tag('Version', expand_macros=True) milestone = spec.get_milestone() if milestone: ref += milestone if not ref: raise exception.CantGuess(msg="got empty .spec Version") except Exception as ex: if default is exception.CantGuess: raise exception.CantGuess( what="current package version", why=str(ex)) else: return default tag_style = version_tag_style(ref) return version2tag(ref, tag_style=tag_style)
python
def patches_base_ref(default=exception.CantGuess): """Return a git reference to patches branch base. Returns first part of .spec's patches_base is found, otherwise return Version(+%{milestone}). """ ref = None try: spec = specfile.Spec() ref, _ = spec.get_patches_base(expand_macros=True) if ref: ref, _ = tag2version(ref) else: ref = spec.get_tag('Version', expand_macros=True) milestone = spec.get_milestone() if milestone: ref += milestone if not ref: raise exception.CantGuess(msg="got empty .spec Version") except Exception as ex: if default is exception.CantGuess: raise exception.CantGuess( what="current package version", why=str(ex)) else: return default tag_style = version_tag_style(ref) return version2tag(ref, tag_style=tag_style)
[ "def", "patches_base_ref", "(", "default", "=", "exception", ".", "CantGuess", ")", ":", "ref", "=", "None", "try", ":", "spec", "=", "specfile", ".", "Spec", "(", ")", "ref", ",", "_", "=", "spec", ".", "get_patches_base", "(", "expand_macros", "=", "...
Return a git reference to patches branch base. Returns first part of .spec's patches_base is found, otherwise return Version(+%{milestone}).
[ "Return", "a", "git", "reference", "to", "patches", "branch", "base", "." ]
2d2bed4e7cd329558a36d0dd404ec4ac8f9f254c
https://github.com/softwarefactory-project/rdopkg/blob/2d2bed4e7cd329558a36d0dd404ec4ac8f9f254c/rdopkg/guess.py#L23-L50
train
44,231
jbeluch/xbmcswift2
xbmcswift2/cli/console.py
display_listitems
def display_listitems(items, url): '''Displays a list of items along with the index to enable a user to select an item. ''' if (len(items) == 2 and items[0].get_label() == '..' and items[1].get_played()): display_video(items) else: label_width = get_max_len(item.get_label() for item in items) num_width = len(str(len(items))) output = [] for i, item in enumerate(items): output.append('[%s] %s (%s)' % ( str(i).rjust(num_width), item.get_label().ljust(label_width), item.get_path())) line_width = get_max_len(output) output.append('-' * line_width) header = [ '', '=' * line_width, 'Current URL: %s' % url, '-' * line_width, '%s %s Path' % ('#'.center(num_width + 2), 'Label'.ljust(label_width)), '-' * line_width, ] print '\n'.join(header + output)
python
def display_listitems(items, url): '''Displays a list of items along with the index to enable a user to select an item. ''' if (len(items) == 2 and items[0].get_label() == '..' and items[1].get_played()): display_video(items) else: label_width = get_max_len(item.get_label() for item in items) num_width = len(str(len(items))) output = [] for i, item in enumerate(items): output.append('[%s] %s (%s)' % ( str(i).rjust(num_width), item.get_label().ljust(label_width), item.get_path())) line_width = get_max_len(output) output.append('-' * line_width) header = [ '', '=' * line_width, 'Current URL: %s' % url, '-' * line_width, '%s %s Path' % ('#'.center(num_width + 2), 'Label'.ljust(label_width)), '-' * line_width, ] print '\n'.join(header + output)
[ "def", "display_listitems", "(", "items", ",", "url", ")", ":", "if", "(", "len", "(", "items", ")", "==", "2", "and", "items", "[", "0", "]", ".", "get_label", "(", ")", "==", "'..'", "and", "items", "[", "1", "]", ".", "get_played", "(", ")", ...
Displays a list of items along with the index to enable a user to select an item.
[ "Displays", "a", "list", "of", "items", "along", "with", "the", "index", "to", "enable", "a", "user", "to", "select", "an", "item", "." ]
0e7a3642499554edc8265fdf1ba6c5ee567daa78
https://github.com/jbeluch/xbmcswift2/blob/0e7a3642499554edc8265fdf1ba6c5ee567daa78/xbmcswift2/cli/console.py#L20-L49
train
44,232
jbeluch/xbmcswift2
xbmcswift2/cli/console.py
display_video
def display_video(items): '''Prints a message for a playing video and displays the parent listitem. ''' parent_item, played_item = items title_line = 'Playing Media %s (%s)' % (played_item.get_label(), played_item.get_path()) parent_line = '[0] %s (%s)' % (parent_item.get_label(), parent_item.get_path()) line_width = get_max_len([title_line, parent_line]) output = [ '-' * line_width, title_line, '-' * line_width, parent_line, ] print '\n'.join(output)
python
def display_video(items): '''Prints a message for a playing video and displays the parent listitem. ''' parent_item, played_item = items title_line = 'Playing Media %s (%s)' % (played_item.get_label(), played_item.get_path()) parent_line = '[0] %s (%s)' % (parent_item.get_label(), parent_item.get_path()) line_width = get_max_len([title_line, parent_line]) output = [ '-' * line_width, title_line, '-' * line_width, parent_line, ] print '\n'.join(output)
[ "def", "display_video", "(", "items", ")", ":", "parent_item", ",", "played_item", "=", "items", "title_line", "=", "'Playing Media %s (%s)'", "%", "(", "played_item", ".", "get_label", "(", ")", ",", "played_item", ".", "get_path", "(", ")", ")", "parent_line...
Prints a message for a playing video and displays the parent listitem.
[ "Prints", "a", "message", "for", "a", "playing", "video", "and", "displays", "the", "parent", "listitem", "." ]
0e7a3642499554edc8265fdf1ba6c5ee567daa78
https://github.com/jbeluch/xbmcswift2/blob/0e7a3642499554edc8265fdf1ba6c5ee567daa78/xbmcswift2/cli/console.py#L52-L70
train
44,233
jbeluch/xbmcswift2
xbmcswift2/cli/console.py
get_user_choice
def get_user_choice(items): '''Returns the selected item from provided items or None if 'q' was entered for quit. ''' choice = raw_input('Choose an item or "q" to quit: ') while choice != 'q': try: item = items[int(choice)] print # Blank line for readability between interactive views return item except ValueError: # Passed something that cound't be converted with int() choice = raw_input('You entered a non-integer. Choice must be an' ' integer or "q": ') except IndexError: # Passed an integer that was out of range of the list of urls choice = raw_input('You entered an invalid integer. Choice must be' ' from above url list or "q": ') return None
python
def get_user_choice(items): '''Returns the selected item from provided items or None if 'q' was entered for quit. ''' choice = raw_input('Choose an item or "q" to quit: ') while choice != 'q': try: item = items[int(choice)] print # Blank line for readability between interactive views return item except ValueError: # Passed something that cound't be converted with int() choice = raw_input('You entered a non-integer. Choice must be an' ' integer or "q": ') except IndexError: # Passed an integer that was out of range of the list of urls choice = raw_input('You entered an invalid integer. Choice must be' ' from above url list or "q": ') return None
[ "def", "get_user_choice", "(", "items", ")", ":", "choice", "=", "raw_input", "(", "'Choose an item or \"q\" to quit: '", ")", "while", "choice", "!=", "'q'", ":", "try", ":", "item", "=", "items", "[", "int", "(", "choice", ")", "]", "print", "# Blank line ...
Returns the selected item from provided items or None if 'q' was entered for quit.
[ "Returns", "the", "selected", "item", "from", "provided", "items", "or", "None", "if", "q", "was", "entered", "for", "quit", "." ]
0e7a3642499554edc8265fdf1ba6c5ee567daa78
https://github.com/jbeluch/xbmcswift2/blob/0e7a3642499554edc8265fdf1ba6c5ee567daa78/xbmcswift2/cli/console.py#L73-L91
train
44,234
mozilla/amo-validator
validator/unicodehelper.py
decode
def decode(data): """ Decode data employing some charset detection and including unicode BOM stripping. """ if isinstance(data, unicode): return data # Detect standard unicode BOMs. for bom, encoding in UNICODE_BOMS: if data.startswith(bom): return data[len(bom):].decode(encoding, errors='ignore') # Try straight UTF-8. try: return data.decode('utf-8') except UnicodeDecodeError: pass # Test for various common encodings. for encoding in COMMON_ENCODINGS: try: return data.decode(encoding) except UnicodeDecodeError: pass # Anything else gets filtered. return NON_ASCII_FILTER.sub('', data).decode('ascii', errors='replace')
python
def decode(data): """ Decode data employing some charset detection and including unicode BOM stripping. """ if isinstance(data, unicode): return data # Detect standard unicode BOMs. for bom, encoding in UNICODE_BOMS: if data.startswith(bom): return data[len(bom):].decode(encoding, errors='ignore') # Try straight UTF-8. try: return data.decode('utf-8') except UnicodeDecodeError: pass # Test for various common encodings. for encoding in COMMON_ENCODINGS: try: return data.decode(encoding) except UnicodeDecodeError: pass # Anything else gets filtered. return NON_ASCII_FILTER.sub('', data).decode('ascii', errors='replace')
[ "def", "decode", "(", "data", ")", ":", "if", "isinstance", "(", "data", ",", "unicode", ")", ":", "return", "data", "# Detect standard unicode BOMs.", "for", "bom", ",", "encoding", "in", "UNICODE_BOMS", ":", "if", "data", ".", "startswith", "(", "bom", "...
Decode data employing some charset detection and including unicode BOM stripping.
[ "Decode", "data", "employing", "some", "charset", "detection", "and", "including", "unicode", "BOM", "stripping", "." ]
0251bfbd7d93106e01ecdb6de5fcd1dc1a180664
https://github.com/mozilla/amo-validator/blob/0251bfbd7d93106e01ecdb6de5fcd1dc1a180664/validator/unicodehelper.py#L22-L50
train
44,235
mozilla/amo-validator
validator/contextgenerator.py
ContextGenerator.get_context
def get_context(self, line=1, column=0): 'Returns a tuple containing the context for a line' line -= 1 # The line is one-based # If there is no data in the file, there can be no context. datalen = len(self.data) if datalen <= line: return None build = [self.data[line]] # Add surrounding lines if they're available. There must always be # three elements in the context. if line > 0: build.insert(0, self.data[line - 1]) else: build.insert(0, None) if line < datalen - 1: build.append(self.data[line + 1]) else: build.append(None) leading_counts = [] # Count whitespace to determine how much needs to be stripped. lstrip_count = INFINITY for line in build: # Don't count empty/whitespace-only lines. if line is None or not line.strip(): leading_counts.append(lstrip_count) continue # Isolate the leading whitespace. ws_count = len(line) - len(line.lstrip()) leading_counts.append(ws_count) if ws_count < lstrip_count: lstrip_count = ws_count # If all of the lines were skipped over, it means everything was # whitespace. if lstrip_count == INFINITY: return ('', '', '') for lnum in range(3): # Skip edge lines. if not build[lnum]: continue line = build[lnum].strip() # Empty lines stay empty. if not line: build[lnum] = '' continue line = self._format_line(line, column=column, rel_line=lnum) line = '%s%s' % (' ' * (leading_counts[lnum] - lstrip_count), line) build[lnum] = line # Return the final output as a tuple. return tuple(build)
python
def get_context(self, line=1, column=0): 'Returns a tuple containing the context for a line' line -= 1 # The line is one-based # If there is no data in the file, there can be no context. datalen = len(self.data) if datalen <= line: return None build = [self.data[line]] # Add surrounding lines if they're available. There must always be # three elements in the context. if line > 0: build.insert(0, self.data[line - 1]) else: build.insert(0, None) if line < datalen - 1: build.append(self.data[line + 1]) else: build.append(None) leading_counts = [] # Count whitespace to determine how much needs to be stripped. lstrip_count = INFINITY for line in build: # Don't count empty/whitespace-only lines. if line is None or not line.strip(): leading_counts.append(lstrip_count) continue # Isolate the leading whitespace. ws_count = len(line) - len(line.lstrip()) leading_counts.append(ws_count) if ws_count < lstrip_count: lstrip_count = ws_count # If all of the lines were skipped over, it means everything was # whitespace. if lstrip_count == INFINITY: return ('', '', '') for lnum in range(3): # Skip edge lines. if not build[lnum]: continue line = build[lnum].strip() # Empty lines stay empty. if not line: build[lnum] = '' continue line = self._format_line(line, column=column, rel_line=lnum) line = '%s%s' % (' ' * (leading_counts[lnum] - lstrip_count), line) build[lnum] = line # Return the final output as a tuple. return tuple(build)
[ "def", "get_context", "(", "self", ",", "line", "=", "1", ",", "column", "=", "0", ")", ":", "line", "-=", "1", "# The line is one-based", "# If there is no data in the file, there can be no context.", "datalen", "=", "len", "(", "self", ".", "data", ")", "if", ...
Returns a tuple containing the context for a line
[ "Returns", "a", "tuple", "containing", "the", "context", "for", "a", "line" ]
0251bfbd7d93106e01ecdb6de5fcd1dc1a180664
https://github.com/mozilla/amo-validator/blob/0251bfbd7d93106e01ecdb6de5fcd1dc1a180664/validator/contextgenerator.py#L19-L82
train
44,236
mozilla/amo-validator
validator/contextgenerator.py
ContextGenerator._format_line
def _format_line(self, data, column=0, rel_line=1): 'Formats a line from the data to be the appropriate length' line_length = len(data) if line_length > 140: if rel_line == 0: # Trim from the beginning data = '... %s' % data[-140:] elif rel_line == 1: # Trim surrounding the error position if column < 70: data = '%s ...' % data[:140] elif column > line_length - 70: data = '... %s' % data[-140:] else: data = '... %s ...' % data[column - 70:column + 70] elif rel_line == 2: # Trim from the end data = '%s ...' % data[:140] data = unicodehelper.decode(data) return data
python
def _format_line(self, data, column=0, rel_line=1): 'Formats a line from the data to be the appropriate length' line_length = len(data) if line_length > 140: if rel_line == 0: # Trim from the beginning data = '... %s' % data[-140:] elif rel_line == 1: # Trim surrounding the error position if column < 70: data = '%s ...' % data[:140] elif column > line_length - 70: data = '... %s' % data[-140:] else: data = '... %s ...' % data[column - 70:column + 70] elif rel_line == 2: # Trim from the end data = '%s ...' % data[:140] data = unicodehelper.decode(data) return data
[ "def", "_format_line", "(", "self", ",", "data", ",", "column", "=", "0", ",", "rel_line", "=", "1", ")", ":", "line_length", "=", "len", "(", "data", ")", "if", "line_length", ">", "140", ":", "if", "rel_line", "==", "0", ":", "# Trim from the beginni...
Formats a line from the data to be the appropriate length
[ "Formats", "a", "line", "from", "the", "data", "to", "be", "the", "appropriate", "length" ]
0251bfbd7d93106e01ecdb6de5fcd1dc1a180664
https://github.com/mozilla/amo-validator/blob/0251bfbd7d93106e01ecdb6de5fcd1dc1a180664/validator/contextgenerator.py#L84-L106
train
44,237
mozilla/amo-validator
validator/contextgenerator.py
ContextGenerator.get_line
def get_line(self, position): 'Returns the line number that the given string position is found on' datalen = len(self.data) count = len(self.data[0]) line = 1 while count < position: if line >= datalen: break count += len(self.data[line]) + 1 line += 1 return line
python
def get_line(self, position): 'Returns the line number that the given string position is found on' datalen = len(self.data) count = len(self.data[0]) line = 1 while count < position: if line >= datalen: break count += len(self.data[line]) + 1 line += 1 return line
[ "def", "get_line", "(", "self", ",", "position", ")", ":", "datalen", "=", "len", "(", "self", ".", "data", ")", "count", "=", "len", "(", "self", ".", "data", "[", "0", "]", ")", "line", "=", "1", "while", "count", "<", "position", ":", "if", ...
Returns the line number that the given string position is found on
[ "Returns", "the", "line", "number", "that", "the", "given", "string", "position", "is", "found", "on" ]
0251bfbd7d93106e01ecdb6de5fcd1dc1a180664
https://github.com/mozilla/amo-validator/blob/0251bfbd7d93106e01ecdb6de5fcd1dc1a180664/validator/contextgenerator.py#L108-L120
train
44,238
jbeluch/xbmcswift2
xbmcswift2/mockxbmc/utils.py
load_addon_strings
def load_addon_strings(addon, filename): '''This is not an official XBMC method, it is here to faciliate mocking up the other methods when running outside of XBMC.''' def get_strings(fn): xml = parse(fn) strings = dict((tag.getAttribute('id'), tag.firstChild.data) for tag in xml.getElementsByTagName('string')) #strings = {} #for tag in xml.getElementsByTagName('string'): #strings[tag.getAttribute('id')] = tag.firstChild.data return strings addon._strings = get_strings(filename)
python
def load_addon_strings(addon, filename): '''This is not an official XBMC method, it is here to faciliate mocking up the other methods when running outside of XBMC.''' def get_strings(fn): xml = parse(fn) strings = dict((tag.getAttribute('id'), tag.firstChild.data) for tag in xml.getElementsByTagName('string')) #strings = {} #for tag in xml.getElementsByTagName('string'): #strings[tag.getAttribute('id')] = tag.firstChild.data return strings addon._strings = get_strings(filename)
[ "def", "load_addon_strings", "(", "addon", ",", "filename", ")", ":", "def", "get_strings", "(", "fn", ")", ":", "xml", "=", "parse", "(", "fn", ")", "strings", "=", "dict", "(", "(", "tag", ".", "getAttribute", "(", "'id'", ")", ",", "tag", ".", "...
This is not an official XBMC method, it is here to faciliate mocking up the other methods when running outside of XBMC.
[ "This", "is", "not", "an", "official", "XBMC", "method", "it", "is", "here", "to", "faciliate", "mocking", "up", "the", "other", "methods", "when", "running", "outside", "of", "XBMC", "." ]
0e7a3642499554edc8265fdf1ba6c5ee567daa78
https://github.com/jbeluch/xbmcswift2/blob/0e7a3642499554edc8265fdf1ba6c5ee567daa78/xbmcswift2/mockxbmc/utils.py#L4-L14
train
44,239
jbeluch/xbmcswift2
xbmcswift2/mockxbmc/utils.py
get_addon_id
def get_addon_id(addonxml): '''Parses an addon id from the given addon.xml filename.''' xml = parse(addonxml) addon_node = xml.getElementsByTagName('addon')[0] return addon_node.getAttribute('id')
python
def get_addon_id(addonxml): '''Parses an addon id from the given addon.xml filename.''' xml = parse(addonxml) addon_node = xml.getElementsByTagName('addon')[0] return addon_node.getAttribute('id')
[ "def", "get_addon_id", "(", "addonxml", ")", ":", "xml", "=", "parse", "(", "addonxml", ")", "addon_node", "=", "xml", ".", "getElementsByTagName", "(", "'addon'", ")", "[", "0", "]", "return", "addon_node", ".", "getAttribute", "(", "'id'", ")" ]
Parses an addon id from the given addon.xml filename.
[ "Parses", "an", "addon", "id", "from", "the", "given", "addon", ".", "xml", "filename", "." ]
0e7a3642499554edc8265fdf1ba6c5ee567daa78
https://github.com/jbeluch/xbmcswift2/blob/0e7a3642499554edc8265fdf1ba6c5ee567daa78/xbmcswift2/mockxbmc/utils.py#L17-L21
train
44,240
jbeluch/xbmcswift2
xbmcswift2/mockxbmc/utils.py
get_addon_name
def get_addon_name(addonxml): '''Parses an addon name from the given addon.xml filename.''' xml = parse(addonxml) addon_node = xml.getElementsByTagName('addon')[0] return addon_node.getAttribute('name')
python
def get_addon_name(addonxml): '''Parses an addon name from the given addon.xml filename.''' xml = parse(addonxml) addon_node = xml.getElementsByTagName('addon')[0] return addon_node.getAttribute('name')
[ "def", "get_addon_name", "(", "addonxml", ")", ":", "xml", "=", "parse", "(", "addonxml", ")", "addon_node", "=", "xml", ".", "getElementsByTagName", "(", "'addon'", ")", "[", "0", "]", "return", "addon_node", ".", "getAttribute", "(", "'name'", ")" ]
Parses an addon name from the given addon.xml filename.
[ "Parses", "an", "addon", "name", "from", "the", "given", "addon", ".", "xml", "filename", "." ]
0e7a3642499554edc8265fdf1ba6c5ee567daa78
https://github.com/jbeluch/xbmcswift2/blob/0e7a3642499554edc8265fdf1ba6c5ee567daa78/xbmcswift2/mockxbmc/utils.py#L24-L28
train
44,241
jbeluch/xbmcswift2
xbmcswift2/mockxbmc/xbmc.py
_create_dir
def _create_dir(path): '''Creates necessary directories for the given path or does nothing if the directories already exist. ''' try: os.makedirs(path) except OSError, exc: if exc.errno == errno.EEXIST: pass else: raise
python
def _create_dir(path): '''Creates necessary directories for the given path or does nothing if the directories already exist. ''' try: os.makedirs(path) except OSError, exc: if exc.errno == errno.EEXIST: pass else: raise
[ "def", "_create_dir", "(", "path", ")", ":", "try", ":", "os", ".", "makedirs", "(", "path", ")", "except", "OSError", ",", "exc", ":", "if", "exc", ".", "errno", "==", "errno", ".", "EEXIST", ":", "pass", "else", ":", "raise" ]
Creates necessary directories for the given path or does nothing if the directories already exist.
[ "Creates", "necessary", "directories", "for", "the", "given", "path", "or", "does", "nothing", "if", "the", "directories", "already", "exist", "." ]
0e7a3642499554edc8265fdf1ba6c5ee567daa78
https://github.com/jbeluch/xbmcswift2/blob/0e7a3642499554edc8265fdf1ba6c5ee567daa78/xbmcswift2/mockxbmc/xbmc.py#L11-L21
train
44,242
jbeluch/xbmcswift2
xbmcswift2/mockxbmc/xbmc.py
translatePath
def translatePath(path): '''Creates folders in the OS's temp directory. Doesn't touch any possible XBMC installation on the machine. Attempting to do as little work as possible to enable this function to work seamlessly. ''' valid_dirs = ['xbmc', 'home', 'temp', 'masterprofile', 'profile', 'subtitles', 'userdata', 'database', 'thumbnails', 'recordings', 'screenshots', 'musicplaylists', 'videoplaylists', 'cdrips', 'skin', ] assert path.startswith('special://'), 'Not a valid special:// path.' parts = path.split('/')[2:] assert len(parts) > 1, 'Need at least a single root directory' assert parts[0] in valid_dirs, '%s is not a valid root dir.' % parts[0] # We don't want to swallow any potential IOErrors here, so only makedir for # the root dir, the user is responsible for making any further child dirs _create_dir(os.path.join(TEMP_DIR, parts[0])) return os.path.join(TEMP_DIR, *parts)
python
def translatePath(path): '''Creates folders in the OS's temp directory. Doesn't touch any possible XBMC installation on the machine. Attempting to do as little work as possible to enable this function to work seamlessly. ''' valid_dirs = ['xbmc', 'home', 'temp', 'masterprofile', 'profile', 'subtitles', 'userdata', 'database', 'thumbnails', 'recordings', 'screenshots', 'musicplaylists', 'videoplaylists', 'cdrips', 'skin', ] assert path.startswith('special://'), 'Not a valid special:// path.' parts = path.split('/')[2:] assert len(parts) > 1, 'Need at least a single root directory' assert parts[0] in valid_dirs, '%s is not a valid root dir.' % parts[0] # We don't want to swallow any potential IOErrors here, so only makedir for # the root dir, the user is responsible for making any further child dirs _create_dir(os.path.join(TEMP_DIR, parts[0])) return os.path.join(TEMP_DIR, *parts)
[ "def", "translatePath", "(", "path", ")", ":", "valid_dirs", "=", "[", "'xbmc'", ",", "'home'", ",", "'temp'", ",", "'masterprofile'", ",", "'profile'", ",", "'subtitles'", ",", "'userdata'", ",", "'database'", ",", "'thumbnails'", ",", "'recordings'", ",", ...
Creates folders in the OS's temp directory. Doesn't touch any possible XBMC installation on the machine. Attempting to do as little work as possible to enable this function to work seamlessly.
[ "Creates", "folders", "in", "the", "OS", "s", "temp", "directory", ".", "Doesn", "t", "touch", "any", "possible", "XBMC", "installation", "on", "the", "machine", ".", "Attempting", "to", "do", "as", "little", "work", "as", "possible", "to", "enable", "this...
0e7a3642499554edc8265fdf1ba6c5ee567daa78
https://github.com/jbeluch/xbmcswift2/blob/0e7a3642499554edc8265fdf1ba6c5ee567daa78/xbmcswift2/mockxbmc/xbmc.py#L37-L56
train
44,243
jbeluch/xbmcswift2
xbmcswift2/plugin.py
Plugin._parse_request
def _parse_request(self, url=None, handle=None): '''Handles setup of the plugin state, including request arguments, handle, mode. This method never needs to be called directly. For testing, see plugin.test() ''' # To accomdate self.redirect, we need to be able to parse a full url as # well if url is None: url = sys.argv[0] if len(sys.argv) == 3: url += sys.argv[2] if handle is None: handle = sys.argv[1] return Request(url, handle)
python
def _parse_request(self, url=None, handle=None): '''Handles setup of the plugin state, including request arguments, handle, mode. This method never needs to be called directly. For testing, see plugin.test() ''' # To accomdate self.redirect, we need to be able to parse a full url as # well if url is None: url = sys.argv[0] if len(sys.argv) == 3: url += sys.argv[2] if handle is None: handle = sys.argv[1] return Request(url, handle)
[ "def", "_parse_request", "(", "self", ",", "url", "=", "None", ",", "handle", "=", "None", ")", ":", "# To accomdate self.redirect, we need to be able to parse a full url as", "# well", "if", "url", "is", "None", ":", "url", "=", "sys", ".", "argv", "[", "0", ...
Handles setup of the plugin state, including request arguments, handle, mode. This method never needs to be called directly. For testing, see plugin.test()
[ "Handles", "setup", "of", "the", "plugin", "state", "including", "request", "arguments", "handle", "mode", "." ]
0e7a3642499554edc8265fdf1ba6c5ee567daa78
https://github.com/jbeluch/xbmcswift2/blob/0e7a3642499554edc8265fdf1ba6c5ee567daa78/xbmcswift2/plugin.py#L194-L209
train
44,244
jbeluch/xbmcswift2
xbmcswift2/plugin.py
Plugin.register_module
def register_module(self, module, url_prefix): '''Registers a module with a plugin. Requires a url_prefix that will then enable calls to url_for. :param module: Should be an instance `xbmcswift2.Module`. :param url_prefix: A url prefix to use for all module urls, e.g. '/mymodule' ''' module._plugin = self module._url_prefix = url_prefix for func in module._register_funcs: func(self, url_prefix)
python
def register_module(self, module, url_prefix): '''Registers a module with a plugin. Requires a url_prefix that will then enable calls to url_for. :param module: Should be an instance `xbmcswift2.Module`. :param url_prefix: A url prefix to use for all module urls, e.g. '/mymodule' ''' module._plugin = self module._url_prefix = url_prefix for func in module._register_funcs: func(self, url_prefix)
[ "def", "register_module", "(", "self", ",", "module", ",", "url_prefix", ")", ":", "module", ".", "_plugin", "=", "self", "module", ".", "_url_prefix", "=", "url_prefix", "for", "func", "in", "module", ".", "_register_funcs", ":", "func", "(", "self", ",",...
Registers a module with a plugin. Requires a url_prefix that will then enable calls to url_for. :param module: Should be an instance `xbmcswift2.Module`. :param url_prefix: A url prefix to use for all module urls, e.g. '/mymodule'
[ "Registers", "a", "module", "with", "a", "plugin", ".", "Requires", "a", "url_prefix", "that", "will", "then", "enable", "calls", "to", "url_for", "." ]
0e7a3642499554edc8265fdf1ba6c5ee567daa78
https://github.com/jbeluch/xbmcswift2/blob/0e7a3642499554edc8265fdf1ba6c5ee567daa78/xbmcswift2/plugin.py#L211-L222
train
44,245
jbeluch/xbmcswift2
xbmcswift2/plugin.py
Plugin.cached_route
def cached_route(self, url_rule, name=None, options=None, TTL=None): '''A decorator to add a route to a view and also apply caching. The url_rule, name and options arguments are the same arguments for the route function. The TTL argument if given will passed along to the caching decorator. ''' route_decorator = self.route(url_rule, name=name, options=options) if TTL: cache_decorator = self.cached(TTL) else: cache_decorator = self.cached() def new_decorator(func): return route_decorator(cache_decorator(func)) return new_decorator
python
def cached_route(self, url_rule, name=None, options=None, TTL=None): '''A decorator to add a route to a view and also apply caching. The url_rule, name and options arguments are the same arguments for the route function. The TTL argument if given will passed along to the caching decorator. ''' route_decorator = self.route(url_rule, name=name, options=options) if TTL: cache_decorator = self.cached(TTL) else: cache_decorator = self.cached() def new_decorator(func): return route_decorator(cache_decorator(func)) return new_decorator
[ "def", "cached_route", "(", "self", ",", "url_rule", ",", "name", "=", "None", ",", "options", "=", "None", ",", "TTL", "=", "None", ")", ":", "route_decorator", "=", "self", ".", "route", "(", "url_rule", ",", "name", "=", "name", ",", "options", "=...
A decorator to add a route to a view and also apply caching. The url_rule, name and options arguments are the same arguments for the route function. The TTL argument if given will passed along to the caching decorator.
[ "A", "decorator", "to", "add", "a", "route", "to", "a", "view", "and", "also", "apply", "caching", ".", "The", "url_rule", "name", "and", "options", "arguments", "are", "the", "same", "arguments", "for", "the", "route", "function", ".", "The", "TTL", "ar...
0e7a3642499554edc8265fdf1ba6c5ee567daa78
https://github.com/jbeluch/xbmcswift2/blob/0e7a3642499554edc8265fdf1ba6c5ee567daa78/xbmcswift2/plugin.py#L224-L238
train
44,246
jbeluch/xbmcswift2
xbmcswift2/plugin.py
Plugin.run
def run(self, test=False): '''The main entry point for a plugin.''' self._request = self._parse_request() log.debug('Handling incoming request for %s', self.request.path) items = self._dispatch(self.request.path) # Close any open storages which will persist them to disk if hasattr(self, '_unsynced_storages'): for storage in self._unsynced_storages.values(): log.debug('Saving a %s storage to disk at "%s"', storage.file_format, storage.filename) storage.close() return items
python
def run(self, test=False): '''The main entry point for a plugin.''' self._request = self._parse_request() log.debug('Handling incoming request for %s', self.request.path) items = self._dispatch(self.request.path) # Close any open storages which will persist them to disk if hasattr(self, '_unsynced_storages'): for storage in self._unsynced_storages.values(): log.debug('Saving a %s storage to disk at "%s"', storage.file_format, storage.filename) storage.close() return items
[ "def", "run", "(", "self", ",", "test", "=", "False", ")", ":", "self", ".", "_request", "=", "self", ".", "_parse_request", "(", ")", "log", ".", "debug", "(", "'Handling incoming request for %s'", ",", "self", ".", "request", ".", "path", ")", "items",...
The main entry point for a plugin.
[ "The", "main", "entry", "point", "for", "a", "plugin", "." ]
0e7a3642499554edc8265fdf1ba6c5ee567daa78
https://github.com/jbeluch/xbmcswift2/blob/0e7a3642499554edc8265fdf1ba6c5ee567daa78/xbmcswift2/plugin.py#L328-L341
train
44,247
jbeluch/xbmcswift2
xbmcswift2/cli/cli.py
main
def main(): '''The entry point for the console script xbmcswift2. The 'xbcmswift2' script is command bassed, so the second argument is always the command to execute. Each command has its own parser options and usages. If no command is provided or the -h flag is used without any other commands, the general help message is shown. ''' parser = OptionParser() if len(sys.argv) == 1: parser.set_usage(USAGE) parser.error('At least one command is required.') # spy sys.argv[1] in order to use correct opts/args command = sys.argv[1] if command == '-h': parser.set_usage(USAGE) opts, args = parser.parse_args() if command not in COMMANDS.keys(): parser.error('Invalid command') # We have a proper command, set the usage and options list according to the # specific command manager = COMMANDS[command] if hasattr(manager, 'option_list'): for args, kwargs in manager.option_list: parser.add_option(*args, **kwargs) if hasattr(manager, 'usage'): parser.set_usage(manager.usage) opts, args = parser.parse_args() # Since we are calling a specific comamnd's manager, we no longer need the # actual command in sys.argv so we slice from position 1 manager.run(opts, args[1:])
python
def main(): '''The entry point for the console script xbmcswift2. The 'xbcmswift2' script is command bassed, so the second argument is always the command to execute. Each command has its own parser options and usages. If no command is provided or the -h flag is used without any other commands, the general help message is shown. ''' parser = OptionParser() if len(sys.argv) == 1: parser.set_usage(USAGE) parser.error('At least one command is required.') # spy sys.argv[1] in order to use correct opts/args command = sys.argv[1] if command == '-h': parser.set_usage(USAGE) opts, args = parser.parse_args() if command not in COMMANDS.keys(): parser.error('Invalid command') # We have a proper command, set the usage and options list according to the # specific command manager = COMMANDS[command] if hasattr(manager, 'option_list'): for args, kwargs in manager.option_list: parser.add_option(*args, **kwargs) if hasattr(manager, 'usage'): parser.set_usage(manager.usage) opts, args = parser.parse_args() # Since we are calling a specific comamnd's manager, we no longer need the # actual command in sys.argv so we slice from position 1 manager.run(opts, args[1:])
[ "def", "main", "(", ")", ":", "parser", "=", "OptionParser", "(", ")", "if", "len", "(", "sys", ".", "argv", ")", "==", "1", ":", "parser", ".", "set_usage", "(", "USAGE", ")", "parser", ".", "error", "(", "'At least one command is required.'", ")", "#...
The entry point for the console script xbmcswift2. The 'xbcmswift2' script is command bassed, so the second argument is always the command to execute. Each command has its own parser options and usages. If no command is provided or the -h flag is used without any other commands, the general help message is shown.
[ "The", "entry", "point", "for", "the", "console", "script", "xbmcswift2", "." ]
0e7a3642499554edc8265fdf1ba6c5ee567daa78
https://github.com/jbeluch/xbmcswift2/blob/0e7a3642499554edc8265fdf1ba6c5ee567daa78/xbmcswift2/cli/cli.py#L40-L76
train
44,248
mozilla/amo-validator
validator/outputhandlers/shellcolors.py
OutputHandler.write
def write(self, text): 'Uses curses to print in the fanciest way possible.' # Add color to the terminal. if not self.no_color: text = self.colorize_text(text) else: pattern = re.compile('\<\<[A-Z]*?\>\>') text = pattern.sub('', text) text += '\n' self.buffer.write(text) return self
python
def write(self, text): 'Uses curses to print in the fanciest way possible.' # Add color to the terminal. if not self.no_color: text = self.colorize_text(text) else: pattern = re.compile('\<\<[A-Z]*?\>\>') text = pattern.sub('', text) text += '\n' self.buffer.write(text) return self
[ "def", "write", "(", "self", ",", "text", ")", ":", "# Add color to the terminal.", "if", "not", "self", ".", "no_color", ":", "text", "=", "self", ".", "colorize_text", "(", "text", ")", "else", ":", "pattern", "=", "re", ".", "compile", "(", "'\\<\\<[A...
Uses curses to print in the fanciest way possible.
[ "Uses", "curses", "to", "print", "in", "the", "fanciest", "way", "possible", "." ]
0251bfbd7d93106e01ecdb6de5fcd1dc1a180664
https://github.com/mozilla/amo-validator/blob/0251bfbd7d93106e01ecdb6de5fcd1dc1a180664/validator/outputhandlers/shellcolors.py#L73-L87
train
44,249
TurboGears/gearbox
gearbox/commands/serve.py
_turn_sigterm_into_systemexit
def _turn_sigterm_into_systemexit(): # pragma: no cover """ Attempts to turn a SIGTERM exception into a SystemExit exception. """ try: import signal except ImportError: return def handle_term(signo, frame): raise SystemExit signal.signal(signal.SIGTERM, handle_term)
python
def _turn_sigterm_into_systemexit(): # pragma: no cover """ Attempts to turn a SIGTERM exception into a SystemExit exception. """ try: import signal except ImportError: return def handle_term(signo, frame): raise SystemExit signal.signal(signal.SIGTERM, handle_term)
[ "def", "_turn_sigterm_into_systemexit", "(", ")", ":", "# pragma: no cover", "try", ":", "import", "signal", "except", "ImportError", ":", "return", "def", "handle_term", "(", "signo", ",", "frame", ")", ":", "raise", "SystemExit", "signal", ".", "signal", "(", ...
Attempts to turn a SIGTERM exception into a SystemExit exception.
[ "Attempts", "to", "turn", "a", "SIGTERM", "exception", "into", "a", "SystemExit", "exception", "." ]
df496ab28050ce6a4cc4c502488f5c5812f2baff
https://github.com/TurboGears/gearbox/blob/df496ab28050ce6a4cc4c502488f5c5812f2baff/gearbox/commands/serve.py#L624-L634
train
44,250
TurboGears/gearbox
gearbox/commands/serve.py
wsgiref_server_runner
def wsgiref_server_runner(wsgi_app, global_conf, **kw): # pragma: no cover """ Entry point for wsgiref's WSGI server Additional parameters: ``certfile``, ``keyfile`` Optional SSL certificate file and host key file names. You can generate self-signed test files as follows: $ openssl genrsa 1024 > keyfile $ chmod 400 keyfile $ openssl req -new -x509 -nodes -sha1 -days 365 \\ -key keyfile > certfile $ chmod 400 certfile The file names should contain full paths. """ from wsgiref.simple_server import make_server, WSGIServer host = kw.get('host', '0.0.0.0') port = int(kw.get('port', 8080)) threaded = asbool(kw.get('wsgiref.threaded', False)) server_class = WSGIServer certfile = kw.get('wsgiref.certfile') keyfile = kw.get('wsgiref.keyfile') scheme = 'http' if certfile and keyfile: """ based on code from nullege: description='Dropbox REST API Client with more consistent responses.', author='Rick van Hattem', author_email='Rick@Wol.ph', url='http://wol.ph/', """ import ssl class SecureWSGIServer(WSGIServer): def get_request(self): socket, client_address = WSGIServer.get_request(self) socket = ssl.wrap_socket(socket, server_side=True, certfile=certfile, keyfile=keyfile) return socket, client_address port = int(kw.get('port', 4443)) server_class = SecureWSGIServer if threaded: from SocketServer import ThreadingMixIn class GearboxWSGIServer(ThreadingMixIn, server_class): pass server_type = 'Threaded' else: class GearboxWSGIServer(server_class): pass server_type = 'Standard' server = make_server(host, port, wsgi_app, server_class=GearboxWSGIServer) if certfile and keyfile: server_type += ' Secure' scheme += 's' ServeCommand.out('Starting %s HTTP server on %s://%s:%s' % (server_type, scheme, host, port)) server.serve_forever()
python
def wsgiref_server_runner(wsgi_app, global_conf, **kw): # pragma: no cover """ Entry point for wsgiref's WSGI server Additional parameters: ``certfile``, ``keyfile`` Optional SSL certificate file and host key file names. You can generate self-signed test files as follows: $ openssl genrsa 1024 > keyfile $ chmod 400 keyfile $ openssl req -new -x509 -nodes -sha1 -days 365 \\ -key keyfile > certfile $ chmod 400 certfile The file names should contain full paths. """ from wsgiref.simple_server import make_server, WSGIServer host = kw.get('host', '0.0.0.0') port = int(kw.get('port', 8080)) threaded = asbool(kw.get('wsgiref.threaded', False)) server_class = WSGIServer certfile = kw.get('wsgiref.certfile') keyfile = kw.get('wsgiref.keyfile') scheme = 'http' if certfile and keyfile: """ based on code from nullege: description='Dropbox REST API Client with more consistent responses.', author='Rick van Hattem', author_email='Rick@Wol.ph', url='http://wol.ph/', """ import ssl class SecureWSGIServer(WSGIServer): def get_request(self): socket, client_address = WSGIServer.get_request(self) socket = ssl.wrap_socket(socket, server_side=True, certfile=certfile, keyfile=keyfile) return socket, client_address port = int(kw.get('port', 4443)) server_class = SecureWSGIServer if threaded: from SocketServer import ThreadingMixIn class GearboxWSGIServer(ThreadingMixIn, server_class): pass server_type = 'Threaded' else: class GearboxWSGIServer(server_class): pass server_type = 'Standard' server = make_server(host, port, wsgi_app, server_class=GearboxWSGIServer) if certfile and keyfile: server_type += ' Secure' scheme += 's' ServeCommand.out('Starting %s HTTP server on %s://%s:%s' % (server_type, scheme, host, port)) server.serve_forever()
[ "def", "wsgiref_server_runner", "(", "wsgi_app", ",", "global_conf", ",", "*", "*", "kw", ")", ":", "# pragma: no cover", "from", "wsgiref", ".", "simple_server", "import", "make_server", ",", "WSGIServer", "host", "=", "kw", ".", "get", "(", "'host'", ",", ...
Entry point for wsgiref's WSGI server Additional parameters: ``certfile``, ``keyfile`` Optional SSL certificate file and host key file names. You can generate self-signed test files as follows: $ openssl genrsa 1024 > keyfile $ chmod 400 keyfile $ openssl req -new -x509 -nodes -sha1 -days 365 \\ -key keyfile > certfile $ chmod 400 certfile The file names should contain full paths.
[ "Entry", "point", "for", "wsgiref", "s", "WSGI", "server" ]
df496ab28050ce6a4cc4c502488f5c5812f2baff
https://github.com/TurboGears/gearbox/blob/df496ab28050ce6a4cc4c502488f5c5812f2baff/gearbox/commands/serve.py#L638-L704
train
44,251
TurboGears/gearbox
gearbox/commands/serve.py
cherrypy_server_runner
def cherrypy_server_runner( app, global_conf=None, host='127.0.0.1', port=None, ssl_pem=None, protocol_version=None, numthreads=None, server_name=None, max=None, request_queue_size=None, timeout=None ): # pragma: no cover """ Entry point for CherryPy's WSGI server Serves the specified WSGI app via CherryPyWSGIServer. ``app`` The WSGI 'application callable'; multiple WSGI applications may be passed as (script_name, callable) pairs. ``host`` This is the ipaddress to bind to (or a hostname if your nameserver is properly configured). This defaults to 127.0.0.1, which is not a public interface. ``port`` The port to run on, defaults to 8080 for HTTP, or 4443 for HTTPS. This can be a string or an integer value. ``ssl_pem`` This an optional SSL certificate file (via OpenSSL) You can generate a self-signed test PEM certificate file as follows: $ openssl genrsa 1024 > host.key $ chmod 400 host.key $ openssl req -new -x509 -nodes -sha1 -days 365 \\ -key host.key > host.cert $ cat host.cert host.key > host.pem $ chmod 400 host.pem ``protocol_version`` The protocol used by the server, by default ``HTTP/1.1``. ``numthreads`` The number of worker threads to create. ``server_name`` The string to set for WSGI's SERVER_NAME environ entry. ``max`` The maximum number of queued requests. (defaults to -1 = no limit). ``request_queue_size`` The 'backlog' argument to socket.listen(); specifies the maximum number of queued connections. ``timeout`` The timeout in seconds for accepted connections. """ is_ssl = False if ssl_pem: port = port or 4443 is_ssl = True if not port: if ':' in host: host, port = host.split(':', 1) else: port = 8080 bind_addr = (host, int(port)) kwargs = {} for var_name in ('numthreads', 'max', 'request_queue_size', 'timeout'): var = locals()[var_name] if var is not None: kwargs[var_name] = int(var) server = None try: # Try to import from newer CherryPy releases. import cheroot.wsgi as wsgiserver server = wsgiserver.Server(bind_addr, app, server_name=server_name, **kwargs) except ImportError: # Nope. Try to import from older CherryPy releases. # We might just take another ImportError here. Oh well. from cherrypy import wsgiserver server = wsgiserver.CherryPyWSGIServer(bind_addr, app, server_name=server_name, **kwargs) server.ssl_certificate = server.ssl_private_key = ssl_pem if protocol_version: server.protocol = protocol_version try: protocol = is_ssl and 'https' or 'http' if host == '0.0.0.0': print('serving on 0.0.0.0:%s view at %s://127.0.0.1:%s' % (port, protocol, port)) else: print('serving on %s://%s:%s' % (protocol, host, port)) server.start() except (KeyboardInterrupt, SystemExit): server.stop() return server
python
def cherrypy_server_runner( app, global_conf=None, host='127.0.0.1', port=None, ssl_pem=None, protocol_version=None, numthreads=None, server_name=None, max=None, request_queue_size=None, timeout=None ): # pragma: no cover """ Entry point for CherryPy's WSGI server Serves the specified WSGI app via CherryPyWSGIServer. ``app`` The WSGI 'application callable'; multiple WSGI applications may be passed as (script_name, callable) pairs. ``host`` This is the ipaddress to bind to (or a hostname if your nameserver is properly configured). This defaults to 127.0.0.1, which is not a public interface. ``port`` The port to run on, defaults to 8080 for HTTP, or 4443 for HTTPS. This can be a string or an integer value. ``ssl_pem`` This an optional SSL certificate file (via OpenSSL) You can generate a self-signed test PEM certificate file as follows: $ openssl genrsa 1024 > host.key $ chmod 400 host.key $ openssl req -new -x509 -nodes -sha1 -days 365 \\ -key host.key > host.cert $ cat host.cert host.key > host.pem $ chmod 400 host.pem ``protocol_version`` The protocol used by the server, by default ``HTTP/1.1``. ``numthreads`` The number of worker threads to create. ``server_name`` The string to set for WSGI's SERVER_NAME environ entry. ``max`` The maximum number of queued requests. (defaults to -1 = no limit). ``request_queue_size`` The 'backlog' argument to socket.listen(); specifies the maximum number of queued connections. ``timeout`` The timeout in seconds for accepted connections. """ is_ssl = False if ssl_pem: port = port or 4443 is_ssl = True if not port: if ':' in host: host, port = host.split(':', 1) else: port = 8080 bind_addr = (host, int(port)) kwargs = {} for var_name in ('numthreads', 'max', 'request_queue_size', 'timeout'): var = locals()[var_name] if var is not None: kwargs[var_name] = int(var) server = None try: # Try to import from newer CherryPy releases. import cheroot.wsgi as wsgiserver server = wsgiserver.Server(bind_addr, app, server_name=server_name, **kwargs) except ImportError: # Nope. Try to import from older CherryPy releases. # We might just take another ImportError here. Oh well. from cherrypy import wsgiserver server = wsgiserver.CherryPyWSGIServer(bind_addr, app, server_name=server_name, **kwargs) server.ssl_certificate = server.ssl_private_key = ssl_pem if protocol_version: server.protocol = protocol_version try: protocol = is_ssl and 'https' or 'http' if host == '0.0.0.0': print('serving on 0.0.0.0:%s view at %s://127.0.0.1:%s' % (port, protocol, port)) else: print('serving on %s://%s:%s' % (protocol, host, port)) server.start() except (KeyboardInterrupt, SystemExit): server.stop() return server
[ "def", "cherrypy_server_runner", "(", "app", ",", "global_conf", "=", "None", ",", "host", "=", "'127.0.0.1'", ",", "port", "=", "None", ",", "ssl_pem", "=", "None", ",", "protocol_version", "=", "None", ",", "numthreads", "=", "None", ",", "server_name", ...
Entry point for CherryPy's WSGI server Serves the specified WSGI app via CherryPyWSGIServer. ``app`` The WSGI 'application callable'; multiple WSGI applications may be passed as (script_name, callable) pairs. ``host`` This is the ipaddress to bind to (or a hostname if your nameserver is properly configured). This defaults to 127.0.0.1, which is not a public interface. ``port`` The port to run on, defaults to 8080 for HTTP, or 4443 for HTTPS. This can be a string or an integer value. ``ssl_pem`` This an optional SSL certificate file (via OpenSSL) You can generate a self-signed test PEM certificate file as follows: $ openssl genrsa 1024 > host.key $ chmod 400 host.key $ openssl req -new -x509 -nodes -sha1 -days 365 \\ -key host.key > host.cert $ cat host.cert host.key > host.pem $ chmod 400 host.pem ``protocol_version`` The protocol used by the server, by default ``HTTP/1.1``. ``numthreads`` The number of worker threads to create. ``server_name`` The string to set for WSGI's SERVER_NAME environ entry. ``max`` The maximum number of queued requests. (defaults to -1 = no limit). ``request_queue_size`` The 'backlog' argument to socket.listen(); specifies the maximum number of queued connections. ``timeout`` The timeout in seconds for accepted connections.
[ "Entry", "point", "for", "CherryPy", "s", "WSGI", "server" ]
df496ab28050ce6a4cc4c502488f5c5812f2baff
https://github.com/TurboGears/gearbox/blob/df496ab28050ce6a4cc4c502488f5c5812f2baff/gearbox/commands/serve.py#L729-L840
train
44,252
TurboGears/gearbox
gearbox/commands/serve.py
ServeCommand.get_fixed_argv
def get_fixed_argv(self): # pragma: no cover """Get proper arguments for re-running the command. This is primarily for fixing some issues under Windows. First, there was a bug in Windows when running an executable located at a path with a space in it. This has become a non-issue with current versions of Python and Windows, so we don't take measures like adding quotes or calling win32api.GetShortPathName() as was necessary in former times. Second, depending on whether gearbox was installed as an egg or a wheel under Windows, it is run as a .py or an .exe stub. In the first case, we need to run it through the interpreter. On other operating systems, we can re-run the command as is. """ argv = sys.argv[:] if sys.platform == 'win32' and argv[0].endswith('.py'): argv.insert(0, sys.executable) return argv
python
def get_fixed_argv(self): # pragma: no cover """Get proper arguments for re-running the command. This is primarily for fixing some issues under Windows. First, there was a bug in Windows when running an executable located at a path with a space in it. This has become a non-issue with current versions of Python and Windows, so we don't take measures like adding quotes or calling win32api.GetShortPathName() as was necessary in former times. Second, depending on whether gearbox was installed as an egg or a wheel under Windows, it is run as a .py or an .exe stub. In the first case, we need to run it through the interpreter. On other operating systems, we can re-run the command as is. """ argv = sys.argv[:] if sys.platform == 'win32' and argv[0].endswith('.py'): argv.insert(0, sys.executable) return argv
[ "def", "get_fixed_argv", "(", "self", ")", ":", "# pragma: no cover", "argv", "=", "sys", ".", "argv", "[", ":", "]", "if", "sys", ".", "platform", "==", "'win32'", "and", "argv", "[", "0", "]", ".", "endswith", "(", "'.py'", ")", ":", "argv", ".", ...
Get proper arguments for re-running the command. This is primarily for fixing some issues under Windows. First, there was a bug in Windows when running an executable located at a path with a space in it. This has become a non-issue with current versions of Python and Windows, so we don't take measures like adding quotes or calling win32api.GetShortPathName() as was necessary in former times. Second, depending on whether gearbox was installed as an egg or a wheel under Windows, it is run as a .py or an .exe stub. In the first case, we need to run it through the interpreter. On other operating systems, we can re-run the command as is.
[ "Get", "proper", "arguments", "for", "re", "-", "running", "the", "command", "." ]
df496ab28050ce6a4cc4c502488f5c5812f2baff
https://github.com/TurboGears/gearbox/blob/df496ab28050ce6a4cc4c502488f5c5812f2baff/gearbox/commands/serve.py#L328-L348
train
44,253
mozilla/amo-validator
validator/xpi.py
XPIManager.package_contents
def package_contents(self): 'Returns a dictionary of file information' if self.contents_cache: return self.contents_cache # Get a list of ZipInfo objects. files = self.zf.infolist() out_files = {} # Iterate through each file in the XPI. for file_ in files: file_doc = {'name': file_.filename, 'size': file_.file_size, 'name_lower': file_.filename.lower()} file_doc['extension'] = file_doc['name_lower'].split('.')[-1] out_files[file_.filename] = file_doc self.contents_cache = out_files return out_files
python
def package_contents(self): 'Returns a dictionary of file information' if self.contents_cache: return self.contents_cache # Get a list of ZipInfo objects. files = self.zf.infolist() out_files = {} # Iterate through each file in the XPI. for file_ in files: file_doc = {'name': file_.filename, 'size': file_.file_size, 'name_lower': file_.filename.lower()} file_doc['extension'] = file_doc['name_lower'].split('.')[-1] out_files[file_.filename] = file_doc self.contents_cache = out_files return out_files
[ "def", "package_contents", "(", "self", ")", ":", "if", "self", ".", "contents_cache", ":", "return", "self", ".", "contents_cache", "# Get a list of ZipInfo objects.", "files", "=", "self", ".", "zf", ".", "infolist", "(", ")", "out_files", "=", "{", "}", "...
Returns a dictionary of file information
[ "Returns", "a", "dictionary", "of", "file", "information" ]
0251bfbd7d93106e01ecdb6de5fcd1dc1a180664
https://github.com/mozilla/amo-validator/blob/0251bfbd7d93106e01ecdb6de5fcd1dc1a180664/validator/xpi.py#L40-L62
train
44,254
mozilla/amo-validator
validator/xpi.py
XPIManager.write
def write(self, name, data): """Write a blob of data to the XPI manager.""" if isinstance(data, StringIO): self.zf.writestr(name, data.getvalue()) else: self.zf.writestr(name, to_utf8(data))
python
def write(self, name, data): """Write a blob of data to the XPI manager.""" if isinstance(data, StringIO): self.zf.writestr(name, data.getvalue()) else: self.zf.writestr(name, to_utf8(data))
[ "def", "write", "(", "self", ",", "name", ",", "data", ")", ":", "if", "isinstance", "(", "data", ",", "StringIO", ")", ":", "self", ".", "zf", ".", "writestr", "(", "name", ",", "data", ".", "getvalue", "(", ")", ")", "else", ":", "self", ".", ...
Write a blob of data to the XPI manager.
[ "Write", "a", "blob", "of", "data", "to", "the", "XPI", "manager", "." ]
0251bfbd7d93106e01ecdb6de5fcd1dc1a180664
https://github.com/mozilla/amo-validator/blob/0251bfbd7d93106e01ecdb6de5fcd1dc1a180664/validator/xpi.py#L70-L75
train
44,255
mozilla/amo-validator
validator/xpi.py
XPIManager.write_file
def write_file(self, name, path=None): """Write the contents of a file from the disk to the XPI.""" if path is None: path = name self.zf.write(path, name)
python
def write_file(self, name, path=None): """Write the contents of a file from the disk to the XPI.""" if path is None: path = name self.zf.write(path, name)
[ "def", "write_file", "(", "self", ",", "name", ",", "path", "=", "None", ")", ":", "if", "path", "is", "None", ":", "path", "=", "name", "self", ".", "zf", ".", "write", "(", "path", ",", "name", ")" ]
Write the contents of a file from the disk to the XPI.
[ "Write", "the", "contents", "of", "a", "file", "from", "the", "disk", "to", "the", "XPI", "." ]
0251bfbd7d93106e01ecdb6de5fcd1dc1a180664
https://github.com/mozilla/amo-validator/blob/0251bfbd7d93106e01ecdb6de5fcd1dc1a180664/validator/xpi.py#L77-L83
train
44,256
jbeluch/xbmcswift2
xbmcswift2/common.py
enum
def enum(*args, **kwargs): '''An enum class to mirror XBMC constatns. All args and kwargs.keys are added as atrrs on the returned object. >>> States = enum('NEW_JERSEY', NY='NEW_YORK') >>> States.NY 'NEW_YORK' >>> States.NEW_JERSEY 'NEW_JERSEY' >>> States._fields ['NY', 'NEW_JERSEY'] ''' kwargs.update((arg, arg) for arg in args) kwargs['_fields'] = kwargs.keys() return type('Enum', (), kwargs)
python
def enum(*args, **kwargs): '''An enum class to mirror XBMC constatns. All args and kwargs.keys are added as atrrs on the returned object. >>> States = enum('NEW_JERSEY', NY='NEW_YORK') >>> States.NY 'NEW_YORK' >>> States.NEW_JERSEY 'NEW_JERSEY' >>> States._fields ['NY', 'NEW_JERSEY'] ''' kwargs.update((arg, arg) for arg in args) kwargs['_fields'] = kwargs.keys() return type('Enum', (), kwargs)
[ "def", "enum", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "kwargs", ".", "update", "(", "(", "arg", ",", "arg", ")", "for", "arg", "in", "args", ")", "kwargs", "[", "'_fields'", "]", "=", "kwargs", ".", "keys", "(", ")", "return", "ty...
An enum class to mirror XBMC constatns. All args and kwargs.keys are added as atrrs on the returned object. >>> States = enum('NEW_JERSEY', NY='NEW_YORK') >>> States.NY 'NEW_YORK' >>> States.NEW_JERSEY 'NEW_JERSEY' >>> States._fields ['NY', 'NEW_JERSEY']
[ "An", "enum", "class", "to", "mirror", "XBMC", "constatns", ".", "All", "args", "and", "kwargs", ".", "keys", "are", "added", "as", "atrrs", "on", "the", "returned", "object", "." ]
0e7a3642499554edc8265fdf1ba6c5ee567daa78
https://github.com/jbeluch/xbmcswift2/blob/0e7a3642499554edc8265fdf1ba6c5ee567daa78/xbmcswift2/common.py#L29-L43
train
44,257
jbeluch/xbmcswift2
xbmcswift2/common.py
clean_dict
def clean_dict(dct): '''Returns a dict where items with a None value are removed''' return dict((key, val) for key, val in dct.items() if val is not None)
python
def clean_dict(dct): '''Returns a dict where items with a None value are removed''' return dict((key, val) for key, val in dct.items() if val is not None)
[ "def", "clean_dict", "(", "dct", ")", ":", "return", "dict", "(", "(", "key", ",", "val", ")", "for", "key", ",", "val", "in", "dct", ".", "items", "(", ")", "if", "val", "is", "not", "None", ")" ]
Returns a dict where items with a None value are removed
[ "Returns", "a", "dict", "where", "items", "with", "a", "None", "value", "are", "removed" ]
0e7a3642499554edc8265fdf1ba6c5ee567daa78
https://github.com/jbeluch/xbmcswift2/blob/0e7a3642499554edc8265fdf1ba6c5ee567daa78/xbmcswift2/common.py#L50-L52
train
44,258
jbeluch/xbmcswift2
xbmcswift2/common.py
pickle_dict
def pickle_dict(items): '''Returns a new dictionary where values which aren't instances of basestring are pickled. Also, a new key '_pickled' contains a comma separated list of keys corresponding to the pickled values. ''' ret = {} pickled_keys = [] for key, val in items.items(): if isinstance(val, basestring): ret[key] = val else: pickled_keys.append(key) ret[key] = pickle.dumps(val) if pickled_keys: ret['_pickled'] = ','.join(pickled_keys) return ret
python
def pickle_dict(items): '''Returns a new dictionary where values which aren't instances of basestring are pickled. Also, a new key '_pickled' contains a comma separated list of keys corresponding to the pickled values. ''' ret = {} pickled_keys = [] for key, val in items.items(): if isinstance(val, basestring): ret[key] = val else: pickled_keys.append(key) ret[key] = pickle.dumps(val) if pickled_keys: ret['_pickled'] = ','.join(pickled_keys) return ret
[ "def", "pickle_dict", "(", "items", ")", ":", "ret", "=", "{", "}", "pickled_keys", "=", "[", "]", "for", "key", ",", "val", "in", "items", ".", "items", "(", ")", ":", "if", "isinstance", "(", "val", ",", "basestring", ")", ":", "ret", "[", "key...
Returns a new dictionary where values which aren't instances of basestring are pickled. Also, a new key '_pickled' contains a comma separated list of keys corresponding to the pickled values.
[ "Returns", "a", "new", "dictionary", "where", "values", "which", "aren", "t", "instances", "of", "basestring", "are", "pickled", ".", "Also", "a", "new", "key", "_pickled", "contains", "a", "comma", "separated", "list", "of", "keys", "corresponding", "to", "...
0e7a3642499554edc8265fdf1ba6c5ee567daa78
https://github.com/jbeluch/xbmcswift2/blob/0e7a3642499554edc8265fdf1ba6c5ee567daa78/xbmcswift2/common.py#L55-L70
train
44,259
jbeluch/xbmcswift2
xbmcswift2/common.py
unpickle_args
def unpickle_args(items): '''Takes a dict and unpickles values whose keys are found in '_pickled' key. >>> unpickle_args({'_pickled': ['foo']. 'foo': ['I3%0A.']}) {'foo': 3} ''' # Technically there can be more than one _pickled value. At this point # we'll just use the first one pickled= items.pop('_pickled', None) if pickled is None: return items pickled_keys = pickled[0].split(',') ret = {} for key, vals in items.items(): if key in pickled_keys: ret[key] = [pickle.loads(val) for val in vals] else: ret[key] = vals return ret
python
def unpickle_args(items): '''Takes a dict and unpickles values whose keys are found in '_pickled' key. >>> unpickle_args({'_pickled': ['foo']. 'foo': ['I3%0A.']}) {'foo': 3} ''' # Technically there can be more than one _pickled value. At this point # we'll just use the first one pickled= items.pop('_pickled', None) if pickled is None: return items pickled_keys = pickled[0].split(',') ret = {} for key, vals in items.items(): if key in pickled_keys: ret[key] = [pickle.loads(val) for val in vals] else: ret[key] = vals return ret
[ "def", "unpickle_args", "(", "items", ")", ":", "# Technically there can be more than one _pickled value. At this point", "# we'll just use the first one", "pickled", "=", "items", ".", "pop", "(", "'_pickled'", ",", "None", ")", "if", "pickled", "is", "None", ":", "ret...
Takes a dict and unpickles values whose keys are found in '_pickled' key. >>> unpickle_args({'_pickled': ['foo']. 'foo': ['I3%0A.']}) {'foo': 3}
[ "Takes", "a", "dict", "and", "unpickles", "values", "whose", "keys", "are", "found", "in", "_pickled", "key", "." ]
0e7a3642499554edc8265fdf1ba6c5ee567daa78
https://github.com/jbeluch/xbmcswift2/blob/0e7a3642499554edc8265fdf1ba6c5ee567daa78/xbmcswift2/common.py#L73-L93
train
44,260
jbeluch/xbmcswift2
xbmcswift2/common.py
unpickle_dict
def unpickle_dict(items): '''Returns a dict pickled with pickle_dict''' pickled_keys = items.pop('_pickled', '').split(',') ret = {} for key, val in items.items(): if key in pickled_keys: ret[key] = pickle.loads(val) else: ret[key] = val return ret
python
def unpickle_dict(items): '''Returns a dict pickled with pickle_dict''' pickled_keys = items.pop('_pickled', '').split(',') ret = {} for key, val in items.items(): if key in pickled_keys: ret[key] = pickle.loads(val) else: ret[key] = val return ret
[ "def", "unpickle_dict", "(", "items", ")", ":", "pickled_keys", "=", "items", ".", "pop", "(", "'_pickled'", ",", "''", ")", ".", "split", "(", "','", ")", "ret", "=", "{", "}", "for", "key", ",", "val", "in", "items", ".", "items", "(", ")", ":"...
Returns a dict pickled with pickle_dict
[ "Returns", "a", "dict", "pickled", "with", "pickle_dict" ]
0e7a3642499554edc8265fdf1ba6c5ee567daa78
https://github.com/jbeluch/xbmcswift2/blob/0e7a3642499554edc8265fdf1ba6c5ee567daa78/xbmcswift2/common.py#L95-L104
train
44,261
jbeluch/xbmcswift2
xbmcswift2/common.py
download_page
def download_page(url, data=None): '''Returns the response for the given url. The optional data argument is passed directly to urlopen.''' conn = urllib2.urlopen(url, data) resp = conn.read() conn.close() return resp
python
def download_page(url, data=None): '''Returns the response for the given url. The optional data argument is passed directly to urlopen.''' conn = urllib2.urlopen(url, data) resp = conn.read() conn.close() return resp
[ "def", "download_page", "(", "url", ",", "data", "=", "None", ")", ":", "conn", "=", "urllib2", ".", "urlopen", "(", "url", ",", "data", ")", "resp", "=", "conn", ".", "read", "(", ")", "conn", ".", "close", "(", ")", "return", "resp" ]
Returns the response for the given url. The optional data argument is passed directly to urlopen.
[ "Returns", "the", "response", "for", "the", "given", "url", ".", "The", "optional", "data", "argument", "is", "passed", "directly", "to", "urlopen", "." ]
0e7a3642499554edc8265fdf1ba6c5ee567daa78
https://github.com/jbeluch/xbmcswift2/blob/0e7a3642499554edc8265fdf1ba6c5ee567daa78/xbmcswift2/common.py#L107-L113
train
44,262
TurboGears/gearbox
gearbox/commandmanager.py
CommandManager.load_commands
def load_commands(self, namespace): """Load all the commands from an entrypoint""" for ep in pkg_resources.iter_entry_points(namespace): LOG.debug('found command %r', ep.name) cmd_name = (ep.name.replace('_', ' ') if self.convert_underscores else ep.name) self.commands[cmd_name] = ep return
python
def load_commands(self, namespace): """Load all the commands from an entrypoint""" for ep in pkg_resources.iter_entry_points(namespace): LOG.debug('found command %r', ep.name) cmd_name = (ep.name.replace('_', ' ') if self.convert_underscores else ep.name) self.commands[cmd_name] = ep return
[ "def", "load_commands", "(", "self", ",", "namespace", ")", ":", "for", "ep", "in", "pkg_resources", ".", "iter_entry_points", "(", "namespace", ")", ":", "LOG", ".", "debug", "(", "'found command %r'", ",", "ep", ".", "name", ")", "cmd_name", "=", "(", ...
Load all the commands from an entrypoint
[ "Load", "all", "the", "commands", "from", "an", "entrypoint" ]
df496ab28050ce6a4cc4c502488f5c5812f2baff
https://github.com/TurboGears/gearbox/blob/df496ab28050ce6a4cc4c502488f5c5812f2baff/gearbox/commandmanager.py#L47-L55
train
44,263
TurboGears/gearbox
gearbox/commandmanager.py
CommandManager.find_command
def find_command(self, argv): """Given an argument list, find a command and return the processor and any remaining arguments. """ search_args = argv[:] name = '' while search_args: if search_args[0].startswith('-'): name = '%s %s' % (name, search_args[0]) raise ValueError('Invalid command %r' % name) next_val = search_args.pop(0) name = '%s %s' % (name, next_val) if name else next_val if name in self.commands: cmd_ep = self.commands[name] if hasattr(cmd_ep, 'resolve'): cmd_factory = cmd_ep.resolve() else: # NOTE(dhellmann): Some fake classes don't take # require as an argument. Yay? arg_spec = inspect.getargspec(cmd_ep.load) if 'require' in arg_spec[0]: cmd_factory = cmd_ep.load(require=False) else: cmd_factory = cmd_ep.load() return (cmd_factory, name, search_args) else: raise ValueError('Unknown command %r' % next(iter(argv), ''))
python
def find_command(self, argv): """Given an argument list, find a command and return the processor and any remaining arguments. """ search_args = argv[:] name = '' while search_args: if search_args[0].startswith('-'): name = '%s %s' % (name, search_args[0]) raise ValueError('Invalid command %r' % name) next_val = search_args.pop(0) name = '%s %s' % (name, next_val) if name else next_val if name in self.commands: cmd_ep = self.commands[name] if hasattr(cmd_ep, 'resolve'): cmd_factory = cmd_ep.resolve() else: # NOTE(dhellmann): Some fake classes don't take # require as an argument. Yay? arg_spec = inspect.getargspec(cmd_ep.load) if 'require' in arg_spec[0]: cmd_factory = cmd_ep.load(require=False) else: cmd_factory = cmd_ep.load() return (cmd_factory, name, search_args) else: raise ValueError('Unknown command %r' % next(iter(argv), ''))
[ "def", "find_command", "(", "self", ",", "argv", ")", ":", "search_args", "=", "argv", "[", ":", "]", "name", "=", "''", "while", "search_args", ":", "if", "search_args", "[", "0", "]", ".", "startswith", "(", "'-'", ")", ":", "name", "=", "'%s %s'",...
Given an argument list, find a command and return the processor and any remaining arguments.
[ "Given", "an", "argument", "list", "find", "a", "command", "and", "return", "the", "processor", "and", "any", "remaining", "arguments", "." ]
df496ab28050ce6a4cc4c502488f5c5812f2baff
https://github.com/TurboGears/gearbox/blob/df496ab28050ce6a4cc4c502488f5c5812f2baff/gearbox/commandmanager.py#L63-L89
train
44,264
mozilla/amo-validator
validator/submain.py
prepare_package
def prepare_package(err, path, expectation=0, for_appversions=None, timeout=-1): """Prepares a file-based package for validation. timeout is the number of seconds before validation is aborted. If timeout is -1 then no timeout checking code will run. """ package = None try: # Test that the package actually exists. I consider this Tier 0 # since we may not even be dealing with a real file. if not os.path.isfile(path): err.error(('main', 'prepare_package', 'not_found'), 'The package could not be found') return # Pop the package extension. package_extension = os.path.splitext(path)[1] package_extension = package_extension.lower() def timeout_handler(signum, frame): raise validator.ValidationTimeout(timeout) if timeout != -1: signal.signal(signal.SIGALRM, timeout_handler) signal.setitimer(signal.ITIMER_REAL, timeout) if package_extension == '.xml': test_search(err, path, expectation) elif package_extension not in ('.xpi', '.jar'): err.error(('main', 'prepare_package', 'unrecognized'), 'The package is not of a recognized type.') else: package = open(path, 'rb') test_package(err, package, path, expectation, for_appversions) err.metadata['is_extension'] = err.detected_type == PACKAGE_EXTENSION except validator.ValidationTimeout: err.system_error( msg_id='validation_timeout', message='Validation has timed out', signing_severity='high', description=('Validation was unable to complete in the allotted ' 'time. This is most likely due to the size or ' 'complexity of your add-on.', 'This timeout has been logged, but please consider ' 'filing an issue report here: ' 'https://bit.ly/1POrYYU'), exc_info=sys.exc_info()) except Exception: err.system_error(exc_info=sys.exc_info()) finally: # Remove timers and signal handlers regardless of whether # we've completed successfully or the timer has fired. if timeout != -1: signal.setitimer(signal.ITIMER_REAL, 0) signal.signal(signal.SIGALRM, signal.SIG_DFL) if package: package.close() decorator.cleanup()
python
def prepare_package(err, path, expectation=0, for_appversions=None, timeout=-1): """Prepares a file-based package for validation. timeout is the number of seconds before validation is aborted. If timeout is -1 then no timeout checking code will run. """ package = None try: # Test that the package actually exists. I consider this Tier 0 # since we may not even be dealing with a real file. if not os.path.isfile(path): err.error(('main', 'prepare_package', 'not_found'), 'The package could not be found') return # Pop the package extension. package_extension = os.path.splitext(path)[1] package_extension = package_extension.lower() def timeout_handler(signum, frame): raise validator.ValidationTimeout(timeout) if timeout != -1: signal.signal(signal.SIGALRM, timeout_handler) signal.setitimer(signal.ITIMER_REAL, timeout) if package_extension == '.xml': test_search(err, path, expectation) elif package_extension not in ('.xpi', '.jar'): err.error(('main', 'prepare_package', 'unrecognized'), 'The package is not of a recognized type.') else: package = open(path, 'rb') test_package(err, package, path, expectation, for_appversions) err.metadata['is_extension'] = err.detected_type == PACKAGE_EXTENSION except validator.ValidationTimeout: err.system_error( msg_id='validation_timeout', message='Validation has timed out', signing_severity='high', description=('Validation was unable to complete in the allotted ' 'time. This is most likely due to the size or ' 'complexity of your add-on.', 'This timeout has been logged, but please consider ' 'filing an issue report here: ' 'https://bit.ly/1POrYYU'), exc_info=sys.exc_info()) except Exception: err.system_error(exc_info=sys.exc_info()) finally: # Remove timers and signal handlers regardless of whether # we've completed successfully or the timer has fired. if timeout != -1: signal.setitimer(signal.ITIMER_REAL, 0) signal.signal(signal.SIGALRM, signal.SIG_DFL) if package: package.close() decorator.cleanup()
[ "def", "prepare_package", "(", "err", ",", "path", ",", "expectation", "=", "0", ",", "for_appversions", "=", "None", ",", "timeout", "=", "-", "1", ")", ":", "package", "=", "None", "try", ":", "# Test that the package actually exists. I consider this Tier 0", ...
Prepares a file-based package for validation. timeout is the number of seconds before validation is aborted. If timeout is -1 then no timeout checking code will run.
[ "Prepares", "a", "file", "-", "based", "package", "for", "validation", "." ]
0251bfbd7d93106e01ecdb6de5fcd1dc1a180664
https://github.com/mozilla/amo-validator/blob/0251bfbd7d93106e01ecdb6de5fcd1dc1a180664/validator/submain.py#L35-L100
train
44,265
mozilla/amo-validator
validator/submain.py
populate_chrome_manifest
def populate_chrome_manifest(err, xpi_package): "Loads the chrome.manifest if it's present" if 'chrome.manifest' in xpi_package: chrome_data = xpi_package.read('chrome.manifest') chrome = ChromeManifest(chrome_data, 'chrome.manifest') chrome_recursion_buster = set() # Handle the case of manifests linked from the manifest. def get_linked_manifest(path, from_path, from_chrome, from_triple): if path in chrome_recursion_buster: err.warning( err_id=('submain', 'populate_chrome_manifest', 'recursion'), warning='Linked manifest recursion detected.', description='A chrome registration file links back to ' 'itself. This can cause a multitude of ' 'issues.', filename=path) return # Make sure the manifest is properly linked if path not in xpi_package: err.notice( err_id=('submain', 'populate_chrome_manifest', 'linkerr'), notice='Linked manifest could not be found.', description=('A linked manifest file could not be found ' 'in the package.', 'Path: %s' % path), filename=from_path, line=from_triple['line'], context=from_chrome.context) return chrome_recursion_buster.add(path) manifest = ChromeManifest(xpi_package.read(path), path) for triple in manifest.triples: yield triple if triple['subject'] == 'manifest': subpath = triple['predicate'] # If the path is relative, make it relative to the current # file. if not subpath.startswith('/'): subpath = '%s/%s' % ( '/'.join(path.split('/')[:-1]), subpath) subpath = subpath.lstrip('/') for subtriple in get_linked_manifest( subpath, path, manifest, triple): yield subtriple chrome_recursion_buster.discard(path) chrome_recursion_buster.add('chrome.manifest') # Search for linked manifests in the base manifest. for extra_manifest in chrome.get_triples(subject='manifest'): # When one is found, add its triples to our own. for triple in get_linked_manifest(extra_manifest['predicate'], 'chrome.manifest', chrome, extra_manifest): chrome.triples.append(triple) chrome_recursion_buster.discard('chrome.manifest') # Create a reference so we can get the chrome manifest later, but make # it pushable so we don't run chrome manifests in JAR files. err.save_resource('chrome.manifest', chrome, pushable=True) # Create a non-pushable reference for tests that need to access the # chrome manifest from within JAR files. err.save_resource('chrome.manifest_nopush', chrome, pushable=False)
python
def populate_chrome_manifest(err, xpi_package): "Loads the chrome.manifest if it's present" if 'chrome.manifest' in xpi_package: chrome_data = xpi_package.read('chrome.manifest') chrome = ChromeManifest(chrome_data, 'chrome.manifest') chrome_recursion_buster = set() # Handle the case of manifests linked from the manifest. def get_linked_manifest(path, from_path, from_chrome, from_triple): if path in chrome_recursion_buster: err.warning( err_id=('submain', 'populate_chrome_manifest', 'recursion'), warning='Linked manifest recursion detected.', description='A chrome registration file links back to ' 'itself. This can cause a multitude of ' 'issues.', filename=path) return # Make sure the manifest is properly linked if path not in xpi_package: err.notice( err_id=('submain', 'populate_chrome_manifest', 'linkerr'), notice='Linked manifest could not be found.', description=('A linked manifest file could not be found ' 'in the package.', 'Path: %s' % path), filename=from_path, line=from_triple['line'], context=from_chrome.context) return chrome_recursion_buster.add(path) manifest = ChromeManifest(xpi_package.read(path), path) for triple in manifest.triples: yield triple if triple['subject'] == 'manifest': subpath = triple['predicate'] # If the path is relative, make it relative to the current # file. if not subpath.startswith('/'): subpath = '%s/%s' % ( '/'.join(path.split('/')[:-1]), subpath) subpath = subpath.lstrip('/') for subtriple in get_linked_manifest( subpath, path, manifest, triple): yield subtriple chrome_recursion_buster.discard(path) chrome_recursion_buster.add('chrome.manifest') # Search for linked manifests in the base manifest. for extra_manifest in chrome.get_triples(subject='manifest'): # When one is found, add its triples to our own. for triple in get_linked_manifest(extra_manifest['predicate'], 'chrome.manifest', chrome, extra_manifest): chrome.triples.append(triple) chrome_recursion_buster.discard('chrome.manifest') # Create a reference so we can get the chrome manifest later, but make # it pushable so we don't run chrome manifests in JAR files. err.save_resource('chrome.manifest', chrome, pushable=True) # Create a non-pushable reference for tests that need to access the # chrome manifest from within JAR files. err.save_resource('chrome.manifest_nopush', chrome, pushable=False)
[ "def", "populate_chrome_manifest", "(", "err", ",", "xpi_package", ")", ":", "if", "'chrome.manifest'", "in", "xpi_package", ":", "chrome_data", "=", "xpi_package", ".", "read", "(", "'chrome.manifest'", ")", "chrome", "=", "ChromeManifest", "(", "chrome_data", ",...
Loads the chrome.manifest if it's present
[ "Loads", "the", "chrome", ".", "manifest", "if", "it", "s", "present" ]
0251bfbd7d93106e01ecdb6de5fcd1dc1a180664
https://github.com/mozilla/amo-validator/blob/0251bfbd7d93106e01ecdb6de5fcd1dc1a180664/validator/submain.py#L279-L354
train
44,266
mozilla/amo-validator
validator/typedetection.py
detect_type
def detect_type(err, install_rdf=None, xpi_package=None): """Determines the type of add-on being validated based on install.rdf, file extension, and other properties.""" # The types in the install.rdf don't pair up 1:1 with the type # system that we're using for expectations and the like. This is # to help translate between the two. translated_types = {'2': PACKAGE_EXTENSION, '4': PACKAGE_THEME, '8': PACKAGE_LANGPACK, '32': PACKAGE_MULTI, '64': PACKAGE_DICTIONARY, # New "experiment" types: see bug 1220097 and # https://github.com/mozilla/addons-server/issues/3315 '128': PACKAGE_EXTENSION, '256': PACKAGE_EXTENSION,} # If we're missing our install.rdf file, we can try to make some # assumptions. if install_rdf is None: types = {'xpi': PACKAGE_DICTIONARY} err.notice(('typedetection', 'detect_type', 'missing_install_rdf'), 'install.rdf was not found.', 'The type should be determined by install.rdf if present. ' "If it isn't, we still need to know the type.") # If we know what the file type might be, return it. if xpi_package.extension in types: return types[xpi_package.extension] # Otherwise, we're out of luck :( else: return None # Attempt to locate the <em:type> node in the RDF doc. type_uri = install_rdf.uri('type') type_ = install_rdf.get_object(None, type_uri) # Dictionaries are weird too, they might not have the obligatory # em:type. We can assume that if they have a /dictionaries/ folder, # they are a dictionary because even if they aren't, dictionaries # have an extraordinarily strict set of rules and file filters that # must be passed. It's so crazy secure that it's cool if we use it # as kind of a fallback. if any(file_ for file_ in xpi_package if file_.startswith('dictionaries/')): if type_ != '64': err.error(('typedetection', 'dictionary_valid_type', 'invalid_em_type'), 'Invalid <em:type> value.', 'The package appears to be a dictionary but does not have ' 'the correct <em:type> set in the install manifest.') return PACKAGE_DICTIONARY if type_ is not None: if type_ in translated_types: err.save_resource('is_multipackage', type_ == '32', pushable=True) # Make sure we translate back to the normalized version return translated_types[type_] else: err.error(('typedetection', 'detect_type', 'invalid_em_type'), 'Invalid <em:type> value.', 'The only valid values for <em:type> are 2, 4, 8, and ' '32. Any other values are either invalid or deprecated.', 'install.rdf') return else: err.notice( err_id=('typedetection', 'detect_type', 'no_em:type'), notice='No <em:type> element found in install.rdf', description="It isn't always required, but it is the most reliable " 'method for determining add-on type.', filename='install.rdf') # There's no type element, so the spec says that it's either a # theme or an extension. At this point, we know that it isn't # a dictionary, language pack, or multiple extension pack. extensions = {'jar': '4', 'xpi': '2'} # If the package's extension is listed in the [tiny] extension # dictionary, then just return that. We'll validate against that # add-on type's layout later. Better to false positive than to false # negative. if xpi_package.extension in extensions: # Make sure it gets translated back to the normalized version install_rdf_type = extensions[xpi_package.extension] return translated_types[install_rdf_type]
python
def detect_type(err, install_rdf=None, xpi_package=None): """Determines the type of add-on being validated based on install.rdf, file extension, and other properties.""" # The types in the install.rdf don't pair up 1:1 with the type # system that we're using for expectations and the like. This is # to help translate between the two. translated_types = {'2': PACKAGE_EXTENSION, '4': PACKAGE_THEME, '8': PACKAGE_LANGPACK, '32': PACKAGE_MULTI, '64': PACKAGE_DICTIONARY, # New "experiment" types: see bug 1220097 and # https://github.com/mozilla/addons-server/issues/3315 '128': PACKAGE_EXTENSION, '256': PACKAGE_EXTENSION,} # If we're missing our install.rdf file, we can try to make some # assumptions. if install_rdf is None: types = {'xpi': PACKAGE_DICTIONARY} err.notice(('typedetection', 'detect_type', 'missing_install_rdf'), 'install.rdf was not found.', 'The type should be determined by install.rdf if present. ' "If it isn't, we still need to know the type.") # If we know what the file type might be, return it. if xpi_package.extension in types: return types[xpi_package.extension] # Otherwise, we're out of luck :( else: return None # Attempt to locate the <em:type> node in the RDF doc. type_uri = install_rdf.uri('type') type_ = install_rdf.get_object(None, type_uri) # Dictionaries are weird too, they might not have the obligatory # em:type. We can assume that if they have a /dictionaries/ folder, # they are a dictionary because even if they aren't, dictionaries # have an extraordinarily strict set of rules and file filters that # must be passed. It's so crazy secure that it's cool if we use it # as kind of a fallback. if any(file_ for file_ in xpi_package if file_.startswith('dictionaries/')): if type_ != '64': err.error(('typedetection', 'dictionary_valid_type', 'invalid_em_type'), 'Invalid <em:type> value.', 'The package appears to be a dictionary but does not have ' 'the correct <em:type> set in the install manifest.') return PACKAGE_DICTIONARY if type_ is not None: if type_ in translated_types: err.save_resource('is_multipackage', type_ == '32', pushable=True) # Make sure we translate back to the normalized version return translated_types[type_] else: err.error(('typedetection', 'detect_type', 'invalid_em_type'), 'Invalid <em:type> value.', 'The only valid values for <em:type> are 2, 4, 8, and ' '32. Any other values are either invalid or deprecated.', 'install.rdf') return else: err.notice( err_id=('typedetection', 'detect_type', 'no_em:type'), notice='No <em:type> element found in install.rdf', description="It isn't always required, but it is the most reliable " 'method for determining add-on type.', filename='install.rdf') # There's no type element, so the spec says that it's either a # theme or an extension. At this point, we know that it isn't # a dictionary, language pack, or multiple extension pack. extensions = {'jar': '4', 'xpi': '2'} # If the package's extension is listed in the [tiny] extension # dictionary, then just return that. We'll validate against that # add-on type's layout later. Better to false positive than to false # negative. if xpi_package.extension in extensions: # Make sure it gets translated back to the normalized version install_rdf_type = extensions[xpi_package.extension] return translated_types[install_rdf_type]
[ "def", "detect_type", "(", "err", ",", "install_rdf", "=", "None", ",", "xpi_package", "=", "None", ")", ":", "# The types in the install.rdf don't pair up 1:1 with the type", "# system that we're using for expectations and the like. This is", "# to help translate between the two.", ...
Determines the type of add-on being validated based on install.rdf, file extension, and other properties.
[ "Determines", "the", "type", "of", "add", "-", "on", "being", "validated", "based", "on", "install", ".", "rdf", "file", "extension", "and", "other", "properties", "." ]
0251bfbd7d93106e01ecdb6de5fcd1dc1a180664
https://github.com/mozilla/amo-validator/blob/0251bfbd7d93106e01ecdb6de5fcd1dc1a180664/validator/typedetection.py#L4-L99
train
44,267
jbeluch/xbmcswift2
xbmcswift2/cli/app.py
setup_options
def setup_options(opts): '''Takes any actions necessary based on command line options''' if opts.quiet: logger.log.setLevel(logging.WARNING) logger.GLOBAL_LOG_LEVEL = logging.WARNING if opts.verbose: logger.log.setLevel(logging.DEBUG) logger.GLOBAL_LOG_LEVEL = logging.DEBUG
python
def setup_options(opts): '''Takes any actions necessary based on command line options''' if opts.quiet: logger.log.setLevel(logging.WARNING) logger.GLOBAL_LOG_LEVEL = logging.WARNING if opts.verbose: logger.log.setLevel(logging.DEBUG) logger.GLOBAL_LOG_LEVEL = logging.DEBUG
[ "def", "setup_options", "(", "opts", ")", ":", "if", "opts", ".", "quiet", ":", "logger", ".", "log", ".", "setLevel", "(", "logging", ".", "WARNING", ")", "logger", ".", "GLOBAL_LOG_LEVEL", "=", "logging", ".", "WARNING", "if", "opts", ".", "verbose", ...
Takes any actions necessary based on command line options
[ "Takes", "any", "actions", "necessary", "based", "on", "command", "line", "options" ]
0e7a3642499554edc8265fdf1ba6c5ee567daa78
https://github.com/jbeluch/xbmcswift2/blob/0e7a3642499554edc8265fdf1ba6c5ee567daa78/xbmcswift2/cli/app.py#L55-L63
train
44,268
jbeluch/xbmcswift2
xbmcswift2/cli/app.py
get_addon_module_name
def get_addon_module_name(addonxml_filename): '''Attempts to extract a module name for the given addon's addon.xml file. Looks for the 'xbmc.python.pluginsource' extension node and returns the addon's filename without the .py suffix. ''' try: xml = ET.parse(addonxml_filename).getroot() except IOError: sys.exit('Cannot find an addon.xml file in the current working ' 'directory. Please run this command from the root directory ' 'of an addon.') try: plugin_source = (ext for ext in xml.findall('extension') if ext.get('point') == 'xbmc.python.pluginsource').next() except StopIteration: sys.exit('ERROR, no pluginsource in addonxml') return plugin_source.get('library').split('.')[0]
python
def get_addon_module_name(addonxml_filename): '''Attempts to extract a module name for the given addon's addon.xml file. Looks for the 'xbmc.python.pluginsource' extension node and returns the addon's filename without the .py suffix. ''' try: xml = ET.parse(addonxml_filename).getroot() except IOError: sys.exit('Cannot find an addon.xml file in the current working ' 'directory. Please run this command from the root directory ' 'of an addon.') try: plugin_source = (ext for ext in xml.findall('extension') if ext.get('point') == 'xbmc.python.pluginsource').next() except StopIteration: sys.exit('ERROR, no pluginsource in addonxml') return plugin_source.get('library').split('.')[0]
[ "def", "get_addon_module_name", "(", "addonxml_filename", ")", ":", "try", ":", "xml", "=", "ET", ".", "parse", "(", "addonxml_filename", ")", ".", "getroot", "(", ")", "except", "IOError", ":", "sys", ".", "exit", "(", "'Cannot find an addon.xml file in the cur...
Attempts to extract a module name for the given addon's addon.xml file. Looks for the 'xbmc.python.pluginsource' extension node and returns the addon's filename without the .py suffix.
[ "Attempts", "to", "extract", "a", "module", "name", "for", "the", "given", "addon", "s", "addon", ".", "xml", "file", ".", "Looks", "for", "the", "xbmc", ".", "python", ".", "pluginsource", "extension", "node", "and", "returns", "the", "addon", "s", "fil...
0e7a3642499554edc8265fdf1ba6c5ee567daa78
https://github.com/jbeluch/xbmcswift2/blob/0e7a3642499554edc8265fdf1ba6c5ee567daa78/xbmcswift2/cli/app.py#L66-L84
train
44,269
jbeluch/xbmcswift2
xbmcswift2/cli/app.py
once
def once(plugin, parent_stack=None): '''A run mode for the CLI that runs the plugin once and exits.''' plugin.clear_added_items() items = plugin.run() # if update_listing=True, we need to remove the last url from the parent # stack if parent_stack and plugin._update_listing: del parent_stack[-1] # if we have parent items, include the most recent in the display if parent_stack: items.insert(0, parent_stack[-1]) display_listitems(items, plugin.request.url) return items
python
def once(plugin, parent_stack=None): '''A run mode for the CLI that runs the plugin once and exits.''' plugin.clear_added_items() items = plugin.run() # if update_listing=True, we need to remove the last url from the parent # stack if parent_stack and plugin._update_listing: del parent_stack[-1] # if we have parent items, include the most recent in the display if parent_stack: items.insert(0, parent_stack[-1]) display_listitems(items, plugin.request.url) return items
[ "def", "once", "(", "plugin", ",", "parent_stack", "=", "None", ")", ":", "plugin", ".", "clear_added_items", "(", ")", "items", "=", "plugin", ".", "run", "(", ")", "# if update_listing=True, we need to remove the last url from the parent", "# stack", "if", "parent...
A run mode for the CLI that runs the plugin once and exits.
[ "A", "run", "mode", "for", "the", "CLI", "that", "runs", "the", "plugin", "once", "and", "exits", "." ]
0e7a3642499554edc8265fdf1ba6c5ee567daa78
https://github.com/jbeluch/xbmcswift2/blob/0e7a3642499554edc8265fdf1ba6c5ee567daa78/xbmcswift2/cli/app.py#L147-L162
train
44,270
jbeluch/xbmcswift2
xbmcswift2/cli/app.py
interactive
def interactive(plugin): '''A run mode for the CLI that runs the plugin in a loop based on user input. ''' items = [item for item in once(plugin) if not item.get_played()] parent_stack = [] # Keep track of parents so we can have a '..' option selected_item = get_user_choice(items) while selected_item is not None: if parent_stack and selected_item == parent_stack[-1]: # User selected the parent item, remove from list parent_stack.pop() else: # User selected non parent item, add current url to parent stack parent_stack.append(ListItem.from_dict(label='..', path=plugin.request.url)) patch_plugin(plugin, selected_item.get_path()) items = [item for item in once(plugin, parent_stack=parent_stack) if not item.get_played()] selected_item = get_user_choice(items)
python
def interactive(plugin): '''A run mode for the CLI that runs the plugin in a loop based on user input. ''' items = [item for item in once(plugin) if not item.get_played()] parent_stack = [] # Keep track of parents so we can have a '..' option selected_item = get_user_choice(items) while selected_item is not None: if parent_stack and selected_item == parent_stack[-1]: # User selected the parent item, remove from list parent_stack.pop() else: # User selected non parent item, add current url to parent stack parent_stack.append(ListItem.from_dict(label='..', path=plugin.request.url)) patch_plugin(plugin, selected_item.get_path()) items = [item for item in once(plugin, parent_stack=parent_stack) if not item.get_played()] selected_item = get_user_choice(items)
[ "def", "interactive", "(", "plugin", ")", ":", "items", "=", "[", "item", "for", "item", "in", "once", "(", "plugin", ")", "if", "not", "item", ".", "get_played", "(", ")", "]", "parent_stack", "=", "[", "]", "# Keep track of parents so we can have a '..' op...
A run mode for the CLI that runs the plugin in a loop based on user input.
[ "A", "run", "mode", "for", "the", "CLI", "that", "runs", "the", "plugin", "in", "a", "loop", "based", "on", "user", "input", "." ]
0e7a3642499554edc8265fdf1ba6c5ee567daa78
https://github.com/jbeluch/xbmcswift2/blob/0e7a3642499554edc8265fdf1ba6c5ee567daa78/xbmcswift2/cli/app.py#L165-L185
train
44,271
jbeluch/xbmcswift2
xbmcswift2/cli/app.py
crawl
def crawl(plugin): '''Performs a breadth-first crawl of all possible routes from the starting path. Will only visit a URL once, even if it is referenced multiple times in a plugin. Requires user interaction in between each fetch. ''' # TODO: use OrderedSet? paths_visited = set() paths_to_visit = set(item.get_path() for item in once(plugin)) while paths_to_visit and continue_or_quit(): path = paths_to_visit.pop() paths_visited.add(path) # Run the new listitem patch_plugin(plugin, path) new_paths = set(item.get_path() for item in once(plugin)) # Filter new items by checking against urls_visited and # urls_tovisit paths_to_visit.update(path for path in new_paths if path not in paths_visited)
python
def crawl(plugin): '''Performs a breadth-first crawl of all possible routes from the starting path. Will only visit a URL once, even if it is referenced multiple times in a plugin. Requires user interaction in between each fetch. ''' # TODO: use OrderedSet? paths_visited = set() paths_to_visit = set(item.get_path() for item in once(plugin)) while paths_to_visit and continue_or_quit(): path = paths_to_visit.pop() paths_visited.add(path) # Run the new listitem patch_plugin(plugin, path) new_paths = set(item.get_path() for item in once(plugin)) # Filter new items by checking against urls_visited and # urls_tovisit paths_to_visit.update(path for path in new_paths if path not in paths_visited)
[ "def", "crawl", "(", "plugin", ")", ":", "# TODO: use OrderedSet?", "paths_visited", "=", "set", "(", ")", "paths_to_visit", "=", "set", "(", "item", ".", "get_path", "(", ")", "for", "item", "in", "once", "(", "plugin", ")", ")", "while", "paths_to_visit"...
Performs a breadth-first crawl of all possible routes from the starting path. Will only visit a URL once, even if it is referenced multiple times in a plugin. Requires user interaction in between each fetch.
[ "Performs", "a", "breadth", "-", "first", "crawl", "of", "all", "possible", "routes", "from", "the", "starting", "path", ".", "Will", "only", "visit", "a", "URL", "once", "even", "if", "it", "is", "referenced", "multiple", "times", "in", "a", "plugin", "...
0e7a3642499554edc8265fdf1ba6c5ee567daa78
https://github.com/jbeluch/xbmcswift2/blob/0e7a3642499554edc8265fdf1ba6c5ee567daa78/xbmcswift2/cli/app.py#L188-L209
train
44,272
jbeluch/xbmcswift2
xbmcswift2/cli/app.py
RunCommand.run
def run(opts, args): '''The run method for the 'run' command. Executes a plugin from the command line. ''' setup_options(opts) mode = Modes.ONCE if len(args) > 0 and hasattr(Modes, args[0].upper()): _mode = args.pop(0).upper() mode = getattr(Modes, _mode) url = None if len(args) > 0: # A url was specified url = args.pop(0) plugin_mgr = PluginManager.load_plugin_from_addonxml(mode, url) plugin_mgr.run()
python
def run(opts, args): '''The run method for the 'run' command. Executes a plugin from the command line. ''' setup_options(opts) mode = Modes.ONCE if len(args) > 0 and hasattr(Modes, args[0].upper()): _mode = args.pop(0).upper() mode = getattr(Modes, _mode) url = None if len(args) > 0: # A url was specified url = args.pop(0) plugin_mgr = PluginManager.load_plugin_from_addonxml(mode, url) plugin_mgr.run()
[ "def", "run", "(", "opts", ",", "args", ")", ":", "setup_options", "(", "opts", ")", "mode", "=", "Modes", ".", "ONCE", "if", "len", "(", "args", ")", ">", "0", "and", "hasattr", "(", "Modes", ",", "args", "[", "0", "]", ".", "upper", "(", ")",...
The run method for the 'run' command. Executes a plugin from the command line.
[ "The", "run", "method", "for", "the", "run", "command", ".", "Executes", "a", "plugin", "from", "the", "command", "line", "." ]
0e7a3642499554edc8265fdf1ba6c5ee567daa78
https://github.com/jbeluch/xbmcswift2/blob/0e7a3642499554edc8265fdf1ba6c5ee567daa78/xbmcswift2/cli/app.py#L35-L52
train
44,273
jbeluch/xbmcswift2
xbmcswift2/cli/app.py
PluginManager.run
def run(self): '''This method runs the the plugin in the appropriate mode parsed from the command line options. ''' handle = 0 handlers = { Modes.ONCE: once, Modes.CRAWL: crawl, Modes.INTERACTIVE: interactive, } handler = handlers[self.mode] patch_sysargv(self.url or 'plugin://%s/' % self.plugin.id, handle) return handler(self.plugin)
python
def run(self): '''This method runs the the plugin in the appropriate mode parsed from the command line options. ''' handle = 0 handlers = { Modes.ONCE: once, Modes.CRAWL: crawl, Modes.INTERACTIVE: interactive, } handler = handlers[self.mode] patch_sysargv(self.url or 'plugin://%s/' % self.plugin.id, handle) return handler(self.plugin)
[ "def", "run", "(", "self", ")", ":", "handle", "=", "0", "handlers", "=", "{", "Modes", ".", "ONCE", ":", "once", ",", "Modes", ".", "CRAWL", ":", "crawl", ",", "Modes", ".", "INTERACTIVE", ":", "interactive", ",", "}", "handler", "=", "handlers", ...
This method runs the the plugin in the appropriate mode parsed from the command line options.
[ "This", "method", "runs", "the", "the", "plugin", "in", "the", "appropriate", "mode", "parsed", "from", "the", "command", "line", "options", "." ]
0e7a3642499554edc8265fdf1ba6c5ee567daa78
https://github.com/jbeluch/xbmcswift2/blob/0e7a3642499554edc8265fdf1ba6c5ee567daa78/xbmcswift2/cli/app.py#L117-L129
train
44,274
orlnub123/cleverbot.py
cleverbot/async_/cleverbot.py
Cleverbot.conversation
def conversation(self, name=None, **kwargs): """Make a new conversation. Arguments: name: The key for the dictionary the conversation will be stored as in conversations. If None the conversation will be stored as a list instead. Mixing both types results in an error. **kwargs: Keyword arguments to pass into the new conversation. These accept the same arguments as Cleverbot. Returns: The new conversation. """ convo = Conversation(self, **kwargs) super().conversation(name, convo) return convo
python
def conversation(self, name=None, **kwargs): """Make a new conversation. Arguments: name: The key for the dictionary the conversation will be stored as in conversations. If None the conversation will be stored as a list instead. Mixing both types results in an error. **kwargs: Keyword arguments to pass into the new conversation. These accept the same arguments as Cleverbot. Returns: The new conversation. """ convo = Conversation(self, **kwargs) super().conversation(name, convo) return convo
[ "def", "conversation", "(", "self", ",", "name", "=", "None", ",", "*", "*", "kwargs", ")", ":", "convo", "=", "Conversation", "(", "self", ",", "*", "*", "kwargs", ")", "super", "(", ")", ".", "conversation", "(", "name", ",", "convo", ")", "retur...
Make a new conversation. Arguments: name: The key for the dictionary the conversation will be stored as in conversations. If None the conversation will be stored as a list instead. Mixing both types results in an error. **kwargs: Keyword arguments to pass into the new conversation. These accept the same arguments as Cleverbot. Returns: The new conversation.
[ "Make", "a", "new", "conversation", "." ]
83aa45fc2582c30d8646372d9e09756525af931f
https://github.com/orlnub123/cleverbot.py/blob/83aa45fc2582c30d8646372d9e09756525af931f/cleverbot/async_/cleverbot.py#L73-L88
train
44,275
mozilla/amo-validator
validator/validate.py
validate
def validate(path, format='json', approved_applications=None, determined=True, listed=True, expectation=PACKAGE_ANY, for_appversions=None, overrides=None, timeout=-1, compat_test=False, **kw): """ Perform validation in one easy step! `path`: *Required* A file system path to the package to be validated. `format`: The format to return the results in. Defaults to "json". Currently, any other format will simply return the error bundle. `approved_applications`: Path to the list of approved application versions `determined`: If set to `False`, validation will halt at the end of the first tier that raises errors. `listed`: Whether the app is headed for the app marketplace or AMO. Defaults to `True`. `expectation`: The type of package that should be expected. Must be a symbolic constant from validator.constants (i.e.: validator.constants.PACKAGE_*). Defaults to PACKAGE_ANY. `for_appversions`: A dict of app GUIDs referencing lists of versions. Determines which version-dependant tests should be run. `timeout`: Number of seconds before aborting addon validation, or -1 to run with no timeout. `compat_tests`: A flag to signal the validator to skip tests which should not be run during compatibility bumps. Defaults to `False`. """ bundle = ErrorBundle(listed=listed, determined=determined, overrides=overrides, for_appversions=for_appversions) bundle.save_resource('is_compat_test', compat_test) if approved_applications is None: approved_applications = os.path.join(os.path.dirname(__file__), 'app_versions.json') if isinstance(approved_applications, types.StringTypes): # Load up the target applications if the approved applications is a # path (string). with open(approved_applications) as approved_apps: apps = json.load(approved_apps) elif isinstance(approved_applications, dict): # If the lists of approved applications are already in a dict, just use # that instead of trying to pull from a file. apps = approved_applications else: raise ValueError('Unknown format for `approved_applications`.') constants.APPROVED_APPLICATIONS.clear() constants.APPROVED_APPLICATIONS.update(apps) submain.prepare_package(bundle, path, expectation, for_appversions=for_appversions, timeout=timeout) return format_result(bundle, format)
python
def validate(path, format='json', approved_applications=None, determined=True, listed=True, expectation=PACKAGE_ANY, for_appversions=None, overrides=None, timeout=-1, compat_test=False, **kw): """ Perform validation in one easy step! `path`: *Required* A file system path to the package to be validated. `format`: The format to return the results in. Defaults to "json". Currently, any other format will simply return the error bundle. `approved_applications`: Path to the list of approved application versions `determined`: If set to `False`, validation will halt at the end of the first tier that raises errors. `listed`: Whether the app is headed for the app marketplace or AMO. Defaults to `True`. `expectation`: The type of package that should be expected. Must be a symbolic constant from validator.constants (i.e.: validator.constants.PACKAGE_*). Defaults to PACKAGE_ANY. `for_appversions`: A dict of app GUIDs referencing lists of versions. Determines which version-dependant tests should be run. `timeout`: Number of seconds before aborting addon validation, or -1 to run with no timeout. `compat_tests`: A flag to signal the validator to skip tests which should not be run during compatibility bumps. Defaults to `False`. """ bundle = ErrorBundle(listed=listed, determined=determined, overrides=overrides, for_appversions=for_appversions) bundle.save_resource('is_compat_test', compat_test) if approved_applications is None: approved_applications = os.path.join(os.path.dirname(__file__), 'app_versions.json') if isinstance(approved_applications, types.StringTypes): # Load up the target applications if the approved applications is a # path (string). with open(approved_applications) as approved_apps: apps = json.load(approved_apps) elif isinstance(approved_applications, dict): # If the lists of approved applications are already in a dict, just use # that instead of trying to pull from a file. apps = approved_applications else: raise ValueError('Unknown format for `approved_applications`.') constants.APPROVED_APPLICATIONS.clear() constants.APPROVED_APPLICATIONS.update(apps) submain.prepare_package(bundle, path, expectation, for_appversions=for_appversions, timeout=timeout) return format_result(bundle, format)
[ "def", "validate", "(", "path", ",", "format", "=", "'json'", ",", "approved_applications", "=", "None", ",", "determined", "=", "True", ",", "listed", "=", "True", ",", "expectation", "=", "PACKAGE_ANY", ",", "for_appversions", "=", "None", ",", "overrides"...
Perform validation in one easy step! `path`: *Required* A file system path to the package to be validated. `format`: The format to return the results in. Defaults to "json". Currently, any other format will simply return the error bundle. `approved_applications`: Path to the list of approved application versions `determined`: If set to `False`, validation will halt at the end of the first tier that raises errors. `listed`: Whether the app is headed for the app marketplace or AMO. Defaults to `True`. `expectation`: The type of package that should be expected. Must be a symbolic constant from validator.constants (i.e.: validator.constants.PACKAGE_*). Defaults to PACKAGE_ANY. `for_appversions`: A dict of app GUIDs referencing lists of versions. Determines which version-dependant tests should be run. `timeout`: Number of seconds before aborting addon validation, or -1 to run with no timeout. `compat_tests`: A flag to signal the validator to skip tests which should not be run during compatibility bumps. Defaults to `False`.
[ "Perform", "validation", "in", "one", "easy", "step!" ]
0251bfbd7d93106e01ecdb6de5fcd1dc1a180664
https://github.com/mozilla/amo-validator/blob/0251bfbd7d93106e01ecdb6de5fcd1dc1a180664/validator/validate.py#L14-L83
train
44,276
mozilla/amo-validator
validator/errorbundler.py
ErrorBundle.system_error
def system_error(self, msg_id=None, message=None, description=None, validation_timeout=False, exc_info=None, **kw): """Add an error message for an unexpected exception in validator code, and move it to the front of the error message list. If `exc_info` is supplied, the error will be logged. If the error is a validation timeout, it is re-raised unless `msg_id` is "validation_timeout".""" if exc_info: if (isinstance(exc_info[1], validator.ValidationTimeout) and msg_id != 'validation_timeout'): # These should always propagate to the top-level exception # handler, and be reported only once. raise exc_info[1] log.error('Unexpected error during validation: %s: %s' % (exc_info[0].__name__, exc_info[1]), exc_info=exc_info) full_id = ('validator', 'unexpected_exception') if msg_id: full_id += (msg_id,) self.error(full_id, message or 'An unexpected error has occurred.', description or ('Validation was unable to complete successfully due ' 'to an unexpected error.', 'The error has been logged, but please consider ' 'filing an issue report here: ' 'https://bit.ly/1POrYYU'), tier=1, **kw) # Move the error message to the beginning of the list. self.errors.insert(0, self.errors.pop())
python
def system_error(self, msg_id=None, message=None, description=None, validation_timeout=False, exc_info=None, **kw): """Add an error message for an unexpected exception in validator code, and move it to the front of the error message list. If `exc_info` is supplied, the error will be logged. If the error is a validation timeout, it is re-raised unless `msg_id` is "validation_timeout".""" if exc_info: if (isinstance(exc_info[1], validator.ValidationTimeout) and msg_id != 'validation_timeout'): # These should always propagate to the top-level exception # handler, and be reported only once. raise exc_info[1] log.error('Unexpected error during validation: %s: %s' % (exc_info[0].__name__, exc_info[1]), exc_info=exc_info) full_id = ('validator', 'unexpected_exception') if msg_id: full_id += (msg_id,) self.error(full_id, message or 'An unexpected error has occurred.', description or ('Validation was unable to complete successfully due ' 'to an unexpected error.', 'The error has been logged, but please consider ' 'filing an issue report here: ' 'https://bit.ly/1POrYYU'), tier=1, **kw) # Move the error message to the beginning of the list. self.errors.insert(0, self.errors.pop())
[ "def", "system_error", "(", "self", ",", "msg_id", "=", "None", ",", "message", "=", "None", ",", "description", "=", "None", ",", "validation_timeout", "=", "False", ",", "exc_info", "=", "None", ",", "*", "*", "kw", ")", ":", "if", "exc_info", ":", ...
Add an error message for an unexpected exception in validator code, and move it to the front of the error message list. If `exc_info` is supplied, the error will be logged. If the error is a validation timeout, it is re-raised unless `msg_id` is "validation_timeout".
[ "Add", "an", "error", "message", "for", "an", "unexpected", "exception", "in", "validator", "code", "and", "move", "it", "to", "the", "front", "of", "the", "error", "message", "list", ".", "If", "exc_info", "is", "supplied", "the", "error", "will", "be", ...
0251bfbd7d93106e01ecdb6de5fcd1dc1a180664
https://github.com/mozilla/amo-validator/blob/0251bfbd7d93106e01ecdb6de5fcd1dc1a180664/validator/errorbundler.py#L124-L159
train
44,277
mozilla/amo-validator
validator/errorbundler.py
ErrorBundle.drop_message
def drop_message(self, message): """Drop the given message object from the appropriate message list. Returns True if the message was found, otherwise False.""" for type_ in 'errors', 'warnings', 'notices': list_ = getattr(self, type_) if message in list_: list_.remove(message) if 'signing_severity' in message: self.signing_summary[message['signing_severity']] -= 1 return True return False
python
def drop_message(self, message): """Drop the given message object from the appropriate message list. Returns True if the message was found, otherwise False.""" for type_ in 'errors', 'warnings', 'notices': list_ = getattr(self, type_) if message in list_: list_.remove(message) if 'signing_severity' in message: self.signing_summary[message['signing_severity']] -= 1 return True return False
[ "def", "drop_message", "(", "self", ",", "message", ")", ":", "for", "type_", "in", "'errors'", ",", "'warnings'", ",", "'notices'", ":", "list_", "=", "getattr", "(", "self", ",", "type_", ")", "if", "message", "in", "list_", ":", "list_", ".", "remov...
Drop the given message object from the appropriate message list. Returns True if the message was found, otherwise False.
[ "Drop", "the", "given", "message", "object", "from", "the", "appropriate", "message", "list", "." ]
0251bfbd7d93106e01ecdb6de5fcd1dc1a180664
https://github.com/mozilla/amo-validator/blob/0251bfbd7d93106e01ecdb6de5fcd1dc1a180664/validator/errorbundler.py#L161-L174
train
44,278
mozilla/amo-validator
validator/errorbundler.py
ErrorBundle.set_tier
def set_tier(self, tier): 'Updates the tier and ending tier' self.tier = tier if tier > self.ending_tier: self.ending_tier = tier
python
def set_tier(self, tier): 'Updates the tier and ending tier' self.tier = tier if tier > self.ending_tier: self.ending_tier = tier
[ "def", "set_tier", "(", "self", ",", "tier", ")", ":", "self", ".", "tier", "=", "tier", "if", "tier", ">", "self", ".", "ending_tier", ":", "self", ".", "ending_tier", "=", "tier" ]
Updates the tier and ending tier
[ "Updates", "the", "tier", "and", "ending", "tier" ]
0251bfbd7d93106e01ecdb6de5fcd1dc1a180664
https://github.com/mozilla/amo-validator/blob/0251bfbd7d93106e01ecdb6de5fcd1dc1a180664/validator/errorbundler.py#L176-L180
train
44,279
mozilla/amo-validator
validator/errorbundler.py
ErrorBundle._save_message
def _save_message(self, stack, type_, message, context=None, from_merge=False): 'Stores a message in the appropriate message stack.' uid = uuid.uuid4().hex message['uid'] = uid # Get the context for the message (if there's a context available) if context is not None: if isinstance(context, tuple): message['context'] = context else: message['context'] = ( context.get_context(line=message['line'], column=message['column'])) else: message['context'] = None if self.package_stack: if not isinstance(message['file'], list): message['file'] = [message['file']] message['file'] = self.package_stack + message['file'] # Test that if for_appversions is set that we're only applying to # supported add-ons. THIS IS THE LAST FILTER BEFORE THE MESSAGE IS # ADDED TO THE STACK! if message['for_appversions']: if not self.supports_version(message['for_appversions']): if self.instant: print '(Instant error discarded)' self._print_message(type_, message, verbose=True) return elif self.version_requirements: # If there was no for_appversions but there were version # requirements detailed in the decorator, use the ones from the # decorator. message['for_appversions'] = self.version_requirements # Save the message to the stack. stack.append(message) # Mark the tier that the error occurred at. if message['tier'] is None: message['tier'] = self.tier # Build out the compatibility summary if possible. if message['compatibility_type'] and not from_merge: self.compat_summary['%ss' % message['compatibility_type']] += 1 # Build out the message tree entry. if message['id']: tree = self.message_tree last_id = None for eid in message['id']: if last_id is not None: tree = tree[last_id] if eid not in tree: tree[eid] = {'__errors': 0, '__warnings': 0, '__notices': 0, '__messages': []} tree[eid]['__%s' % type_] += 1 last_id = eid tree[last_id]['__messages'].append(uid) # If instant mode is turned on, output the message immediately. if self.instant: self._print_message(type_, message, verbose=True)
python
def _save_message(self, stack, type_, message, context=None, from_merge=False): 'Stores a message in the appropriate message stack.' uid = uuid.uuid4().hex message['uid'] = uid # Get the context for the message (if there's a context available) if context is not None: if isinstance(context, tuple): message['context'] = context else: message['context'] = ( context.get_context(line=message['line'], column=message['column'])) else: message['context'] = None if self.package_stack: if not isinstance(message['file'], list): message['file'] = [message['file']] message['file'] = self.package_stack + message['file'] # Test that if for_appversions is set that we're only applying to # supported add-ons. THIS IS THE LAST FILTER BEFORE THE MESSAGE IS # ADDED TO THE STACK! if message['for_appversions']: if not self.supports_version(message['for_appversions']): if self.instant: print '(Instant error discarded)' self._print_message(type_, message, verbose=True) return elif self.version_requirements: # If there was no for_appversions but there were version # requirements detailed in the decorator, use the ones from the # decorator. message['for_appversions'] = self.version_requirements # Save the message to the stack. stack.append(message) # Mark the tier that the error occurred at. if message['tier'] is None: message['tier'] = self.tier # Build out the compatibility summary if possible. if message['compatibility_type'] and not from_merge: self.compat_summary['%ss' % message['compatibility_type']] += 1 # Build out the message tree entry. if message['id']: tree = self.message_tree last_id = None for eid in message['id']: if last_id is not None: tree = tree[last_id] if eid not in tree: tree[eid] = {'__errors': 0, '__warnings': 0, '__notices': 0, '__messages': []} tree[eid]['__%s' % type_] += 1 last_id = eid tree[last_id]['__messages'].append(uid) # If instant mode is turned on, output the message immediately. if self.instant: self._print_message(type_, message, verbose=True)
[ "def", "_save_message", "(", "self", ",", "stack", ",", "type_", ",", "message", ",", "context", "=", "None", ",", "from_merge", "=", "False", ")", ":", "uid", "=", "uuid", ".", "uuid4", "(", ")", ".", "hex", "message", "[", "'uid'", "]", "=", "uid...
Stores a message in the appropriate message stack.
[ "Stores", "a", "message", "in", "the", "appropriate", "message", "stack", "." ]
0251bfbd7d93106e01ecdb6de5fcd1dc1a180664
https://github.com/mozilla/amo-validator/blob/0251bfbd7d93106e01ecdb6de5fcd1dc1a180664/validator/errorbundler.py#L186-L255
train
44,280
mozilla/amo-validator
validator/errorbundler.py
ErrorBundle.failed
def failed(self, fail_on_warnings=True): """Returns a boolean value describing whether the validation succeeded or not.""" return bool(self.errors) or (fail_on_warnings and bool(self.warnings))
python
def failed(self, fail_on_warnings=True): """Returns a boolean value describing whether the validation succeeded or not.""" return bool(self.errors) or (fail_on_warnings and bool(self.warnings))
[ "def", "failed", "(", "self", ",", "fail_on_warnings", "=", "True", ")", ":", "return", "bool", "(", "self", ".", "errors", ")", "or", "(", "fail_on_warnings", "and", "bool", "(", "self", ".", "warnings", ")", ")" ]
Returns a boolean value describing whether the validation succeeded or not.
[ "Returns", "a", "boolean", "value", "describing", "whether", "the", "validation", "succeeded", "or", "not", "." ]
0251bfbd7d93106e01ecdb6de5fcd1dc1a180664
https://github.com/mozilla/amo-validator/blob/0251bfbd7d93106e01ecdb6de5fcd1dc1a180664/validator/errorbundler.py#L257-L261
train
44,281
mozilla/amo-validator
validator/errorbundler.py
ErrorBundle.get_resource
def get_resource(self, name): 'Retrieves an object that has been stored by another test.' if name in self.resources: return self.resources[name] elif name in self.pushable_resources: return self.pushable_resources[name] else: return False
python
def get_resource(self, name): 'Retrieves an object that has been stored by another test.' if name in self.resources: return self.resources[name] elif name in self.pushable_resources: return self.pushable_resources[name] else: return False
[ "def", "get_resource", "(", "self", ",", "name", ")", ":", "if", "name", "in", "self", ".", "resources", ":", "return", "self", ".", "resources", "[", "name", "]", "elif", "name", "in", "self", ".", "pushable_resources", ":", "return", "self", ".", "pu...
Retrieves an object that has been stored by another test.
[ "Retrieves", "an", "object", "that", "has", "been", "stored", "by", "another", "test", "." ]
0251bfbd7d93106e01ecdb6de5fcd1dc1a180664
https://github.com/mozilla/amo-validator/blob/0251bfbd7d93106e01ecdb6de5fcd1dc1a180664/validator/errorbundler.py#L263-L271
train
44,282
mozilla/amo-validator
validator/errorbundler.py
ErrorBundle.save_resource
def save_resource(self, name, resource, pushable=False): 'Saves an object such that it can be used by other tests.' if pushable: self.pushable_resources[name] = resource else: self.resources[name] = resource
python
def save_resource(self, name, resource, pushable=False): 'Saves an object such that it can be used by other tests.' if pushable: self.pushable_resources[name] = resource else: self.resources[name] = resource
[ "def", "save_resource", "(", "self", ",", "name", ",", "resource", ",", "pushable", "=", "False", ")", ":", "if", "pushable", ":", "self", ".", "pushable_resources", "[", "name", "]", "=", "resource", "else", ":", "self", ".", "resources", "[", "name", ...
Saves an object such that it can be used by other tests.
[ "Saves", "an", "object", "such", "that", "it", "can", "be", "used", "by", "other", "tests", "." ]
0251bfbd7d93106e01ecdb6de5fcd1dc1a180664
https://github.com/mozilla/amo-validator/blob/0251bfbd7d93106e01ecdb6de5fcd1dc1a180664/validator/errorbundler.py#L273-L279
train
44,283
mozilla/amo-validator
validator/errorbundler.py
ErrorBundle.push_state
def push_state(self, new_file=''): 'Saves the current error state to parse subpackages' self.subpackages.append({'detected_type': self.detected_type, 'message_tree': self.message_tree, 'resources': self.pushable_resources, 'metadata': self.metadata}) self.message_tree = {} self.pushable_resources = {} self.metadata = {'requires_chrome': False, 'listed': self.metadata.get('listed'), 'validator_version': validator.__version__} self.package_stack.append(new_file)
python
def push_state(self, new_file=''): 'Saves the current error state to parse subpackages' self.subpackages.append({'detected_type': self.detected_type, 'message_tree': self.message_tree, 'resources': self.pushable_resources, 'metadata': self.metadata}) self.message_tree = {} self.pushable_resources = {} self.metadata = {'requires_chrome': False, 'listed': self.metadata.get('listed'), 'validator_version': validator.__version__} self.package_stack.append(new_file)
[ "def", "push_state", "(", "self", ",", "new_file", "=", "''", ")", ":", "self", ".", "subpackages", ".", "append", "(", "{", "'detected_type'", ":", "self", ".", "detected_type", ",", "'message_tree'", ":", "self", ".", "message_tree", ",", "'resources'", ...
Saves the current error state to parse subpackages
[ "Saves", "the", "current", "error", "state", "to", "parse", "subpackages" ]
0251bfbd7d93106e01ecdb6de5fcd1dc1a180664
https://github.com/mozilla/amo-validator/blob/0251bfbd7d93106e01ecdb6de5fcd1dc1a180664/validator/errorbundler.py#L286-L300
train
44,284
mozilla/amo-validator
validator/errorbundler.py
ErrorBundle.pop_state
def pop_state(self): 'Retrieves the last saved state and restores it.' # Save a copy of the current state. state = self.subpackages.pop() metadata = self.metadata # We only rebuild message_tree anyway. No need to restore. # Copy the existing state back into place self.detected_type = state['detected_type'] self.message_tree = state['message_tree'] self.pushable_resources = state['resources'] self.metadata = state['metadata'] name = self.package_stack.pop() self.metadata.setdefault('sub_packages', {})[name] = metadata
python
def pop_state(self): 'Retrieves the last saved state and restores it.' # Save a copy of the current state. state = self.subpackages.pop() metadata = self.metadata # We only rebuild message_tree anyway. No need to restore. # Copy the existing state back into place self.detected_type = state['detected_type'] self.message_tree = state['message_tree'] self.pushable_resources = state['resources'] self.metadata = state['metadata'] name = self.package_stack.pop() self.metadata.setdefault('sub_packages', {})[name] = metadata
[ "def", "pop_state", "(", "self", ")", ":", "# Save a copy of the current state.", "state", "=", "self", ".", "subpackages", ".", "pop", "(", ")", "metadata", "=", "self", ".", "metadata", "# We only rebuild message_tree anyway. No need to restore.", "# Copy the existing s...
Retrieves the last saved state and restores it.
[ "Retrieves", "the", "last", "saved", "state", "and", "restores", "it", "." ]
0251bfbd7d93106e01ecdb6de5fcd1dc1a180664
https://github.com/mozilla/amo-validator/blob/0251bfbd7d93106e01ecdb6de5fcd1dc1a180664/validator/errorbundler.py#L302-L318
train
44,285
mozilla/amo-validator
validator/errorbundler.py
ErrorBundle.render_json
def render_json(self): 'Returns a JSON summary of the validation operation.' types = {0: 'unknown', 1: 'extension', 2: 'theme', 3: 'dictionary', 4: 'langpack', 5: 'search', 8: 'webapp'} output = {'detected_type': types[self.detected_type], 'ending_tier': self.ending_tier, 'success': not self.failed(), 'messages': [], 'errors': len(self.errors), 'warnings': len(self.warnings), 'notices': len(self.notices), 'message_tree': self.message_tree, 'compatibility_summary': self.compat_summary, 'signing_summary': self.signing_summary, 'metadata': self.metadata} messages = output['messages'] # Copy messages to the JSON output for error in self.errors: error['type'] = 'error' messages.append(error) for warning in self.warnings: warning['type'] = 'warning' messages.append(warning) for notice in self.notices: notice['type'] = 'notice' messages.append(notice) # Output the JSON. return json.dumps(output)
python
def render_json(self): 'Returns a JSON summary of the validation operation.' types = {0: 'unknown', 1: 'extension', 2: 'theme', 3: 'dictionary', 4: 'langpack', 5: 'search', 8: 'webapp'} output = {'detected_type': types[self.detected_type], 'ending_tier': self.ending_tier, 'success': not self.failed(), 'messages': [], 'errors': len(self.errors), 'warnings': len(self.warnings), 'notices': len(self.notices), 'message_tree': self.message_tree, 'compatibility_summary': self.compat_summary, 'signing_summary': self.signing_summary, 'metadata': self.metadata} messages = output['messages'] # Copy messages to the JSON output for error in self.errors: error['type'] = 'error' messages.append(error) for warning in self.warnings: warning['type'] = 'warning' messages.append(warning) for notice in self.notices: notice['type'] = 'notice' messages.append(notice) # Output the JSON. return json.dumps(output)
[ "def", "render_json", "(", "self", ")", ":", "types", "=", "{", "0", ":", "'unknown'", ",", "1", ":", "'extension'", ",", "2", ":", "'theme'", ",", "3", ":", "'dictionary'", ",", "4", ":", "'langpack'", ",", "5", ":", "'search'", ",", "8", ":", "...
Returns a JSON summary of the validation operation.
[ "Returns", "a", "JSON", "summary", "of", "the", "validation", "operation", "." ]
0251bfbd7d93106e01ecdb6de5fcd1dc1a180664
https://github.com/mozilla/amo-validator/blob/0251bfbd7d93106e01ecdb6de5fcd1dc1a180664/validator/errorbundler.py#L320-L358
train
44,286
mozilla/amo-validator
validator/errorbundler.py
ErrorBundle.print_summary
def print_summary(self, verbose=False, no_color=False): 'Prints a summary of the validation process so far.' types = {0: 'Unknown', 1: 'Extension/Multi-Extension', 2: 'Full Theme', 3: 'Dictionary', 4: 'Language Pack', 5: 'Search Provider', 7: 'Subpackage', 8: 'App'} detected_type = types[self.detected_type] buffer = StringIO() self.handler = OutputHandler(buffer, no_color) # Make a neat little printout. self.handler.write('\n<<GREEN>>Summary:') \ .write('-' * 30) \ .write('Detected type: <<BLUE>>%s' % detected_type) \ .write('-' * 30) if self.failed(): self.handler.write('<<BLUE>>Test failed! Errors:') # Print out all the errors/warnings: for error in self.errors: self._print_message('<<RED>>Error:<<NORMAL>>\t', error, verbose) for warning in self.warnings: self._print_message('<<YELLOW>>Warning:<<NORMAL>> ', warning, verbose) else: self.handler.write('<<GREEN>>All tests succeeded!') if self.notices: for notice in self.notices: self._print_message(prefix='<<WHITE>>Notice:<<NORMAL>>\t', message=notice, verbose=verbose) if 'is_jetpack' in self.metadata and verbose: self.handler.write('\n') self.handler.write('<<GREEN>>Jetpack add-on detected.<<NORMAL>>\n' 'Identified files:') if 'jetpack_identified_files' in self.metadata: for filename, data in \ self.metadata['jetpack_identified_files'].items(): self.handler.write((' %s\n' % filename) + (' %s : %s' % data)) if 'jetpack_unknown_files' in self.metadata: self.handler.write('Unknown files:') for filename in self.metadata['jetpack_unknown_files']: self.handler.write(' %s' % filename) self.handler.write('\n') if self.unfinished: self.handler.write('<<RED>>Validation terminated early') self.handler.write('Errors during validation are preventing ' 'the validation process from completing.') self.handler.write('Use the <<YELLOW>>--determined<<NORMAL>> ' 'flag to ignore these errors.') self.handler.write('\n') return buffer.getvalue()
python
def print_summary(self, verbose=False, no_color=False): 'Prints a summary of the validation process so far.' types = {0: 'Unknown', 1: 'Extension/Multi-Extension', 2: 'Full Theme', 3: 'Dictionary', 4: 'Language Pack', 5: 'Search Provider', 7: 'Subpackage', 8: 'App'} detected_type = types[self.detected_type] buffer = StringIO() self.handler = OutputHandler(buffer, no_color) # Make a neat little printout. self.handler.write('\n<<GREEN>>Summary:') \ .write('-' * 30) \ .write('Detected type: <<BLUE>>%s' % detected_type) \ .write('-' * 30) if self.failed(): self.handler.write('<<BLUE>>Test failed! Errors:') # Print out all the errors/warnings: for error in self.errors: self._print_message('<<RED>>Error:<<NORMAL>>\t', error, verbose) for warning in self.warnings: self._print_message('<<YELLOW>>Warning:<<NORMAL>> ', warning, verbose) else: self.handler.write('<<GREEN>>All tests succeeded!') if self.notices: for notice in self.notices: self._print_message(prefix='<<WHITE>>Notice:<<NORMAL>>\t', message=notice, verbose=verbose) if 'is_jetpack' in self.metadata and verbose: self.handler.write('\n') self.handler.write('<<GREEN>>Jetpack add-on detected.<<NORMAL>>\n' 'Identified files:') if 'jetpack_identified_files' in self.metadata: for filename, data in \ self.metadata['jetpack_identified_files'].items(): self.handler.write((' %s\n' % filename) + (' %s : %s' % data)) if 'jetpack_unknown_files' in self.metadata: self.handler.write('Unknown files:') for filename in self.metadata['jetpack_unknown_files']: self.handler.write(' %s' % filename) self.handler.write('\n') if self.unfinished: self.handler.write('<<RED>>Validation terminated early') self.handler.write('Errors during validation are preventing ' 'the validation process from completing.') self.handler.write('Use the <<YELLOW>>--determined<<NORMAL>> ' 'flag to ignore these errors.') self.handler.write('\n') return buffer.getvalue()
[ "def", "print_summary", "(", "self", ",", "verbose", "=", "False", ",", "no_color", "=", "False", ")", ":", "types", "=", "{", "0", ":", "'Unknown'", ",", "1", ":", "'Extension/Multi-Extension'", ",", "2", ":", "'Full Theme'", ",", "3", ":", "'Dictionary...
Prints a summary of the validation process so far.
[ "Prints", "a", "summary", "of", "the", "validation", "process", "so", "far", "." ]
0251bfbd7d93106e01ecdb6de5fcd1dc1a180664
https://github.com/mozilla/amo-validator/blob/0251bfbd7d93106e01ecdb6de5fcd1dc1a180664/validator/errorbundler.py#L360-L425
train
44,287
mozilla/amo-validator
validator/errorbundler.py
ErrorBundle._flatten_list
def _flatten_list(self, data): 'Flattens nested lists into strings.' if data is None: return '' if isinstance(data, types.StringTypes): return data elif isinstance(data, (list, tuple)): return '\n'.join(self._flatten_list(x) for x in data)
python
def _flatten_list(self, data): 'Flattens nested lists into strings.' if data is None: return '' if isinstance(data, types.StringTypes): return data elif isinstance(data, (list, tuple)): return '\n'.join(self._flatten_list(x) for x in data)
[ "def", "_flatten_list", "(", "self", ",", "data", ")", ":", "if", "data", "is", "None", ":", "return", "''", "if", "isinstance", "(", "data", ",", "types", ".", "StringTypes", ")", ":", "return", "data", "elif", "isinstance", "(", "data", ",", "(", "...
Flattens nested lists into strings.
[ "Flattens", "nested", "lists", "into", "strings", "." ]
0251bfbd7d93106e01ecdb6de5fcd1dc1a180664
https://github.com/mozilla/amo-validator/blob/0251bfbd7d93106e01ecdb6de5fcd1dc1a180664/validator/errorbundler.py#L427-L435
train
44,288
mozilla/amo-validator
validator/errorbundler.py
ErrorBundle._print_message
def _print_message(self, prefix, message, verbose=True): 'Prints a message and takes care of all sorts of nasty code' # Load up the standard output. output = ['\n', prefix, message['message']] # We have some extra stuff for verbose mode. if verbose: verbose_output = [] # Detailed problem description. if message['description']: verbose_output.append( self._flatten_list(message['description'])) if message.get('signing_severity'): verbose_output.append( ('\tAutomated signing severity: %s' % message['signing_severity'])) if message.get('signing_help'): verbose_output.append( '\tSuggestions for passing automated signing:') verbose_output.append( self._flatten_list(message['signing_help'])) # Show the user what tier we're on verbose_output.append('\tTier:\t%d' % message['tier']) # If file information is available, output that as well. files = message['file'] if files is not None and files != '': fmsg = '\tFile:\t%s' # Nested files (subpackes) are stored in a list. if type(files) is list: if files[-1] == '': files[-1] = '(none)' verbose_output.append(fmsg % ' > '.join(files)) else: verbose_output.append(fmsg % files) # If there is a line number, that gets put on the end. if message['line']: verbose_output.append('\tLine:\t%s' % message['line']) if message['column'] and message['column'] != 0: verbose_output.append('\tColumn:\t%d' % message['column']) if message.get('context'): verbose_output.append('\tContext:') verbose_output.extend([('\t> %s' % x if x is not None else '\t>' + ('-' * 20)) for x in message['context']]) # Stick it in with the standard items. output.append('\n') output.append('\n'.join(verbose_output)) # Send the final output to the handler to be rendered. self.handler.write(u''.join(map(unicodehelper.decode, output)))
python
def _print_message(self, prefix, message, verbose=True): 'Prints a message and takes care of all sorts of nasty code' # Load up the standard output. output = ['\n', prefix, message['message']] # We have some extra stuff for verbose mode. if verbose: verbose_output = [] # Detailed problem description. if message['description']: verbose_output.append( self._flatten_list(message['description'])) if message.get('signing_severity'): verbose_output.append( ('\tAutomated signing severity: %s' % message['signing_severity'])) if message.get('signing_help'): verbose_output.append( '\tSuggestions for passing automated signing:') verbose_output.append( self._flatten_list(message['signing_help'])) # Show the user what tier we're on verbose_output.append('\tTier:\t%d' % message['tier']) # If file information is available, output that as well. files = message['file'] if files is not None and files != '': fmsg = '\tFile:\t%s' # Nested files (subpackes) are stored in a list. if type(files) is list: if files[-1] == '': files[-1] = '(none)' verbose_output.append(fmsg % ' > '.join(files)) else: verbose_output.append(fmsg % files) # If there is a line number, that gets put on the end. if message['line']: verbose_output.append('\tLine:\t%s' % message['line']) if message['column'] and message['column'] != 0: verbose_output.append('\tColumn:\t%d' % message['column']) if message.get('context'): verbose_output.append('\tContext:') verbose_output.extend([('\t> %s' % x if x is not None else '\t>' + ('-' * 20)) for x in message['context']]) # Stick it in with the standard items. output.append('\n') output.append('\n'.join(verbose_output)) # Send the final output to the handler to be rendered. self.handler.write(u''.join(map(unicodehelper.decode, output)))
[ "def", "_print_message", "(", "self", ",", "prefix", ",", "message", ",", "verbose", "=", "True", ")", ":", "# Load up the standard output.", "output", "=", "[", "'\\n'", ",", "prefix", ",", "message", "[", "'message'", "]", "]", "# We have some extra stuff for ...
Prints a message and takes care of all sorts of nasty code
[ "Prints", "a", "message", "and", "takes", "care", "of", "all", "sorts", "of", "nasty", "code" ]
0251bfbd7d93106e01ecdb6de5fcd1dc1a180664
https://github.com/mozilla/amo-validator/blob/0251bfbd7d93106e01ecdb6de5fcd1dc1a180664/validator/errorbundler.py#L437-L498
train
44,289
mozilla/amo-validator
validator/errorbundler.py
ErrorBundle.supports_version
def supports_version(self, guid_set): """ Returns whether a GUID set in for_appversions format is compatbile with the current supported applications list. """ # Don't let the test run if we haven't parsed install.rdf yet. if self.supported_versions is None: raise Exception('Early compatibility test run before install.rdf ' 'was parsed.') return self._compare_version(requirements=guid_set, support=self.supported_versions)
python
def supports_version(self, guid_set): """ Returns whether a GUID set in for_appversions format is compatbile with the current supported applications list. """ # Don't let the test run if we haven't parsed install.rdf yet. if self.supported_versions is None: raise Exception('Early compatibility test run before install.rdf ' 'was parsed.') return self._compare_version(requirements=guid_set, support=self.supported_versions)
[ "def", "supports_version", "(", "self", ",", "guid_set", ")", ":", "# Don't let the test run if we haven't parsed install.rdf yet.", "if", "self", ".", "supported_versions", "is", "None", ":", "raise", "Exception", "(", "'Early compatibility test run before install.rdf '", "'...
Returns whether a GUID set in for_appversions format is compatbile with the current supported applications list.
[ "Returns", "whether", "a", "GUID", "set", "in", "for_appversions", "format", "is", "compatbile", "with", "the", "current", "supported", "applications", "list", "." ]
0251bfbd7d93106e01ecdb6de5fcd1dc1a180664
https://github.com/mozilla/amo-validator/blob/0251bfbd7d93106e01ecdb6de5fcd1dc1a180664/validator/errorbundler.py#L500-L512
train
44,290
mozilla/amo-validator
validator/errorbundler.py
ErrorBundle._compare_version
def _compare_version(self, requirements, support): """ Return whether there is an intersection between a support applications GUID set and a set of supported applications. """ for guid in requirements: # If we support any of the GUIDs in the guid_set, test if any of # the provided versions for the GUID are supported. if (guid in support and any((detected_version in requirements[guid]) for detected_version in support[guid])): return True
python
def _compare_version(self, requirements, support): """ Return whether there is an intersection between a support applications GUID set and a set of supported applications. """ for guid in requirements: # If we support any of the GUIDs in the guid_set, test if any of # the provided versions for the GUID are supported. if (guid in support and any((detected_version in requirements[guid]) for detected_version in support[guid])): return True
[ "def", "_compare_version", "(", "self", ",", "requirements", ",", "support", ")", ":", "for", "guid", "in", "requirements", ":", "# If we support any of the GUIDs in the guid_set, test if any of", "# the provided versions for the GUID are supported.", "if", "(", "guid", "in",...
Return whether there is an intersection between a support applications GUID set and a set of supported applications.
[ "Return", "whether", "there", "is", "an", "intersection", "between", "a", "support", "applications", "GUID", "set", "and", "a", "set", "of", "supported", "applications", "." ]
0251bfbd7d93106e01ecdb6de5fcd1dc1a180664
https://github.com/mozilla/amo-validator/blob/0251bfbd7d93106e01ecdb6de5fcd1dc1a180664/validator/errorbundler.py#L514-L526
train
44,291
mozilla/amo-validator
validator/errorbundler.py
ErrorBundle.discard_unused_messages
def discard_unused_messages(self, ending_tier): """ Delete messages from errors, warnings, and notices whose tier is greater than the ending tier. """ stacks = [self.errors, self.warnings, self.notices] for stack in stacks: for message in stack: if message['tier'] > ending_tier: stack.remove(message)
python
def discard_unused_messages(self, ending_tier): """ Delete messages from errors, warnings, and notices whose tier is greater than the ending tier. """ stacks = [self.errors, self.warnings, self.notices] for stack in stacks: for message in stack: if message['tier'] > ending_tier: stack.remove(message)
[ "def", "discard_unused_messages", "(", "self", ",", "ending_tier", ")", ":", "stacks", "=", "[", "self", ".", "errors", ",", "self", ".", "warnings", ",", "self", ".", "notices", "]", "for", "stack", "in", "stacks", ":", "for", "message", "in", "stack", ...
Delete messages from errors, warnings, and notices whose tier is greater than the ending tier.
[ "Delete", "messages", "from", "errors", "warnings", "and", "notices", "whose", "tier", "is", "greater", "than", "the", "ending", "tier", "." ]
0251bfbd7d93106e01ecdb6de5fcd1dc1a180664
https://github.com/mozilla/amo-validator/blob/0251bfbd7d93106e01ecdb6de5fcd1dc1a180664/validator/errorbundler.py#L528-L538
train
44,292
orlnub123/cleverbot.py
cleverbot/migrations.py
migrator
def migrator(state): """Tweaks will be lost for Cleverbot and its conversations.""" for tweak in ('tweak1', 'tweak2', 'tweak3'): del state[0][tweak] for convo in state[1]: if tweak in convo: del convo[tweak] return state
python
def migrator(state): """Tweaks will be lost for Cleverbot and its conversations.""" for tweak in ('tweak1', 'tweak2', 'tweak3'): del state[0][tweak] for convo in state[1]: if tweak in convo: del convo[tweak] return state
[ "def", "migrator", "(", "state", ")", ":", "for", "tweak", "in", "(", "'tweak1'", ",", "'tweak2'", ",", "'tweak3'", ")", ":", "del", "state", "[", "0", "]", "[", "tweak", "]", "for", "convo", "in", "state", "[", "1", "]", ":", "if", "tweak", "in"...
Tweaks will be lost for Cleverbot and its conversations.
[ "Tweaks", "will", "be", "lost", "for", "Cleverbot", "and", "its", "conversations", "." ]
83aa45fc2582c30d8646372d9e09756525af931f
https://github.com/orlnub123/cleverbot.py/blob/83aa45fc2582c30d8646372d9e09756525af931f/cleverbot/migrations.py#L65-L72
train
44,293
orlnub123/cleverbot.py
cleverbot/migrations.py
migrator
def migrator(state): """Nameless conversations will be lost.""" cleverbot_kwargs, convos_kwargs = state cb = Cleverbot(**cleverbot_kwargs) for convo_kwargs in convos_kwargs: cb.conversation(**convo_kwargs) return cb
python
def migrator(state): """Nameless conversations will be lost.""" cleverbot_kwargs, convos_kwargs = state cb = Cleverbot(**cleverbot_kwargs) for convo_kwargs in convos_kwargs: cb.conversation(**convo_kwargs) return cb
[ "def", "migrator", "(", "state", ")", ":", "cleverbot_kwargs", ",", "convos_kwargs", "=", "state", "cb", "=", "Cleverbot", "(", "*", "*", "cleverbot_kwargs", ")", "for", "convo_kwargs", "in", "convos_kwargs", ":", "cb", ".", "conversation", "(", "*", "*", ...
Nameless conversations will be lost.
[ "Nameless", "conversations", "will", "be", "lost", "." ]
83aa45fc2582c30d8646372d9e09756525af931f
https://github.com/orlnub123/cleverbot.py/blob/83aa45fc2582c30d8646372d9e09756525af931f/cleverbot/migrations.py#L76-L82
train
44,294
senaite/senaite.core.supermodel
src/senaite/core/supermodel/model.py
SuperModel.init_with_uid
def init_with_uid(self, uid): """Initialize with an UID """ self._uid = uid self._brain = None self._catalog = None self._instance = None
python
def init_with_uid(self, uid): """Initialize with an UID """ self._uid = uid self._brain = None self._catalog = None self._instance = None
[ "def", "init_with_uid", "(", "self", ",", "uid", ")", ":", "self", ".", "_uid", "=", "uid", "self", ".", "_brain", "=", "None", "self", ".", "_catalog", "=", "None", "self", ".", "_instance", "=", "None" ]
Initialize with an UID
[ "Initialize", "with", "an", "UID" ]
1819154332b8776f187aa98a2e299701983a0119
https://github.com/senaite/senaite.core.supermodel/blob/1819154332b8776f187aa98a2e299701983a0119/src/senaite/core/supermodel/model.py#L69-L75
train
44,295
senaite/senaite.core.supermodel
src/senaite/core/supermodel/model.py
SuperModel.init_with_brain
def init_with_brain(self, brain): """Initialize with a catalog brain """ self._uid = api.get_uid(brain) self._brain = brain self._catalog = self.get_catalog_for(brain) self._instance = None
python
def init_with_brain(self, brain): """Initialize with a catalog brain """ self._uid = api.get_uid(brain) self._brain = brain self._catalog = self.get_catalog_for(brain) self._instance = None
[ "def", "init_with_brain", "(", "self", ",", "brain", ")", ":", "self", ".", "_uid", "=", "api", ".", "get_uid", "(", "brain", ")", "self", ".", "_brain", "=", "brain", "self", ".", "_catalog", "=", "self", ".", "get_catalog_for", "(", "brain", ")", "...
Initialize with a catalog brain
[ "Initialize", "with", "a", "catalog", "brain" ]
1819154332b8776f187aa98a2e299701983a0119
https://github.com/senaite/senaite.core.supermodel/blob/1819154332b8776f187aa98a2e299701983a0119/src/senaite/core/supermodel/model.py#L77-L83
train
44,296
senaite/senaite.core.supermodel
src/senaite/core/supermodel/model.py
SuperModel.init_with_instance
def init_with_instance(self, instance): """Initialize with an instance object """ self._uid = api.get_uid(instance) self._brain = None self._catalog = self.get_catalog_for(instance) self._instance = instance
python
def init_with_instance(self, instance): """Initialize with an instance object """ self._uid = api.get_uid(instance) self._brain = None self._catalog = self.get_catalog_for(instance) self._instance = instance
[ "def", "init_with_instance", "(", "self", ",", "instance", ")", ":", "self", ".", "_uid", "=", "api", ".", "get_uid", "(", "instance", ")", "self", ".", "_brain", "=", "None", "self", ".", "_catalog", "=", "self", ".", "get_catalog_for", "(", "instance",...
Initialize with an instance object
[ "Initialize", "with", "an", "instance", "object" ]
1819154332b8776f187aa98a2e299701983a0119
https://github.com/senaite/senaite.core.supermodel/blob/1819154332b8776f187aa98a2e299701983a0119/src/senaite/core/supermodel/model.py#L85-L91
train
44,297
senaite/senaite.core.supermodel
src/senaite/core/supermodel/model.py
SuperModel.process_value
def process_value(self, value): """Process publication value """ # UID -> SuperModel if api.is_uid(value): return self.to_super_model(value) # Content -> SuperModel elif api.is_object(value): return self.to_super_model(value) # String -> Unicode elif isinstance(value, basestring): return safe_unicode(value).encode("utf-8") # DateTime -> DateTime elif isinstance(value, DateTime): return value # Process list values elif isinstance(value, (LazyMap, list, tuple)): return map(self.process_value, value) # Process dict values elif isinstance(value, (dict)): return {k: self.process_value(v) for k, v in value.iteritems()} # Process function elif safe_callable(value): return self.process_value(value()) # Always return the unprocessed value last return value
python
def process_value(self, value): """Process publication value """ # UID -> SuperModel if api.is_uid(value): return self.to_super_model(value) # Content -> SuperModel elif api.is_object(value): return self.to_super_model(value) # String -> Unicode elif isinstance(value, basestring): return safe_unicode(value).encode("utf-8") # DateTime -> DateTime elif isinstance(value, DateTime): return value # Process list values elif isinstance(value, (LazyMap, list, tuple)): return map(self.process_value, value) # Process dict values elif isinstance(value, (dict)): return {k: self.process_value(v) for k, v in value.iteritems()} # Process function elif safe_callable(value): return self.process_value(value()) # Always return the unprocessed value last return value
[ "def", "process_value", "(", "self", ",", "value", ")", ":", "# UID -> SuperModel", "if", "api", ".", "is_uid", "(", "value", ")", ":", "return", "self", ".", "to_super_model", "(", "value", ")", "# Content -> SuperModel", "elif", "api", ".", "is_object", "(...
Process publication value
[ "Process", "publication", "value" ]
1819154332b8776f187aa98a2e299701983a0119
https://github.com/senaite/senaite.core.supermodel/blob/1819154332b8776f187aa98a2e299701983a0119/src/senaite/core/supermodel/model.py#L200-L225
train
44,298
senaite/senaite.core.supermodel
src/senaite/core/supermodel/model.py
SuperModel.instance
def instance(self): """Content instance of the wrapped object """ if self._instance is None: logger.debug("SuperModel::instance: *Wakup object*") self._instance = api.get_object(self.brain) return self._instance
python
def instance(self): """Content instance of the wrapped object """ if self._instance is None: logger.debug("SuperModel::instance: *Wakup object*") self._instance = api.get_object(self.brain) return self._instance
[ "def", "instance", "(", "self", ")", ":", "if", "self", ".", "_instance", "is", "None", ":", "logger", ".", "debug", "(", "\"SuperModel::instance: *Wakup object*\"", ")", "self", ".", "_instance", "=", "api", ".", "get_object", "(", "self", ".", "brain", "...
Content instance of the wrapped object
[ "Content", "instance", "of", "the", "wrapped", "object" ]
1819154332b8776f187aa98a2e299701983a0119
https://github.com/senaite/senaite.core.supermodel/blob/1819154332b8776f187aa98a2e299701983a0119/src/senaite/core/supermodel/model.py#L234-L240
train
44,299