repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
listlengths
20
707
docstring
stringlengths
3
17.3k
docstring_tokens
listlengths
3
222
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
idx
int64
0
252k
openstack/pymod2pkg
pymod2pkg/__init__.py
default_ubuntu_tr
def default_ubuntu_tr(mod): """ Default translation function for Ubuntu based systems """ pkg = 'python-%s' % mod.lower() py2pkg = pkg py3pkg = 'python3-%s' % mod.lower() return (pkg, py2pkg, py3pkg)
python
def default_ubuntu_tr(mod): """ Default translation function for Ubuntu based systems """ pkg = 'python-%s' % mod.lower() py2pkg = pkg py3pkg = 'python3-%s' % mod.lower() return (pkg, py2pkg, py3pkg)
[ "def", "default_ubuntu_tr", "(", "mod", ")", ":", "pkg", "=", "'python-%s'", "%", "mod", ".", "lower", "(", ")", "py2pkg", "=", "pkg", "py3pkg", "=", "'python3-%s'", "%", "mod", ".", "lower", "(", ")", "return", "(", "pkg", ",", "py2pkg", ",", "py3pk...
Default translation function for Ubuntu based systems
[ "Default", "translation", "function", "for", "Ubuntu", "based", "systems" ]
f9a2f02fbfa0b2cfcdb4a7494c9ddbd10859065a
https://github.com/openstack/pymod2pkg/blob/f9a2f02fbfa0b2cfcdb4a7494c9ddbd10859065a/pymod2pkg/__init__.py#L87-L94
train
50,000
openstack/pymod2pkg
pymod2pkg/__init__.py
default_suse_tr
def default_suse_tr(mod): """ Default translation function for openSUSE, SLES, and other SUSE based systems Returns a tuple of 3 elements - the unversioned name, the python2 versioned name and the python3 versioned name. """ pkg = 'python-%s' % mod py2pkg = 'python2-%s' % mod py3pkg = 'python3-%s' % mod return (pkg, py2pkg, py3pkg)
python
def default_suse_tr(mod): """ Default translation function for openSUSE, SLES, and other SUSE based systems Returns a tuple of 3 elements - the unversioned name, the python2 versioned name and the python3 versioned name. """ pkg = 'python-%s' % mod py2pkg = 'python2-%s' % mod py3pkg = 'python3-%s' % mod return (pkg, py2pkg, py3pkg)
[ "def", "default_suse_tr", "(", "mod", ")", ":", "pkg", "=", "'python-%s'", "%", "mod", "py2pkg", "=", "'python2-%s'", "%", "mod", "py3pkg", "=", "'python3-%s'", "%", "mod", "return", "(", "pkg", ",", "py2pkg", ",", "py3pkg", ")" ]
Default translation function for openSUSE, SLES, and other SUSE based systems Returns a tuple of 3 elements - the unversioned name, the python2 versioned name and the python3 versioned name.
[ "Default", "translation", "function", "for", "openSUSE", "SLES", "and", "other", "SUSE", "based", "systems" ]
f9a2f02fbfa0b2cfcdb4a7494c9ddbd10859065a
https://github.com/openstack/pymod2pkg/blob/f9a2f02fbfa0b2cfcdb4a7494c9ddbd10859065a/pymod2pkg/__init__.py#L97-L108
train
50,001
openstack/pymod2pkg
pymod2pkg/__init__.py
module2package
def module2package(mod, dist, pkg_map=None, py_vers=('py',)): """Return a corresponding package name for a python module. mod: python module name dist: a linux distribution as returned by `platform.linux_distribution()[0]` pkg_map: a custom package mapping. None means autodetected based on the given dist parameter py_vers: a list of python versions the function should return. Default is 'py' which is the unversioned translation. Possible values are 'py', 'py2' and 'py3' """ if not pkg_map: pkg_map = get_pkg_map(dist) for rule in pkg_map: pkglist = rule(mod, dist) if pkglist: break else: tr_func = get_default_tr_func(dist) pkglist = tr_func(mod) output = [] for v in py_vers: if v == 'py': output.append(pkglist[0]) elif v == 'py2': output.append(pkglist[1]) elif v == 'py3': output.append(pkglist[2]) else: raise Exception('Invalid version "%s"' % (v)) if len(output) == 1: # just return a single value (backwards compatible) return output[0] else: return output
python
def module2package(mod, dist, pkg_map=None, py_vers=('py',)): """Return a corresponding package name for a python module. mod: python module name dist: a linux distribution as returned by `platform.linux_distribution()[0]` pkg_map: a custom package mapping. None means autodetected based on the given dist parameter py_vers: a list of python versions the function should return. Default is 'py' which is the unversioned translation. Possible values are 'py', 'py2' and 'py3' """ if not pkg_map: pkg_map = get_pkg_map(dist) for rule in pkg_map: pkglist = rule(mod, dist) if pkglist: break else: tr_func = get_default_tr_func(dist) pkglist = tr_func(mod) output = [] for v in py_vers: if v == 'py': output.append(pkglist[0]) elif v == 'py2': output.append(pkglist[1]) elif v == 'py3': output.append(pkglist[2]) else: raise Exception('Invalid version "%s"' % (v)) if len(output) == 1: # just return a single value (backwards compatible) return output[0] else: return output
[ "def", "module2package", "(", "mod", ",", "dist", ",", "pkg_map", "=", "None", ",", "py_vers", "=", "(", "'py'", ",", ")", ")", ":", "if", "not", "pkg_map", ":", "pkg_map", "=", "get_pkg_map", "(", "dist", ")", "for", "rule", "in", "pkg_map", ":", ...
Return a corresponding package name for a python module. mod: python module name dist: a linux distribution as returned by `platform.linux_distribution()[0]` pkg_map: a custom package mapping. None means autodetected based on the given dist parameter py_vers: a list of python versions the function should return. Default is 'py' which is the unversioned translation. Possible values are 'py', 'py2' and 'py3'
[ "Return", "a", "corresponding", "package", "name", "for", "a", "python", "module", "." ]
f9a2f02fbfa0b2cfcdb4a7494c9ddbd10859065a
https://github.com/openstack/pymod2pkg/blob/f9a2f02fbfa0b2cfcdb4a7494c9ddbd10859065a/pymod2pkg/__init__.py#L359-L396
train
50,002
openstack/pymod2pkg
pymod2pkg/__init__.py
module2upstream
def module2upstream(mod): """Return a corresponding OpenStack upstream name for a python module. mod -- python module name """ for rule in OPENSTACK_UPSTREAM_PKG_MAP: pkglist = rule(mod, dist=None) if pkglist: return pkglist[0] return mod
python
def module2upstream(mod): """Return a corresponding OpenStack upstream name for a python module. mod -- python module name """ for rule in OPENSTACK_UPSTREAM_PKG_MAP: pkglist = rule(mod, dist=None) if pkglist: return pkglist[0] return mod
[ "def", "module2upstream", "(", "mod", ")", ":", "for", "rule", "in", "OPENSTACK_UPSTREAM_PKG_MAP", ":", "pkglist", "=", "rule", "(", "mod", ",", "dist", "=", "None", ")", "if", "pkglist", ":", "return", "pkglist", "[", "0", "]", "return", "mod" ]
Return a corresponding OpenStack upstream name for a python module. mod -- python module name
[ "Return", "a", "corresponding", "OpenStack", "upstream", "name", "for", "a", "python", "module", "." ]
f9a2f02fbfa0b2cfcdb4a7494c9ddbd10859065a
https://github.com/openstack/pymod2pkg/blob/f9a2f02fbfa0b2cfcdb4a7494c9ddbd10859065a/pymod2pkg/__init__.py#L399-L408
train
50,003
openstack/pymod2pkg
pymod2pkg/__init__.py
main
def main(): """for resolving names from command line""" parser = argparse.ArgumentParser(description='Python module name to' 'package name') group = parser.add_mutually_exclusive_group() group.add_argument('--dist', help='distribution style ' '(default: %(default)s)', default=platform.linux_distribution()[0]) group.add_argument('--upstream', help='map to OpenStack project name', action='store_true') parser.add_argument('--pyver', help='Python versions to return. "py" is ' 'the unversioned name', action='append', choices=['py', 'py2', 'py3'], default=[]) parser.add_argument('modulename', help='python module name') args = vars(parser.parse_args()) pyversions = args['pyver'] if args['pyver'] else ['py'] if args['upstream']: print(module2upstream(args['modulename'])) else: pylist = module2package(args['modulename'], args['dist'], py_vers=pyversions) # When only 1 version is requested, it will be returned as a string, # for backwards compatibility. Else, it will be a list. if type(pylist) is list: print(' '.join(pylist)) else: print(pylist)
python
def main(): """for resolving names from command line""" parser = argparse.ArgumentParser(description='Python module name to' 'package name') group = parser.add_mutually_exclusive_group() group.add_argument('--dist', help='distribution style ' '(default: %(default)s)', default=platform.linux_distribution()[0]) group.add_argument('--upstream', help='map to OpenStack project name', action='store_true') parser.add_argument('--pyver', help='Python versions to return. "py" is ' 'the unversioned name', action='append', choices=['py', 'py2', 'py3'], default=[]) parser.add_argument('modulename', help='python module name') args = vars(parser.parse_args()) pyversions = args['pyver'] if args['pyver'] else ['py'] if args['upstream']: print(module2upstream(args['modulename'])) else: pylist = module2package(args['modulename'], args['dist'], py_vers=pyversions) # When only 1 version is requested, it will be returned as a string, # for backwards compatibility. Else, it will be a list. if type(pylist) is list: print(' '.join(pylist)) else: print(pylist)
[ "def", "main", "(", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "'Python module name to'", "'package name'", ")", "group", "=", "parser", ".", "add_mutually_exclusive_group", "(", ")", "group", ".", "add_argument", "(", "...
for resolving names from command line
[ "for", "resolving", "names", "from", "command", "line" ]
f9a2f02fbfa0b2cfcdb4a7494c9ddbd10859065a
https://github.com/openstack/pymod2pkg/blob/f9a2f02fbfa0b2cfcdb4a7494c9ddbd10859065a/pymod2pkg/__init__.py#L411-L440
train
50,004
InfoAgeTech/django-core
django_core/views/mixins/paging.py
PagingViewMixin.get_paging
def get_paging(self): """Gets the paging values passed through the query string params. * "p" for "page number" and * "ps" for "page size". :returns: tuple with the page being the first part and the page size being the second part. """ orig_page_num = self.page_num orig_page_size = self.page_size try: page_num = int(self.request.GET.get(self.page_kwarg or 'p')) if page_num < 1: page_num = orig_page_num except: page_num = orig_page_num try: orig_page_size = self.page_size page_size = int(self.request.GET.get(self.page_size_kwarg or 'ps')) if page_size < 1: page_size = orig_page_size except: page_size = orig_page_size return page_num, page_size
python
def get_paging(self): """Gets the paging values passed through the query string params. * "p" for "page number" and * "ps" for "page size". :returns: tuple with the page being the first part and the page size being the second part. """ orig_page_num = self.page_num orig_page_size = self.page_size try: page_num = int(self.request.GET.get(self.page_kwarg or 'p')) if page_num < 1: page_num = orig_page_num except: page_num = orig_page_num try: orig_page_size = self.page_size page_size = int(self.request.GET.get(self.page_size_kwarg or 'ps')) if page_size < 1: page_size = orig_page_size except: page_size = orig_page_size return page_num, page_size
[ "def", "get_paging", "(", "self", ")", ":", "orig_page_num", "=", "self", ".", "page_num", "orig_page_size", "=", "self", ".", "page_size", "try", ":", "page_num", "=", "int", "(", "self", ".", "request", ".", "GET", ".", "get", "(", "self", ".", "page...
Gets the paging values passed through the query string params. * "p" for "page number" and * "ps" for "page size". :returns: tuple with the page being the first part and the page size being the second part.
[ "Gets", "the", "paging", "values", "passed", "through", "the", "query", "string", "params", "." ]
9664a145473b75120bf71e1644e9c8086e7e8955
https://github.com/InfoAgeTech/django-core/blob/9664a145473b75120bf71e1644e9c8086e7e8955/django_core/views/mixins/paging.py#L38-L68
train
50,005
dls-controls/annotypes
annotypes/_calltypes.py
make_call_types
def make_call_types(f, globals_d): # type: (Callable, Dict) -> Tuple[Dict[str, Anno], Anno] """Make a call_types dictionary that describes what arguments to pass to f Args: f: The function to inspect for argument names (without self) globals_d: A dictionary of globals to lookup annotation definitions in """ arg_spec = getargspec(f) args = [k for k in arg_spec.args if k != "self"] defaults = {} # type: Dict[str, Any] if arg_spec.defaults: default_args = args[-len(arg_spec.defaults):] for a, default in zip(default_args, arg_spec.defaults): defaults[a] = default if not getattr(f, "__annotations__", None): # Make string annotations from the type comment if there is one annotations = make_annotations(f, globals_d) else: annotations = f.__annotations__ call_types = OrderedDict() # type: Dict[str, Anno] for a in args: anno = anno_with_default(annotations[a], defaults.get(a, NO_DEFAULT)) assert isinstance(anno, Anno), \ "Argument %r has type %r which is not an Anno" % (a, anno) call_types[a] = anno return_type = anno_with_default(annotations.get("return", None)) if return_type is Any: return_type = Anno("Any return value", Any, "return") assert return_type is None or isinstance(return_type, Anno), \ "Return has type %r which is not an Anno" % (return_type,) return call_types, return_type
python
def make_call_types(f, globals_d): # type: (Callable, Dict) -> Tuple[Dict[str, Anno], Anno] """Make a call_types dictionary that describes what arguments to pass to f Args: f: The function to inspect for argument names (without self) globals_d: A dictionary of globals to lookup annotation definitions in """ arg_spec = getargspec(f) args = [k for k in arg_spec.args if k != "self"] defaults = {} # type: Dict[str, Any] if arg_spec.defaults: default_args = args[-len(arg_spec.defaults):] for a, default in zip(default_args, arg_spec.defaults): defaults[a] = default if not getattr(f, "__annotations__", None): # Make string annotations from the type comment if there is one annotations = make_annotations(f, globals_d) else: annotations = f.__annotations__ call_types = OrderedDict() # type: Dict[str, Anno] for a in args: anno = anno_with_default(annotations[a], defaults.get(a, NO_DEFAULT)) assert isinstance(anno, Anno), \ "Argument %r has type %r which is not an Anno" % (a, anno) call_types[a] = anno return_type = anno_with_default(annotations.get("return", None)) if return_type is Any: return_type = Anno("Any return value", Any, "return") assert return_type is None or isinstance(return_type, Anno), \ "Return has type %r which is not an Anno" % (return_type,) return call_types, return_type
[ "def", "make_call_types", "(", "f", ",", "globals_d", ")", ":", "# type: (Callable, Dict) -> Tuple[Dict[str, Anno], Anno]", "arg_spec", "=", "getargspec", "(", "f", ")", "args", "=", "[", "k", "for", "k", "in", "arg_spec", ".", "args", "if", "k", "!=", "\"self...
Make a call_types dictionary that describes what arguments to pass to f Args: f: The function to inspect for argument names (without self) globals_d: A dictionary of globals to lookup annotation definitions in
[ "Make", "a", "call_types", "dictionary", "that", "describes", "what", "arguments", "to", "pass", "to", "f" ]
31ab68a0367bb70ebd9898e8b9fa9405423465bd
https://github.com/dls-controls/annotypes/blob/31ab68a0367bb70ebd9898e8b9fa9405423465bd/annotypes/_calltypes.py#L44-L80
train
50,006
dls-controls/annotypes
annotypes/_calltypes.py
make_annotations
def make_annotations(f, globals_d=None): # type: (Callable, Dict) -> Dict[str, Any] """Create an annotations dictionary from Python2 type comments http://mypy.readthedocs.io/en/latest/python2.html Args: f: The function to examine for type comments globals_d: The globals dictionary to get type idents from. If not specified then make the annotations dict contain strings rather than the looked up objects """ locals_d = {} # type: Dict[str, Any] if globals_d is None: # If not given a globals_d then we should just populate annotations with # the strings in the type comment. globals_d = {} # The current approach is to use eval, which means manufacturing a # dict like object that will just echo the string back to you. This # has a number of complexities for somthing like numpy.number or # Callable[..., int], which are handled in EchoStr above, so it might be # better off as an ast.parse in the future... locals_d = EchoDict() lines, _ = inspect.getsourcelines(f) arg_spec = getargspec(f) args = list(arg_spec.args) if arg_spec.varargs is not None: args.append(arg_spec.varargs) if arg_spec.keywords is not None: args.append(arg_spec.keywords) it = iter(lines) types = [] # type: List found = None for token in tokenize.generate_tokens(lambda: next(it)): typ, string, start, end, line = token if typ == tokenize.COMMENT: found = type_re.match(string) if found: parts = found.groups() # (...) is used to represent all the args so far if parts[0] != "(...)": expr = parts[0].replace("*", "") try: ob = eval(expr, globals_d, locals_d) except Exception as e: raise ValueError( "Error evaluating %r: %s" % (expr, e)) if isinstance(ob, tuple): # We got more than one argument types += list(ob) else: # We got a single argument types.append(ob) if parts[1]: # Got a return, done try: ob = eval(parts[2], globals_d, locals_d) except Exception as e: raise ValueError( "Error evaluating %r: %s" % (parts[2], e)) if args and args[0] in ["self", "cls"]: # Allow the first argument to be inferred if len(args) == len(types) + 1: args = args[1:] assert len(args) == len(types), \ "Args %r Types %r length mismatch" % (args, types) ret = dict(zip(args, types)) ret["return"] = ob return ret if found: # If we have ever found a type comment, but not the return value, error raise ValueError("Got to the end of the function without seeing ->") return {}
python
def make_annotations(f, globals_d=None): # type: (Callable, Dict) -> Dict[str, Any] """Create an annotations dictionary from Python2 type comments http://mypy.readthedocs.io/en/latest/python2.html Args: f: The function to examine for type comments globals_d: The globals dictionary to get type idents from. If not specified then make the annotations dict contain strings rather than the looked up objects """ locals_d = {} # type: Dict[str, Any] if globals_d is None: # If not given a globals_d then we should just populate annotations with # the strings in the type comment. globals_d = {} # The current approach is to use eval, which means manufacturing a # dict like object that will just echo the string back to you. This # has a number of complexities for somthing like numpy.number or # Callable[..., int], which are handled in EchoStr above, so it might be # better off as an ast.parse in the future... locals_d = EchoDict() lines, _ = inspect.getsourcelines(f) arg_spec = getargspec(f) args = list(arg_spec.args) if arg_spec.varargs is not None: args.append(arg_spec.varargs) if arg_spec.keywords is not None: args.append(arg_spec.keywords) it = iter(lines) types = [] # type: List found = None for token in tokenize.generate_tokens(lambda: next(it)): typ, string, start, end, line = token if typ == tokenize.COMMENT: found = type_re.match(string) if found: parts = found.groups() # (...) is used to represent all the args so far if parts[0] != "(...)": expr = parts[0].replace("*", "") try: ob = eval(expr, globals_d, locals_d) except Exception as e: raise ValueError( "Error evaluating %r: %s" % (expr, e)) if isinstance(ob, tuple): # We got more than one argument types += list(ob) else: # We got a single argument types.append(ob) if parts[1]: # Got a return, done try: ob = eval(parts[2], globals_d, locals_d) except Exception as e: raise ValueError( "Error evaluating %r: %s" % (parts[2], e)) if args and args[0] in ["self", "cls"]: # Allow the first argument to be inferred if len(args) == len(types) + 1: args = args[1:] assert len(args) == len(types), \ "Args %r Types %r length mismatch" % (args, types) ret = dict(zip(args, types)) ret["return"] = ob return ret if found: # If we have ever found a type comment, but not the return value, error raise ValueError("Got to the end of the function without seeing ->") return {}
[ "def", "make_annotations", "(", "f", ",", "globals_d", "=", "None", ")", ":", "# type: (Callable, Dict) -> Dict[str, Any]", "locals_d", "=", "{", "}", "# type: Dict[str, Any]", "if", "globals_d", "is", "None", ":", "# If not given a globals_d then we should just populate an...
Create an annotations dictionary from Python2 type comments http://mypy.readthedocs.io/en/latest/python2.html Args: f: The function to examine for type comments globals_d: The globals dictionary to get type idents from. If not specified then make the annotations dict contain strings rather than the looked up objects
[ "Create", "an", "annotations", "dictionary", "from", "Python2", "type", "comments" ]
31ab68a0367bb70ebd9898e8b9fa9405423465bd
https://github.com/dls-controls/annotypes/blob/31ab68a0367bb70ebd9898e8b9fa9405423465bd/annotypes/_calltypes.py#L106-L178
train
50,007
InfoAgeTech/django-core
django_core/views/mixins/common.py
CommonSingleObjectViewMixin.get_object
def get_object(self, **kwargs): """Sometimes preprocessing of a view need to happen before the object attribute has been set for a view. In this case, just return the object if it has already been set when it's called down the road since there's no need to make another query. """ if hasattr(self, 'object') and self.object: return self.object obj = super(CommonSingleObjectViewMixin, self).get_object(**kwargs) self.object = obj return obj
python
def get_object(self, **kwargs): """Sometimes preprocessing of a view need to happen before the object attribute has been set for a view. In this case, just return the object if it has already been set when it's called down the road since there's no need to make another query. """ if hasattr(self, 'object') and self.object: return self.object obj = super(CommonSingleObjectViewMixin, self).get_object(**kwargs) self.object = obj return obj
[ "def", "get_object", "(", "self", ",", "*", "*", "kwargs", ")", ":", "if", "hasattr", "(", "self", ",", "'object'", ")", "and", "self", ".", "object", ":", "return", "self", ".", "object", "obj", "=", "super", "(", "CommonSingleObjectViewMixin", ",", "...
Sometimes preprocessing of a view need to happen before the object attribute has been set for a view. In this case, just return the object if it has already been set when it's called down the road since there's no need to make another query.
[ "Sometimes", "preprocessing", "of", "a", "view", "need", "to", "happen", "before", "the", "object", "attribute", "has", "been", "set", "for", "a", "view", ".", "In", "this", "case", "just", "return", "the", "object", "if", "it", "has", "already", "been", ...
9664a145473b75120bf71e1644e9c8086e7e8955
https://github.com/InfoAgeTech/django-core/blob/9664a145473b75120bf71e1644e9c8086e7e8955/django_core/views/mixins/common.py#L6-L17
train
50,008
InfoAgeTech/django-core
django_core/utils/date_parsers.py
hex_timestamp_to_datetime
def hex_timestamp_to_datetime(hex_timestamp): """Converts hex timestamp to a datetime object. >>> hex_timestamp_to_datetime('558BBCF9') datetime.datetime(2015, 6, 25, 8, 34, 1) >>> hex_timestamp_to_datetime('0x558BBCF9') datetime.datetime(2015, 6, 25, 8, 34, 1) >>> datetime.fromtimestamp(0x558BBCF9) datetime.datetime(2015, 6, 25, 8, 34, 1) """ if not hex_timestamp.startswith('0x'): hex_timestamp = '0x{0}'.format(hex_timestamp) return datetime.fromtimestamp(int(hex_timestamp, 16))
python
def hex_timestamp_to_datetime(hex_timestamp): """Converts hex timestamp to a datetime object. >>> hex_timestamp_to_datetime('558BBCF9') datetime.datetime(2015, 6, 25, 8, 34, 1) >>> hex_timestamp_to_datetime('0x558BBCF9') datetime.datetime(2015, 6, 25, 8, 34, 1) >>> datetime.fromtimestamp(0x558BBCF9) datetime.datetime(2015, 6, 25, 8, 34, 1) """ if not hex_timestamp.startswith('0x'): hex_timestamp = '0x{0}'.format(hex_timestamp) return datetime.fromtimestamp(int(hex_timestamp, 16))
[ "def", "hex_timestamp_to_datetime", "(", "hex_timestamp", ")", ":", "if", "not", "hex_timestamp", ".", "startswith", "(", "'0x'", ")", ":", "hex_timestamp", "=", "'0x{0}'", ".", "format", "(", "hex_timestamp", ")", "return", "datetime", ".", "fromtimestamp", "("...
Converts hex timestamp to a datetime object. >>> hex_timestamp_to_datetime('558BBCF9') datetime.datetime(2015, 6, 25, 8, 34, 1) >>> hex_timestamp_to_datetime('0x558BBCF9') datetime.datetime(2015, 6, 25, 8, 34, 1) >>> datetime.fromtimestamp(0x558BBCF9) datetime.datetime(2015, 6, 25, 8, 34, 1)
[ "Converts", "hex", "timestamp", "to", "a", "datetime", "object", "." ]
9664a145473b75120bf71e1644e9c8086e7e8955
https://github.com/InfoAgeTech/django-core/blob/9664a145473b75120bf71e1644e9c8086e7e8955/django_core/utils/date_parsers.py#L10-L23
train
50,009
InfoAgeTech/django-core
django_core/utils/date_parsers.py
now_by_tz
def now_by_tz(tz='US/Central', ignoretz=True): """Gets the current datetime object by timezone. :param tz: is the timezone to get the date for. tz can be passed as a string or as a timezone object. (i.e. 'US/Central' or pytz.timezone('US/Central'), etc) :param ignoretz: will ignore the timezone portion of the datetime object and tzinfo will be None. :return: the current datetime object by tz Examples: >>> now_by_tz('US/Pacific') 2011-09-28 10:06:01.130025 >>> now_by_tz('US/Pacific', False) 2011-09-28 10:06:01.130025-07:00 >>> now_by_tz(pytz.timezone('US/Central')) 2011-09-28 12:06:01.130025 >>> now_by_tz(pytz.timezone('US/Central'), False) 2011-09-28 12:06:01.130025-05:00 """ if isinstance(tz, string_types): tz = pytz.timezone(tz) if ignoretz: return datetime.now(tz).replace(tzinfo=None) return datetime.now(tz)
python
def now_by_tz(tz='US/Central', ignoretz=True): """Gets the current datetime object by timezone. :param tz: is the timezone to get the date for. tz can be passed as a string or as a timezone object. (i.e. 'US/Central' or pytz.timezone('US/Central'), etc) :param ignoretz: will ignore the timezone portion of the datetime object and tzinfo will be None. :return: the current datetime object by tz Examples: >>> now_by_tz('US/Pacific') 2011-09-28 10:06:01.130025 >>> now_by_tz('US/Pacific', False) 2011-09-28 10:06:01.130025-07:00 >>> now_by_tz(pytz.timezone('US/Central')) 2011-09-28 12:06:01.130025 >>> now_by_tz(pytz.timezone('US/Central'), False) 2011-09-28 12:06:01.130025-05:00 """ if isinstance(tz, string_types): tz = pytz.timezone(tz) if ignoretz: return datetime.now(tz).replace(tzinfo=None) return datetime.now(tz)
[ "def", "now_by_tz", "(", "tz", "=", "'US/Central'", ",", "ignoretz", "=", "True", ")", ":", "if", "isinstance", "(", "tz", ",", "string_types", ")", ":", "tz", "=", "pytz", ".", "timezone", "(", "tz", ")", "if", "ignoretz", ":", "return", "datetime", ...
Gets the current datetime object by timezone. :param tz: is the timezone to get the date for. tz can be passed as a string or as a timezone object. (i.e. 'US/Central' or pytz.timezone('US/Central'), etc) :param ignoretz: will ignore the timezone portion of the datetime object and tzinfo will be None. :return: the current datetime object by tz Examples: >>> now_by_tz('US/Pacific') 2011-09-28 10:06:01.130025 >>> now_by_tz('US/Pacific', False) 2011-09-28 10:06:01.130025-07:00 >>> now_by_tz(pytz.timezone('US/Central')) 2011-09-28 12:06:01.130025 >>> now_by_tz(pytz.timezone('US/Central'), False) 2011-09-28 12:06:01.130025-05:00
[ "Gets", "the", "current", "datetime", "object", "by", "timezone", "." ]
9664a145473b75120bf71e1644e9c8086e7e8955
https://github.com/InfoAgeTech/django-core/blob/9664a145473b75120bf71e1644e9c8086e7e8955/django_core/utils/date_parsers.py#L26-L53
train
50,010
InfoAgeTech/django-core
django_core/utils/date_parsers.py
tz_to_utc
def tz_to_utc(dt, tz, ignoretz=True): """Converts a datetime object from the specified timezone to a UTC datetime. :param tz: the timezone the datetime is currently in. tz can be passed as a string or as a timezone object. (i.e. 'US/Central' or pytz.timezone('US/Central'), etc) :param ignoretz: will ignore the timezone portion of the datetime object and tzinfo will be None. :return: the datetime object by in UTC time. Examples: >>> tz_to_utc(datetime(2011, 11, 25, 9), 'US/Central') 2011-11-25 15:00:00 >>> tz_to_utc(datetime(2011, 11, 25, 9), pytz.timezone('US/Central')) 2011-11-25 15:00:00 >>> tz_to_utc(datetime(2011, 11, 25, 9), 'US/Central', False) 2011-11-25 15:00:00+00:00 """ if isinstance(tz, string_types): tz = pytz.timezone(tz) dt = tz.localize(dt) dt = datetime.astimezone(dt, pytz.timezone('UTC')) if ignoretz: return dt.replace(tzinfo=None) return dt
python
def tz_to_utc(dt, tz, ignoretz=True): """Converts a datetime object from the specified timezone to a UTC datetime. :param tz: the timezone the datetime is currently in. tz can be passed as a string or as a timezone object. (i.e. 'US/Central' or pytz.timezone('US/Central'), etc) :param ignoretz: will ignore the timezone portion of the datetime object and tzinfo will be None. :return: the datetime object by in UTC time. Examples: >>> tz_to_utc(datetime(2011, 11, 25, 9), 'US/Central') 2011-11-25 15:00:00 >>> tz_to_utc(datetime(2011, 11, 25, 9), pytz.timezone('US/Central')) 2011-11-25 15:00:00 >>> tz_to_utc(datetime(2011, 11, 25, 9), 'US/Central', False) 2011-11-25 15:00:00+00:00 """ if isinstance(tz, string_types): tz = pytz.timezone(tz) dt = tz.localize(dt) dt = datetime.astimezone(dt, pytz.timezone('UTC')) if ignoretz: return dt.replace(tzinfo=None) return dt
[ "def", "tz_to_utc", "(", "dt", ",", "tz", ",", "ignoretz", "=", "True", ")", ":", "if", "isinstance", "(", "tz", ",", "string_types", ")", ":", "tz", "=", "pytz", ".", "timezone", "(", "tz", ")", "dt", "=", "tz", ".", "localize", "(", "dt", ")", ...
Converts a datetime object from the specified timezone to a UTC datetime. :param tz: the timezone the datetime is currently in. tz can be passed as a string or as a timezone object. (i.e. 'US/Central' or pytz.timezone('US/Central'), etc) :param ignoretz: will ignore the timezone portion of the datetime object and tzinfo will be None. :return: the datetime object by in UTC time. Examples: >>> tz_to_utc(datetime(2011, 11, 25, 9), 'US/Central') 2011-11-25 15:00:00 >>> tz_to_utc(datetime(2011, 11, 25, 9), pytz.timezone('US/Central')) 2011-11-25 15:00:00 >>> tz_to_utc(datetime(2011, 11, 25, 9), 'US/Central', False) 2011-11-25 15:00:00+00:00
[ "Converts", "a", "datetime", "object", "from", "the", "specified", "timezone", "to", "a", "UTC", "datetime", "." ]
9664a145473b75120bf71e1644e9c8086e7e8955
https://github.com/InfoAgeTech/django-core/blob/9664a145473b75120bf71e1644e9c8086e7e8955/django_core/utils/date_parsers.py#L56-L84
train
50,011
InfoAgeTech/django-core
django_core/utils/date_parsers.py
utc_to_tz
def utc_to_tz(dt, tz, ignoretz=True): """ Converts UTC datetime object to the specific timezone. :param dt: the UTC datetime object to convert. :param tz: the timezone to convert the UTC datetime object info. tz can be passed as a string or as a timezone object. (i.e. 'US/Central' or pytz.timezone('US/Central'), etc) :param ignoretz: will ignore the timezone portion of the datetime object and tzinfo will be None. :return: the datetime object by in UTC time. Examples: >>> utc_to_tz(datetime(2011, 11, 25, 9), pytz.timezone('US/Central')) 2011-11-25 03:00:00 >>> utc_to_tz(datetime(2011, 11, 25, 9), 'US/Central', False) 2011-11-25 03:00:00-06:00 """ if isinstance(tz, string_types): tz = pytz.timezone(tz) dt = pytz.utc.localize(dt) dt = dt.astimezone(tz) if ignoretz: return dt.replace(tzinfo=None) return dt
python
def utc_to_tz(dt, tz, ignoretz=True): """ Converts UTC datetime object to the specific timezone. :param dt: the UTC datetime object to convert. :param tz: the timezone to convert the UTC datetime object info. tz can be passed as a string or as a timezone object. (i.e. 'US/Central' or pytz.timezone('US/Central'), etc) :param ignoretz: will ignore the timezone portion of the datetime object and tzinfo will be None. :return: the datetime object by in UTC time. Examples: >>> utc_to_tz(datetime(2011, 11, 25, 9), pytz.timezone('US/Central')) 2011-11-25 03:00:00 >>> utc_to_tz(datetime(2011, 11, 25, 9), 'US/Central', False) 2011-11-25 03:00:00-06:00 """ if isinstance(tz, string_types): tz = pytz.timezone(tz) dt = pytz.utc.localize(dt) dt = dt.astimezone(tz) if ignoretz: return dt.replace(tzinfo=None) return dt
[ "def", "utc_to_tz", "(", "dt", ",", "tz", ",", "ignoretz", "=", "True", ")", ":", "if", "isinstance", "(", "tz", ",", "string_types", ")", ":", "tz", "=", "pytz", ".", "timezone", "(", "tz", ")", "dt", "=", "pytz", ".", "utc", ".", "localize", "(...
Converts UTC datetime object to the specific timezone. :param dt: the UTC datetime object to convert. :param tz: the timezone to convert the UTC datetime object info. tz can be passed as a string or as a timezone object. (i.e. 'US/Central' or pytz.timezone('US/Central'), etc) :param ignoretz: will ignore the timezone portion of the datetime object and tzinfo will be None. :return: the datetime object by in UTC time. Examples: >>> utc_to_tz(datetime(2011, 11, 25, 9), pytz.timezone('US/Central')) 2011-11-25 03:00:00 >>> utc_to_tz(datetime(2011, 11, 25, 9), 'US/Central', False) 2011-11-25 03:00:00-06:00
[ "Converts", "UTC", "datetime", "object", "to", "the", "specific", "timezone", "." ]
9664a145473b75120bf71e1644e9c8086e7e8955
https://github.com/InfoAgeTech/django-core/blob/9664a145473b75120bf71e1644e9c8086e7e8955/django_core/utils/date_parsers.py#L87-L114
train
50,012
InfoAgeTech/django-core
django_core/decorators.py
turn_emails_off
def turn_emails_off(view_func): """Turns emails off so no emails will be sent.""" # Dummy email backend so no emails are sent. EMAIL_BACKEND_DUMMY = 'django.core.mail.backends.dummy.EmailBackend' def decorated(request, *args, **kwargs): orig_email_backend = settings.EMAIL_BACKEND settings.EMAIL_BACKEND = EMAIL_BACKEND_DUMMY response = view_func(request, *args, **kwargs) settings.EMAIL_BACKEND = orig_email_backend return response return decorated
python
def turn_emails_off(view_func): """Turns emails off so no emails will be sent.""" # Dummy email backend so no emails are sent. EMAIL_BACKEND_DUMMY = 'django.core.mail.backends.dummy.EmailBackend' def decorated(request, *args, **kwargs): orig_email_backend = settings.EMAIL_BACKEND settings.EMAIL_BACKEND = EMAIL_BACKEND_DUMMY response = view_func(request, *args, **kwargs) settings.EMAIL_BACKEND = orig_email_backend return response return decorated
[ "def", "turn_emails_off", "(", "view_func", ")", ":", "# Dummy email backend so no emails are sent.", "EMAIL_BACKEND_DUMMY", "=", "'django.core.mail.backends.dummy.EmailBackend'", "def", "decorated", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "...
Turns emails off so no emails will be sent.
[ "Turns", "emails", "off", "so", "no", "emails", "will", "be", "sent", "." ]
9664a145473b75120bf71e1644e9c8086e7e8955
https://github.com/InfoAgeTech/django-core/blob/9664a145473b75120bf71e1644e9c8086e7e8955/django_core/decorators.py#L6-L20
train
50,013
InfoAgeTech/django-core
django_core/utils/file_utils.py
get_md5_for_file
def get_md5_for_file(file): """Get the md5 hash for a file. :param file: the file to get the md5 hash for """ md5 = hashlib.md5() while True: data = file.read(md5.block_size) if not data: break md5.update(data) return md5.hexdigest()
python
def get_md5_for_file(file): """Get the md5 hash for a file. :param file: the file to get the md5 hash for """ md5 = hashlib.md5() while True: data = file.read(md5.block_size) if not data: break md5.update(data) return md5.hexdigest()
[ "def", "get_md5_for_file", "(", "file", ")", ":", "md5", "=", "hashlib", ".", "md5", "(", ")", "while", "True", ":", "data", "=", "file", ".", "read", "(", "md5", ".", "block_size", ")", "if", "not", "data", ":", "break", "md5", ".", "update", "(",...
Get the md5 hash for a file. :param file: the file to get the md5 hash for
[ "Get", "the", "md5", "hash", "for", "a", "file", "." ]
9664a145473b75120bf71e1644e9c8086e7e8955
https://github.com/InfoAgeTech/django-core/blob/9664a145473b75120bf71e1644e9c8086e7e8955/django_core/utils/file_utils.py#L72-L87
train
50,014
InfoAgeTech/django-core
django_core/utils/file_utils.py
get_dict_from_json_file
def get_dict_from_json_file(path, encoding='utf-8'): """Gets a dict of data form a json file. :param path: the absolute path to the file :param encoding: the encoding the file is in """ with open(path, encoding=encoding) as data_file: return json.loads(data_file.read())
python
def get_dict_from_json_file(path, encoding='utf-8'): """Gets a dict of data form a json file. :param path: the absolute path to the file :param encoding: the encoding the file is in """ with open(path, encoding=encoding) as data_file: return json.loads(data_file.read())
[ "def", "get_dict_from_json_file", "(", "path", ",", "encoding", "=", "'utf-8'", ")", ":", "with", "open", "(", "path", ",", "encoding", "=", "encoding", ")", "as", "data_file", ":", "return", "json", ".", "loads", "(", "data_file", ".", "read", "(", ")",...
Gets a dict of data form a json file. :param path: the absolute path to the file :param encoding: the encoding the file is in
[ "Gets", "a", "dict", "of", "data", "form", "a", "json", "file", "." ]
9664a145473b75120bf71e1644e9c8086e7e8955
https://github.com/InfoAgeTech/django-core/blob/9664a145473b75120bf71e1644e9c8086e7e8955/django_core/utils/file_utils.py#L90-L97
train
50,015
InfoAgeTech/django-core
django_core/templatetags/html_tags.py
linebreaks_safe
def linebreaks_safe(value, autoescape=True): """ Adds linebreaks only for text that has a newline character. """ if isinstance(value, string_types) and '\n' in value: return linebreaks_filter(value, autoescape=autoescape) return value
python
def linebreaks_safe(value, autoescape=True): """ Adds linebreaks only for text that has a newline character. """ if isinstance(value, string_types) and '\n' in value: return linebreaks_filter(value, autoescape=autoescape) return value
[ "def", "linebreaks_safe", "(", "value", ",", "autoescape", "=", "True", ")", ":", "if", "isinstance", "(", "value", ",", "string_types", ")", "and", "'\\n'", "in", "value", ":", "return", "linebreaks_filter", "(", "value", ",", "autoescape", "=", "autoescape...
Adds linebreaks only for text that has a newline character.
[ "Adds", "linebreaks", "only", "for", "text", "that", "has", "a", "newline", "character", "." ]
9664a145473b75120bf71e1644e9c8086e7e8955
https://github.com/InfoAgeTech/django-core/blob/9664a145473b75120bf71e1644e9c8086e7e8955/django_core/templatetags/html_tags.py#L11-L18
train
50,016
QualiSystems/cloudshell-networking-devices
cloudshell/devices/standards/base.py
AbstractResource.add_sub_resource
def add_sub_resource(self, relative_id, sub_resource): """Add sub resource""" existing_sub_resources = self.resources.get(sub_resource.RELATIVE_PATH_TEMPLATE, defaultdict(list)) existing_sub_resources[relative_id].append(sub_resource) self.resources.update({sub_resource.RELATIVE_PATH_TEMPLATE: existing_sub_resources})
python
def add_sub_resource(self, relative_id, sub_resource): """Add sub resource""" existing_sub_resources = self.resources.get(sub_resource.RELATIVE_PATH_TEMPLATE, defaultdict(list)) existing_sub_resources[relative_id].append(sub_resource) self.resources.update({sub_resource.RELATIVE_PATH_TEMPLATE: existing_sub_resources})
[ "def", "add_sub_resource", "(", "self", ",", "relative_id", ",", "sub_resource", ")", ":", "existing_sub_resources", "=", "self", ".", "resources", ".", "get", "(", "sub_resource", ".", "RELATIVE_PATH_TEMPLATE", ",", "defaultdict", "(", "list", ")", ")", "existi...
Add sub resource
[ "Add", "sub", "resource" ]
009aab33edb30035b52fe10dbb91db61c95ba4d9
https://github.com/QualiSystems/cloudshell-networking-devices/blob/009aab33edb30035b52fe10dbb91db61c95ba4d9/cloudshell/devices/standards/base.py#L30-L34
train
50,017
QualiSystems/cloudshell-networking-devices
cloudshell/devices/standards/base.py
AbstractResource.cloudshell_model_name
def cloudshell_model_name(self): """Return the name of the CloudShell model""" if self.shell_name: return "{shell_name}.{resource_model}".format(shell_name=self.shell_name, resource_model=self.RESOURCE_MODEL.replace(" ", "")) else: return self.RESOURCE_MODEL
python
def cloudshell_model_name(self): """Return the name of the CloudShell model""" if self.shell_name: return "{shell_name}.{resource_model}".format(shell_name=self.shell_name, resource_model=self.RESOURCE_MODEL.replace(" ", "")) else: return self.RESOURCE_MODEL
[ "def", "cloudshell_model_name", "(", "self", ")", ":", "if", "self", ".", "shell_name", ":", "return", "\"{shell_name}.{resource_model}\"", ".", "format", "(", "shell_name", "=", "self", ".", "shell_name", ",", "resource_model", "=", "self", ".", "RESOURCE_MODEL",...
Return the name of the CloudShell model
[ "Return", "the", "name", "of", "the", "CloudShell", "model" ]
009aab33edb30035b52fe10dbb91db61c95ba4d9
https://github.com/QualiSystems/cloudshell-networking-devices/blob/009aab33edb30035b52fe10dbb91db61c95ba4d9/cloudshell/devices/standards/base.py#L37-L43
train
50,018
eumis/pyviews
pyviews/compilation/parsing.py
parse_expression
def parse_expression(source: str) -> ExpressionSource: '''Returns tuple with expression type and expression body''' if not is_expression(source): msg = 'Expression is not valid. Expression should be matched with regular expression: {0}'\ .format(EXPRESSION_REGEX) raise ExpressionError(msg, source) if not source.startswith('{'): [type_, source] = source.split(':', 1) elif source.endswith('}}'): type_ = 'twoways' else: type_ = 'oneway' return (type_, source[1:-1])
python
def parse_expression(source: str) -> ExpressionSource: '''Returns tuple with expression type and expression body''' if not is_expression(source): msg = 'Expression is not valid. Expression should be matched with regular expression: {0}'\ .format(EXPRESSION_REGEX) raise ExpressionError(msg, source) if not source.startswith('{'): [type_, source] = source.split(':', 1) elif source.endswith('}}'): type_ = 'twoways' else: type_ = 'oneway' return (type_, source[1:-1])
[ "def", "parse_expression", "(", "source", ":", "str", ")", "->", "ExpressionSource", ":", "if", "not", "is_expression", "(", "source", ")", ":", "msg", "=", "'Expression is not valid. Expression should be matched with regular expression: {0}'", ".", "format", "(", "EXPR...
Returns tuple with expression type and expression body
[ "Returns", "tuple", "with", "expression", "type", "and", "expression", "body" ]
80a868242ee9cdc6f4ded594b3e0544cc238ed55
https://github.com/eumis/pyviews/blob/80a868242ee9cdc6f4ded594b3e0544cc238ed55/pyviews/compilation/parsing.py#L20-L32
train
50,019
internetarchive/doublethink
doublethink/rethinker.py
Rethinker._server_whitelist
def _server_whitelist(self): ''' Returns list of servers that have not errored in the last five minutes. If all servers have errored in the last five minutes, returns list with one item, the server that errored least recently. ''' whitelist = [] for server in self.servers: if (server not in self.last_error or self.last_error[server] < time.time() - self.PENALTY_BOX_TIME): whitelist.append(server) if not whitelist: whitelist.append(sorted( self.last_error.items(), key=lambda kv: kv[1])[0][0]) return whitelist
python
def _server_whitelist(self): ''' Returns list of servers that have not errored in the last five minutes. If all servers have errored in the last five minutes, returns list with one item, the server that errored least recently. ''' whitelist = [] for server in self.servers: if (server not in self.last_error or self.last_error[server] < time.time() - self.PENALTY_BOX_TIME): whitelist.append(server) if not whitelist: whitelist.append(sorted( self.last_error.items(), key=lambda kv: kv[1])[0][0]) return whitelist
[ "def", "_server_whitelist", "(", "self", ")", ":", "whitelist", "=", "[", "]", "for", "server", "in", "self", ".", "servers", ":", "if", "(", "server", "not", "in", "self", ".", "last_error", "or", "self", ".", "last_error", "[", "server", "]", "<", ...
Returns list of servers that have not errored in the last five minutes. If all servers have errored in the last five minutes, returns list with one item, the server that errored least recently.
[ "Returns", "list", "of", "servers", "that", "have", "not", "errored", "in", "the", "last", "five", "minutes", ".", "If", "all", "servers", "have", "errored", "in", "the", "last", "five", "minutes", "returns", "list", "with", "one", "item", "the", "server",...
f7fc7da725c9b572d473c717b3dad9af98a7a2b4
https://github.com/internetarchive/doublethink/blob/f7fc7da725c9b572d473c717b3dad9af98a7a2b4/doublethink/rethinker.py#L141-L155
train
50,020
upsight/doctor
doctor/parsers.py
_parse_array
def _parse_array(value): """Coerce value into an list. :param str value: Value to parse. :returns: list or None if the value is not a JSON array :raises: TypeError or ValueError if value appears to be an array but can't be parsed as JSON. """ value = value.lstrip() if not value or value[0] not in _bracket_strings: return None return json.loads(value)
python
def _parse_array(value): """Coerce value into an list. :param str value: Value to parse. :returns: list or None if the value is not a JSON array :raises: TypeError or ValueError if value appears to be an array but can't be parsed as JSON. """ value = value.lstrip() if not value or value[0] not in _bracket_strings: return None return json.loads(value)
[ "def", "_parse_array", "(", "value", ")", ":", "value", "=", "value", ".", "lstrip", "(", ")", "if", "not", "value", "or", "value", "[", "0", "]", "not", "in", "_bracket_strings", ":", "return", "None", "return", "json", ".", "loads", "(", "value", "...
Coerce value into an list. :param str value: Value to parse. :returns: list or None if the value is not a JSON array :raises: TypeError or ValueError if value appears to be an array but can't be parsed as JSON.
[ "Coerce", "value", "into", "an", "list", "." ]
2cf1d433f6f1aa1355644b449a757c0660793cdd
https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/parsers.py#L22-L33
train
50,021
upsight/doctor
doctor/parsers.py
_parse_boolean
def _parse_boolean(value): """Coerce value into an bool. :param str value: Value to parse. :returns: bool or None if the value is not a boolean string. """ value = value.lower() if value in _true_strings: return True elif value in _false_strings: return False else: return None
python
def _parse_boolean(value): """Coerce value into an bool. :param str value: Value to parse. :returns: bool or None if the value is not a boolean string. """ value = value.lower() if value in _true_strings: return True elif value in _false_strings: return False else: return None
[ "def", "_parse_boolean", "(", "value", ")", ":", "value", "=", "value", ".", "lower", "(", ")", "if", "value", "in", "_true_strings", ":", "return", "True", "elif", "value", "in", "_false_strings", ":", "return", "False", "else", ":", "return", "None" ]
Coerce value into an bool. :param str value: Value to parse. :returns: bool or None if the value is not a boolean string.
[ "Coerce", "value", "into", "an", "bool", "." ]
2cf1d433f6f1aa1355644b449a757c0660793cdd
https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/parsers.py#L36-L48
train
50,022
upsight/doctor
doctor/parsers.py
_parse_object
def _parse_object(value): """Coerce value into a dict. :param str value: Value to parse. :returns: dict or None if the value is not a JSON object :raises: TypeError or ValueError if value appears to be an object but can't be parsed as JSON. """ value = value.lstrip() if not value or value[0] not in _brace_strings: return None return json.loads(value)
python
def _parse_object(value): """Coerce value into a dict. :param str value: Value to parse. :returns: dict or None if the value is not a JSON object :raises: TypeError or ValueError if value appears to be an object but can't be parsed as JSON. """ value = value.lstrip() if not value or value[0] not in _brace_strings: return None return json.loads(value)
[ "def", "_parse_object", "(", "value", ")", ":", "value", "=", "value", ".", "lstrip", "(", ")", "if", "not", "value", "or", "value", "[", "0", "]", "not", "in", "_brace_strings", ":", "return", "None", "return", "json", ".", "loads", "(", "value", ")...
Coerce value into a dict. :param str value: Value to parse. :returns: dict or None if the value is not a JSON object :raises: TypeError or ValueError if value appears to be an object but can't be parsed as JSON.
[ "Coerce", "value", "into", "a", "dict", "." ]
2cf1d433f6f1aa1355644b449a757c0660793cdd
https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/parsers.py#L51-L62
train
50,023
upsight/doctor
doctor/parsers.py
parse_value
def parse_value(value, allowed_types, name='value'): """Parse a value into one of a number of types. This function is used to coerce untyped HTTP parameter strings into an appropriate type. It tries to coerce the value into each of the allowed types, and uses the first that evaluates properly. Because this is coercing a string into multiple, potentially ambiguous, types, it tests things in the order of least ambiguous to most ambiguous: - The "null" type is checked first. If allowed, and the value is blank (""), None will be returned. - The "boolean" type is checked next. Values of "true" (case insensitive) are True, and values of "false" are False. - Numeric types are checked next -- first "integer", then "number". - The "array" type is checked next. A value is only considered a valid array if it begins with a "[" and can be parsed as JSON. - The "object" type is checked next. A value is only considered a valid object if it begins with a "{" and can be parsed as JSON. - The "string" type is checked last, since any value is a valid string. Unicode strings are encoded as UTF-8. :param str value: Parameter value. Example: "1" :param list allowed_types: Types that should be attempted. Example: ["integer", "null"] :param str name: Parameter name. If not specified, "value" is used. Example: "campaign_id" :returns: a tuple of a type string and coerced value :raises: ParseError if the value cannot be coerced to any of the types """ if not isinstance(value, str): raise ValueError('value for %r must be a string' % name) if isinstance(allowed_types, str): allowed_types = [allowed_types] # Note that the order of these type considerations is important. Because we # have an untyped value that may be one of any given number of types, we # need a consistent order of evaluation in cases when there is ambiguity # between types. if 'null' in allowed_types and value == '': return 'null', None # For all of these types, we'll pass the value to the function and it will # raise a TypeError or ValueError or return None if it can't be parsed as # the given type. for allowed_type, parser in _parser_funcs: if allowed_type in allowed_types: try: parsed_value = parser(value) if parsed_value is not None: return allowed_type, parsed_value except (TypeError, ValueError): # Ignore any errors, and continue trying other types pass raise ParseError('%s must be a valid type (%s)' % (name, ', '.join(allowed_types)))
python
def parse_value(value, allowed_types, name='value'): """Parse a value into one of a number of types. This function is used to coerce untyped HTTP parameter strings into an appropriate type. It tries to coerce the value into each of the allowed types, and uses the first that evaluates properly. Because this is coercing a string into multiple, potentially ambiguous, types, it tests things in the order of least ambiguous to most ambiguous: - The "null" type is checked first. If allowed, and the value is blank (""), None will be returned. - The "boolean" type is checked next. Values of "true" (case insensitive) are True, and values of "false" are False. - Numeric types are checked next -- first "integer", then "number". - The "array" type is checked next. A value is only considered a valid array if it begins with a "[" and can be parsed as JSON. - The "object" type is checked next. A value is only considered a valid object if it begins with a "{" and can be parsed as JSON. - The "string" type is checked last, since any value is a valid string. Unicode strings are encoded as UTF-8. :param str value: Parameter value. Example: "1" :param list allowed_types: Types that should be attempted. Example: ["integer", "null"] :param str name: Parameter name. If not specified, "value" is used. Example: "campaign_id" :returns: a tuple of a type string and coerced value :raises: ParseError if the value cannot be coerced to any of the types """ if not isinstance(value, str): raise ValueError('value for %r must be a string' % name) if isinstance(allowed_types, str): allowed_types = [allowed_types] # Note that the order of these type considerations is important. Because we # have an untyped value that may be one of any given number of types, we # need a consistent order of evaluation in cases when there is ambiguity # between types. if 'null' in allowed_types and value == '': return 'null', None # For all of these types, we'll pass the value to the function and it will # raise a TypeError or ValueError or return None if it can't be parsed as # the given type. for allowed_type, parser in _parser_funcs: if allowed_type in allowed_types: try: parsed_value = parser(value) if parsed_value is not None: return allowed_type, parsed_value except (TypeError, ValueError): # Ignore any errors, and continue trying other types pass raise ParseError('%s must be a valid type (%s)' % (name, ', '.join(allowed_types)))
[ "def", "parse_value", "(", "value", ",", "allowed_types", ",", "name", "=", "'value'", ")", ":", "if", "not", "isinstance", "(", "value", ",", "str", ")", ":", "raise", "ValueError", "(", "'value for %r must be a string'", "%", "name", ")", "if", "isinstance...
Parse a value into one of a number of types. This function is used to coerce untyped HTTP parameter strings into an appropriate type. It tries to coerce the value into each of the allowed types, and uses the first that evaluates properly. Because this is coercing a string into multiple, potentially ambiguous, types, it tests things in the order of least ambiguous to most ambiguous: - The "null" type is checked first. If allowed, and the value is blank (""), None will be returned. - The "boolean" type is checked next. Values of "true" (case insensitive) are True, and values of "false" are False. - Numeric types are checked next -- first "integer", then "number". - The "array" type is checked next. A value is only considered a valid array if it begins with a "[" and can be parsed as JSON. - The "object" type is checked next. A value is only considered a valid object if it begins with a "{" and can be parsed as JSON. - The "string" type is checked last, since any value is a valid string. Unicode strings are encoded as UTF-8. :param str value: Parameter value. Example: "1" :param list allowed_types: Types that should be attempted. Example: ["integer", "null"] :param str name: Parameter name. If not specified, "value" is used. Example: "campaign_id" :returns: a tuple of a type string and coerced value :raises: ParseError if the value cannot be coerced to any of the types
[ "Parse", "a", "value", "into", "one", "of", "a", "number", "of", "types", "." ]
2cf1d433f6f1aa1355644b449a757c0660793cdd
https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/parsers.py#L87-L145
train
50,024
upsight/doctor
doctor/parsers.py
parse_json
def parse_json(value: str, sig_params: List[inspect.Parameter] = None) -> dict: """Parse a value as JSON. This is just a wrapper around json.loads which re-raises any errors as a ParseError instead. :param str value: JSON string. :param dict sig_params: The logic function's signature parameters. :returns: the parsed JSON value """ try: loaded = json.loads(value) except Exception as e: message = 'Error parsing JSON: %r error: %s' % (value, e) logging.debug(message, exc_info=e) raise ParseError(message) if sig_params is not None: return map_param_names(loaded, sig_params) return loaded
python
def parse_json(value: str, sig_params: List[inspect.Parameter] = None) -> dict: """Parse a value as JSON. This is just a wrapper around json.loads which re-raises any errors as a ParseError instead. :param str value: JSON string. :param dict sig_params: The logic function's signature parameters. :returns: the parsed JSON value """ try: loaded = json.loads(value) except Exception as e: message = 'Error parsing JSON: %r error: %s' % (value, e) logging.debug(message, exc_info=e) raise ParseError(message) if sig_params is not None: return map_param_names(loaded, sig_params) return loaded
[ "def", "parse_json", "(", "value", ":", "str", ",", "sig_params", ":", "List", "[", "inspect", ".", "Parameter", "]", "=", "None", ")", "->", "dict", ":", "try", ":", "loaded", "=", "json", ".", "loads", "(", "value", ")", "except", "Exception", "as"...
Parse a value as JSON. This is just a wrapper around json.loads which re-raises any errors as a ParseError instead. :param str value: JSON string. :param dict sig_params: The logic function's signature parameters. :returns: the parsed JSON value
[ "Parse", "a", "value", "as", "JSON", "." ]
2cf1d433f6f1aa1355644b449a757c0660793cdd
https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/parsers.py#L148-L167
train
50,025
upsight/doctor
doctor/parsers.py
map_param_names
def map_param_names( req_params: dict, sig_params: List[inspect.Parameter]) -> dict: """Maps request param names to match logic function param names. If a doctor type defined a `param_name` attribute for the name of the parameter in the request, we should use that as the key when looking up the value for the request parameter. When we declare a type we can specify what the parameter name should be in the request that the annotated type should get mapped to. >>> from doctor.types import number >>> Latitude = number('The latitude', param_name='location.lat') >>> def my_logic(lat: Latitude): pass >>> request_params = {'location.lat': 45.2342343} In the above example doctor knows to pass the value at key `location.lat` to the logic function variable named `lat` since it's annotated by the `Latitude` type which specifies what the param_name is on the request. :param dict req_params: The parameters specified in the request. :param dict sig_params: The logic function's signature parameters. :returns: A dict of re-mapped params. """ new_request_params = {} for k, param in sig_params.items(): param_name = getattr(param.annotation, 'param_name', None) key = k if param_name is None else param_name if key in req_params: new_request_params[k] = req_params[key] return new_request_params
python
def map_param_names( req_params: dict, sig_params: List[inspect.Parameter]) -> dict: """Maps request param names to match logic function param names. If a doctor type defined a `param_name` attribute for the name of the parameter in the request, we should use that as the key when looking up the value for the request parameter. When we declare a type we can specify what the parameter name should be in the request that the annotated type should get mapped to. >>> from doctor.types import number >>> Latitude = number('The latitude', param_name='location.lat') >>> def my_logic(lat: Latitude): pass >>> request_params = {'location.lat': 45.2342343} In the above example doctor knows to pass the value at key `location.lat` to the logic function variable named `lat` since it's annotated by the `Latitude` type which specifies what the param_name is on the request. :param dict req_params: The parameters specified in the request. :param dict sig_params: The logic function's signature parameters. :returns: A dict of re-mapped params. """ new_request_params = {} for k, param in sig_params.items(): param_name = getattr(param.annotation, 'param_name', None) key = k if param_name is None else param_name if key in req_params: new_request_params[k] = req_params[key] return new_request_params
[ "def", "map_param_names", "(", "req_params", ":", "dict", ",", "sig_params", ":", "List", "[", "inspect", ".", "Parameter", "]", ")", "->", "dict", ":", "new_request_params", "=", "{", "}", "for", "k", ",", "param", "in", "sig_params", ".", "items", "(",...
Maps request param names to match logic function param names. If a doctor type defined a `param_name` attribute for the name of the parameter in the request, we should use that as the key when looking up the value for the request parameter. When we declare a type we can specify what the parameter name should be in the request that the annotated type should get mapped to. >>> from doctor.types import number >>> Latitude = number('The latitude', param_name='location.lat') >>> def my_logic(lat: Latitude): pass >>> request_params = {'location.lat': 45.2342343} In the above example doctor knows to pass the value at key `location.lat` to the logic function variable named `lat` since it's annotated by the `Latitude` type which specifies what the param_name is on the request. :param dict req_params: The parameters specified in the request. :param dict sig_params: The logic function's signature parameters. :returns: A dict of re-mapped params.
[ "Maps", "request", "param", "names", "to", "match", "logic", "function", "param", "names", "." ]
2cf1d433f6f1aa1355644b449a757c0660793cdd
https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/parsers.py#L180-L210
train
50,026
upsight/doctor
doctor/parsers.py
parse_form_and_query_params
def parse_form_and_query_params(req_params: dict, sig_params: dict) -> dict: """Uses the parameter annotations to coerce string params. This is used for HTTP requests, in which the form parameters are all strings, but need to be converted to the appropriate types before validating them. :param dict req_params: The parameters specified in the request. :param dict sig_params: The logic function's signature parameters. :returns: a dict of params parsed from the input dict. :raises TypeSystemError: If there are errors parsing values. """ # Importing here to prevent circular dependencies. from doctor.types import SuperType, UnionType errors = {} parsed_params = {} for param, value in req_params.items(): # Skip request variables not in the function signature. if param not in sig_params: continue # Skip coercing parameters not annotated by a doctor type. if not issubclass(sig_params[param].annotation, SuperType): continue # Check if the type has a custom parser for the parameter. custom_parser = sig_params[param].annotation.parser if custom_parser is not None: if not callable(custom_parser): warnings.warn( 'Parser `{}` is not callable, using default parser.'.format( custom_parser)) custom_parser = None try: if custom_parser is not None: parsed_params[param] = custom_parser(value) else: if issubclass(sig_params[param].annotation, UnionType): json_type = [ _native_type_to_json[_type.native_type] for _type in sig_params[param].annotation.types ] else: native_type = sig_params[param].annotation.native_type json_type = [_native_type_to_json[native_type]] # If the type is nullable, also add null as an allowed type. if sig_params[param].annotation.nullable: json_type.append('null') _, parsed_params[param] = parse_value(value, json_type) except ParseError as e: errors[param] = str(e) if errors: raise TypeSystemError(errors, errors=errors) return parsed_params
python
def parse_form_and_query_params(req_params: dict, sig_params: dict) -> dict: """Uses the parameter annotations to coerce string params. This is used for HTTP requests, in which the form parameters are all strings, but need to be converted to the appropriate types before validating them. :param dict req_params: The parameters specified in the request. :param dict sig_params: The logic function's signature parameters. :returns: a dict of params parsed from the input dict. :raises TypeSystemError: If there are errors parsing values. """ # Importing here to prevent circular dependencies. from doctor.types import SuperType, UnionType errors = {} parsed_params = {} for param, value in req_params.items(): # Skip request variables not in the function signature. if param not in sig_params: continue # Skip coercing parameters not annotated by a doctor type. if not issubclass(sig_params[param].annotation, SuperType): continue # Check if the type has a custom parser for the parameter. custom_parser = sig_params[param].annotation.parser if custom_parser is not None: if not callable(custom_parser): warnings.warn( 'Parser `{}` is not callable, using default parser.'.format( custom_parser)) custom_parser = None try: if custom_parser is not None: parsed_params[param] = custom_parser(value) else: if issubclass(sig_params[param].annotation, UnionType): json_type = [ _native_type_to_json[_type.native_type] for _type in sig_params[param].annotation.types ] else: native_type = sig_params[param].annotation.native_type json_type = [_native_type_to_json[native_type]] # If the type is nullable, also add null as an allowed type. if sig_params[param].annotation.nullable: json_type.append('null') _, parsed_params[param] = parse_value(value, json_type) except ParseError as e: errors[param] = str(e) if errors: raise TypeSystemError(errors, errors=errors) return parsed_params
[ "def", "parse_form_and_query_params", "(", "req_params", ":", "dict", ",", "sig_params", ":", "dict", ")", "->", "dict", ":", "# Importing here to prevent circular dependencies.", "from", "doctor", ".", "types", "import", "SuperType", ",", "UnionType", "errors", "=", ...
Uses the parameter annotations to coerce string params. This is used for HTTP requests, in which the form parameters are all strings, but need to be converted to the appropriate types before validating them. :param dict req_params: The parameters specified in the request. :param dict sig_params: The logic function's signature parameters. :returns: a dict of params parsed from the input dict. :raises TypeSystemError: If there are errors parsing values.
[ "Uses", "the", "parameter", "annotations", "to", "coerce", "string", "params", "." ]
2cf1d433f6f1aa1355644b449a757c0660793cdd
https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/parsers.py#L213-L268
train
50,027
Workiva/furious
furious/handlers/webapp.py
AsyncJobHandler._handle_task
def _handle_task(self): """Pass request info to the async framework.""" headers = self.request.headers message = None try: status_code, output = process_async_task( headers, self.request.body) except AbortAndRestart as restart: # Async retry status code status_code = 549 message = 'Retry Async Task' output = str(restart) self.response.set_status(status_code, message) self.response.out.write(output)
python
def _handle_task(self): """Pass request info to the async framework.""" headers = self.request.headers message = None try: status_code, output = process_async_task( headers, self.request.body) except AbortAndRestart as restart: # Async retry status code status_code = 549 message = 'Retry Async Task' output = str(restart) self.response.set_status(status_code, message) self.response.out.write(output)
[ "def", "_handle_task", "(", "self", ")", ":", "headers", "=", "self", ".", "request", ".", "headers", "message", "=", "None", "try", ":", "status_code", ",", "output", "=", "process_async_task", "(", "headers", ",", "self", ".", "request", ".", "body", "...
Pass request info to the async framework.
[ "Pass", "request", "info", "to", "the", "async", "framework", "." ]
c29823ec8b98549e7439d7273aa064d1e5830632
https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/handlers/webapp.py#L30-L45
train
50,028
QualiSystems/cloudshell-networking-devices
cloudshell/devices/autoload/autoload_migration_helper.py
migrate_autoload_details
def migrate_autoload_details(autoload_details, shell_name, shell_type): """ Migrate autoload details. Add namespace for attributes :param autoload_details: :param shell_name: :param shell_type: :return: """ mapping = {} for resource in autoload_details.resources: resource.model = "{shell_name}.{model}".format(shell_name=shell_name, model=resource.model) mapping[resource.relative_address] = resource.model for attribute in autoload_details.attributes: if not attribute.relative_address: # Root element attribute.attribute_name = "{shell_type}.{attr_name}".format(shell_type=shell_type, attr_name=attribute.attribute_name) else: attribute.attribute_name = "{model}.{attr_name}".format(model=mapping[attribute.relative_address], attr_name=attribute.attribute_name) return autoload_details
python
def migrate_autoload_details(autoload_details, shell_name, shell_type): """ Migrate autoload details. Add namespace for attributes :param autoload_details: :param shell_name: :param shell_type: :return: """ mapping = {} for resource in autoload_details.resources: resource.model = "{shell_name}.{model}".format(shell_name=shell_name, model=resource.model) mapping[resource.relative_address] = resource.model for attribute in autoload_details.attributes: if not attribute.relative_address: # Root element attribute.attribute_name = "{shell_type}.{attr_name}".format(shell_type=shell_type, attr_name=attribute.attribute_name) else: attribute.attribute_name = "{model}.{attr_name}".format(model=mapping[attribute.relative_address], attr_name=attribute.attribute_name) return autoload_details
[ "def", "migrate_autoload_details", "(", "autoload_details", ",", "shell_name", ",", "shell_type", ")", ":", "mapping", "=", "{", "}", "for", "resource", "in", "autoload_details", ".", "resources", ":", "resource", ".", "model", "=", "\"{shell_name}.{model}\"", "."...
Migrate autoload details. Add namespace for attributes :param autoload_details: :param shell_name: :param shell_type: :return:
[ "Migrate", "autoload", "details", ".", "Add", "namespace", "for", "attributes" ]
009aab33edb30035b52fe10dbb91db61c95ba4d9
https://github.com/QualiSystems/cloudshell-networking-devices/blob/009aab33edb30035b52fe10dbb91db61c95ba4d9/cloudshell/devices/autoload/autoload_migration_helper.py#L76-L99
train
50,029
horejsek/python-sqlpuzzle
sqlpuzzle/_common/sqlvalue.py
SqlValue._get_convert_method
def _get_convert_method(self): """ Get right method to convert of the value. """ for type_, method in self._map.items(): if type(self.value) is bool and type_ is not bool: continue if isinstance(self.value, type_): return method if is_sql_instance(self.value): return self._raw return self._undefined
python
def _get_convert_method(self): """ Get right method to convert of the value. """ for type_, method in self._map.items(): if type(self.value) is bool and type_ is not bool: continue if isinstance(self.value, type_): return method if is_sql_instance(self.value): return self._raw return self._undefined
[ "def", "_get_convert_method", "(", "self", ")", ":", "for", "type_", ",", "method", "in", "self", ".", "_map", ".", "items", "(", ")", ":", "if", "type", "(", "self", ".", "value", ")", "is", "bool", "and", "type_", "is", "not", "bool", ":", "conti...
Get right method to convert of the value.
[ "Get", "right", "method", "to", "convert", "of", "the", "value", "." ]
d3a42ed1b339b8eafddb8d2c28a3a5832b3998dd
https://github.com/horejsek/python-sqlpuzzle/blob/d3a42ed1b339b8eafddb8d2c28a3a5832b3998dd/sqlpuzzle/_common/sqlvalue.py#L68-L79
train
50,030
kmedian/korr
korr/flatten.py
flatten
def flatten(rho, pval, sortby="cor"): """Flatten correlation and p-value matrix Parameters: ----------- rho : ndarray Correlation Matrix pval : ndarray Matrix with p-values sortby : str sort the output table by - "cor" the highest absolute correlation coefficient - "pval" the lowest p-value Return: ------- tab : ndarray Table with (i, j, cor, pval) rows Example: -------- from korr import pearson, flatten rho, pval = pearson(X) tab = flatten(rho, pval, sortby="pval") tab.values """ n = rho.shape[0] idx = np.triu_indices(n, k=1) tab = pd.DataFrame( columns=['i', 'j', 'cor', 'pval'], data=np.c_[idx[0], idx[1], rho[idx], pval[idx]]) tab[['i', "j"]] = tab[['i', "j"]].astype(int) if sortby == "cor": tab['abscor'] = np.abs(tab['cor']) tab.sort_values(by='abscor', inplace=True, ascending=False) elif sortby == "pval": tab.sort_values(by='pval', inplace=True, ascending=True) return tab[["i", "j", "cor", "pval"]]
python
def flatten(rho, pval, sortby="cor"): """Flatten correlation and p-value matrix Parameters: ----------- rho : ndarray Correlation Matrix pval : ndarray Matrix with p-values sortby : str sort the output table by - "cor" the highest absolute correlation coefficient - "pval" the lowest p-value Return: ------- tab : ndarray Table with (i, j, cor, pval) rows Example: -------- from korr import pearson, flatten rho, pval = pearson(X) tab = flatten(rho, pval, sortby="pval") tab.values """ n = rho.shape[0] idx = np.triu_indices(n, k=1) tab = pd.DataFrame( columns=['i', 'j', 'cor', 'pval'], data=np.c_[idx[0], idx[1], rho[idx], pval[idx]]) tab[['i', "j"]] = tab[['i', "j"]].astype(int) if sortby == "cor": tab['abscor'] = np.abs(tab['cor']) tab.sort_values(by='abscor', inplace=True, ascending=False) elif sortby == "pval": tab.sort_values(by='pval', inplace=True, ascending=True) return tab[["i", "j", "cor", "pval"]]
[ "def", "flatten", "(", "rho", ",", "pval", ",", "sortby", "=", "\"cor\"", ")", ":", "n", "=", "rho", ".", "shape", "[", "0", "]", "idx", "=", "np", ".", "triu_indices", "(", "n", ",", "k", "=", "1", ")", "tab", "=", "pd", ".", "DataFrame", "(...
Flatten correlation and p-value matrix Parameters: ----------- rho : ndarray Correlation Matrix pval : ndarray Matrix with p-values sortby : str sort the output table by - "cor" the highest absolute correlation coefficient - "pval" the lowest p-value Return: ------- tab : ndarray Table with (i, j, cor, pval) rows Example: -------- from korr import pearson, flatten rho, pval = pearson(X) tab = flatten(rho, pval, sortby="pval") tab.values
[ "Flatten", "correlation", "and", "p", "-", "value", "matrix" ]
4eb86fc14b1fc1b69204069b7753d115b327c937
https://github.com/kmedian/korr/blob/4eb86fc14b1fc1b69204069b7753d115b327c937/korr/flatten.py#L5-L48
train
50,031
wallento/riscv-python-model
riscvmodel/types.py
Immediate.max
def max(self) -> int: """ Get the maximum value this immediate can have :return: Maximum value of this immediate """ if self.signed: v = (1 << (self.bits - 1)) - 1 else: v = (1 << self.bits) - 1 if self.lsb0: v = v - (v % 2) return v
python
def max(self) -> int: """ Get the maximum value this immediate can have :return: Maximum value of this immediate """ if self.signed: v = (1 << (self.bits - 1)) - 1 else: v = (1 << self.bits) - 1 if self.lsb0: v = v - (v % 2) return v
[ "def", "max", "(", "self", ")", "->", "int", ":", "if", "self", ".", "signed", ":", "v", "=", "(", "1", "<<", "(", "self", ".", "bits", "-", "1", ")", ")", "-", "1", "else", ":", "v", "=", "(", "1", "<<", "self", ".", "bits", ")", "-", ...
Get the maximum value this immediate can have :return: Maximum value of this immediate
[ "Get", "the", "maximum", "value", "this", "immediate", "can", "have" ]
51df07d16b79b143eb3d3c1e95bf26030c64a39b
https://github.com/wallento/riscv-python-model/blob/51df07d16b79b143eb3d3c1e95bf26030c64a39b/riscvmodel/types.py#L34-L46
train
50,032
wallento/riscv-python-model
riscvmodel/types.py
Immediate.set_from_bits
def set_from_bits(self, value: int): """ Set the immediate value from machine code bits. Those are not sign extended, so it will take care of the proper handling. :param value: Value to set the immediate to :type value: int """ if self.signed: value = -(value & self.tcmask) + (value & ~self.tcmask) self.set(value)
python
def set_from_bits(self, value: int): """ Set the immediate value from machine code bits. Those are not sign extended, so it will take care of the proper handling. :param value: Value to set the immediate to :type value: int """ if self.signed: value = -(value & self.tcmask) + (value & ~self.tcmask) self.set(value)
[ "def", "set_from_bits", "(", "self", ",", "value", ":", "int", ")", ":", "if", "self", ".", "signed", ":", "value", "=", "-", "(", "value", "&", "self", ".", "tcmask", ")", "+", "(", "value", "&", "~", "self", ".", "tcmask", ")", "self", ".", "...
Set the immediate value from machine code bits. Those are not sign extended, so it will take care of the proper handling. :param value: Value to set the immediate to :type value: int
[ "Set", "the", "immediate", "value", "from", "machine", "code", "bits", ".", "Those", "are", "not", "sign", "extended", "so", "it", "will", "take", "care", "of", "the", "proper", "handling", "." ]
51df07d16b79b143eb3d3c1e95bf26030c64a39b
https://github.com/wallento/riscv-python-model/blob/51df07d16b79b143eb3d3c1e95bf26030c64a39b/riscvmodel/types.py#L85-L95
train
50,033
wallento/riscv-python-model
riscvmodel/types.py
Immediate.randomize
def randomize(self): """ Randomize this immediate to a legal value """ self.value = randint(self.min(), self.max()) if self.lsb0: self.value = self.value - (self.value % 2)
python
def randomize(self): """ Randomize this immediate to a legal value """ self.value = randint(self.min(), self.max()) if self.lsb0: self.value = self.value - (self.value % 2)
[ "def", "randomize", "(", "self", ")", ":", "self", ".", "value", "=", "randint", "(", "self", ".", "min", "(", ")", ",", "self", ".", "max", "(", ")", ")", "if", "self", ".", "lsb0", ":", "self", ".", "value", "=", "self", ".", "value", "-", ...
Randomize this immediate to a legal value
[ "Randomize", "this", "immediate", "to", "a", "legal", "value" ]
51df07d16b79b143eb3d3c1e95bf26030c64a39b
https://github.com/wallento/riscv-python-model/blob/51df07d16b79b143eb3d3c1e95bf26030c64a39b/riscvmodel/types.py#L97-L103
train
50,034
Workiva/furious
furious/context/_local.py
_init
def _init(): """Initialize the furious context and registry. NOTE: Do not directly run this method. """ # If there is a context and it is initialized to this request, # return, otherwise reinitialize the _local_context. if (hasattr(_local_context, '_initialized') and _local_context._initialized == os.environ.get('REQUEST_ID_HASH')): return # Used to track the context object stack. _local_context.registry = [] # Used to provide easy access to the currently running Async job. _local_context._executing_async_context = None _local_context._executing_async = [] # So that we do not inadvertently reinitialize the local context. _local_context._initialized = os.environ.get('REQUEST_ID_HASH') return _local_context
python
def _init(): """Initialize the furious context and registry. NOTE: Do not directly run this method. """ # If there is a context and it is initialized to this request, # return, otherwise reinitialize the _local_context. if (hasattr(_local_context, '_initialized') and _local_context._initialized == os.environ.get('REQUEST_ID_HASH')): return # Used to track the context object stack. _local_context.registry = [] # Used to provide easy access to the currently running Async job. _local_context._executing_async_context = None _local_context._executing_async = [] # So that we do not inadvertently reinitialize the local context. _local_context._initialized = os.environ.get('REQUEST_ID_HASH') return _local_context
[ "def", "_init", "(", ")", ":", "# If there is a context and it is initialized to this request,", "# return, otherwise reinitialize the _local_context.", "if", "(", "hasattr", "(", "_local_context", ",", "'_initialized'", ")", "and", "_local_context", ".", "_initialized", "==", ...
Initialize the furious context and registry. NOTE: Do not directly run this method.
[ "Initialize", "the", "furious", "context", "and", "registry", "." ]
c29823ec8b98549e7439d7273aa064d1e5830632
https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/context/_local.py#L50-L71
train
50,035
Workiva/furious
furious/processors.py
_handle_results
def _handle_results(options): """Process the results of executing the Async's target.""" results_processor = options.get('_process_results') if not results_processor: results_processor = _process_results processor_result = results_processor() if isinstance(processor_result, (Async, Context)): processor_result.start()
python
def _handle_results(options): """Process the results of executing the Async's target.""" results_processor = options.get('_process_results') if not results_processor: results_processor = _process_results processor_result = results_processor() if isinstance(processor_result, (Async, Context)): processor_result.start()
[ "def", "_handle_results", "(", "options", ")", ":", "results_processor", "=", "options", ".", "get", "(", "'_process_results'", ")", "if", "not", "results_processor", ":", "results_processor", "=", "_process_results", "processor_result", "=", "results_processor", "(",...
Process the results of executing the Async's target.
[ "Process", "the", "results", "of", "executing", "the", "Async", "s", "target", "." ]
c29823ec8b98549e7439d7273aa064d1e5830632
https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/processors.py#L78-L86
train
50,036
Workiva/furious
furious/processors.py
encode_exception
def encode_exception(exception): """Encode exception to a form that can be passed around and serialized. This will grab the stack, then strip off the last two calls which are encode_exception and the function that called it. """ import sys return AsyncException(unicode(exception), exception.args, sys.exc_info(), exception)
python
def encode_exception(exception): """Encode exception to a form that can be passed around and serialized. This will grab the stack, then strip off the last two calls which are encode_exception and the function that called it. """ import sys return AsyncException(unicode(exception), exception.args, sys.exc_info(), exception)
[ "def", "encode_exception", "(", "exception", ")", ":", "import", "sys", "return", "AsyncException", "(", "unicode", "(", "exception", ")", ",", "exception", ".", "args", ",", "sys", ".", "exc_info", "(", ")", ",", "exception", ")" ]
Encode exception to a form that can be passed around and serialized. This will grab the stack, then strip off the last two calls which are encode_exception and the function that called it.
[ "Encode", "exception", "to", "a", "form", "that", "can", "be", "passed", "around", "and", "serialized", "." ]
c29823ec8b98549e7439d7273aa064d1e5830632
https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/processors.py#L100-L110
train
50,037
Workiva/furious
furious/processors.py
_execute_callback
def _execute_callback(async, callback): """Execute the given callback or insert the Async callback, or if no callback is given return the async.result. """ from furious.async import Async if not callback: return async.result.payload if isinstance(callback, Async): return callback.start() return callback()
python
def _execute_callback(async, callback): """Execute the given callback or insert the Async callback, or if no callback is given return the async.result. """ from furious.async import Async if not callback: return async.result.payload if isinstance(callback, Async): return callback.start() return callback()
[ "def", "_execute_callback", "(", "async", ",", "callback", ")", ":", "from", "furious", ".", "async", "import", "Async", "if", "not", "callback", ":", "return", "async", ".", "result", ".", "payload", "if", "isinstance", "(", "callback", ",", "Async", ")",...
Execute the given callback or insert the Async callback, or if no callback is given return the async.result.
[ "Execute", "the", "given", "callback", "or", "insert", "the", "Async", "callback", "or", "if", "no", "callback", "is", "given", "return", "the", "async", ".", "result", "." ]
c29823ec8b98549e7439d7273aa064d1e5830632
https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/processors.py#L130-L142
train
50,038
Workiva/furious
example/complex_workflow.py
complex_state_generator_bravo
def complex_state_generator_bravo(last_state=''): """Pick a state.""" from random import choice states = ['ALPHA', 'BRAVO', 'BRAVO', 'DONE'] if last_state: states.remove(last_state) # Slightly lower chances of previous state. state = choice(states) logging.info('Generating a state... %s', state) return state
python
def complex_state_generator_bravo(last_state=''): """Pick a state.""" from random import choice states = ['ALPHA', 'BRAVO', 'BRAVO', 'DONE'] if last_state: states.remove(last_state) # Slightly lower chances of previous state. state = choice(states) logging.info('Generating a state... %s', state) return state
[ "def", "complex_state_generator_bravo", "(", "last_state", "=", "''", ")", ":", "from", "random", "import", "choice", "states", "=", "[", "'ALPHA'", ",", "'BRAVO'", ",", "'BRAVO'", ",", "'DONE'", "]", "if", "last_state", ":", "states", ".", "remove", "(", ...
Pick a state.
[ "Pick", "a", "state", "." ]
c29823ec8b98549e7439d7273aa064d1e5830632
https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/example/complex_workflow.py#L61-L73
train
50,039
Workiva/furious
example/complex_workflow.py
state_machine_success
def state_machine_success(): """A positive result! Iterate!""" from furious.async import Async from furious.context import get_current_async result = get_current_async().result if result == 'ALPHA': logging.info('Inserting continuation for state %s.', result) return Async(target=complex_state_generator_alpha, args=[result]) elif result == 'BRAVO': logging.info('Inserting continuation for state %s.', result) return Async(target=complex_state_generator_bravo, args=[result]) logging.info('Done working, stop now.')
python
def state_machine_success(): """A positive result! Iterate!""" from furious.async import Async from furious.context import get_current_async result = get_current_async().result if result == 'ALPHA': logging.info('Inserting continuation for state %s.', result) return Async(target=complex_state_generator_alpha, args=[result]) elif result == 'BRAVO': logging.info('Inserting continuation for state %s.', result) return Async(target=complex_state_generator_bravo, args=[result]) logging.info('Done working, stop now.')
[ "def", "state_machine_success", "(", ")", ":", "from", "furious", ".", "async", "import", "Async", "from", "furious", ".", "context", "import", "get_current_async", "result", "=", "get_current_async", "(", ")", ".", "result", "if", "result", "==", "'ALPHA'", "...
A positive result! Iterate!
[ "A", "positive", "result!", "Iterate!" ]
c29823ec8b98549e7439d7273aa064d1e5830632
https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/example/complex_workflow.py#L76-L91
train
50,040
upsight/doctor
doctor/docs/base.py
get_example_curl_lines
def get_example_curl_lines(method: str, url: str, params: dict, headers: dict) -> List[str]: """Render a cURL command for the given request. :param str method: HTTP request method (e.g. "GET"). :param str url: HTTP request URL. :param dict params: JSON body, for POST and PUT requests. :param dict headers: A dict of HTTP headers. :returns: list """ parts = ['curl {}'.format(pipes.quote(url))] parts.append('-X {}'.format(method)) for header in headers: parts.append("-H '{}: {}'".format(header, headers[header])) if method not in ('DELETE', 'GET'): # Don't append a json body if there are no params. if params: parts.append("-H 'Content-Type: application/json' -d") pretty_json = json.dumps(params, separators=(',', ': '), indent=4, sort_keys=True) # add indentation for the closing bracket of the json body json_lines = pretty_json.split('\n') json_lines[-1] = ' ' + json_lines[-1] pretty_json = '\n'.join(json_lines) parts.append(pipes.quote(pretty_json)) wrapped = [parts.pop(0)] for part in parts: if len(wrapped[-1]) + len(part) < 80: wrapped[-1] += ' ' + part else: wrapped[-1] += ' \\' wrapped.append(' ' + part) return wrapped
python
def get_example_curl_lines(method: str, url: str, params: dict, headers: dict) -> List[str]: """Render a cURL command for the given request. :param str method: HTTP request method (e.g. "GET"). :param str url: HTTP request URL. :param dict params: JSON body, for POST and PUT requests. :param dict headers: A dict of HTTP headers. :returns: list """ parts = ['curl {}'.format(pipes.quote(url))] parts.append('-X {}'.format(method)) for header in headers: parts.append("-H '{}: {}'".format(header, headers[header])) if method not in ('DELETE', 'GET'): # Don't append a json body if there are no params. if params: parts.append("-H 'Content-Type: application/json' -d") pretty_json = json.dumps(params, separators=(',', ': '), indent=4, sort_keys=True) # add indentation for the closing bracket of the json body json_lines = pretty_json.split('\n') json_lines[-1] = ' ' + json_lines[-1] pretty_json = '\n'.join(json_lines) parts.append(pipes.quote(pretty_json)) wrapped = [parts.pop(0)] for part in parts: if len(wrapped[-1]) + len(part) < 80: wrapped[-1] += ' ' + part else: wrapped[-1] += ' \\' wrapped.append(' ' + part) return wrapped
[ "def", "get_example_curl_lines", "(", "method", ":", "str", ",", "url", ":", "str", ",", "params", ":", "dict", ",", "headers", ":", "dict", ")", "->", "List", "[", "str", "]", ":", "parts", "=", "[", "'curl {}'", ".", "format", "(", "pipes", ".", ...
Render a cURL command for the given request. :param str method: HTTP request method (e.g. "GET"). :param str url: HTTP request URL. :param dict params: JSON body, for POST and PUT requests. :param dict headers: A dict of HTTP headers. :returns: list
[ "Render", "a", "cURL", "command", "for", "the", "given", "request", "." ]
2cf1d433f6f1aa1355644b449a757c0660793cdd
https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/docs/base.py#L62-L94
train
50,041
upsight/doctor
doctor/docs/base.py
get_example_lines
def get_example_lines(headers: Dict[str, str], method: str, url: str, params: Dict[str, Any], response: str) -> List[str]: """Render a reStructuredText example for the given request and response. :param dict headers: A dict of HTTP headers. :param str method: HTTP request method (e.g. "GET"). :param str url: HTTP request URL. :param dict params: Form parameters, for POST and PUT requests. :param str response: Text response body. :returns: list """ lines = ['', 'Example Request:', '', '.. code-block:: bash', ''] lines.extend(prefix_lines( get_example_curl_lines(method, url, params, headers), ' ')) lines.extend(['', 'Example Response:', '']) try: # Try to parse and prettify the response as JSON. If it fails # (for whatever reason), we'll treat it as text instead. response = json.dumps(json.loads(response), indent=2, separators=(',', ': '), sort_keys=True) lines.extend(['.. code-block:: json', '']) except Exception: lines.extend(['.. code-block:: text', '']) lines.extend(prefix_lines(response, ' ')) return lines
python
def get_example_lines(headers: Dict[str, str], method: str, url: str, params: Dict[str, Any], response: str) -> List[str]: """Render a reStructuredText example for the given request and response. :param dict headers: A dict of HTTP headers. :param str method: HTTP request method (e.g. "GET"). :param str url: HTTP request URL. :param dict params: Form parameters, for POST and PUT requests. :param str response: Text response body. :returns: list """ lines = ['', 'Example Request:', '', '.. code-block:: bash', ''] lines.extend(prefix_lines( get_example_curl_lines(method, url, params, headers), ' ')) lines.extend(['', 'Example Response:', '']) try: # Try to parse and prettify the response as JSON. If it fails # (for whatever reason), we'll treat it as text instead. response = json.dumps(json.loads(response), indent=2, separators=(',', ': '), sort_keys=True) lines.extend(['.. code-block:: json', '']) except Exception: lines.extend(['.. code-block:: text', '']) lines.extend(prefix_lines(response, ' ')) return lines
[ "def", "get_example_lines", "(", "headers", ":", "Dict", "[", "str", ",", "str", "]", ",", "method", ":", "str", ",", "url", ":", "str", ",", "params", ":", "Dict", "[", "str", ",", "Any", "]", ",", "response", ":", "str", ")", "->", "List", "[",...
Render a reStructuredText example for the given request and response. :param dict headers: A dict of HTTP headers. :param str method: HTTP request method (e.g. "GET"). :param str url: HTTP request URL. :param dict params: Form parameters, for POST and PUT requests. :param str response: Text response body. :returns: list
[ "Render", "a", "reStructuredText", "example", "for", "the", "given", "request", "and", "response", "." ]
2cf1d433f6f1aa1355644b449a757c0660793cdd
https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/docs/base.py#L97-L121
train
50,042
upsight/doctor
doctor/docs/base.py
get_object_reference
def get_object_reference(obj: Object) -> str: """Gets an object reference string from the obj instance. This adds the object type to ALL_RESOURCES so that it gets documented and returns a str which contains a sphinx reference to the documented object. :param obj: The Object instance. :returns: A sphinx docs reference str. """ resource_name = obj.title if resource_name is None: class_name = obj.__name__ resource_name = class_name_to_resource_name(class_name) ALL_RESOURCES[resource_name] = obj return ' See :ref:`resource-{}`.'.format( '-'.join(resource_name.split(' ')).lower().strip())
python
def get_object_reference(obj: Object) -> str: """Gets an object reference string from the obj instance. This adds the object type to ALL_RESOURCES so that it gets documented and returns a str which contains a sphinx reference to the documented object. :param obj: The Object instance. :returns: A sphinx docs reference str. """ resource_name = obj.title if resource_name is None: class_name = obj.__name__ resource_name = class_name_to_resource_name(class_name) ALL_RESOURCES[resource_name] = obj return ' See :ref:`resource-{}`.'.format( '-'.join(resource_name.split(' ')).lower().strip())
[ "def", "get_object_reference", "(", "obj", ":", "Object", ")", "->", "str", ":", "resource_name", "=", "obj", ".", "title", "if", "resource_name", "is", "None", ":", "class_name", "=", "obj", ".", "__name__", "resource_name", "=", "class_name_to_resource_name", ...
Gets an object reference string from the obj instance. This adds the object type to ALL_RESOURCES so that it gets documented and returns a str which contains a sphinx reference to the documented object. :param obj: The Object instance. :returns: A sphinx docs reference str.
[ "Gets", "an", "object", "reference", "string", "from", "the", "obj", "instance", "." ]
2cf1d433f6f1aa1355644b449a757c0660793cdd
https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/docs/base.py#L124-L139
train
50,043
upsight/doctor
doctor/docs/base.py
get_array_items_description
def get_array_items_description(item: Array) -> str: """Returns a description for an array's items. :param item: The Array type whose items should be documented. :returns: A string documenting what type the array's items should be. """ desc = '' if isinstance(item.items, list): # This means the type has a list of types where each position is # mapped to a different type. Document what each type should be. desc = '' item_pos_template = ( ' *Item {pos} must be*: {description}{enum}{ref}') for pos, item in enumerate(item.items): _enum = '' ref = '' if issubclass(item, Enum): _enum = ' Must be one of: `{}`'.format(item.enum) if item.case_insensitive: _enum += ' (case-insensitive)' _enum += '.' elif issubclass(item, Object): ref = get_object_reference(item) desc += item_pos_template.format( pos=pos, description=item.description, enum=_enum, ref=ref) else: # Otherwise just document the type assigned to `items`. desc = item.items.description _enum = '' ref = '' if issubclass(item.items, Enum): _enum = ' Must be one of: `{}`'.format( item.items.enum) if item.items.case_insensitive: _enum += ' (case-insensitive)' _enum += '.' elif issubclass(item.items, Object): ref = get_object_reference(item.items) desc = ' *Items must be*: {description}{enum}{ref}'.format( description=desc, enum=_enum, ref=ref) return desc
python
def get_array_items_description(item: Array) -> str: """Returns a description for an array's items. :param item: The Array type whose items should be documented. :returns: A string documenting what type the array's items should be. """ desc = '' if isinstance(item.items, list): # This means the type has a list of types where each position is # mapped to a different type. Document what each type should be. desc = '' item_pos_template = ( ' *Item {pos} must be*: {description}{enum}{ref}') for pos, item in enumerate(item.items): _enum = '' ref = '' if issubclass(item, Enum): _enum = ' Must be one of: `{}`'.format(item.enum) if item.case_insensitive: _enum += ' (case-insensitive)' _enum += '.' elif issubclass(item, Object): ref = get_object_reference(item) desc += item_pos_template.format( pos=pos, description=item.description, enum=_enum, ref=ref) else: # Otherwise just document the type assigned to `items`. desc = item.items.description _enum = '' ref = '' if issubclass(item.items, Enum): _enum = ' Must be one of: `{}`'.format( item.items.enum) if item.items.case_insensitive: _enum += ' (case-insensitive)' _enum += '.' elif issubclass(item.items, Object): ref = get_object_reference(item.items) desc = ' *Items must be*: {description}{enum}{ref}'.format( description=desc, enum=_enum, ref=ref) return desc
[ "def", "get_array_items_description", "(", "item", ":", "Array", ")", "->", "str", ":", "desc", "=", "''", "if", "isinstance", "(", "item", ".", "items", ",", "list", ")", ":", "# This means the type has a list of types where each position is", "# mapped to a differen...
Returns a description for an array's items. :param item: The Array type whose items should be documented. :returns: A string documenting what type the array's items should be.
[ "Returns", "a", "description", "for", "an", "array", "s", "items", "." ]
2cf1d433f6f1aa1355644b449a757c0660793cdd
https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/docs/base.py#L142-L185
train
50,044
upsight/doctor
doctor/docs/base.py
get_json_types
def get_json_types(annotated_type: SuperType) -> List[str]: """Returns the json types for the provided annotated type. This handles special cases for when we encounter UnionType and an Array. UnionType's will have all valid types returned. An Array will document what the items type is by placing that value in brackets, e.g. `list[str]`. :param annotated_type: A subclass of SuperType. :returns: A list of json types. """ types = [] if issubclass(annotated_type, UnionType): types = [str(t.native_type.__name__) for t in annotated_type.types] elif issubclass(annotated_type, Array): # Include the type of items in the list if items is defined. if annotated_type.items is not None: if not isinstance(annotated_type.items, list): # items are all of the same type. types.append('list[{}]'.format( str(annotated_type.items.native_type.__name__))) else: # items are different at each index. _types = [ str(t.native_type.__name__) for t in annotated_type.items] types.append('list[{}]'.format(','.join(_types))) else: types.append('list') else: types.append(str(annotated_type.native_type.__name__)) return types
python
def get_json_types(annotated_type: SuperType) -> List[str]: """Returns the json types for the provided annotated type. This handles special cases for when we encounter UnionType and an Array. UnionType's will have all valid types returned. An Array will document what the items type is by placing that value in brackets, e.g. `list[str]`. :param annotated_type: A subclass of SuperType. :returns: A list of json types. """ types = [] if issubclass(annotated_type, UnionType): types = [str(t.native_type.__name__) for t in annotated_type.types] elif issubclass(annotated_type, Array): # Include the type of items in the list if items is defined. if annotated_type.items is not None: if not isinstance(annotated_type.items, list): # items are all of the same type. types.append('list[{}]'.format( str(annotated_type.items.native_type.__name__))) else: # items are different at each index. _types = [ str(t.native_type.__name__) for t in annotated_type.items] types.append('list[{}]'.format(','.join(_types))) else: types.append('list') else: types.append(str(annotated_type.native_type.__name__)) return types
[ "def", "get_json_types", "(", "annotated_type", ":", "SuperType", ")", "->", "List", "[", "str", "]", ":", "types", "=", "[", "]", "if", "issubclass", "(", "annotated_type", ",", "UnionType", ")", ":", "types", "=", "[", "str", "(", "t", ".", "native_t...
Returns the json types for the provided annotated type. This handles special cases for when we encounter UnionType and an Array. UnionType's will have all valid types returned. An Array will document what the items type is by placing that value in brackets, e.g. `list[str]`. :param annotated_type: A subclass of SuperType. :returns: A list of json types.
[ "Returns", "the", "json", "types", "for", "the", "provided", "annotated", "type", "." ]
2cf1d433f6f1aa1355644b449a757c0660793cdd
https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/docs/base.py#L188-L218
train
50,045
upsight/doctor
doctor/docs/base.py
get_json_object_lines
def get_json_object_lines(annotation: ResourceAnnotation, properties: Dict[str, Any], field: str, url_params: Dict, request: bool = False, object_property: bool = False) -> List[str]: """Generate documentation for the given object annotation. :param doctor.resource.ResourceAnnotation annotation: Annotation object for the associated handler method. :param str field: Sphinx field type to use (e.g. '<json'). :param list url_params: A list of url parameter strings. :param bool request: Whether the schema is for the request or not. :param bool object_property: If True it indicates this is a property of an object that we are documenting. This is only set to True when called recursively when encountering a property that is an object in order to document the properties of it. :returns: list of strings, one for each line. """ sig_params = annotation.logic._doctor_signature.parameters required_lines = [] lines = [] default_field = field for prop in sorted(properties.keys()): annotated_type = properties[prop] # If the property is a url parameter override the field to use # param so that it's not documented in the json body or query params. field = default_field if request and prop in url_params: field = 'param' types = get_json_types(annotated_type) description = annotated_type.description obj_ref = '' if issubclass(annotated_type, Object): obj_ref = get_object_reference(annotated_type) elif (issubclass(annotated_type, Array) and annotated_type.items is not None and not isinstance(annotated_type.items, list) and issubclass(annotated_type.items, Object)): # This means the type is an array of objects, so we want to # collect the object as a resource we can document later. obj_ref = get_object_reference(annotated_type.items) elif (issubclass(annotated_type, Array) and isinstance(annotated_type.items, list)): # This means the type is array and items is a list of types. Iterate # through each type to see if any are objects that we can document. for item in annotated_type.items: if issubclass(item, Object): # Note: we are just adding them to the global variable # ALL_RESOURCES when calling the function below and not # using the return value as this special case is handled # below in documenting items of an array. get_object_reference(item) # Document any enum. enum = '' if issubclass(annotated_type, Enum): enum = ' Must be one of: `{}`'.format(annotated_type.enum) if annotated_type.case_insensitive: enum += ' (case-insensitive)' enum += '.' # Document type(s) for an array's items. if (issubclass(annotated_type, Array) and annotated_type.items is not None): array_description = get_array_items_description(annotated_type) # Prevents creating a duplicate object reference link in the docs. if obj_ref in array_description: obj_ref = '' description += array_description # Document any default value. default = '' if (request and prop in sig_params and sig_params[prop].default != Signature.empty): default = ' (Defaults to `{}`) '.format(sig_params[prop].default) field_prop = prop # If this is a request param and the property is required # add required text and append lines to required_lines. This # will make the required properties appear in alphabetical order # before the optional. line_template = ( ':{field} {types} {prop}: {description}{enum}{default}{obj_ref}') if request and prop in annotation.params.required: description = '**Required**. ' + description required_lines.append(line_template.format( field=field, types=','.join(types), prop=field_prop, description=description, enum=enum, obj_ref=obj_ref, default=default)) else: lines.append(line_template.format( field=field, types=','.join(types), prop=field_prop, description=description, enum=enum, obj_ref=obj_ref, default=default)) return required_lines + lines
python
def get_json_object_lines(annotation: ResourceAnnotation, properties: Dict[str, Any], field: str, url_params: Dict, request: bool = False, object_property: bool = False) -> List[str]: """Generate documentation for the given object annotation. :param doctor.resource.ResourceAnnotation annotation: Annotation object for the associated handler method. :param str field: Sphinx field type to use (e.g. '<json'). :param list url_params: A list of url parameter strings. :param bool request: Whether the schema is for the request or not. :param bool object_property: If True it indicates this is a property of an object that we are documenting. This is only set to True when called recursively when encountering a property that is an object in order to document the properties of it. :returns: list of strings, one for each line. """ sig_params = annotation.logic._doctor_signature.parameters required_lines = [] lines = [] default_field = field for prop in sorted(properties.keys()): annotated_type = properties[prop] # If the property is a url parameter override the field to use # param so that it's not documented in the json body or query params. field = default_field if request and prop in url_params: field = 'param' types = get_json_types(annotated_type) description = annotated_type.description obj_ref = '' if issubclass(annotated_type, Object): obj_ref = get_object_reference(annotated_type) elif (issubclass(annotated_type, Array) and annotated_type.items is not None and not isinstance(annotated_type.items, list) and issubclass(annotated_type.items, Object)): # This means the type is an array of objects, so we want to # collect the object as a resource we can document later. obj_ref = get_object_reference(annotated_type.items) elif (issubclass(annotated_type, Array) and isinstance(annotated_type.items, list)): # This means the type is array and items is a list of types. Iterate # through each type to see if any are objects that we can document. for item in annotated_type.items: if issubclass(item, Object): # Note: we are just adding them to the global variable # ALL_RESOURCES when calling the function below and not # using the return value as this special case is handled # below in documenting items of an array. get_object_reference(item) # Document any enum. enum = '' if issubclass(annotated_type, Enum): enum = ' Must be one of: `{}`'.format(annotated_type.enum) if annotated_type.case_insensitive: enum += ' (case-insensitive)' enum += '.' # Document type(s) for an array's items. if (issubclass(annotated_type, Array) and annotated_type.items is not None): array_description = get_array_items_description(annotated_type) # Prevents creating a duplicate object reference link in the docs. if obj_ref in array_description: obj_ref = '' description += array_description # Document any default value. default = '' if (request and prop in sig_params and sig_params[prop].default != Signature.empty): default = ' (Defaults to `{}`) '.format(sig_params[prop].default) field_prop = prop # If this is a request param and the property is required # add required text and append lines to required_lines. This # will make the required properties appear in alphabetical order # before the optional. line_template = ( ':{field} {types} {prop}: {description}{enum}{default}{obj_ref}') if request and prop in annotation.params.required: description = '**Required**. ' + description required_lines.append(line_template.format( field=field, types=','.join(types), prop=field_prop, description=description, enum=enum, obj_ref=obj_ref, default=default)) else: lines.append(line_template.format( field=field, types=','.join(types), prop=field_prop, description=description, enum=enum, obj_ref=obj_ref, default=default)) return required_lines + lines
[ "def", "get_json_object_lines", "(", "annotation", ":", "ResourceAnnotation", ",", "properties", ":", "Dict", "[", "str", ",", "Any", "]", ",", "field", ":", "str", ",", "url_params", ":", "Dict", ",", "request", ":", "bool", "=", "False", ",", "object_pro...
Generate documentation for the given object annotation. :param doctor.resource.ResourceAnnotation annotation: Annotation object for the associated handler method. :param str field: Sphinx field type to use (e.g. '<json'). :param list url_params: A list of url parameter strings. :param bool request: Whether the schema is for the request or not. :param bool object_property: If True it indicates this is a property of an object that we are documenting. This is only set to True when called recursively when encountering a property that is an object in order to document the properties of it. :returns: list of strings, one for each line.
[ "Generate", "documentation", "for", "the", "given", "object", "annotation", "." ]
2cf1d433f6f1aa1355644b449a757c0660793cdd
https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/docs/base.py#L221-L316
train
50,046
upsight/doctor
doctor/docs/base.py
get_json_lines
def get_json_lines(annotation: ResourceAnnotation, field: str, route: str, request: bool = False) -> List: """Generate documentation lines for the given annotation. This only documents schemas of type "object", or type "list" where each "item" is an object. Other types are ignored (but a warning is logged). :param doctor.resource.ResourceAnnotation annotation: Annotation object for the associated handler method. :param str field: Sphinx field type to use (e.g. '<json'). :param str route: The route the annotation is attached to. :param bool request: Whether the resource annotation is for the request or not. :returns: list of strings, one for each line. """ url_params = URL_PARAMS_RE.findall(route) if not request: return_type = annotation.logic._doctor_signature.return_annotation # Check if our return annotation is a Response that supplied a # type we can use to document. If so, use that type for api docs. # e.g. def logic() -> Response[MyType] if issubclass(return_type, Response): if return_type.__args__ is not None: return_type = return_type.__args__[0] if issubclass(return_type, Array): if issubclass(return_type.items, Object): properties = return_type.items.properties field += 'arr' else: return [] elif issubclass(return_type, Object): properties = return_type.properties else: return [] else: # If we defined a req_obj_type for the logic, use that type's # properties instead of the function signature. if annotation.logic._doctor_req_obj_type: properties = annotation.logic._doctor_req_obj_type.properties else: parameters = annotation.annotated_parameters properties = {k: p.annotation for k, p in parameters.items()} return get_json_object_lines(annotation, properties, field, url_params, request)
python
def get_json_lines(annotation: ResourceAnnotation, field: str, route: str, request: bool = False) -> List: """Generate documentation lines for the given annotation. This only documents schemas of type "object", or type "list" where each "item" is an object. Other types are ignored (but a warning is logged). :param doctor.resource.ResourceAnnotation annotation: Annotation object for the associated handler method. :param str field: Sphinx field type to use (e.g. '<json'). :param str route: The route the annotation is attached to. :param bool request: Whether the resource annotation is for the request or not. :returns: list of strings, one for each line. """ url_params = URL_PARAMS_RE.findall(route) if not request: return_type = annotation.logic._doctor_signature.return_annotation # Check if our return annotation is a Response that supplied a # type we can use to document. If so, use that type for api docs. # e.g. def logic() -> Response[MyType] if issubclass(return_type, Response): if return_type.__args__ is not None: return_type = return_type.__args__[0] if issubclass(return_type, Array): if issubclass(return_type.items, Object): properties = return_type.items.properties field += 'arr' else: return [] elif issubclass(return_type, Object): properties = return_type.properties else: return [] else: # If we defined a req_obj_type for the logic, use that type's # properties instead of the function signature. if annotation.logic._doctor_req_obj_type: properties = annotation.logic._doctor_req_obj_type.properties else: parameters = annotation.annotated_parameters properties = {k: p.annotation for k, p in parameters.items()} return get_json_object_lines(annotation, properties, field, url_params, request)
[ "def", "get_json_lines", "(", "annotation", ":", "ResourceAnnotation", ",", "field", ":", "str", ",", "route", ":", "str", ",", "request", ":", "bool", "=", "False", ")", "->", "List", ":", "url_params", "=", "URL_PARAMS_RE", ".", "findall", "(", "route", ...
Generate documentation lines for the given annotation. This only documents schemas of type "object", or type "list" where each "item" is an object. Other types are ignored (but a warning is logged). :param doctor.resource.ResourceAnnotation annotation: Annotation object for the associated handler method. :param str field: Sphinx field type to use (e.g. '<json'). :param str route: The route the annotation is attached to. :param bool request: Whether the resource annotation is for the request or not. :returns: list of strings, one for each line.
[ "Generate", "documentation", "lines", "for", "the", "given", "annotation", "." ]
2cf1d433f6f1aa1355644b449a757c0660793cdd
https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/docs/base.py#L319-L362
train
50,047
upsight/doctor
doctor/docs/base.py
get_resource_object_doc_lines
def get_resource_object_doc_lines() -> List[str]: """Generate documentation lines for all collected resource objects. As API documentation is generated we keep a running list of objects used in request parameters and responses. This section will generate documentation for each object and provide an inline reference in the API documentation. :returns: A list of lines required to generate the documentation. """ # First loop through all resources and make sure to add any properties that # are objects and not already in `ALL_RESOURCES`. We iterate over a copy # since we will be modifying the dict during the loop. for resource_name, a_type in ALL_RESOURCES.copy().items(): for prop_a_type in a_type.properties.values(): if issubclass(prop_a_type, Object): resource_name = prop_a_type.title if resource_name is None: class_name = prop_a_type.__name__ resource_name = class_name_to_resource_name(class_name) ALL_RESOURCES[resource_name] = prop_a_type elif (issubclass(prop_a_type, Array) and prop_a_type.items is not None and not isinstance(prop_a_type.items, list) and issubclass(prop_a_type.items, Object)): # This means the type is an array of objects, so we want to # collect the object as a resource we can document later. resource_name = prop_a_type.items.title if resource_name is None: class_name = prop_a_type.items.__name__ resource_name = class_name_to_resource_name(class_name) ALL_RESOURCES[resource_name] = prop_a_type.items # If we don't have any resources to document, just return. if not ALL_RESOURCES: return [] lines = ['Resource Objects', '----------------'] for resource_name in sorted(ALL_RESOURCES.keys()): a_type = ALL_RESOURCES[resource_name] # First add a reference to the resource resource_ref = '_resource-{}'.format( '-'.join(resource_name.lower().split(' '))) lines.extend(['.. {}:'.format(resource_ref), '']) # Add resource name heading lines.extend([resource_name, '#' * len(resource_name)]) # Add resource description lines.extend([a_type.description, '']) # Only document attributes if it has properties defined. if a_type.properties: # Add attributes documentation. lines.extend(['Attributes', '**********']) for prop in a_type.properties: prop_a_type = a_type.properties[prop] description = a_type.properties[prop].description.strip() # Add any object reference if the property is an object or # an array of objects. obj_ref = '' if issubclass(prop_a_type, Object): obj_ref = get_object_reference(prop_a_type) elif (issubclass(prop_a_type, Array) and prop_a_type.items is not None and not isinstance(prop_a_type.items, list) and issubclass(prop_a_type.items, Object)): # This means the type is an array of objects. obj_ref = get_object_reference(prop_a_type.items) elif (issubclass(prop_a_type, Array) and prop_a_type.items is not None): description += get_array_items_description(prop_a_type) native_type = a_type.properties[prop].native_type.__name__ if prop in a_type.required: description = '**Required**. ' + description lines.append('* **{}** (*{}*) - {}{}'.format( prop, native_type, description, obj_ref).strip()) lines.append('') # Add example of object. lines.extend(['Example', '*******']) example = a_type.get_example() pretty_json = json.dumps(example, separators=(',', ': '), indent=4, sort_keys=True) pretty_json_lines = prefix_lines(pretty_json, ' ') lines.extend(['.. code-block:: json', '']) lines.extend(pretty_json_lines) return lines
python
def get_resource_object_doc_lines() -> List[str]: """Generate documentation lines for all collected resource objects. As API documentation is generated we keep a running list of objects used in request parameters and responses. This section will generate documentation for each object and provide an inline reference in the API documentation. :returns: A list of lines required to generate the documentation. """ # First loop through all resources and make sure to add any properties that # are objects and not already in `ALL_RESOURCES`. We iterate over a copy # since we will be modifying the dict during the loop. for resource_name, a_type in ALL_RESOURCES.copy().items(): for prop_a_type in a_type.properties.values(): if issubclass(prop_a_type, Object): resource_name = prop_a_type.title if resource_name is None: class_name = prop_a_type.__name__ resource_name = class_name_to_resource_name(class_name) ALL_RESOURCES[resource_name] = prop_a_type elif (issubclass(prop_a_type, Array) and prop_a_type.items is not None and not isinstance(prop_a_type.items, list) and issubclass(prop_a_type.items, Object)): # This means the type is an array of objects, so we want to # collect the object as a resource we can document later. resource_name = prop_a_type.items.title if resource_name is None: class_name = prop_a_type.items.__name__ resource_name = class_name_to_resource_name(class_name) ALL_RESOURCES[resource_name] = prop_a_type.items # If we don't have any resources to document, just return. if not ALL_RESOURCES: return [] lines = ['Resource Objects', '----------------'] for resource_name in sorted(ALL_RESOURCES.keys()): a_type = ALL_RESOURCES[resource_name] # First add a reference to the resource resource_ref = '_resource-{}'.format( '-'.join(resource_name.lower().split(' '))) lines.extend(['.. {}:'.format(resource_ref), '']) # Add resource name heading lines.extend([resource_name, '#' * len(resource_name)]) # Add resource description lines.extend([a_type.description, '']) # Only document attributes if it has properties defined. if a_type.properties: # Add attributes documentation. lines.extend(['Attributes', '**********']) for prop in a_type.properties: prop_a_type = a_type.properties[prop] description = a_type.properties[prop].description.strip() # Add any object reference if the property is an object or # an array of objects. obj_ref = '' if issubclass(prop_a_type, Object): obj_ref = get_object_reference(prop_a_type) elif (issubclass(prop_a_type, Array) and prop_a_type.items is not None and not isinstance(prop_a_type.items, list) and issubclass(prop_a_type.items, Object)): # This means the type is an array of objects. obj_ref = get_object_reference(prop_a_type.items) elif (issubclass(prop_a_type, Array) and prop_a_type.items is not None): description += get_array_items_description(prop_a_type) native_type = a_type.properties[prop].native_type.__name__ if prop in a_type.required: description = '**Required**. ' + description lines.append('* **{}** (*{}*) - {}{}'.format( prop, native_type, description, obj_ref).strip()) lines.append('') # Add example of object. lines.extend(['Example', '*******']) example = a_type.get_example() pretty_json = json.dumps(example, separators=(',', ': '), indent=4, sort_keys=True) pretty_json_lines = prefix_lines(pretty_json, ' ') lines.extend(['.. code-block:: json', '']) lines.extend(pretty_json_lines) return lines
[ "def", "get_resource_object_doc_lines", "(", ")", "->", "List", "[", "str", "]", ":", "# First loop through all resources and make sure to add any properties that", "# are objects and not already in `ALL_RESOURCES`. We iterate over a copy", "# since we will be modifying the dict during the...
Generate documentation lines for all collected resource objects. As API documentation is generated we keep a running list of objects used in request parameters and responses. This section will generate documentation for each object and provide an inline reference in the API documentation. :returns: A list of lines required to generate the documentation.
[ "Generate", "documentation", "lines", "for", "all", "collected", "resource", "objects", "." ]
2cf1d433f6f1aa1355644b449a757c0660793cdd
https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/docs/base.py#L365-L448
train
50,048
upsight/doctor
doctor/docs/base.py
get_name
def get_name(value) -> str: """Return a best guess at the qualified name for a class or function. :param value: A class or function object. :type value: class or function :returns str: """ if value.__module__ == '__builtin__': return value.__name__ else: return '.'.join((value.__module__, value.__name__))
python
def get_name(value) -> str: """Return a best guess at the qualified name for a class or function. :param value: A class or function object. :type value: class or function :returns str: """ if value.__module__ == '__builtin__': return value.__name__ else: return '.'.join((value.__module__, value.__name__))
[ "def", "get_name", "(", "value", ")", "->", "str", ":", "if", "value", ".", "__module__", "==", "'__builtin__'", ":", "return", "value", ".", "__name__", "else", ":", "return", "'.'", ".", "join", "(", "(", "value", ".", "__module__", ",", "value", "."...
Return a best guess at the qualified name for a class or function. :param value: A class or function object. :type value: class or function :returns str:
[ "Return", "a", "best", "guess", "at", "the", "qualified", "name", "for", "a", "class", "or", "function", "." ]
2cf1d433f6f1aa1355644b449a757c0660793cdd
https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/docs/base.py#L451-L461
train
50,049
upsight/doctor
doctor/docs/base.py
normalize_route
def normalize_route(route: str) -> str: """Strip some of the ugly regexp characters from the given pattern. >>> normalize_route('^/user/<user_id:int>/?$') u'/user/(user_id:int)/' """ normalized_route = str(route).lstrip('^').rstrip('$').rstrip('?') normalized_route = normalized_route.replace('<', '(').replace('>', ')') return normalized_route
python
def normalize_route(route: str) -> str: """Strip some of the ugly regexp characters from the given pattern. >>> normalize_route('^/user/<user_id:int>/?$') u'/user/(user_id:int)/' """ normalized_route = str(route).lstrip('^').rstrip('$').rstrip('?') normalized_route = normalized_route.replace('<', '(').replace('>', ')') return normalized_route
[ "def", "normalize_route", "(", "route", ":", "str", ")", "->", "str", ":", "normalized_route", "=", "str", "(", "route", ")", ".", "lstrip", "(", "'^'", ")", ".", "rstrip", "(", "'$'", ")", ".", "rstrip", "(", "'?'", ")", "normalized_route", "=", "no...
Strip some of the ugly regexp characters from the given pattern. >>> normalize_route('^/user/<user_id:int>/?$') u'/user/(user_id:int)/'
[ "Strip", "some", "of", "the", "ugly", "regexp", "characters", "from", "the", "given", "pattern", "." ]
2cf1d433f6f1aa1355644b449a757c0660793cdd
https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/docs/base.py#L464-L472
train
50,050
upsight/doctor
doctor/docs/base.py
prefix_lines
def prefix_lines(lines, prefix): """Add the prefix to each of the lines. >>> prefix_lines(['foo', 'bar'], ' ') [' foo', ' bar'] >>> prefix_lines('foo\\nbar', ' ') [' foo', ' bar'] :param list or str lines: A string or a list of strings. If a string is passed, the string is split using splitlines(). :param str prefix: Prefix to add to the lines. Usually an indent. :returns: list """ if isinstance(lines, bytes): lines = lines.decode('utf-8') if isinstance(lines, str): lines = lines.splitlines() return [prefix + line for line in lines]
python
def prefix_lines(lines, prefix): """Add the prefix to each of the lines. >>> prefix_lines(['foo', 'bar'], ' ') [' foo', ' bar'] >>> prefix_lines('foo\\nbar', ' ') [' foo', ' bar'] :param list or str lines: A string or a list of strings. If a string is passed, the string is split using splitlines(). :param str prefix: Prefix to add to the lines. Usually an indent. :returns: list """ if isinstance(lines, bytes): lines = lines.decode('utf-8') if isinstance(lines, str): lines = lines.splitlines() return [prefix + line for line in lines]
[ "def", "prefix_lines", "(", "lines", ",", "prefix", ")", ":", "if", "isinstance", "(", "lines", ",", "bytes", ")", ":", "lines", "=", "lines", ".", "decode", "(", "'utf-8'", ")", "if", "isinstance", "(", "lines", ",", "str", ")", ":", "lines", "=", ...
Add the prefix to each of the lines. >>> prefix_lines(['foo', 'bar'], ' ') [' foo', ' bar'] >>> prefix_lines('foo\\nbar', ' ') [' foo', ' bar'] :param list or str lines: A string or a list of strings. If a string is passed, the string is split using splitlines(). :param str prefix: Prefix to add to the lines. Usually an indent. :returns: list
[ "Add", "the", "prefix", "to", "each", "of", "the", "lines", "." ]
2cf1d433f6f1aa1355644b449a757c0660793cdd
https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/docs/base.py#L475-L492
train
50,051
upsight/doctor
doctor/docs/base.py
class_name_to_resource_name
def class_name_to_resource_name(class_name: str) -> str: """Converts a camel case class name to a resource name with spaces. >>> class_name_to_resource_name('FooBarObject') 'Foo Bar Object' :param class_name: The name to convert. :returns: The resource name. """ s = re.sub('(.)([A-Z][a-z]+)', r'\1 \2', class_name) return re.sub('([a-z0-9])([A-Z])', r'\1 \2', s)
python
def class_name_to_resource_name(class_name: str) -> str: """Converts a camel case class name to a resource name with spaces. >>> class_name_to_resource_name('FooBarObject') 'Foo Bar Object' :param class_name: The name to convert. :returns: The resource name. """ s = re.sub('(.)([A-Z][a-z]+)', r'\1 \2', class_name) return re.sub('([a-z0-9])([A-Z])', r'\1 \2', s)
[ "def", "class_name_to_resource_name", "(", "class_name", ":", "str", ")", "->", "str", ":", "s", "=", "re", ".", "sub", "(", "'(.)([A-Z][a-z]+)'", ",", "r'\\1 \\2'", ",", "class_name", ")", "return", "re", ".", "sub", "(", "'([a-z0-9])([A-Z])'", ",", "r'\\1 ...
Converts a camel case class name to a resource name with spaces. >>> class_name_to_resource_name('FooBarObject') 'Foo Bar Object' :param class_name: The name to convert. :returns: The resource name.
[ "Converts", "a", "camel", "case", "class", "name", "to", "a", "resource", "name", "with", "spaces", "." ]
2cf1d433f6f1aa1355644b449a757c0660793cdd
https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/docs/base.py#L495-L505
train
50,052
upsight/doctor
doctor/docs/base.py
BaseDirective._prepare_env
def _prepare_env(self): # pragma: no cover """Setup the document's environment, if necessary.""" env = self.state.document.settings.env if not hasattr(env, self.directive_name): # Track places where we use this directive, so we can check for # outdated documents in the future. state = DirectiveState() setattr(env, self.directive_name, state) else: state = getattr(env, self.directive_name) return env, state
python
def _prepare_env(self): # pragma: no cover """Setup the document's environment, if necessary.""" env = self.state.document.settings.env if not hasattr(env, self.directive_name): # Track places where we use this directive, so we can check for # outdated documents in the future. state = DirectiveState() setattr(env, self.directive_name, state) else: state = getattr(env, self.directive_name) return env, state
[ "def", "_prepare_env", "(", "self", ")", ":", "# pragma: no cover", "env", "=", "self", ".", "state", ".", "document", ".", "settings", ".", "env", "if", "not", "hasattr", "(", "env", ",", "self", ".", "directive_name", ")", ":", "# Track places where we use...
Setup the document's environment, if necessary.
[ "Setup", "the", "document", "s", "environment", "if", "necessary", "." ]
2cf1d433f6f1aa1355644b449a757c0660793cdd
https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/docs/base.py#L551-L561
train
50,053
upsight/doctor
doctor/docs/base.py
BaseDirective.run
def run(self): # pragma: no cover """Called by Sphinx to generate documentation for this directive.""" if self.directive_name is None: raise NotImplementedError('directive_name must be implemented by ' 'subclasses of BaseDirective') env, state = self._prepare_env() state.doc_names.add(env.docname) directive_name = '<{}>'.format(self.directive_name) node = nodes.section() node.document = self.state.document result = ViewList() for line in self._render_rst(): if line.startswith(HEADING_TOKEN): # Remove heading token, then append 2 lines, one with # the heading text, and the other with the dashes to # underline the heading. heading = line[HEADING_TOKEN_LENGTH:] result.append(heading, directive_name) result.append('-' * len(heading), directive_name) else: result.append(line, directive_name) nested_parse_with_titles(self.state, result, node) return node.children
python
def run(self): # pragma: no cover """Called by Sphinx to generate documentation for this directive.""" if self.directive_name is None: raise NotImplementedError('directive_name must be implemented by ' 'subclasses of BaseDirective') env, state = self._prepare_env() state.doc_names.add(env.docname) directive_name = '<{}>'.format(self.directive_name) node = nodes.section() node.document = self.state.document result = ViewList() for line in self._render_rst(): if line.startswith(HEADING_TOKEN): # Remove heading token, then append 2 lines, one with # the heading text, and the other with the dashes to # underline the heading. heading = line[HEADING_TOKEN_LENGTH:] result.append(heading, directive_name) result.append('-' * len(heading), directive_name) else: result.append(line, directive_name) nested_parse_with_titles(self.state, result, node) return node.children
[ "def", "run", "(", "self", ")", ":", "# pragma: no cover", "if", "self", ".", "directive_name", "is", "None", ":", "raise", "NotImplementedError", "(", "'directive_name must be implemented by '", "'subclasses of BaseDirective'", ")", "env", ",", "state", "=", "self", ...
Called by Sphinx to generate documentation for this directive.
[ "Called", "by", "Sphinx", "to", "generate", "documentation", "for", "this", "directive", "." ]
2cf1d433f6f1aa1355644b449a757c0660793cdd
https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/docs/base.py#L642-L664
train
50,054
upsight/doctor
doctor/docs/base.py
BaseDirective.get_outdated_docs
def get_outdated_docs( cls, app, env, added, changed, removed): # pragma: no cover """Handler for Sphinx's env-get-outdated event. This handler gives a Sphinx extension a chance to indicate that some set of documents are out of date and need to be re-rendered. The implementation here is stupid, for now, and always says that anything that uses the directive needs to be re-rendered. We should make it smarter, at some point, and have it figure out which modules are used by the associated handlers, and whether they have actually been updated since the last time the given document was rendered. """ state = getattr(env, cls.directive_name, None) if state and state.doc_names: # This is stupid for now, and always says everything that uses # this autodoc generation needs to be updated. We should make this # smarter at some point and actually figure out what modules are # touched, and whether they have been changed. return sorted(state.doc_names) else: return []
python
def get_outdated_docs( cls, app, env, added, changed, removed): # pragma: no cover """Handler for Sphinx's env-get-outdated event. This handler gives a Sphinx extension a chance to indicate that some set of documents are out of date and need to be re-rendered. The implementation here is stupid, for now, and always says that anything that uses the directive needs to be re-rendered. We should make it smarter, at some point, and have it figure out which modules are used by the associated handlers, and whether they have actually been updated since the last time the given document was rendered. """ state = getattr(env, cls.directive_name, None) if state and state.doc_names: # This is stupid for now, and always says everything that uses # this autodoc generation needs to be updated. We should make this # smarter at some point and actually figure out what modules are # touched, and whether they have been changed. return sorted(state.doc_names) else: return []
[ "def", "get_outdated_docs", "(", "cls", ",", "app", ",", "env", ",", "added", ",", "changed", ",", "removed", ")", ":", "# pragma: no cover", "state", "=", "getattr", "(", "env", ",", "cls", ".", "directive_name", ",", "None", ")", "if", "state", "and", ...
Handler for Sphinx's env-get-outdated event. This handler gives a Sphinx extension a chance to indicate that some set of documents are out of date and need to be re-rendered. The implementation here is stupid, for now, and always says that anything that uses the directive needs to be re-rendered. We should make it smarter, at some point, and have it figure out which modules are used by the associated handlers, and whether they have actually been updated since the last time the given document was rendered.
[ "Handler", "for", "Sphinx", "s", "env", "-", "get", "-", "outdated", "event", "." ]
2cf1d433f6f1aa1355644b449a757c0660793cdd
https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/docs/base.py#L667-L689
train
50,055
upsight/doctor
doctor/docs/base.py
BaseDirective.purge_docs
def purge_docs(cls, app, env, docname): # pragma: no cover """Handler for Sphinx's env-purge-doc event. This event is emitted when all traces of a source file should be cleaned from the environment (that is, if the source file is removed, or before it is freshly read). This is for extensions that keep their own caches in attributes of the environment. For example, there is a cache of all modules on the environment. When a source file has been changed, the cache's entries for the file are cleared, since the module declarations could have been removed from the file. """ state = getattr(env, cls.directive_name, None) if state and docname in state.doc_names: state.doc_names.remove(docname)
python
def purge_docs(cls, app, env, docname): # pragma: no cover """Handler for Sphinx's env-purge-doc event. This event is emitted when all traces of a source file should be cleaned from the environment (that is, if the source file is removed, or before it is freshly read). This is for extensions that keep their own caches in attributes of the environment. For example, there is a cache of all modules on the environment. When a source file has been changed, the cache's entries for the file are cleared, since the module declarations could have been removed from the file. """ state = getattr(env, cls.directive_name, None) if state and docname in state.doc_names: state.doc_names.remove(docname)
[ "def", "purge_docs", "(", "cls", ",", "app", ",", "env", ",", "docname", ")", ":", "# pragma: no cover", "state", "=", "getattr", "(", "env", ",", "cls", ".", "directive_name", ",", "None", ")", "if", "state", "and", "docname", "in", "state", ".", "doc...
Handler for Sphinx's env-purge-doc event. This event is emitted when all traces of a source file should be cleaned from the environment (that is, if the source file is removed, or before it is freshly read). This is for extensions that keep their own caches in attributes of the environment. For example, there is a cache of all modules on the environment. When a source file has been changed, the cache's entries for the file are cleared, since the module declarations could have been removed from the file.
[ "Handler", "for", "Sphinx", "s", "env", "-", "purge", "-", "doc", "event", "." ]
2cf1d433f6f1aa1355644b449a757c0660793cdd
https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/docs/base.py#L692-L707
train
50,056
upsight/doctor
doctor/docs/base.py
BaseDirective.setup
def setup(cls, app): # pragma: no cover """Called by Sphinx to setup an extension.""" if cls.directive_name is None: raise NotImplementedError('directive_name must be set by ' 'subclasses of BaseDirective') if not app.registry.has_domain('http'): setup_httpdomain(app) app.add_config_value('{}_harness'.format(cls.directive_name), None, 'env') app.add_directive(cls.directive_name, cls) app.connect('builder-inited', cls.run_setup) app.connect('build-finished', cls.run_teardown) app.connect('env-get-outdated', cls.get_outdated_docs) app.connect('env-purge-doc', cls.purge_docs)
python
def setup(cls, app): # pragma: no cover """Called by Sphinx to setup an extension.""" if cls.directive_name is None: raise NotImplementedError('directive_name must be set by ' 'subclasses of BaseDirective') if not app.registry.has_domain('http'): setup_httpdomain(app) app.add_config_value('{}_harness'.format(cls.directive_name), None, 'env') app.add_directive(cls.directive_name, cls) app.connect('builder-inited', cls.run_setup) app.connect('build-finished', cls.run_teardown) app.connect('env-get-outdated', cls.get_outdated_docs) app.connect('env-purge-doc', cls.purge_docs)
[ "def", "setup", "(", "cls", ",", "app", ")", ":", "# pragma: no cover", "if", "cls", ".", "directive_name", "is", "None", ":", "raise", "NotImplementedError", "(", "'directive_name must be set by '", "'subclasses of BaseDirective'", ")", "if", "not", "app", ".", "...
Called by Sphinx to setup an extension.
[ "Called", "by", "Sphinx", "to", "setup", "an", "extension", "." ]
2cf1d433f6f1aa1355644b449a757c0660793cdd
https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/docs/base.py#L723-L736
train
50,057
upsight/doctor
doctor/docs/base.py
BaseHarness.define_header_values
def define_header_values(self, http_method, route, values, update=False): """Define header values for a given request. By default, header values are determined from the class attribute `headers`. But if you want to change the headers used in the documentation for a specific route, this method lets you do that. :param str http_method: An HTTP method, like "get". :param str route: The route to match. :param dict values: A dictionary of headers for the example request. :param bool update: If True, the values will be merged into the default headers for the request. If False, the values will replace the default headers. """ self.defined_header_values[(http_method.lower(), route)] = { 'update': update, 'values': values }
python
def define_header_values(self, http_method, route, values, update=False): """Define header values for a given request. By default, header values are determined from the class attribute `headers`. But if you want to change the headers used in the documentation for a specific route, this method lets you do that. :param str http_method: An HTTP method, like "get". :param str route: The route to match. :param dict values: A dictionary of headers for the example request. :param bool update: If True, the values will be merged into the default headers for the request. If False, the values will replace the default headers. """ self.defined_header_values[(http_method.lower(), route)] = { 'update': update, 'values': values }
[ "def", "define_header_values", "(", "self", ",", "http_method", ",", "route", ",", "values", ",", "update", "=", "False", ")", ":", "self", ".", "defined_header_values", "[", "(", "http_method", ".", "lower", "(", ")", ",", "route", ")", "]", "=", "{", ...
Define header values for a given request. By default, header values are determined from the class attribute `headers`. But if you want to change the headers used in the documentation for a specific route, this method lets you do that. :param str http_method: An HTTP method, like "get". :param str route: The route to match. :param dict values: A dictionary of headers for the example request. :param bool update: If True, the values will be merged into the default headers for the request. If False, the values will replace the default headers.
[ "Define", "header", "values", "for", "a", "given", "request", "." ]
2cf1d433f6f1aa1355644b449a757c0660793cdd
https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/docs/base.py#L768-L785
train
50,058
upsight/doctor
doctor/docs/base.py
BaseHarness.define_example_values
def define_example_values(self, http_method, route, values, update=False): """Define example values for a given request. By default, example values are determined from the example properties in the schema. But if you want to change the example used in the documentation for a specific route, and this method lets you do that. :param str http_method: An HTTP method, like "get". :param str route: The route to match. :param dict values: A dictionary of parameters for the example request. :param bool update: If True, the values will be merged into the default example values for the request. If False, the values will replace the default example values. """ self.defined_example_values[(http_method.lower(), route)] = { 'update': update, 'values': values }
python
def define_example_values(self, http_method, route, values, update=False): """Define example values for a given request. By default, example values are determined from the example properties in the schema. But if you want to change the example used in the documentation for a specific route, and this method lets you do that. :param str http_method: An HTTP method, like "get". :param str route: The route to match. :param dict values: A dictionary of parameters for the example request. :param bool update: If True, the values will be merged into the default example values for the request. If False, the values will replace the default example values. """ self.defined_example_values[(http_method.lower(), route)] = { 'update': update, 'values': values }
[ "def", "define_example_values", "(", "self", ",", "http_method", ",", "route", ",", "values", ",", "update", "=", "False", ")", ":", "self", ".", "defined_example_values", "[", "(", "http_method", ".", "lower", "(", ")", ",", "route", ")", "]", "=", "{",...
Define example values for a given request. By default, example values are determined from the example properties in the schema. But if you want to change the example used in the documentation for a specific route, and this method lets you do that. :param str http_method: An HTTP method, like "get". :param str route: The route to match. :param dict values: A dictionary of parameters for the example request. :param bool update: If True, the values will be merged into the default example values for the request. If False, the values will replace the default example values.
[ "Define", "example", "values", "for", "a", "given", "request", "." ]
2cf1d433f6f1aa1355644b449a757c0660793cdd
https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/docs/base.py#L787-L804
train
50,059
upsight/doctor
doctor/docs/base.py
BaseHarness._get_annotation_heading
def _get_annotation_heading(self, handler, route, heading=None): """Returns the heading text for an annotation. Attempts to get the name of the heading from the handler attribute `schematic_title` first. If `schematic_title` it is not present, it attempts to generate the title from the class path. This path: advertiser_api.handlers.foo_bar.FooListHandler would translate to 'Foo Bar' If the file name with the resource is generically named handlers.py or it doesn't have a full path then we attempt to get the resource name from the class name. So FooListHandler and FooHandler would translate to 'Foo'. If the handler class name starts with 'Internal', then that will be appended to the heading. So InternalFooListHandler would translate to 'Foo (Internal)' :param mixed handler: The handler class. Will be a flask resource class :param str route: The route to the handler. :returns: The text for the heading as a string. """ if hasattr(handler, '_doctor_heading'): return handler._doctor_heading heading = '' handler_path = str(handler) try: handler_file_name = handler_path.split('.')[-2] except IndexError: # In the event there is no path and we just have the class name, # get heading from the class name by setting us up to enter the # first if statement. handler_file_name = 'handler' # Get heading from class name if handler_file_name.startswith('handler'): class_name = handler_path.split('.')[-1] internal = False for word in CAMEL_CASE_RE.findall(class_name): if word == 'Internal': internal = True continue elif word.startswith(('List', 'Handler', 'Resource')): # We've hit the end of the class name that contains # words we are interested in. break heading += '%s ' % (word,) if internal: heading = heading.strip() heading += ' (Internal)' # Get heading from handler file name else: heading = ' '.join(handler_file_name.split('_')).title() if 'internal' in route: heading += ' (Internal)' return heading.strip()
python
def _get_annotation_heading(self, handler, route, heading=None): """Returns the heading text for an annotation. Attempts to get the name of the heading from the handler attribute `schematic_title` first. If `schematic_title` it is not present, it attempts to generate the title from the class path. This path: advertiser_api.handlers.foo_bar.FooListHandler would translate to 'Foo Bar' If the file name with the resource is generically named handlers.py or it doesn't have a full path then we attempt to get the resource name from the class name. So FooListHandler and FooHandler would translate to 'Foo'. If the handler class name starts with 'Internal', then that will be appended to the heading. So InternalFooListHandler would translate to 'Foo (Internal)' :param mixed handler: The handler class. Will be a flask resource class :param str route: The route to the handler. :returns: The text for the heading as a string. """ if hasattr(handler, '_doctor_heading'): return handler._doctor_heading heading = '' handler_path = str(handler) try: handler_file_name = handler_path.split('.')[-2] except IndexError: # In the event there is no path and we just have the class name, # get heading from the class name by setting us up to enter the # first if statement. handler_file_name = 'handler' # Get heading from class name if handler_file_name.startswith('handler'): class_name = handler_path.split('.')[-1] internal = False for word in CAMEL_CASE_RE.findall(class_name): if word == 'Internal': internal = True continue elif word.startswith(('List', 'Handler', 'Resource')): # We've hit the end of the class name that contains # words we are interested in. break heading += '%s ' % (word,) if internal: heading = heading.strip() heading += ' (Internal)' # Get heading from handler file name else: heading = ' '.join(handler_file_name.split('_')).title() if 'internal' in route: heading += ' (Internal)' return heading.strip()
[ "def", "_get_annotation_heading", "(", "self", ",", "handler", ",", "route", ",", "heading", "=", "None", ")", ":", "if", "hasattr", "(", "handler", ",", "'_doctor_heading'", ")", ":", "return", "handler", ".", "_doctor_heading", "heading", "=", "''", "handl...
Returns the heading text for an annotation. Attempts to get the name of the heading from the handler attribute `schematic_title` first. If `schematic_title` it is not present, it attempts to generate the title from the class path. This path: advertiser_api.handlers.foo_bar.FooListHandler would translate to 'Foo Bar' If the file name with the resource is generically named handlers.py or it doesn't have a full path then we attempt to get the resource name from the class name. So FooListHandler and FooHandler would translate to 'Foo'. If the handler class name starts with 'Internal', then that will be appended to the heading. So InternalFooListHandler would translate to 'Foo (Internal)' :param mixed handler: The handler class. Will be a flask resource class :param str route: The route to the handler. :returns: The text for the heading as a string.
[ "Returns", "the", "heading", "text", "for", "an", "annotation", "." ]
2cf1d433f6f1aa1355644b449a757c0660793cdd
https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/docs/base.py#L856-L913
train
50,060
upsight/doctor
doctor/docs/base.py
BaseHarness._get_headers
def _get_headers(self, route: str, annotation: ResourceAnnotation) -> Dict: """Gets headers for the provided route. :param route: The route to get example values for. :type route: werkzeug.routing.Rule for a flask api. :param annotation: Schema annotation for the method to be requested. :type annotation: doctor.resource.ResourceAnnotation :retruns: A dict containing headers. """ headers = self.headers.copy() defined_header_values = self.defined_header_values.get( (annotation.http_method.lower(), str(route))) if defined_header_values is not None: if defined_header_values['update']: headers.update(defined_header_values['values']) else: headers = defined_header_values['values'] return headers
python
def _get_headers(self, route: str, annotation: ResourceAnnotation) -> Dict: """Gets headers for the provided route. :param route: The route to get example values for. :type route: werkzeug.routing.Rule for a flask api. :param annotation: Schema annotation for the method to be requested. :type annotation: doctor.resource.ResourceAnnotation :retruns: A dict containing headers. """ headers = self.headers.copy() defined_header_values = self.defined_header_values.get( (annotation.http_method.lower(), str(route))) if defined_header_values is not None: if defined_header_values['update']: headers.update(defined_header_values['values']) else: headers = defined_header_values['values'] return headers
[ "def", "_get_headers", "(", "self", ",", "route", ":", "str", ",", "annotation", ":", "ResourceAnnotation", ")", "->", "Dict", ":", "headers", "=", "self", ".", "headers", ".", "copy", "(", ")", "defined_header_values", "=", "self", ".", "defined_header_valu...
Gets headers for the provided route. :param route: The route to get example values for. :type route: werkzeug.routing.Rule for a flask api. :param annotation: Schema annotation for the method to be requested. :type annotation: doctor.resource.ResourceAnnotation :retruns: A dict containing headers.
[ "Gets", "headers", "for", "the", "provided", "route", "." ]
2cf1d433f6f1aa1355644b449a757c0660793cdd
https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/docs/base.py#L915-L932
train
50,061
upsight/doctor
doctor/docs/base.py
BaseHarness._get_example_values
def _get_example_values(self, route: str, annotation: ResourceAnnotation) -> Dict[str, Any]: """Gets example values for all properties in the annotation's schema. :param route: The route to get example values for. :type route: werkzeug.routing.Rule for a flask api. :param annotation: Schema annotation for the method to be requested. :type annotation: doctor.resource.ResourceAnnotation :retruns: A dict containing property names as keys and example values as values. """ defined_values = self.defined_example_values.get( (annotation.http_method.lower(), str(route))) if defined_values and not defined_values['update']: return defined_values['values'] # If we defined a req_obj_type for the logic, use that type's # example values instead of the annotated parameters. if annotation.logic._doctor_req_obj_type: values = annotation.logic._doctor_req_obj_type.get_example() else: values = { k: v.annotation.get_example() for k, v in annotation.annotated_parameters.items() } if defined_values: values.update(defined_values['values']) # If this is a GET route, we need to json dumps any parameters that # are lists or dicts. Otherwise we'll get a 400 error for those params if annotation.http_method == 'GET': for k, v in values.items(): if isinstance(v, (list, dict)): values[k] = json.dumps(v) return values
python
def _get_example_values(self, route: str, annotation: ResourceAnnotation) -> Dict[str, Any]: """Gets example values for all properties in the annotation's schema. :param route: The route to get example values for. :type route: werkzeug.routing.Rule for a flask api. :param annotation: Schema annotation for the method to be requested. :type annotation: doctor.resource.ResourceAnnotation :retruns: A dict containing property names as keys and example values as values. """ defined_values = self.defined_example_values.get( (annotation.http_method.lower(), str(route))) if defined_values and not defined_values['update']: return defined_values['values'] # If we defined a req_obj_type for the logic, use that type's # example values instead of the annotated parameters. if annotation.logic._doctor_req_obj_type: values = annotation.logic._doctor_req_obj_type.get_example() else: values = { k: v.annotation.get_example() for k, v in annotation.annotated_parameters.items() } if defined_values: values.update(defined_values['values']) # If this is a GET route, we need to json dumps any parameters that # are lists or dicts. Otherwise we'll get a 400 error for those params if annotation.http_method == 'GET': for k, v in values.items(): if isinstance(v, (list, dict)): values[k] = json.dumps(v) return values
[ "def", "_get_example_values", "(", "self", ",", "route", ":", "str", ",", "annotation", ":", "ResourceAnnotation", ")", "->", "Dict", "[", "str", ",", "Any", "]", ":", "defined_values", "=", "self", ".", "defined_example_values", ".", "get", "(", "(", "ann...
Gets example values for all properties in the annotation's schema. :param route: The route to get example values for. :type route: werkzeug.routing.Rule for a flask api. :param annotation: Schema annotation for the method to be requested. :type annotation: doctor.resource.ResourceAnnotation :retruns: A dict containing property names as keys and example values as values.
[ "Gets", "example", "values", "for", "all", "properties", "in", "the", "annotation", "s", "schema", "." ]
2cf1d433f6f1aa1355644b449a757c0660793cdd
https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/docs/base.py#L934-L968
train
50,062
QualiSystems/cloudshell-networking-devices
cloudshell/devices/autoload/device_names.py
get_device_name
def get_device_name(file_name, sys_obj_id, delimiter=":"): """Get device name by its SNMP sysObjectID property from the file map :param str file_name: :param str sys_obj_id: :param str delimiter: :rtype: str """ try: with open(file_name, "rb") as csv_file: csv_reader = csv.reader(csv_file, delimiter=delimiter) for row in csv_reader: if len(row) >= 2 and row[0] == sys_obj_id: return row[1] except IOError: pass # file does not exists return sys_obj_id
python
def get_device_name(file_name, sys_obj_id, delimiter=":"): """Get device name by its SNMP sysObjectID property from the file map :param str file_name: :param str sys_obj_id: :param str delimiter: :rtype: str """ try: with open(file_name, "rb") as csv_file: csv_reader = csv.reader(csv_file, delimiter=delimiter) for row in csv_reader: if len(row) >= 2 and row[0] == sys_obj_id: return row[1] except IOError: pass # file does not exists return sys_obj_id
[ "def", "get_device_name", "(", "file_name", ",", "sys_obj_id", ",", "delimiter", "=", "\":\"", ")", ":", "try", ":", "with", "open", "(", "file_name", ",", "\"rb\"", ")", "as", "csv_file", ":", "csv_reader", "=", "csv", ".", "reader", "(", "csv_file", ",...
Get device name by its SNMP sysObjectID property from the file map :param str file_name: :param str sys_obj_id: :param str delimiter: :rtype: str
[ "Get", "device", "name", "by", "its", "SNMP", "sysObjectID", "property", "from", "the", "file", "map" ]
009aab33edb30035b52fe10dbb91db61c95ba4d9
https://github.com/QualiSystems/cloudshell-networking-devices/blob/009aab33edb30035b52fe10dbb91db61c95ba4d9/cloudshell/devices/autoload/device_names.py#L4-L22
train
50,063
Workiva/furious
example/context_completion_with_results.py
context_complete
def context_complete(context_id): """Log out that the context is complete.""" logging.info('Context %s is.......... DONE.', context_id) from furious.context import get_current_async_with_context _, context = get_current_async_with_context() if not context: logging.error("Could not load context") return for task_id, result in context.result.items(): logging.info("#########################") logging.info("Task Id: %s and Result: %s", task_id, result) return context_id
python
def context_complete(context_id): """Log out that the context is complete.""" logging.info('Context %s is.......... DONE.', context_id) from furious.context import get_current_async_with_context _, context = get_current_async_with_context() if not context: logging.error("Could not load context") return for task_id, result in context.result.items(): logging.info("#########################") logging.info("Task Id: %s and Result: %s", task_id, result) return context_id
[ "def", "context_complete", "(", "context_id", ")", ":", "logging", ".", "info", "(", "'Context %s is.......... DONE.'", ",", "context_id", ")", "from", "furious", ".", "context", "import", "get_current_async_with_context", "_", ",", "context", "=", "get_current_async_...
Log out that the context is complete.
[ "Log", "out", "that", "the", "context", "is", "complete", "." ]
c29823ec8b98549e7439d7273aa064d1e5830632
https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/example/context_completion_with_results.py#L63-L79
train
50,064
Workiva/furious
example/batcher/__init__.py
process_messages
def process_messages(tag, retries=0): """Processes the messages pulled fromm a queue based off the tag passed in. Will insert another processor if any work was processed or the retry count is under the max retry count. Will update a aggregated stats object with the data in the payload of the messages processed. :param tag: :class: `str` Tag to query the queue on :param retry: :class: `int` Number of retries the job has processed """ from furious.batcher import bump_batch from furious.batcher import MESSAGE_DEFAULT_QUEUE from furious.batcher import MessageIterator from furious.batcher import MessageProcessor from google.appengine.api import memcache # since we don't have a flag for checking complete we'll re-insert a # processor task with a retry count to catch any work that may still be # filtering in. If we've hit our max retry count we just bail out and # consider the job complete. if retries > 5: logging.info("Process messages hit max retry and is exiting") return # create a message iteragor for the tag in batches of 500 message_iterator = MessageIterator(tag, MESSAGE_DEFAULT_QUEUE, 500) client = memcache.Client() # get the stats object from cache stats = client.gets(tag) # json decode it if it exists otherwise get the default state. stats = json.loads(stats) if stats else get_default_stats() work_processed = False # loop through the messages pulled from the queue. for message in message_iterator: work_processed = True value = int(message.get("value", 0)) color = message.get("color").lower() # update the total stats with the value pulled set_stats(stats["totals"], value) # update the specific color status via the value pulled set_stats(stats["colors"][color], value) # insert the stats back into cache json_stats = json.dumps(stats) # try and do an add first to see if it's new. We can't trush get due to # a race condition. if not client.add(tag, json_stats): # if we couldn't add than lets do a compare and set to safely # update the stats if not client.cas(tag, json_stats): raise Exception("Transaction Collision.") # bump the process batch id bump_batch(tag) if work_processed: # reset the retries as we've processed work retries = 0 else: # no work was processed so increment the retries retries += 1 # insert another processor processor = MessageProcessor( target=process_messages, args=("colors",), kwargs={'retries': retries}, tag="colors") processor.start()
python
def process_messages(tag, retries=0): """Processes the messages pulled fromm a queue based off the tag passed in. Will insert another processor if any work was processed or the retry count is under the max retry count. Will update a aggregated stats object with the data in the payload of the messages processed. :param tag: :class: `str` Tag to query the queue on :param retry: :class: `int` Number of retries the job has processed """ from furious.batcher import bump_batch from furious.batcher import MESSAGE_DEFAULT_QUEUE from furious.batcher import MessageIterator from furious.batcher import MessageProcessor from google.appengine.api import memcache # since we don't have a flag for checking complete we'll re-insert a # processor task with a retry count to catch any work that may still be # filtering in. If we've hit our max retry count we just bail out and # consider the job complete. if retries > 5: logging.info("Process messages hit max retry and is exiting") return # create a message iteragor for the tag in batches of 500 message_iterator = MessageIterator(tag, MESSAGE_DEFAULT_QUEUE, 500) client = memcache.Client() # get the stats object from cache stats = client.gets(tag) # json decode it if it exists otherwise get the default state. stats = json.loads(stats) if stats else get_default_stats() work_processed = False # loop through the messages pulled from the queue. for message in message_iterator: work_processed = True value = int(message.get("value", 0)) color = message.get("color").lower() # update the total stats with the value pulled set_stats(stats["totals"], value) # update the specific color status via the value pulled set_stats(stats["colors"][color], value) # insert the stats back into cache json_stats = json.dumps(stats) # try and do an add first to see if it's new. We can't trush get due to # a race condition. if not client.add(tag, json_stats): # if we couldn't add than lets do a compare and set to safely # update the stats if not client.cas(tag, json_stats): raise Exception("Transaction Collision.") # bump the process batch id bump_batch(tag) if work_processed: # reset the retries as we've processed work retries = 0 else: # no work was processed so increment the retries retries += 1 # insert another processor processor = MessageProcessor( target=process_messages, args=("colors",), kwargs={'retries': retries}, tag="colors") processor.start()
[ "def", "process_messages", "(", "tag", ",", "retries", "=", "0", ")", ":", "from", "furious", ".", "batcher", "import", "bump_batch", "from", "furious", ".", "batcher", "import", "MESSAGE_DEFAULT_QUEUE", "from", "furious", ".", "batcher", "import", "MessageItera...
Processes the messages pulled fromm a queue based off the tag passed in. Will insert another processor if any work was processed or the retry count is under the max retry count. Will update a aggregated stats object with the data in the payload of the messages processed. :param tag: :class: `str` Tag to query the queue on :param retry: :class: `int` Number of retries the job has processed
[ "Processes", "the", "messages", "pulled", "fromm", "a", "queue", "based", "off", "the", "tag", "passed", "in", ".", "Will", "insert", "another", "processor", "if", "any", "work", "was", "processed", "or", "the", "retry", "count", "is", "under", "the", "max...
c29823ec8b98549e7439d7273aa064d1e5830632
https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/example/batcher/__init__.py#L125-L201
train
50,065
Workiva/furious
example/batcher/__init__.py
set_stats
def set_stats(stats, value): """Updates the stats with the value passed in. :param stats: :class: `dict` :param value: :class: `int` """ stats["total_count"] += 1 stats["value"] += value stats["average"] = stats["value"] / stats["total_count"] # this is just a basic example and not the best way to track aggregation. # for max and min old there are cases where this will not work correctly. if value > stats["max"]: stats["max"] = value if value < stats["min"] or stats["min"] == 0: stats["min"] = value
python
def set_stats(stats, value): """Updates the stats with the value passed in. :param stats: :class: `dict` :param value: :class: `int` """ stats["total_count"] += 1 stats["value"] += value stats["average"] = stats["value"] / stats["total_count"] # this is just a basic example and not the best way to track aggregation. # for max and min old there are cases where this will not work correctly. if value > stats["max"]: stats["max"] = value if value < stats["min"] or stats["min"] == 0: stats["min"] = value
[ "def", "set_stats", "(", "stats", ",", "value", ")", ":", "stats", "[", "\"total_count\"", "]", "+=", "1", "stats", "[", "\"value\"", "]", "+=", "value", "stats", "[", "\"average\"", "]", "=", "stats", "[", "\"value\"", "]", "/", "stats", "[", "\"total...
Updates the stats with the value passed in. :param stats: :class: `dict` :param value: :class: `int`
[ "Updates", "the", "stats", "with", "the", "value", "passed", "in", "." ]
c29823ec8b98549e7439d7273aa064d1e5830632
https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/example/batcher/__init__.py#L204-L220
train
50,066
QualiSystems/cloudshell-networking-devices
cloudshell/devices/networking_utils.py
serialize_to_json
def serialize_to_json(result, unpicklable=False): """Serializes output as JSON and writes it to console output wrapped with special prefix and suffix :param result: Result to return :param unpicklable: If True adds JSON can be deserialized as real object. When False will be deserialized as dictionary """ json = jsonpickle.encode(result, unpicklable=unpicklable) result_for_output = str(json) return result_for_output
python
def serialize_to_json(result, unpicklable=False): """Serializes output as JSON and writes it to console output wrapped with special prefix and suffix :param result: Result to return :param unpicklable: If True adds JSON can be deserialized as real object. When False will be deserialized as dictionary """ json = jsonpickle.encode(result, unpicklable=unpicklable) result_for_output = str(json) return result_for_output
[ "def", "serialize_to_json", "(", "result", ",", "unpicklable", "=", "False", ")", ":", "json", "=", "jsonpickle", ".", "encode", "(", "result", ",", "unpicklable", "=", "unpicklable", ")", "result_for_output", "=", "str", "(", "json", ")", "return", "result_...
Serializes output as JSON and writes it to console output wrapped with special prefix and suffix :param result: Result to return :param unpicklable: If True adds JSON can be deserialized as real object. When False will be deserialized as dictionary
[ "Serializes", "output", "as", "JSON", "and", "writes", "it", "to", "console", "output", "wrapped", "with", "special", "prefix", "and", "suffix" ]
009aab33edb30035b52fe10dbb91db61c95ba4d9
https://github.com/QualiSystems/cloudshell-networking-devices/blob/009aab33edb30035b52fe10dbb91db61c95ba4d9/cloudshell/devices/networking_utils.py#L30-L40
train
50,067
QualiSystems/cloudshell-networking-devices
cloudshell/devices/runners/configuration_runner.py
ConfigurationRunner.orchestration_save
def orchestration_save(self, mode="shallow", custom_params=None): """Orchestration Save command :param mode: :param custom_params: json with all required action to configure or remove vlans from certain port :return Serialized OrchestrationSavedArtifact to json :rtype json """ save_params = {'folder_path': '', 'configuration_type': 'running', 'return_artifact': True} params = dict() if custom_params: params = jsonpickle.decode(custom_params) save_params.update(params.get('custom_params', {})) save_params['folder_path'] = self.get_path(save_params['folder_path']) saved_artifact = self.save(**save_params) saved_artifact_info = OrchestrationSavedArtifactInfo(resource_name=self.resource_config.name, created_date=datetime.datetime.now(), restore_rules=self.get_restore_rules(), saved_artifact=saved_artifact) save_response = OrchestrationSaveResult(saved_artifacts_info=saved_artifact_info) self._validate_artifact_info(saved_artifact_info) return serialize_to_json(save_response)
python
def orchestration_save(self, mode="shallow", custom_params=None): """Orchestration Save command :param mode: :param custom_params: json with all required action to configure or remove vlans from certain port :return Serialized OrchestrationSavedArtifact to json :rtype json """ save_params = {'folder_path': '', 'configuration_type': 'running', 'return_artifact': True} params = dict() if custom_params: params = jsonpickle.decode(custom_params) save_params.update(params.get('custom_params', {})) save_params['folder_path'] = self.get_path(save_params['folder_path']) saved_artifact = self.save(**save_params) saved_artifact_info = OrchestrationSavedArtifactInfo(resource_name=self.resource_config.name, created_date=datetime.datetime.now(), restore_rules=self.get_restore_rules(), saved_artifact=saved_artifact) save_response = OrchestrationSaveResult(saved_artifacts_info=saved_artifact_info) self._validate_artifact_info(saved_artifact_info) return serialize_to_json(save_response)
[ "def", "orchestration_save", "(", "self", ",", "mode", "=", "\"shallow\"", ",", "custom_params", "=", "None", ")", ":", "save_params", "=", "{", "'folder_path'", ":", "''", ",", "'configuration_type'", ":", "'running'", ",", "'return_artifact'", ":", "True", "...
Orchestration Save command :param mode: :param custom_params: json with all required action to configure or remove vlans from certain port :return Serialized OrchestrationSavedArtifact to json :rtype json
[ "Orchestration", "Save", "command" ]
009aab33edb30035b52fe10dbb91db61c95ba4d9
https://github.com/QualiSystems/cloudshell-networking-devices/blob/009aab33edb30035b52fe10dbb91db61c95ba4d9/cloudshell/devices/runners/configuration_runner.py#L123-L149
train
50,068
QualiSystems/cloudshell-networking-devices
cloudshell/devices/runners/configuration_runner.py
ConfigurationRunner.get_path
def get_path(self, path=''): """ Validate incoming path, if path is empty, build it from resource attributes, If path is invalid - raise exception :param path: path to remote file storage :return: valid path or :raise Exception: """ if not path: host = self.resource_config.backup_location if ':' not in host: scheme = self.resource_config.backup_type if not scheme or scheme.lower() == self.DEFAULT_FILE_SYSTEM.lower(): scheme = self.file_system scheme = re.sub('(:|/+).*$', '', scheme, re.DOTALL) host = re.sub('^/+', '', host) host = '{}://{}'.format(scheme, host) path = host url = UrlParser.parse_url(path) if url[UrlParser.SCHEME].lower() in AUTHORIZATION_REQUIRED_STORAGE: if UrlParser.USERNAME not in url or not url[UrlParser.USERNAME]: url[UrlParser.USERNAME] = self.resource_config.backup_user if UrlParser.PASSWORD not in url or not url[UrlParser.PASSWORD]: url[UrlParser.PASSWORD] = self._api.DecryptPassword(self.resource_config.backup_password).Value try: result = UrlParser.build_url(url) except Exception as e: self._logger.error('Failed to build url: {}'.format(e)) raise Exception('ConfigurationOperations', 'Failed to build path url to remote host') return result
python
def get_path(self, path=''): """ Validate incoming path, if path is empty, build it from resource attributes, If path is invalid - raise exception :param path: path to remote file storage :return: valid path or :raise Exception: """ if not path: host = self.resource_config.backup_location if ':' not in host: scheme = self.resource_config.backup_type if not scheme or scheme.lower() == self.DEFAULT_FILE_SYSTEM.lower(): scheme = self.file_system scheme = re.sub('(:|/+).*$', '', scheme, re.DOTALL) host = re.sub('^/+', '', host) host = '{}://{}'.format(scheme, host) path = host url = UrlParser.parse_url(path) if url[UrlParser.SCHEME].lower() in AUTHORIZATION_REQUIRED_STORAGE: if UrlParser.USERNAME not in url or not url[UrlParser.USERNAME]: url[UrlParser.USERNAME] = self.resource_config.backup_user if UrlParser.PASSWORD not in url or not url[UrlParser.PASSWORD]: url[UrlParser.PASSWORD] = self._api.DecryptPassword(self.resource_config.backup_password).Value try: result = UrlParser.build_url(url) except Exception as e: self._logger.error('Failed to build url: {}'.format(e)) raise Exception('ConfigurationOperations', 'Failed to build path url to remote host') return result
[ "def", "get_path", "(", "self", ",", "path", "=", "''", ")", ":", "if", "not", "path", ":", "host", "=", "self", ".", "resource_config", ".", "backup_location", "if", "':'", "not", "in", "host", ":", "scheme", "=", "self", ".", "resource_config", ".", ...
Validate incoming path, if path is empty, build it from resource attributes, If path is invalid - raise exception :param path: path to remote file storage :return: valid path or :raise Exception:
[ "Validate", "incoming", "path", "if", "path", "is", "empty", "build", "it", "from", "resource", "attributes", "If", "path", "is", "invalid", "-", "raise", "exception" ]
009aab33edb30035b52fe10dbb91db61c95ba4d9
https://github.com/QualiSystems/cloudshell-networking-devices/blob/009aab33edb30035b52fe10dbb91db61c95ba4d9/cloudshell/devices/runners/configuration_runner.py#L198-L230
train
50,069
QualiSystems/cloudshell-networking-devices
cloudshell/devices/runners/configuration_runner.py
ConfigurationRunner._validate_configuration_type
def _validate_configuration_type(self, configuration_type): """Validate configuration type :param configuration_type: configuration_type, should be Startup or Running :raise Exception: """ if configuration_type.lower() != 'running' and configuration_type.lower() != 'startup': raise Exception(self.__class__.__name__, 'Configuration Type is invalid. Should be startup or running')
python
def _validate_configuration_type(self, configuration_type): """Validate configuration type :param configuration_type: configuration_type, should be Startup or Running :raise Exception: """ if configuration_type.lower() != 'running' and configuration_type.lower() != 'startup': raise Exception(self.__class__.__name__, 'Configuration Type is invalid. Should be startup or running')
[ "def", "_validate_configuration_type", "(", "self", ",", "configuration_type", ")", ":", "if", "configuration_type", ".", "lower", "(", ")", "!=", "'running'", "and", "configuration_type", ".", "lower", "(", ")", "!=", "'startup'", ":", "raise", "Exception", "("...
Validate configuration type :param configuration_type: configuration_type, should be Startup or Running :raise Exception:
[ "Validate", "configuration", "type" ]
009aab33edb30035b52fe10dbb91db61c95ba4d9
https://github.com/QualiSystems/cloudshell-networking-devices/blob/009aab33edb30035b52fe10dbb91db61c95ba4d9/cloudshell/devices/runners/configuration_runner.py#L232-L240
train
50,070
QualiSystems/cloudshell-networking-devices
cloudshell/devices/runners/configuration_runner.py
ConfigurationRunner._validate_artifact_info
def _validate_artifact_info(self, saved_config): """Validate OrchestrationSavedArtifactInfo object for key components :param OrchestrationSavedArtifactInfo saved_config: object to validate """ is_fail = False fail_attribute = '' for class_attribute in self.REQUIRED_SAVE_ATTRIBUTES_LIST: if type(class_attribute) is tuple: if not hasattr(saved_config, class_attribute[0]): is_fail = True fail_attribute = class_attribute[0] elif not hasattr(getattr(saved_config, class_attribute[0]), class_attribute[1]): is_fail = True fail_attribute = class_attribute[1] else: if not hasattr(saved_config, class_attribute): is_fail = True fail_attribute = class_attribute if is_fail: raise Exception('ConfigurationOperations', 'Mandatory field {0} is missing in Saved Artifact Info request json'.format( fail_attribute))
python
def _validate_artifact_info(self, saved_config): """Validate OrchestrationSavedArtifactInfo object for key components :param OrchestrationSavedArtifactInfo saved_config: object to validate """ is_fail = False fail_attribute = '' for class_attribute in self.REQUIRED_SAVE_ATTRIBUTES_LIST: if type(class_attribute) is tuple: if not hasattr(saved_config, class_attribute[0]): is_fail = True fail_attribute = class_attribute[0] elif not hasattr(getattr(saved_config, class_attribute[0]), class_attribute[1]): is_fail = True fail_attribute = class_attribute[1] else: if not hasattr(saved_config, class_attribute): is_fail = True fail_attribute = class_attribute if is_fail: raise Exception('ConfigurationOperations', 'Mandatory field {0} is missing in Saved Artifact Info request json'.format( fail_attribute))
[ "def", "_validate_artifact_info", "(", "self", ",", "saved_config", ")", ":", "is_fail", "=", "False", "fail_attribute", "=", "''", "for", "class_attribute", "in", "self", ".", "REQUIRED_SAVE_ATTRIBUTES_LIST", ":", "if", "type", "(", "class_attribute", ")", "is", ...
Validate OrchestrationSavedArtifactInfo object for key components :param OrchestrationSavedArtifactInfo saved_config: object to validate
[ "Validate", "OrchestrationSavedArtifactInfo", "object", "for", "key", "components" ]
009aab33edb30035b52fe10dbb91db61c95ba4d9
https://github.com/QualiSystems/cloudshell-networking-devices/blob/009aab33edb30035b52fe10dbb91db61c95ba4d9/cloudshell/devices/runners/configuration_runner.py#L242-L265
train
50,071
eumis/pyviews
pyviews/core/ioc.py
register
def register(key, initializer: callable, param=None): '''Adds resolver to global container''' get_current_scope().container.register(key, initializer, param)
python
def register(key, initializer: callable, param=None): '''Adds resolver to global container''' get_current_scope().container.register(key, initializer, param)
[ "def", "register", "(", "key", ",", "initializer", ":", "callable", ",", "param", "=", "None", ")", ":", "get_current_scope", "(", ")", ".", "container", ".", "register", "(", "key", ",", "initializer", ",", "param", ")" ]
Adds resolver to global container
[ "Adds", "resolver", "to", "global", "container" ]
80a868242ee9cdc6f4ded594b3e0544cc238ed55
https://github.com/eumis/pyviews/blob/80a868242ee9cdc6f4ded594b3e0544cc238ed55/pyviews/core/ioc.py#L81-L83
train
50,072
eumis/pyviews
pyviews/core/ioc.py
register_single
def register_single(key, value, param=None): '''Generates resolver to return singleton value and adds it to global container''' get_current_scope().container.register(key, lambda: value, param)
python
def register_single(key, value, param=None): '''Generates resolver to return singleton value and adds it to global container''' get_current_scope().container.register(key, lambda: value, param)
[ "def", "register_single", "(", "key", ",", "value", ",", "param", "=", "None", ")", ":", "get_current_scope", "(", ")", ".", "container", ".", "register", "(", "key", ",", "lambda", ":", "value", ",", "param", ")" ]
Generates resolver to return singleton value and adds it to global container
[ "Generates", "resolver", "to", "return", "singleton", "value", "and", "adds", "it", "to", "global", "container" ]
80a868242ee9cdc6f4ded594b3e0544cc238ed55
https://github.com/eumis/pyviews/blob/80a868242ee9cdc6f4ded594b3e0544cc238ed55/pyviews/core/ioc.py#L85-L87
train
50,073
eumis/pyviews
pyviews/core/ioc.py
wrap_with_scope
def wrap_with_scope(func, scope_name=None): '''Wraps function with scope. If scope_name is None current scope is used''' if scope_name is None: scope_name = get_current_scope().name return lambda *args, scope=scope_name, **kwargs: \ _call_with_scope(func, scope, args, kwargs)
python
def wrap_with_scope(func, scope_name=None): '''Wraps function with scope. If scope_name is None current scope is used''' if scope_name is None: scope_name = get_current_scope().name return lambda *args, scope=scope_name, **kwargs: \ _call_with_scope(func, scope, args, kwargs)
[ "def", "wrap_with_scope", "(", "func", ",", "scope_name", "=", "None", ")", ":", "if", "scope_name", "is", "None", ":", "scope_name", "=", "get_current_scope", "(", ")", ".", "name", "return", "lambda", "*", "args", ",", "scope", "=", "scope_name", ",", ...
Wraps function with scope. If scope_name is None current scope is used
[ "Wraps", "function", "with", "scope", ".", "If", "scope_name", "is", "None", "current", "scope", "is", "used" ]
80a868242ee9cdc6f4ded594b3e0544cc238ed55
https://github.com/eumis/pyviews/blob/80a868242ee9cdc6f4ded594b3e0544cc238ed55/pyviews/core/ioc.py#L99-L104
train
50,074
eumis/pyviews
pyviews/core/ioc.py
inject
def inject(*injections): '''Resolves dependencies using global container and passed it with optional parameters''' def _decorate(func): def _decorated(*args, **kwargs): args = list(args) keys_to_inject = [name for name in injections if name not in kwargs] for key in keys_to_inject: kwargs[key] = get_current_scope().container.get(key) return func(*args, **kwargs) return _decorated return _decorate
python
def inject(*injections): '''Resolves dependencies using global container and passed it with optional parameters''' def _decorate(func): def _decorated(*args, **kwargs): args = list(args) keys_to_inject = [name for name in injections if name not in kwargs] for key in keys_to_inject: kwargs[key] = get_current_scope().container.get(key) return func(*args, **kwargs) return _decorated return _decorate
[ "def", "inject", "(", "*", "injections", ")", ":", "def", "_decorate", "(", "func", ")", ":", "def", "_decorated", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "args", "=", "list", "(", "args", ")", "keys_to_inject", "=", "[", "name", "for"...
Resolves dependencies using global container and passed it with optional parameters
[ "Resolves", "dependencies", "using", "global", "container", "and", "passed", "it", "with", "optional", "parameters" ]
80a868242ee9cdc6f4ded594b3e0544cc238ed55
https://github.com/eumis/pyviews/blob/80a868242ee9cdc6f4ded594b3e0544cc238ed55/pyviews/core/ioc.py#L126-L136
train
50,075
eumis/pyviews
pyviews/core/ioc.py
Container.register
def register(self, key, initializer: callable, param=None): '''Add resolver to container''' if not callable(initializer): raise DependencyError('Initializer {0} is not callable'.format(initializer)) if key not in self._initializers: self._initializers[key] = {} self._initializers[key][param] = initializer
python
def register(self, key, initializer: callable, param=None): '''Add resolver to container''' if not callable(initializer): raise DependencyError('Initializer {0} is not callable'.format(initializer)) if key not in self._initializers: self._initializers[key] = {} self._initializers[key][param] = initializer
[ "def", "register", "(", "self", ",", "key", ",", "initializer", ":", "callable", ",", "param", "=", "None", ")", ":", "if", "not", "callable", "(", "initializer", ")", ":", "raise", "DependencyError", "(", "'Initializer {0} is not callable'", ".", "format", ...
Add resolver to container
[ "Add", "resolver", "to", "container" ]
80a868242ee9cdc6f4ded594b3e0544cc238ed55
https://github.com/eumis/pyviews/blob/80a868242ee9cdc6f4ded594b3e0544cc238ed55/pyviews/core/ioc.py#L16-L22
train
50,076
upsight/doctor
doctor/flask.py
create_routes
def create_routes(routes: Tuple[Route]) -> List[Tuple[str, Resource]]: """A thin wrapper around create_routes that passes in flask specific values. :param routes: A tuple containing the route and another tuple with all http methods allowed for the route. :returns: A list of tuples containing the route and generated handler. """ return doctor_create_routes( routes, handle_http, default_base_handler_class=Resource)
python
def create_routes(routes: Tuple[Route]) -> List[Tuple[str, Resource]]: """A thin wrapper around create_routes that passes in flask specific values. :param routes: A tuple containing the route and another tuple with all http methods allowed for the route. :returns: A list of tuples containing the route and generated handler. """ return doctor_create_routes( routes, handle_http, default_base_handler_class=Resource)
[ "def", "create_routes", "(", "routes", ":", "Tuple", "[", "Route", "]", ")", "->", "List", "[", "Tuple", "[", "str", ",", "Resource", "]", "]", ":", "return", "doctor_create_routes", "(", "routes", ",", "handle_http", ",", "default_base_handler_class", "=", ...
A thin wrapper around create_routes that passes in flask specific values. :param routes: A tuple containing the route and another tuple with all http methods allowed for the route. :returns: A list of tuples containing the route and generated handler.
[ "A", "thin", "wrapper", "around", "create_routes", "that", "passes", "in", "flask", "specific", "values", "." ]
2cf1d433f6f1aa1355644b449a757c0660793cdd
https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/flask.py#L249-L257
train
50,077
kmedian/korr
korr/mcc.py
confusion_to_mcc
def confusion_to_mcc(*args): """Convert the confusion matrix to the Matthews correlation coefficient Parameters: ----------- cm : ndarray 2x2 confusion matrix with np.array([[tn, fp], [fn, tp]]) tn, fp, fn, tp : float four scalar variables - tn : number of true negatives - fp : number of false positives - fn : number of false negatives - tp : number of true positives Return: ------- r : float Matthews correlation coefficient """ if len(args) is 1: tn, fp, fn, tp = args[0].ravel().astype(float) elif len(args) is 4: tn, fp, fn, tp = [float(a) for a in args] else: raise Exception(( "Input argument is not an 2x2 matrix, " "nor 4 elements tn, fp, fn, tp.")) return (tp * tn - fp * fn) / np.sqrt( (tp + fp) * (tp + fn) * (tn + fp) * (tn + fn))
python
def confusion_to_mcc(*args): """Convert the confusion matrix to the Matthews correlation coefficient Parameters: ----------- cm : ndarray 2x2 confusion matrix with np.array([[tn, fp], [fn, tp]]) tn, fp, fn, tp : float four scalar variables - tn : number of true negatives - fp : number of false positives - fn : number of false negatives - tp : number of true positives Return: ------- r : float Matthews correlation coefficient """ if len(args) is 1: tn, fp, fn, tp = args[0].ravel().astype(float) elif len(args) is 4: tn, fp, fn, tp = [float(a) for a in args] else: raise Exception(( "Input argument is not an 2x2 matrix, " "nor 4 elements tn, fp, fn, tp.")) return (tp * tn - fp * fn) / np.sqrt( (tp + fp) * (tp + fn) * (tn + fp) * (tn + fn))
[ "def", "confusion_to_mcc", "(", "*", "args", ")", ":", "if", "len", "(", "args", ")", "is", "1", ":", "tn", ",", "fp", ",", "fn", ",", "tp", "=", "args", "[", "0", "]", ".", "ravel", "(", ")", ".", "astype", "(", "float", ")", "elif", "len", ...
Convert the confusion matrix to the Matthews correlation coefficient Parameters: ----------- cm : ndarray 2x2 confusion matrix with np.array([[tn, fp], [fn, tp]]) tn, fp, fn, tp : float four scalar variables - tn : number of true negatives - fp : number of false positives - fn : number of false negatives - tp : number of true positives Return: ------- r : float Matthews correlation coefficient
[ "Convert", "the", "confusion", "matrix", "to", "the", "Matthews", "correlation", "coefficient" ]
4eb86fc14b1fc1b69204069b7753d115b327c937
https://github.com/kmedian/korr/blob/4eb86fc14b1fc1b69204069b7753d115b327c937/korr/mcc.py#L6-L36
train
50,078
eumis/pyviews
pyviews/rendering/node.py
create_node
def create_node(xml_node: XmlNode, **init_args): '''Creates node from xml node using namespace as module and tag name as class name''' inst_type = get_inst_type(xml_node) init_args['xml_node'] = xml_node inst = create_inst(inst_type, **init_args) if not isinstance(inst, Node): inst = convert_to_node(inst, **init_args) return inst
python
def create_node(xml_node: XmlNode, **init_args): '''Creates node from xml node using namespace as module and tag name as class name''' inst_type = get_inst_type(xml_node) init_args['xml_node'] = xml_node inst = create_inst(inst_type, **init_args) if not isinstance(inst, Node): inst = convert_to_node(inst, **init_args) return inst
[ "def", "create_node", "(", "xml_node", ":", "XmlNode", ",", "*", "*", "init_args", ")", ":", "inst_type", "=", "get_inst_type", "(", "xml_node", ")", "init_args", "[", "'xml_node'", "]", "=", "xml_node", "inst", "=", "create_inst", "(", "inst_type", ",", "...
Creates node from xml node using namespace as module and tag name as class name
[ "Creates", "node", "from", "xml", "node", "using", "namespace", "as", "module", "and", "tag", "name", "as", "class", "name" ]
80a868242ee9cdc6f4ded594b3e0544cc238ed55
https://github.com/eumis/pyviews/blob/80a868242ee9cdc6f4ded594b3e0544cc238ed55/pyviews/rendering/node.py#L11-L18
train
50,079
eumis/pyviews
pyviews/rendering/node.py
get_inst_type
def get_inst_type(xml_node: XmlNode): '''Returns type by xml node''' (module_path, class_name) = (xml_node.namespace, xml_node.name) try: return import_module(module_path).__dict__[class_name] except (KeyError, ImportError, ModuleNotFoundError): message = 'Import "{0}.{1}" is failed.'.format(module_path, class_name) raise RenderingError(message, xml_node.view_info)
python
def get_inst_type(xml_node: XmlNode): '''Returns type by xml node''' (module_path, class_name) = (xml_node.namespace, xml_node.name) try: return import_module(module_path).__dict__[class_name] except (KeyError, ImportError, ModuleNotFoundError): message = 'Import "{0}.{1}" is failed.'.format(module_path, class_name) raise RenderingError(message, xml_node.view_info)
[ "def", "get_inst_type", "(", "xml_node", ":", "XmlNode", ")", ":", "(", "module_path", ",", "class_name", ")", "=", "(", "xml_node", ".", "namespace", ",", "xml_node", ".", "name", ")", "try", ":", "return", "import_module", "(", "module_path", ")", ".", ...
Returns type by xml node
[ "Returns", "type", "by", "xml", "node" ]
80a868242ee9cdc6f4ded594b3e0544cc238ed55
https://github.com/eumis/pyviews/blob/80a868242ee9cdc6f4ded594b3e0544cc238ed55/pyviews/rendering/node.py#L20-L27
train
50,080
eumis/pyviews
pyviews/rendering/node.py
create_inst
def create_inst(inst_type, **init_args): '''Creates class instance with args''' args, kwargs = get_init_args(inst_type, init_args) return inst_type(*args, **kwargs)
python
def create_inst(inst_type, **init_args): '''Creates class instance with args''' args, kwargs = get_init_args(inst_type, init_args) return inst_type(*args, **kwargs)
[ "def", "create_inst", "(", "inst_type", ",", "*", "*", "init_args", ")", ":", "args", ",", "kwargs", "=", "get_init_args", "(", "inst_type", ",", "init_args", ")", "return", "inst_type", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
Creates class instance with args
[ "Creates", "class", "instance", "with", "args" ]
80a868242ee9cdc6f4ded594b3e0544cc238ed55
https://github.com/eumis/pyviews/blob/80a868242ee9cdc6f4ded594b3e0544cc238ed55/pyviews/rendering/node.py#L29-L32
train
50,081
eumis/pyviews
pyviews/rendering/node.py
get_init_args
def get_init_args(inst_type, init_args: dict, add_kwargs=False) -> Tuple[List, Dict]: '''Returns tuple with args and kwargs to pass it to inst_type constructor''' try: parameters = signature(inst_type).parameters.values() args_keys = [p.name for p in parameters \ if p.kind in [Parameter.POSITIONAL_ONLY, Parameter.POSITIONAL_OR_KEYWORD] \ and p.default == Parameter.empty] args = [init_args[key] for key in args_keys] kwargs = _get_var_kwargs(parameters, args_keys, init_args)\ if add_kwargs else\ _get_kwargs(parameters, init_args) except KeyError as key_error: msg_format = 'parameter with key "{0}" is not found in node args' raise RenderingError(msg_format.format(key_error.args[0])) return (args, kwargs)
python
def get_init_args(inst_type, init_args: dict, add_kwargs=False) -> Tuple[List, Dict]: '''Returns tuple with args and kwargs to pass it to inst_type constructor''' try: parameters = signature(inst_type).parameters.values() args_keys = [p.name for p in parameters \ if p.kind in [Parameter.POSITIONAL_ONLY, Parameter.POSITIONAL_OR_KEYWORD] \ and p.default == Parameter.empty] args = [init_args[key] for key in args_keys] kwargs = _get_var_kwargs(parameters, args_keys, init_args)\ if add_kwargs else\ _get_kwargs(parameters, init_args) except KeyError as key_error: msg_format = 'parameter with key "{0}" is not found in node args' raise RenderingError(msg_format.format(key_error.args[0])) return (args, kwargs)
[ "def", "get_init_args", "(", "inst_type", ",", "init_args", ":", "dict", ",", "add_kwargs", "=", "False", ")", "->", "Tuple", "[", "List", ",", "Dict", "]", ":", "try", ":", "parameters", "=", "signature", "(", "inst_type", ")", ".", "parameters", ".", ...
Returns tuple with args and kwargs to pass it to inst_type constructor
[ "Returns", "tuple", "with", "args", "and", "kwargs", "to", "pass", "it", "to", "inst_type", "constructor" ]
80a868242ee9cdc6f4ded594b3e0544cc238ed55
https://github.com/eumis/pyviews/blob/80a868242ee9cdc6f4ded594b3e0544cc238ed55/pyviews/rendering/node.py#L34-L48
train
50,082
eumis/pyviews
pyviews/rendering/node.py
convert_to_node
def convert_to_node(instance, xml_node: XmlNode, node_globals: InheritedDict = None)\ -> InstanceNode: '''Wraps passed instance with InstanceNode''' return InstanceNode(instance, xml_node, node_globals)
python
def convert_to_node(instance, xml_node: XmlNode, node_globals: InheritedDict = None)\ -> InstanceNode: '''Wraps passed instance with InstanceNode''' return InstanceNode(instance, xml_node, node_globals)
[ "def", "convert_to_node", "(", "instance", ",", "xml_node", ":", "XmlNode", ",", "node_globals", ":", "InheritedDict", "=", "None", ")", "->", "InstanceNode", ":", "return", "InstanceNode", "(", "instance", ",", "xml_node", ",", "node_globals", ")" ]
Wraps passed instance with InstanceNode
[ "Wraps", "passed", "instance", "with", "InstanceNode" ]
80a868242ee9cdc6f4ded594b3e0544cc238ed55
https://github.com/eumis/pyviews/blob/80a868242ee9cdc6f4ded594b3e0544cc238ed55/pyviews/rendering/node.py#L67-L70
train
50,083
Workiva/furious
furious/context/auto_context.py
AutoContext._auto_insert_check
def _auto_insert_check(self): """Automatically insert tasks asynchronously. Depending on batch_size, insert or wait until next call. """ if not self.batch_size: return if len(self._tasks) >= self.batch_size: self._handle_tasks()
python
def _auto_insert_check(self): """Automatically insert tasks asynchronously. Depending on batch_size, insert or wait until next call. """ if not self.batch_size: return if len(self._tasks) >= self.batch_size: self._handle_tasks()
[ "def", "_auto_insert_check", "(", "self", ")", ":", "if", "not", "self", ".", "batch_size", ":", "return", "if", "len", "(", "self", ".", "_tasks", ")", ">=", "self", ".", "batch_size", ":", "self", ".", "_handle_tasks", "(", ")" ]
Automatically insert tasks asynchronously. Depending on batch_size, insert or wait until next call.
[ "Automatically", "insert", "tasks", "asynchronously", ".", "Depending", "on", "batch_size", "insert", "or", "wait", "until", "next", "call", "." ]
c29823ec8b98549e7439d7273aa064d1e5830632
https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/context/auto_context.py#L56-L65
train
50,084
wallento/riscv-python-model
riscvmodel/insn.py
isa
def isa(mnemonic: str, opcode: int, funct3: int=None, funct7: int=None, *, variant=RV32I, extension=None): """ Decorator for the instructions. The decorator contains the static information for the instructions, in particular the encoding parameters and the assembler mnemonic. :param mnemonic: Assembler mnemonic :param opcode: Opcode of this instruction :param funct3: 3 bit function code on bits 14 to 12 (R-, I-, S- and B-type) :param funct7: 7 bit function code on bits 31 to 25 (R-type) :return: Wrapper class that overwrites the actual definition and contains static data """ def wrapper(wrapped): """Get wrapper""" class WrappedClass(wrapped): """Generic wrapper class""" _mnemonic = mnemonic _opcode = opcode _funct3 = funct3 _funct7 = funct7 _variant = variant _extension = extension @staticmethod def _match(machinecode: int): """Try to match a machine code to this instruction""" f3 = (machinecode >> 12) & 0x7 f7 = (machinecode >> 25) & 0x7f if funct3 is not None and f3 != funct3: return False if funct7 is not None and f7 != funct7: return False return True WrappedClass.__name__ = wrapped.__name__ WrappedClass.__module__ = wrapped.__module__ WrappedClass.__qualname__ = wrapped.__qualname__ return WrappedClass return wrapper
python
def isa(mnemonic: str, opcode: int, funct3: int=None, funct7: int=None, *, variant=RV32I, extension=None): """ Decorator for the instructions. The decorator contains the static information for the instructions, in particular the encoding parameters and the assembler mnemonic. :param mnemonic: Assembler mnemonic :param opcode: Opcode of this instruction :param funct3: 3 bit function code on bits 14 to 12 (R-, I-, S- and B-type) :param funct7: 7 bit function code on bits 31 to 25 (R-type) :return: Wrapper class that overwrites the actual definition and contains static data """ def wrapper(wrapped): """Get wrapper""" class WrappedClass(wrapped): """Generic wrapper class""" _mnemonic = mnemonic _opcode = opcode _funct3 = funct3 _funct7 = funct7 _variant = variant _extension = extension @staticmethod def _match(machinecode: int): """Try to match a machine code to this instruction""" f3 = (machinecode >> 12) & 0x7 f7 = (machinecode >> 25) & 0x7f if funct3 is not None and f3 != funct3: return False if funct7 is not None and f7 != funct7: return False return True WrappedClass.__name__ = wrapped.__name__ WrappedClass.__module__ = wrapped.__module__ WrappedClass.__qualname__ = wrapped.__qualname__ return WrappedClass return wrapper
[ "def", "isa", "(", "mnemonic", ":", "str", ",", "opcode", ":", "int", ",", "funct3", ":", "int", "=", "None", ",", "funct7", ":", "int", "=", "None", ",", "*", ",", "variant", "=", "RV32I", ",", "extension", "=", "None", ")", ":", "def", "wrapper...
Decorator for the instructions. The decorator contains the static information for the instructions, in particular the encoding parameters and the assembler mnemonic. :param mnemonic: Assembler mnemonic :param opcode: Opcode of this instruction :param funct3: 3 bit function code on bits 14 to 12 (R-, I-, S- and B-type) :param funct7: 7 bit function code on bits 31 to 25 (R-type) :return: Wrapper class that overwrites the actual definition and contains static data
[ "Decorator", "for", "the", "instructions", ".", "The", "decorator", "contains", "the", "static", "information", "for", "the", "instructions", "in", "particular", "the", "encoding", "parameters", "and", "the", "assembler", "mnemonic", "." ]
51df07d16b79b143eb3d3c1e95bf26030c64a39b
https://github.com/wallento/riscv-python-model/blob/51df07d16b79b143eb3d3c1e95bf26030c64a39b/riscvmodel/insn.py#L394-L431
train
50,085
wallento/riscv-python-model
riscvmodel/insn.py
get_insns
def get_insns(cls = None): """ Get all Instructions. This is based on all known subclasses of `cls`. If non is given, all Instructions are returned. Only such instructions are returned that can be generated, i.e., that have a mnemonic, opcode, etc. So other classes in the hierarchy are not matched. :param cls: Base class to get list :type cls: Instruction :return: List of instructions """ insns = [] if cls is None: cls = Instruction if "_mnemonic" in cls.__dict__.keys(): insns = [cls] for subcls in cls.__subclasses__(): insns += get_insns(subcls) return insns
python
def get_insns(cls = None): """ Get all Instructions. This is based on all known subclasses of `cls`. If non is given, all Instructions are returned. Only such instructions are returned that can be generated, i.e., that have a mnemonic, opcode, etc. So other classes in the hierarchy are not matched. :param cls: Base class to get list :type cls: Instruction :return: List of instructions """ insns = [] if cls is None: cls = Instruction if "_mnemonic" in cls.__dict__.keys(): insns = [cls] for subcls in cls.__subclasses__(): insns += get_insns(subcls) return insns
[ "def", "get_insns", "(", "cls", "=", "None", ")", ":", "insns", "=", "[", "]", "if", "cls", "is", "None", ":", "cls", "=", "Instruction", "if", "\"_mnemonic\"", "in", "cls", ".", "__dict__", ".", "keys", "(", ")", ":", "insns", "=", "[", "cls", "...
Get all Instructions. This is based on all known subclasses of `cls`. If non is given, all Instructions are returned. Only such instructions are returned that can be generated, i.e., that have a mnemonic, opcode, etc. So other classes in the hierarchy are not matched. :param cls: Base class to get list :type cls: Instruction :return: List of instructions
[ "Get", "all", "Instructions", ".", "This", "is", "based", "on", "all", "known", "subclasses", "of", "cls", ".", "If", "non", "is", "given", "all", "Instructions", "are", "returned", ".", "Only", "such", "instructions", "are", "returned", "that", "can", "be...
51df07d16b79b143eb3d3c1e95bf26030c64a39b
https://github.com/wallento/riscv-python-model/blob/51df07d16b79b143eb3d3c1e95bf26030c64a39b/riscvmodel/insn.py#L446-L467
train
50,086
wallento/riscv-python-model
riscvmodel/insn.py
reverse_lookup
def reverse_lookup(mnemonic: str): """ Find instruction that matches the mnemonic. :param mnemonic: Mnemonic to match :return: :class:`Instruction` that matches or None """ for i in get_insns(): if "_mnemonic" in i.__dict__ and i._mnemonic == mnemonic: return i return None
python
def reverse_lookup(mnemonic: str): """ Find instruction that matches the mnemonic. :param mnemonic: Mnemonic to match :return: :class:`Instruction` that matches or None """ for i in get_insns(): if "_mnemonic" in i.__dict__ and i._mnemonic == mnemonic: return i return None
[ "def", "reverse_lookup", "(", "mnemonic", ":", "str", ")", ":", "for", "i", "in", "get_insns", "(", ")", ":", "if", "\"_mnemonic\"", "in", "i", ".", "__dict__", "and", "i", ".", "_mnemonic", "==", "mnemonic", ":", "return", "i", "return", "None" ]
Find instruction that matches the mnemonic. :param mnemonic: Mnemonic to match :return: :class:`Instruction` that matches or None
[ "Find", "instruction", "that", "matches", "the", "mnemonic", "." ]
51df07d16b79b143eb3d3c1e95bf26030c64a39b
https://github.com/wallento/riscv-python-model/blob/51df07d16b79b143eb3d3c1e95bf26030c64a39b/riscvmodel/insn.py#L470-L481
train
50,087
upsight/doctor
doctor/types.py
get_value_from_schema
def get_value_from_schema(schema, definition: dict, key: str, definition_key: str): """Gets a value from a schema and definition. If the value has references it will recursively attempt to resolve them. :param ResourceSchema schema: The resource schema. :param dict definition: The definition dict from the schema. :param str key: The key to use to get the value from the schema. :param str definition_key: The name of the definition. :returns: The value. :raises TypeSystemError: If the key can't be found in the schema/definition or we can't resolve the definition. """ resolved_definition = definition.copy() if '$ref' in resolved_definition: try: # NOTE: The resolve method recursively resolves references, so # we don't need to worry about that in this function. resolved_definition = schema.resolve(definition['$ref']) except SchemaError as e: raise TypeSystemError(str(e)) try: value = resolved_definition[key] except KeyError: # Before raising an error, the resolved definition may have an array # or object inside it that needs to be resolved in order to get # values. Attempt that here and then fail if we still can't find # the key we are looking for. # If the key was missing and this is an array, try to resolve it # from the items key. if resolved_definition['type'] == 'array': return [ get_value_from_schema(schema, resolved_definition['items'], key, definition_key) ] # If the key was missing and this is an object, resolve it from it's # properties. elif resolved_definition['type'] == 'object': value = {} for prop, definition in resolved_definition['properties'].items(): value[prop] = get_value_from_schema( schema, definition, key, definition_key) return value raise TypeSystemError( 'Definition `{}` is missing a {}.'.format( definition_key, key)) return value
python
def get_value_from_schema(schema, definition: dict, key: str, definition_key: str): """Gets a value from a schema and definition. If the value has references it will recursively attempt to resolve them. :param ResourceSchema schema: The resource schema. :param dict definition: The definition dict from the schema. :param str key: The key to use to get the value from the schema. :param str definition_key: The name of the definition. :returns: The value. :raises TypeSystemError: If the key can't be found in the schema/definition or we can't resolve the definition. """ resolved_definition = definition.copy() if '$ref' in resolved_definition: try: # NOTE: The resolve method recursively resolves references, so # we don't need to worry about that in this function. resolved_definition = schema.resolve(definition['$ref']) except SchemaError as e: raise TypeSystemError(str(e)) try: value = resolved_definition[key] except KeyError: # Before raising an error, the resolved definition may have an array # or object inside it that needs to be resolved in order to get # values. Attempt that here and then fail if we still can't find # the key we are looking for. # If the key was missing and this is an array, try to resolve it # from the items key. if resolved_definition['type'] == 'array': return [ get_value_from_schema(schema, resolved_definition['items'], key, definition_key) ] # If the key was missing and this is an object, resolve it from it's # properties. elif resolved_definition['type'] == 'object': value = {} for prop, definition in resolved_definition['properties'].items(): value[prop] = get_value_from_schema( schema, definition, key, definition_key) return value raise TypeSystemError( 'Definition `{}` is missing a {}.'.format( definition_key, key)) return value
[ "def", "get_value_from_schema", "(", "schema", ",", "definition", ":", "dict", ",", "key", ":", "str", ",", "definition_key", ":", "str", ")", ":", "resolved_definition", "=", "definition", ".", "copy", "(", ")", "if", "'$ref'", "in", "resolved_definition", ...
Gets a value from a schema and definition. If the value has references it will recursively attempt to resolve them. :param ResourceSchema schema: The resource schema. :param dict definition: The definition dict from the schema. :param str key: The key to use to get the value from the schema. :param str definition_key: The name of the definition. :returns: The value. :raises TypeSystemError: If the key can't be found in the schema/definition or we can't resolve the definition.
[ "Gets", "a", "value", "from", "a", "schema", "and", "definition", "." ]
2cf1d433f6f1aa1355644b449a757c0660793cdd
https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/types.py#L726-L774
train
50,088
upsight/doctor
doctor/types.py
get_types
def get_types(json_type: StrOrList) -> typing.Tuple[str, str]: """Returns the json and native python type based on the json_type input. If json_type is a list of types it will return the first non 'null' value. :param json_type: A json type or a list of json types. :returns: A tuple containing the json type and native python type. """ # If the type is a list, use the first non 'null' value as the type. if isinstance(json_type, list): for j_type in json_type: if j_type != 'null': json_type = j_type break return (json_type, JSON_TYPES_TO_NATIVE[json_type])
python
def get_types(json_type: StrOrList) -> typing.Tuple[str, str]: """Returns the json and native python type based on the json_type input. If json_type is a list of types it will return the first non 'null' value. :param json_type: A json type or a list of json types. :returns: A tuple containing the json type and native python type. """ # If the type is a list, use the first non 'null' value as the type. if isinstance(json_type, list): for j_type in json_type: if j_type != 'null': json_type = j_type break return (json_type, JSON_TYPES_TO_NATIVE[json_type])
[ "def", "get_types", "(", "json_type", ":", "StrOrList", ")", "->", "typing", ".", "Tuple", "[", "str", ",", "str", "]", ":", "# If the type is a list, use the first non 'null' value as the type.", "if", "isinstance", "(", "json_type", ",", "list", ")", ":", "for",...
Returns the json and native python type based on the json_type input. If json_type is a list of types it will return the first non 'null' value. :param json_type: A json type or a list of json types. :returns: A tuple containing the json type and native python type.
[ "Returns", "the", "json", "and", "native", "python", "type", "based", "on", "the", "json_type", "input", "." ]
2cf1d433f6f1aa1355644b449a757c0660793cdd
https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/types.py#L777-L791
train
50,089
upsight/doctor
doctor/types.py
new_type
def new_type(cls, **kwargs) -> typing.Type: """Create a user defined type. The new type will contain all attributes of the `cls` type passed in. Any attribute's value can be overwritten using kwargs. :param kwargs: Can include any attribute defined in the provided user defined type. """ props = dict(cls.__dict__) props.update(kwargs) return type(cls.__name__, (cls,), props)
python
def new_type(cls, **kwargs) -> typing.Type: """Create a user defined type. The new type will contain all attributes of the `cls` type passed in. Any attribute's value can be overwritten using kwargs. :param kwargs: Can include any attribute defined in the provided user defined type. """ props = dict(cls.__dict__) props.update(kwargs) return type(cls.__name__, (cls,), props)
[ "def", "new_type", "(", "cls", ",", "*", "*", "kwargs", ")", "->", "typing", ".", "Type", ":", "props", "=", "dict", "(", "cls", ".", "__dict__", ")", "props", ".", "update", "(", "kwargs", ")", "return", "type", "(", "cls", ".", "__name__", ",", ...
Create a user defined type. The new type will contain all attributes of the `cls` type passed in. Any attribute's value can be overwritten using kwargs. :param kwargs: Can include any attribute defined in the provided user defined type.
[ "Create", "a", "user", "defined", "type", "." ]
2cf1d433f6f1aa1355644b449a757c0660793cdd
https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/types.py#L925-L936
train
50,090
upsight/doctor
doctor/types.py
Object.get_example
def get_example(cls) -> dict: """Returns an example value for the Dict type. If an example isn't a defined attribute on the class we return a dict of example values based on each property's annotation. """ if cls.example is not None: return cls.example return {k: v.get_example() for k, v in cls.properties.items()}
python
def get_example(cls) -> dict: """Returns an example value for the Dict type. If an example isn't a defined attribute on the class we return a dict of example values based on each property's annotation. """ if cls.example is not None: return cls.example return {k: v.get_example() for k, v in cls.properties.items()}
[ "def", "get_example", "(", "cls", ")", "->", "dict", ":", "if", "cls", ".", "example", "is", "not", "None", ":", "return", "cls", ".", "example", "return", "{", "k", ":", "v", ".", "get_example", "(", ")", "for", "k", ",", "v", "in", "cls", ".", ...
Returns an example value for the Dict type. If an example isn't a defined attribute on the class we return a dict of example values based on each property's annotation.
[ "Returns", "an", "example", "value", "for", "the", "Dict", "type", "." ]
2cf1d433f6f1aa1355644b449a757c0660793cdd
https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/types.py#L563-L571
train
50,091
upsight/doctor
doctor/types.py
Array.get_example
def get_example(cls) -> list: """Returns an example value for the Array type. If an example isn't a defined attribute on the class we return a list of 1 item containing the example value of the `items` attribute. If `items` is None we simply return a `[1]`. """ if cls.example is not None: return cls.example if cls.items is not None: if isinstance(cls.items, list): return [item.get_example() for item in cls.items] else: return [cls.items.get_example()] return [1]
python
def get_example(cls) -> list: """Returns an example value for the Array type. If an example isn't a defined attribute on the class we return a list of 1 item containing the example value of the `items` attribute. If `items` is None we simply return a `[1]`. """ if cls.example is not None: return cls.example if cls.items is not None: if isinstance(cls.items, list): return [item.get_example() for item in cls.items] else: return [cls.items.get_example()] return [1]
[ "def", "get_example", "(", "cls", ")", "->", "list", ":", "if", "cls", ".", "example", "is", "not", "None", ":", "return", "cls", ".", "example", "if", "cls", ".", "items", "is", "not", "None", ":", "if", "isinstance", "(", "cls", ".", "items", ","...
Returns an example value for the Array type. If an example isn't a defined attribute on the class we return a list of 1 item containing the example value of the `items` attribute. If `items` is None we simply return a `[1]`.
[ "Returns", "an", "example", "value", "for", "the", "Array", "type", "." ]
2cf1d433f6f1aa1355644b449a757c0660793cdd
https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/types.py#L650-L664
train
50,092
eumis/pyviews
pyviews/rendering/pipeline.py
render_node
def render_node(xml_node: XmlNode, **args) -> Node: """Renders node from xml node""" try: node = create_node(xml_node, **args) pipeline = get_pipeline(node) run_steps(node, pipeline, **args) return node except CoreError as error: error.add_view_info(xml_node.view_info) raise except: info = exc_info() msg = 'Unknown error occurred during rendering' error = RenderingError(msg, xml_node.view_info) error.add_cause(info[1]) raise error from info[1]
python
def render_node(xml_node: XmlNode, **args) -> Node: """Renders node from xml node""" try: node = create_node(xml_node, **args) pipeline = get_pipeline(node) run_steps(node, pipeline, **args) return node except CoreError as error: error.add_view_info(xml_node.view_info) raise except: info = exc_info() msg = 'Unknown error occurred during rendering' error = RenderingError(msg, xml_node.view_info) error.add_cause(info[1]) raise error from info[1]
[ "def", "render_node", "(", "xml_node", ":", "XmlNode", ",", "*", "*", "args", ")", "->", "Node", ":", "try", ":", "node", "=", "create_node", "(", "xml_node", ",", "*", "*", "args", ")", "pipeline", "=", "get_pipeline", "(", "node", ")", "run_steps", ...
Renders node from xml node
[ "Renders", "node", "from", "xml", "node" ]
80a868242ee9cdc6f4ded594b3e0544cc238ed55
https://github.com/eumis/pyviews/blob/80a868242ee9cdc6f4ded594b3e0544cc238ed55/pyviews/rendering/pipeline.py#L19-L34
train
50,093
eumis/pyviews
pyviews/rendering/pipeline.py
get_pipeline
def get_pipeline(node: Node) -> RenderingPipeline: """Gets rendering pipeline for passed node""" pipeline = _get_registered_pipeline(node) if pipeline is None: msg = _get_pipeline_registration_error_message(node) raise RenderingError(msg) return pipeline
python
def get_pipeline(node: Node) -> RenderingPipeline: """Gets rendering pipeline for passed node""" pipeline = _get_registered_pipeline(node) if pipeline is None: msg = _get_pipeline_registration_error_message(node) raise RenderingError(msg) return pipeline
[ "def", "get_pipeline", "(", "node", ":", "Node", ")", "->", "RenderingPipeline", ":", "pipeline", "=", "_get_registered_pipeline", "(", "node", ")", "if", "pipeline", "is", "None", ":", "msg", "=", "_get_pipeline_registration_error_message", "(", "node", ")", "r...
Gets rendering pipeline for passed node
[ "Gets", "rendering", "pipeline", "for", "passed", "node" ]
80a868242ee9cdc6f4ded594b3e0544cc238ed55
https://github.com/eumis/pyviews/blob/80a868242ee9cdc6f4ded594b3e0544cc238ed55/pyviews/rendering/pipeline.py#L37-L43
train
50,094
eumis/pyviews
pyviews/rendering/pipeline.py
run_steps
def run_steps(node: Node, pipeline: RenderingPipeline, **args): """Runs instance node rendering steps""" for step in pipeline.steps: result = step(node, pipeline=pipeline, **args) if isinstance(result, dict): args = {**args, **result}
python
def run_steps(node: Node, pipeline: RenderingPipeline, **args): """Runs instance node rendering steps""" for step in pipeline.steps: result = step(node, pipeline=pipeline, **args) if isinstance(result, dict): args = {**args, **result}
[ "def", "run_steps", "(", "node", ":", "Node", ",", "pipeline", ":", "RenderingPipeline", ",", "*", "*", "args", ")", ":", "for", "step", "in", "pipeline", ".", "steps", ":", "result", "=", "step", "(", "node", ",", "pipeline", "=", "pipeline", ",", "...
Runs instance node rendering steps
[ "Runs", "instance", "node", "rendering", "steps" ]
80a868242ee9cdc6f4ded594b3e0544cc238ed55
https://github.com/eumis/pyviews/blob/80a868242ee9cdc6f4ded594b3e0544cc238ed55/pyviews/rendering/pipeline.py#L66-L71
train
50,095
eumis/pyviews
pyviews/rendering/pipeline.py
apply_attributes
def apply_attributes(node: Node, **_): """Applies xml attributes to instance node and setups bindings""" for attr in node.xml_node.attrs: apply_attribute(node, attr)
python
def apply_attributes(node: Node, **_): """Applies xml attributes to instance node and setups bindings""" for attr in node.xml_node.attrs: apply_attribute(node, attr)
[ "def", "apply_attributes", "(", "node", ":", "Node", ",", "*", "*", "_", ")", ":", "for", "attr", "in", "node", ".", "xml_node", ".", "attrs", ":", "apply_attribute", "(", "node", ",", "attr", ")" ]
Applies xml attributes to instance node and setups bindings
[ "Applies", "xml", "attributes", "to", "instance", "node", "and", "setups", "bindings" ]
80a868242ee9cdc6f4ded594b3e0544cc238ed55
https://github.com/eumis/pyviews/blob/80a868242ee9cdc6f4ded594b3e0544cc238ed55/pyviews/rendering/pipeline.py#L74-L77
train
50,096
eumis/pyviews
pyviews/rendering/pipeline.py
apply_attribute
def apply_attribute(node: Node, attr: XmlAttr): """Maps xml attribute to instance node property and setups bindings""" setter = get_setter(attr) stripped_value = attr.value.strip() if attr.value else '' if is_expression(stripped_value): (binding_type, expr_body) = parse_expression(stripped_value) binder().apply(binding_type, node=node, attr=attr, modifier=setter, expr_body=expr_body) else: setter(node, attr.name, attr.value)
python
def apply_attribute(node: Node, attr: XmlAttr): """Maps xml attribute to instance node property and setups bindings""" setter = get_setter(attr) stripped_value = attr.value.strip() if attr.value else '' if is_expression(stripped_value): (binding_type, expr_body) = parse_expression(stripped_value) binder().apply(binding_type, node=node, attr=attr, modifier=setter, expr_body=expr_body) else: setter(node, attr.name, attr.value)
[ "def", "apply_attribute", "(", "node", ":", "Node", ",", "attr", ":", "XmlAttr", ")", ":", "setter", "=", "get_setter", "(", "attr", ")", "stripped_value", "=", "attr", ".", "value", ".", "strip", "(", ")", "if", "attr", ".", "value", "else", "''", "...
Maps xml attribute to instance node property and setups bindings
[ "Maps", "xml", "attribute", "to", "instance", "node", "property", "and", "setups", "bindings" ]
80a868242ee9cdc6f4ded594b3e0544cc238ed55
https://github.com/eumis/pyviews/blob/80a868242ee9cdc6f4ded594b3e0544cc238ed55/pyviews/rendering/pipeline.py#L80-L88
train
50,097
eumis/pyviews
pyviews/rendering/pipeline.py
call_set_attr
def call_set_attr(node: Node, key: str, value): """Calls node setter""" node.set_attr(key, value)
python
def call_set_attr(node: Node, key: str, value): """Calls node setter""" node.set_attr(key, value)
[ "def", "call_set_attr", "(", "node", ":", "Node", ",", "key", ":", "str", ",", "value", ")", ":", "node", ".", "set_attr", "(", "key", ",", "value", ")" ]
Calls node setter
[ "Calls", "node", "setter" ]
80a868242ee9cdc6f4ded594b3e0544cc238ed55
https://github.com/eumis/pyviews/blob/80a868242ee9cdc6f4ded594b3e0544cc238ed55/pyviews/rendering/pipeline.py#L98-L100
train
50,098
eumis/pyviews
pyviews/rendering/pipeline.py
render_children
def render_children(node: Node, **child_args): """Render node children""" for xml_node in node.xml_node.children: child = render(xml_node, **child_args) node.add_child(child)
python
def render_children(node: Node, **child_args): """Render node children""" for xml_node in node.xml_node.children: child = render(xml_node, **child_args) node.add_child(child)
[ "def", "render_children", "(", "node", ":", "Node", ",", "*", "*", "child_args", ")", ":", "for", "xml_node", "in", "node", ".", "xml_node", ".", "children", ":", "child", "=", "render", "(", "xml_node", ",", "*", "*", "child_args", ")", "node", ".", ...
Render node children
[ "Render", "node", "children" ]
80a868242ee9cdc6f4ded594b3e0544cc238ed55
https://github.com/eumis/pyviews/blob/80a868242ee9cdc6f4ded594b3e0544cc238ed55/pyviews/rendering/pipeline.py#L103-L107
train
50,099