repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
swimlane/swimlane-python
swimlane/core/client.py
Swimlane.product_version
def product_version(self): """Swimlane product version""" version_separator = '+' if version_separator in self.version: # Post product/build version separation return self.version.split(version_separator)[0] # Pre product/build version separation return self.version.split('-')[0]
python
def product_version(self): """Swimlane product version""" version_separator = '+' if version_separator in self.version: # Post product/build version separation return self.version.split(version_separator)[0] # Pre product/build version separation return self.version.split('-')[0]
[ "def", "product_version", "(", "self", ")", ":", "version_separator", "=", "'+'", "if", "version_separator", "in", "self", ".", "version", ":", "# Post product/build version separation", "return", "self", ".", "version", ".", "split", "(", "version_separator", ")", "[", "0", "]", "# Pre product/build version separation", "return", "self", ".", "version", ".", "split", "(", "'-'", ")", "[", "0", "]" ]
Swimlane product version
[ "Swimlane", "product", "version" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/client.py#L241-L248
train
swimlane/swimlane-python
swimlane/core/client.py
Swimlane.build_number
def build_number(self): """Swimlane build number""" version_separator = '+' if version_separator in self.version: # Post product/build version separation return self.version.split(version_separator)[2] # Pre product/build version separation return self.version.split('-')[1]
python
def build_number(self): """Swimlane build number""" version_separator = '+' if version_separator in self.version: # Post product/build version separation return self.version.split(version_separator)[2] # Pre product/build version separation return self.version.split('-')[1]
[ "def", "build_number", "(", "self", ")", ":", "version_separator", "=", "'+'", "if", "version_separator", "in", "self", ".", "version", ":", "# Post product/build version separation", "return", "self", ".", "version", ".", "split", "(", "version_separator", ")", "[", "2", "]", "# Pre product/build version separation", "return", "self", ".", "version", ".", "split", "(", "'-'", ")", "[", "1", "]" ]
Swimlane build number
[ "Swimlane", "build", "number" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/client.py#L264-L271
train
swimlane/swimlane-python
swimlane/core/client.py
SwimlaneJwtAuth.authenticate
def authenticate(self): """Send login request and update User instance, login headers, and token expiration""" # Temporarily remove auth from Swimlane session for auth request to avoid recursive loop during login request self._swimlane._session.auth = None resp = self._swimlane.request( 'post', 'user/login', json={ 'userName': self._username, 'password': self._password }, ) self._swimlane._session.auth = self # Get JWT from response content json_content = resp.json() token = json_content.pop('token', None) # Grab token expiration token_data = jwt.decode(token, verify=False) token_expiration = pendulum.from_timestamp(token_data['exp']) headers = { 'Authorization': 'Bearer {}'.format(token) } # Create User instance for authenticating user from login response data user = User(self._swimlane, _user_raw_from_login_content(json_content)) self._login_headers = headers self.user = user self._token_expiration = token_expiration
python
def authenticate(self): """Send login request and update User instance, login headers, and token expiration""" # Temporarily remove auth from Swimlane session for auth request to avoid recursive loop during login request self._swimlane._session.auth = None resp = self._swimlane.request( 'post', 'user/login', json={ 'userName': self._username, 'password': self._password }, ) self._swimlane._session.auth = self # Get JWT from response content json_content = resp.json() token = json_content.pop('token', None) # Grab token expiration token_data = jwt.decode(token, verify=False) token_expiration = pendulum.from_timestamp(token_data['exp']) headers = { 'Authorization': 'Bearer {}'.format(token) } # Create User instance for authenticating user from login response data user = User(self._swimlane, _user_raw_from_login_content(json_content)) self._login_headers = headers self.user = user self._token_expiration = token_expiration
[ "def", "authenticate", "(", "self", ")", ":", "# Temporarily remove auth from Swimlane session for auth request to avoid recursive loop during login request", "self", ".", "_swimlane", ".", "_session", ".", "auth", "=", "None", "resp", "=", "self", ".", "_swimlane", ".", "request", "(", "'post'", ",", "'user/login'", ",", "json", "=", "{", "'userName'", ":", "self", ".", "_username", ",", "'password'", ":", "self", ".", "_password", "}", ",", ")", "self", ".", "_swimlane", ".", "_session", ".", "auth", "=", "self", "# Get JWT from response content", "json_content", "=", "resp", ".", "json", "(", ")", "token", "=", "json_content", ".", "pop", "(", "'token'", ",", "None", ")", "# Grab token expiration", "token_data", "=", "jwt", ".", "decode", "(", "token", ",", "verify", "=", "False", ")", "token_expiration", "=", "pendulum", ".", "from_timestamp", "(", "token_data", "[", "'exp'", "]", ")", "headers", "=", "{", "'Authorization'", ":", "'Bearer {}'", ".", "format", "(", "token", ")", "}", "# Create User instance for authenticating user from login response data", "user", "=", "User", "(", "self", ".", "_swimlane", ",", "_user_raw_from_login_content", "(", "json_content", ")", ")", "self", ".", "_login_headers", "=", "headers", "self", ".", "user", "=", "user", "self", ".", "_token_expiration", "=", "token_expiration" ]
Send login request and update User instance, login headers, and token expiration
[ "Send", "login", "request", "and", "update", "User", "instance", "login", "headers", "and", "token", "expiration" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/client.py#L349-L381
train
swimlane/swimlane-python
swimlane/core/cursor.py
PaginatedCursor._evaluate
def _evaluate(self): """Lazily retrieve and paginate report results and build Record instances from returned data""" if self._elements: for element in self._elements: yield element else: for page in itertools.count(): raw_elements = self._retrieve_raw_elements(page) for raw_element in raw_elements: element = self._parse_raw_element(raw_element) self._elements.append(element) yield element if self.__limit and len(self._elements) >= self.__limit: break if any([ len(raw_elements) < self.page_size, (self.__limit and len(self._elements) >= self.__limit) ]): break
python
def _evaluate(self): """Lazily retrieve and paginate report results and build Record instances from returned data""" if self._elements: for element in self._elements: yield element else: for page in itertools.count(): raw_elements = self._retrieve_raw_elements(page) for raw_element in raw_elements: element = self._parse_raw_element(raw_element) self._elements.append(element) yield element if self.__limit and len(self._elements) >= self.__limit: break if any([ len(raw_elements) < self.page_size, (self.__limit and len(self._elements) >= self.__limit) ]): break
[ "def", "_evaluate", "(", "self", ")", ":", "if", "self", ".", "_elements", ":", "for", "element", "in", "self", ".", "_elements", ":", "yield", "element", "else", ":", "for", "page", "in", "itertools", ".", "count", "(", ")", ":", "raw_elements", "=", "self", ".", "_retrieve_raw_elements", "(", "page", ")", "for", "raw_element", "in", "raw_elements", ":", "element", "=", "self", ".", "_parse_raw_element", "(", "raw_element", ")", "self", ".", "_elements", ".", "append", "(", "element", ")", "yield", "element", "if", "self", ".", "__limit", "and", "len", "(", "self", ".", "_elements", ")", ">=", "self", ".", "__limit", ":", "break", "if", "any", "(", "[", "len", "(", "raw_elements", ")", "<", "self", ".", "page_size", ",", "(", "self", ".", "__limit", "and", "len", "(", "self", ".", "_elements", ")", ">=", "self", ".", "__limit", ")", "]", ")", ":", "break" ]
Lazily retrieve and paginate report results and build Record instances from returned data
[ "Lazily", "retrieve", "and", "paginate", "report", "results", "and", "build", "Record", "instances", "from", "returned", "data" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/cursor.py#L44-L64
train
swimlane/swimlane-python
swimlane/core/fields/usergroup.py
UserGroupField._validate_user
def _validate_user(self, user): """Validate a User instance against allowed user IDs or membership in a group""" # All users allowed if self._show_all_users: return # User specifically allowed if user.id in self._allowed_user_ids: return # User allowed by group membership user_member_group_ids = set([g['id'] for g in user._raw['groups']]) if user_member_group_ids & self._allowed_member_ids: return raise ValidationError( self.record, 'User `{}` is not a valid selection for field `{}`'.format( user, self.name ) )
python
def _validate_user(self, user): """Validate a User instance against allowed user IDs or membership in a group""" # All users allowed if self._show_all_users: return # User specifically allowed if user.id in self._allowed_user_ids: return # User allowed by group membership user_member_group_ids = set([g['id'] for g in user._raw['groups']]) if user_member_group_ids & self._allowed_member_ids: return raise ValidationError( self.record, 'User `{}` is not a valid selection for field `{}`'.format( user, self.name ) )
[ "def", "_validate_user", "(", "self", ",", "user", ")", ":", "# All users allowed", "if", "self", ".", "_show_all_users", ":", "return", "# User specifically allowed", "if", "user", ".", "id", "in", "self", ".", "_allowed_user_ids", ":", "return", "# User allowed by group membership", "user_member_group_ids", "=", "set", "(", "[", "g", "[", "'id'", "]", "for", "g", "in", "user", ".", "_raw", "[", "'groups'", "]", "]", ")", "if", "user_member_group_ids", "&", "self", ".", "_allowed_member_ids", ":", "return", "raise", "ValidationError", "(", "self", ".", "record", ",", "'User `{}` is not a valid selection for field `{}`'", ".", "format", "(", "user", ",", "self", ".", "name", ")", ")" ]
Validate a User instance against allowed user IDs or membership in a group
[ "Validate", "a", "User", "instance", "against", "allowed", "user", "IDs", "or", "membership", "in", "a", "group" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/fields/usergroup.py#L56-L77
train
swimlane/swimlane-python
swimlane/core/fields/usergroup.py
UserGroupField._validate_group
def _validate_group(self, group): """Validate a Group instance against allowed group IDs or subgroup of a parent group""" # All groups allowed if self._show_all_groups: return # Group specifically allowed if group.id in self._allowed_group_ids: return # Group allowed by subgroup membership for parent_group_id in self._allowed_subgroup_ids: # Get each group, and check subgroup ids parent_group = self._swimlane.groups.get(id=parent_group_id) parent_group_child_ids = set([g['id'] for g in parent_group._raw['groups']]) if group.id in parent_group_child_ids: return raise ValidationError( self.record, 'Group `{}` is not a valid selection for field `{}`'.format( group, self.name ) )
python
def _validate_group(self, group): """Validate a Group instance against allowed group IDs or subgroup of a parent group""" # All groups allowed if self._show_all_groups: return # Group specifically allowed if group.id in self._allowed_group_ids: return # Group allowed by subgroup membership for parent_group_id in self._allowed_subgroup_ids: # Get each group, and check subgroup ids parent_group = self._swimlane.groups.get(id=parent_group_id) parent_group_child_ids = set([g['id'] for g in parent_group._raw['groups']]) if group.id in parent_group_child_ids: return raise ValidationError( self.record, 'Group `{}` is not a valid selection for field `{}`'.format( group, self.name ) )
[ "def", "_validate_group", "(", "self", ",", "group", ")", ":", "# All groups allowed", "if", "self", ".", "_show_all_groups", ":", "return", "# Group specifically allowed", "if", "group", ".", "id", "in", "self", ".", "_allowed_group_ids", ":", "return", "# Group allowed by subgroup membership", "for", "parent_group_id", "in", "self", ".", "_allowed_subgroup_ids", ":", "# Get each group, and check subgroup ids", "parent_group", "=", "self", ".", "_swimlane", ".", "groups", ".", "get", "(", "id", "=", "parent_group_id", ")", "parent_group_child_ids", "=", "set", "(", "[", "g", "[", "'id'", "]", "for", "g", "in", "parent_group", ".", "_raw", "[", "'groups'", "]", "]", ")", "if", "group", ".", "id", "in", "parent_group_child_ids", ":", "return", "raise", "ValidationError", "(", "self", ".", "record", ",", "'Group `{}` is not a valid selection for field `{}`'", ".", "format", "(", "group", ",", "self", ".", "name", ")", ")" ]
Validate a Group instance against allowed group IDs or subgroup of a parent group
[ "Validate", "a", "Group", "instance", "against", "allowed", "group", "IDs", "or", "subgroup", "of", "a", "parent", "group" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/fields/usergroup.py#L79-L103
train
swimlane/swimlane-python
swimlane/core/fields/usergroup.py
UserGroupField.cast_to_python
def cast_to_python(self, value): """Convert JSON definition to UserGroup object""" # v2.x does not provide a distinction between users and groups at the field selection level, can only return # UserGroup instances instead of specific User or Group instances if value is not None: value = UserGroup(self._swimlane, value) return value
python
def cast_to_python(self, value): """Convert JSON definition to UserGroup object""" # v2.x does not provide a distinction between users and groups at the field selection level, can only return # UserGroup instances instead of specific User or Group instances if value is not None: value = UserGroup(self._swimlane, value) return value
[ "def", "cast_to_python", "(", "self", ",", "value", ")", ":", "# v2.x does not provide a distinction between users and groups at the field selection level, can only return", "# UserGroup instances instead of specific User or Group instances", "if", "value", "is", "not", "None", ":", "value", "=", "UserGroup", "(", "self", ".", "_swimlane", ",", "value", ")", "return", "value" ]
Convert JSON definition to UserGroup object
[ "Convert", "JSON", "definition", "to", "UserGroup", "object" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/fields/usergroup.py#L112-L119
train
swimlane/swimlane-python
swimlane/core/fields/base/cursor.py
CursorField.cursor
def cursor(self): """Cache and return cursor_class instance""" if self._cursor is None: # pylint: disable=not-callable self._cursor = self.cursor_class(self, self.get_initial_elements()) return self._cursor
python
def cursor(self): """Cache and return cursor_class instance""" if self._cursor is None: # pylint: disable=not-callable self._cursor = self.cursor_class(self, self.get_initial_elements()) return self._cursor
[ "def", "cursor", "(", "self", ")", ":", "if", "self", ".", "_cursor", "is", "None", ":", "# pylint: disable=not-callable", "self", ".", "_cursor", "=", "self", ".", "cursor_class", "(", "self", ",", "self", ".", "get_initial_elements", "(", ")", ")", "return", "self", ".", "_cursor" ]
Cache and return cursor_class instance
[ "Cache", "and", "return", "cursor_class", "instance" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/fields/base/cursor.py#L72-L78
train
swimlane/swimlane-python
swimlane/core/fields/comment.py
CommentCursor.comment
def comment(self, message): """Add new comment to record comment field""" message = str(message) sw_repr = { '$type': 'Core.Models.Record.Comments, Core', 'createdByUser': self._record._swimlane.user.as_usergroup_selection(), 'createdDate': pendulum.now().to_rfc3339_string(), 'message': message } comment = Comment(self._swimlane, sw_repr) self._elements.append(comment) self._record._raw['comments'].setdefault(self._field.id, []) self._record._raw['comments'][self._field.id].append(comment._raw) return comment
python
def comment(self, message): """Add new comment to record comment field""" message = str(message) sw_repr = { '$type': 'Core.Models.Record.Comments, Core', 'createdByUser': self._record._swimlane.user.as_usergroup_selection(), 'createdDate': pendulum.now().to_rfc3339_string(), 'message': message } comment = Comment(self._swimlane, sw_repr) self._elements.append(comment) self._record._raw['comments'].setdefault(self._field.id, []) self._record._raw['comments'][self._field.id].append(comment._raw) return comment
[ "def", "comment", "(", "self", ",", "message", ")", ":", "message", "=", "str", "(", "message", ")", "sw_repr", "=", "{", "'$type'", ":", "'Core.Models.Record.Comments, Core'", ",", "'createdByUser'", ":", "self", ".", "_record", ".", "_swimlane", ".", "user", ".", "as_usergroup_selection", "(", ")", ",", "'createdDate'", ":", "pendulum", ".", "now", "(", ")", ".", "to_rfc3339_string", "(", ")", ",", "'message'", ":", "message", "}", "comment", "=", "Comment", "(", "self", ".", "_swimlane", ",", "sw_repr", ")", "self", ".", "_elements", ".", "append", "(", "comment", ")", "self", ".", "_record", ".", "_raw", "[", "'comments'", "]", ".", "setdefault", "(", "self", ".", "_field", ".", "id", ",", "[", "]", ")", "self", ".", "_record", ".", "_raw", "[", "'comments'", "]", "[", "self", ".", "_field", ".", "id", "]", ".", "append", "(", "comment", ".", "_raw", ")", "return", "comment" ]
Add new comment to record comment field
[ "Add", "new", "comment", "to", "record", "comment", "field" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/fields/comment.py#L10-L27
train
swimlane/swimlane-python
swimlane/utils/__init__.py
get_recursive_subclasses
def get_recursive_subclasses(cls): """Return list of all subclasses for a class, including subclasses of direct subclasses""" return cls.__subclasses__() + [g for s in cls.__subclasses__() for g in get_recursive_subclasses(s)]
python
def get_recursive_subclasses(cls): """Return list of all subclasses for a class, including subclasses of direct subclasses""" return cls.__subclasses__() + [g for s in cls.__subclasses__() for g in get_recursive_subclasses(s)]
[ "def", "get_recursive_subclasses", "(", "cls", ")", ":", "return", "cls", ".", "__subclasses__", "(", ")", "+", "[", "g", "for", "s", "in", "cls", ".", "__subclasses__", "(", ")", "for", "g", "in", "get_recursive_subclasses", "(", "s", ")", "]" ]
Return list of all subclasses for a class, including subclasses of direct subclasses
[ "Return", "list", "of", "all", "subclasses", "for", "a", "class", "including", "subclasses", "of", "direct", "subclasses" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/utils/__init__.py#L24-L26
train
swimlane/swimlane-python
swimlane/utils/__init__.py
import_submodules
def import_submodules(package): """Return list of imported module instances from beneath root_package""" if isinstance(package, str): package = importlib.import_module(package) results = {} for _, full_name, is_pkg in pkgutil.walk_packages(package.__path__, package.__name__ + '.'): results[full_name] = importlib.import_module(full_name) if is_pkg: results.update(import_submodules(full_name)) return results
python
def import_submodules(package): """Return list of imported module instances from beneath root_package""" if isinstance(package, str): package = importlib.import_module(package) results = {} for _, full_name, is_pkg in pkgutil.walk_packages(package.__path__, package.__name__ + '.'): results[full_name] = importlib.import_module(full_name) if is_pkg: results.update(import_submodules(full_name)) return results
[ "def", "import_submodules", "(", "package", ")", ":", "if", "isinstance", "(", "package", ",", "str", ")", ":", "package", "=", "importlib", ".", "import_module", "(", "package", ")", "results", "=", "{", "}", "for", "_", ",", "full_name", ",", "is_pkg", "in", "pkgutil", ".", "walk_packages", "(", "package", ".", "__path__", ",", "package", ".", "__name__", "+", "'.'", ")", ":", "results", "[", "full_name", "]", "=", "importlib", ".", "import_module", "(", "full_name", ")", "if", "is_pkg", ":", "results", ".", "update", "(", "import_submodules", "(", "full_name", ")", ")", "return", "results" ]
Return list of imported module instances from beneath root_package
[ "Return", "list", "of", "imported", "module", "instances", "from", "beneath", "root_package" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/utils/__init__.py#L29-L43
train
swimlane/swimlane-python
swimlane/utils/__init__.py
one_of_keyword_only
def one_of_keyword_only(*valid_keywords): """Decorator to help make one-and-only-one keyword-only argument functions more reusable Notes: Decorated function should take 2 arguments, the first for the key, the second the value Examples: :: @one_of_keyword_only('a', 'b', 'c') def func(key, value): if key == 'a': ... elif key == 'b': ... else: # key = 'c' ... ... func(a=1) func(b=2) func(c=3) try: func(d=4) except TypeError: ... try: func(a=1, b=2) except TypeError: ... Args: *valid_keywords (str): All allowed keyword argument names Raises: TypeError: On decorated call, if 0 or 2+ arguments are provided or kwargs contains a key not in valid_keywords """ def decorator(func): @functools.wraps(func) def wrapper(*args, **kwargs): sentinel = object() values = {} for key in valid_keywords: kwarg_value = kwargs.pop(key, sentinel) if kwarg_value is not sentinel: values[key] = kwarg_value if kwargs: raise TypeError('Unexpected arguments: {}'.format(kwargs)) if not values: raise TypeError('Must provide one of {} as keyword argument'.format(', '.join(valid_keywords))) if len(values) > 1: raise TypeError('Must provide only one of {} as keyword argument. Received {}'.format( ', '.join(valid_keywords), values )) return func(*(args + values.popitem())) return wrapper return decorator
python
def one_of_keyword_only(*valid_keywords): """Decorator to help make one-and-only-one keyword-only argument functions more reusable Notes: Decorated function should take 2 arguments, the first for the key, the second the value Examples: :: @one_of_keyword_only('a', 'b', 'c') def func(key, value): if key == 'a': ... elif key == 'b': ... else: # key = 'c' ... ... func(a=1) func(b=2) func(c=3) try: func(d=4) except TypeError: ... try: func(a=1, b=2) except TypeError: ... Args: *valid_keywords (str): All allowed keyword argument names Raises: TypeError: On decorated call, if 0 or 2+ arguments are provided or kwargs contains a key not in valid_keywords """ def decorator(func): @functools.wraps(func) def wrapper(*args, **kwargs): sentinel = object() values = {} for key in valid_keywords: kwarg_value = kwargs.pop(key, sentinel) if kwarg_value is not sentinel: values[key] = kwarg_value if kwargs: raise TypeError('Unexpected arguments: {}'.format(kwargs)) if not values: raise TypeError('Must provide one of {} as keyword argument'.format(', '.join(valid_keywords))) if len(values) > 1: raise TypeError('Must provide only one of {} as keyword argument. Received {}'.format( ', '.join(valid_keywords), values )) return func(*(args + values.popitem())) return wrapper return decorator
[ "def", "one_of_keyword_only", "(", "*", "valid_keywords", ")", ":", "def", "decorator", "(", "func", ")", ":", "@", "functools", ".", "wraps", "(", "func", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "sentinel", "=", "object", "(", ")", "values", "=", "{", "}", "for", "key", "in", "valid_keywords", ":", "kwarg_value", "=", "kwargs", ".", "pop", "(", "key", ",", "sentinel", ")", "if", "kwarg_value", "is", "not", "sentinel", ":", "values", "[", "key", "]", "=", "kwarg_value", "if", "kwargs", ":", "raise", "TypeError", "(", "'Unexpected arguments: {}'", ".", "format", "(", "kwargs", ")", ")", "if", "not", "values", ":", "raise", "TypeError", "(", "'Must provide one of {} as keyword argument'", ".", "format", "(", "', '", ".", "join", "(", "valid_keywords", ")", ")", ")", "if", "len", "(", "values", ")", ">", "1", ":", "raise", "TypeError", "(", "'Must provide only one of {} as keyword argument. Received {}'", ".", "format", "(", "', '", ".", "join", "(", "valid_keywords", ")", ",", "values", ")", ")", "return", "func", "(", "*", "(", "args", "+", "values", ".", "popitem", "(", ")", ")", ")", "return", "wrapper", "return", "decorator" ]
Decorator to help make one-and-only-one keyword-only argument functions more reusable Notes: Decorated function should take 2 arguments, the first for the key, the second the value Examples: :: @one_of_keyword_only('a', 'b', 'c') def func(key, value): if key == 'a': ... elif key == 'b': ... else: # key = 'c' ... ... func(a=1) func(b=2) func(c=3) try: func(d=4) except TypeError: ... try: func(a=1, b=2) except TypeError: ... Args: *valid_keywords (str): All allowed keyword argument names Raises: TypeError: On decorated call, if 0 or 2+ arguments are provided or kwargs contains a key not in valid_keywords
[ "Decorator", "to", "help", "make", "one", "-", "and", "-", "only", "-", "one", "keyword", "-", "only", "argument", "functions", "more", "reusable" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/utils/__init__.py#L46-L117
train
swimlane/swimlane-python
swimlane/core/fields/datetime.py
DatetimeField.get_python
def get_python(self): """Coerce to best date type representation for the field subtype""" value = super(DatetimeField, self).get_python() if value is not None: # Handle subtypes with matching Pendulum types if self.input_type == self._type_time: value = value.time() if self.input_type == self._type_date: value = value.date() return value
python
def get_python(self): """Coerce to best date type representation for the field subtype""" value = super(DatetimeField, self).get_python() if value is not None: # Handle subtypes with matching Pendulum types if self.input_type == self._type_time: value = value.time() if self.input_type == self._type_date: value = value.date() return value
[ "def", "get_python", "(", "self", ")", ":", "value", "=", "super", "(", "DatetimeField", ",", "self", ")", ".", "get_python", "(", ")", "if", "value", "is", "not", "None", ":", "# Handle subtypes with matching Pendulum types", "if", "self", ".", "input_type", "==", "self", ".", "_type_time", ":", "value", "=", "value", ".", "time", "(", ")", "if", "self", ".", "input_type", "==", "self", ".", "_type_date", ":", "value", "=", "value", ".", "date", "(", ")", "return", "value" ]
Coerce to best date type representation for the field subtype
[ "Coerce", "to", "best", "date", "type", "representation", "for", "the", "field", "subtype" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/fields/datetime.py#L68-L79
train
swimlane/swimlane-python
swimlane/core/fields/datetime.py
DatetimeField.cast_to_swimlane
def cast_to_swimlane(self, value): """Return datetimes formatted as expected by API and timespans as millisecond epochs""" if value is None: return value if self.input_type == self._type_interval: return value.in_seconds() * 1000 return self.format_datetime(value)
python
def cast_to_swimlane(self, value): """Return datetimes formatted as expected by API and timespans as millisecond epochs""" if value is None: return value if self.input_type == self._type_interval: return value.in_seconds() * 1000 return self.format_datetime(value)
[ "def", "cast_to_swimlane", "(", "self", ",", "value", ")", ":", "if", "value", "is", "None", ":", "return", "value", "if", "self", ".", "input_type", "==", "self", ".", "_type_interval", ":", "return", "value", ".", "in_seconds", "(", ")", "*", "1000", "return", "self", ".", "format_datetime", "(", "value", ")" ]
Return datetimes formatted as expected by API and timespans as millisecond epochs
[ "Return", "datetimes", "formatted", "as", "expected", "by", "API", "and", "timespans", "as", "millisecond", "epochs" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/fields/datetime.py#L86-L94
train
swimlane/swimlane-python
swimlane/core/fields/datetime.py
DatetimeField.for_json
def for_json(self): """Return date ISO8601 string formats for datetime, date, and time values, milliseconds for intervals""" value = super(DatetimeField, self).for_json() # Order of instance checks matters for proper inheritance checks if isinstance(value, pendulum.Interval): return value.in_seconds() * 1000 if isinstance(value, datetime): return self.format_datetime(value) if isinstance(value, pendulum.Time): return str(value) if isinstance(value, pendulum.Date): return value.to_date_string()
python
def for_json(self): """Return date ISO8601 string formats for datetime, date, and time values, milliseconds for intervals""" value = super(DatetimeField, self).for_json() # Order of instance checks matters for proper inheritance checks if isinstance(value, pendulum.Interval): return value.in_seconds() * 1000 if isinstance(value, datetime): return self.format_datetime(value) if isinstance(value, pendulum.Time): return str(value) if isinstance(value, pendulum.Date): return value.to_date_string()
[ "def", "for_json", "(", "self", ")", ":", "value", "=", "super", "(", "DatetimeField", ",", "self", ")", ".", "for_json", "(", ")", "# Order of instance checks matters for proper inheritance checks", "if", "isinstance", "(", "value", ",", "pendulum", ".", "Interval", ")", ":", "return", "value", ".", "in_seconds", "(", ")", "*", "1000", "if", "isinstance", "(", "value", ",", "datetime", ")", ":", "return", "self", ".", "format_datetime", "(", "value", ")", "if", "isinstance", "(", "value", ",", "pendulum", ".", "Time", ")", ":", "return", "str", "(", "value", ")", "if", "isinstance", "(", "value", ",", "pendulum", ".", "Date", ")", ":", "return", "value", ".", "to_date_string", "(", ")" ]
Return date ISO8601 string formats for datetime, date, and time values, milliseconds for intervals
[ "Return", "date", "ISO8601", "string", "formats", "for", "datetime", "date", "and", "time", "values", "milliseconds", "for", "intervals" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/fields/datetime.py#L96-L108
train
swimlane/swimlane-python
swimlane/core/resources/report.py
report_factory
def report_factory(app, report_name, **kwargs): """Report instance factory populating boilerplate raw data Args: app (App): Swimlane App instance report_name (str): Generated Report name Keyword Args **kwargs: Kwargs to pass to the Report class """ # pylint: disable=protected-access created = pendulum.now().to_rfc3339_string() user_model = app._swimlane.user.as_usergroup_selection() return Report( app, { "$type": Report._type, "groupBys": [], "aggregates": [], "applicationIds": [app.id], "columns": [], "sorts": { "$type": "System.Collections.Generic.Dictionary`2" "[[System.String, mscorlib]," "[Core.Models.Search.SortTypes, Core]], mscorlib", }, "filters": [], "defaultSearchReport": False, "allowed": [], "permissions": { "$type": "Core.Models.Security.PermissionMatrix, Core" }, "createdDate": created, "modifiedDate": created, "createdByUser": user_model, "modifiedByUser": user_model, "id": None, "name": report_name, "disabled": False, "keywords": "" }, **kwargs )
python
def report_factory(app, report_name, **kwargs): """Report instance factory populating boilerplate raw data Args: app (App): Swimlane App instance report_name (str): Generated Report name Keyword Args **kwargs: Kwargs to pass to the Report class """ # pylint: disable=protected-access created = pendulum.now().to_rfc3339_string() user_model = app._swimlane.user.as_usergroup_selection() return Report( app, { "$type": Report._type, "groupBys": [], "aggregates": [], "applicationIds": [app.id], "columns": [], "sorts": { "$type": "System.Collections.Generic.Dictionary`2" "[[System.String, mscorlib]," "[Core.Models.Search.SortTypes, Core]], mscorlib", }, "filters": [], "defaultSearchReport": False, "allowed": [], "permissions": { "$type": "Core.Models.Security.PermissionMatrix, Core" }, "createdDate": created, "modifiedDate": created, "createdByUser": user_model, "modifiedByUser": user_model, "id": None, "name": report_name, "disabled": False, "keywords": "" }, **kwargs )
[ "def", "report_factory", "(", "app", ",", "report_name", ",", "*", "*", "kwargs", ")", ":", "# pylint: disable=protected-access", "created", "=", "pendulum", ".", "now", "(", ")", ".", "to_rfc3339_string", "(", ")", "user_model", "=", "app", ".", "_swimlane", ".", "user", ".", "as_usergroup_selection", "(", ")", "return", "Report", "(", "app", ",", "{", "\"$type\"", ":", "Report", ".", "_type", ",", "\"groupBys\"", ":", "[", "]", ",", "\"aggregates\"", ":", "[", "]", ",", "\"applicationIds\"", ":", "[", "app", ".", "id", "]", ",", "\"columns\"", ":", "[", "]", ",", "\"sorts\"", ":", "{", "\"$type\"", ":", "\"System.Collections.Generic.Dictionary`2\"", "\"[[System.String, mscorlib],\"", "\"[Core.Models.Search.SortTypes, Core]], mscorlib\"", ",", "}", ",", "\"filters\"", ":", "[", "]", ",", "\"defaultSearchReport\"", ":", "False", ",", "\"allowed\"", ":", "[", "]", ",", "\"permissions\"", ":", "{", "\"$type\"", ":", "\"Core.Models.Security.PermissionMatrix, Core\"", "}", ",", "\"createdDate\"", ":", "created", ",", "\"modifiedDate\"", ":", "created", ",", "\"createdByUser\"", ":", "user_model", ",", "\"modifiedByUser\"", ":", "user_model", ",", "\"id\"", ":", "None", ",", "\"name\"", ":", "report_name", ",", "\"disabled\"", ":", "False", ",", "\"keywords\"", ":", "\"\"", "}", ",", "*", "*", "kwargs", ")" ]
Report instance factory populating boilerplate raw data Args: app (App): Swimlane App instance report_name (str): Generated Report name Keyword Args **kwargs: Kwargs to pass to the Report class
[ "Report", "instance", "factory", "populating", "boilerplate", "raw", "data" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/resources/report.py#L113-L156
train
swimlane/swimlane-python
swimlane/core/resources/report.py
Report.filter
def filter(self, field_name, operand, value): """Adds a filter to report Notes: All filters are currently AND'ed together Args: field_name (str): Target field name to filter on operand (str): Operand used in comparison. See `swimlane.core.search` for options value: Target value used in comparision """ if operand not in self._FILTER_OPERANDS: raise ValueError('Operand must be one of {}'.format(', '.join(self._FILTER_OPERANDS))) # Use temp Record instance for target app to translate values into expected API format record_stub = record_factory(self._app) field = record_stub.get_field(field_name) self._raw['filters'].append({ "fieldId": field.id, "filterType": operand, "value": field.get_report(value) })
python
def filter(self, field_name, operand, value): """Adds a filter to report Notes: All filters are currently AND'ed together Args: field_name (str): Target field name to filter on operand (str): Operand used in comparison. See `swimlane.core.search` for options value: Target value used in comparision """ if operand not in self._FILTER_OPERANDS: raise ValueError('Operand must be one of {}'.format(', '.join(self._FILTER_OPERANDS))) # Use temp Record instance for target app to translate values into expected API format record_stub = record_factory(self._app) field = record_stub.get_field(field_name) self._raw['filters'].append({ "fieldId": field.id, "filterType": operand, "value": field.get_report(value) })
[ "def", "filter", "(", "self", ",", "field_name", ",", "operand", ",", "value", ")", ":", "if", "operand", "not", "in", "self", ".", "_FILTER_OPERANDS", ":", "raise", "ValueError", "(", "'Operand must be one of {}'", ".", "format", "(", "', '", ".", "join", "(", "self", ".", "_FILTER_OPERANDS", ")", ")", ")", "# Use temp Record instance for target app to translate values into expected API format", "record_stub", "=", "record_factory", "(", "self", ".", "_app", ")", "field", "=", "record_stub", ".", "get_field", "(", "field_name", ")", "self", ".", "_raw", "[", "'filters'", "]", ".", "append", "(", "{", "\"fieldId\"", ":", "field", ".", "id", ",", "\"filterType\"", ":", "operand", ",", "\"value\"", ":", "field", ".", "get_report", "(", "value", ")", "}", ")" ]
Adds a filter to report Notes: All filters are currently AND'ed together Args: field_name (str): Target field name to filter on operand (str): Operand used in comparison. See `swimlane.core.search` for options value: Target value used in comparision
[ "Adds", "a", "filter", "to", "report" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/resources/report.py#L88-L110
train
swimlane/swimlane-python
swimlane/core/adapters/report.py
ReportAdapter.list
def list(self): """Retrieve all reports for parent app Returns: :class:`list` of :class:`~swimlane.core.resources.report.Report`: List of all returned reports """ raw_reports = self._swimlane.request('get', "reports?appId={}".format(self._app.id)).json() # Ignore StatsReports for now return [Report(self._app, raw_report) for raw_report in raw_reports if raw_report['$type'] == Report._type]
python
def list(self): """Retrieve all reports for parent app Returns: :class:`list` of :class:`~swimlane.core.resources.report.Report`: List of all returned reports """ raw_reports = self._swimlane.request('get', "reports?appId={}".format(self._app.id)).json() # Ignore StatsReports for now return [Report(self._app, raw_report) for raw_report in raw_reports if raw_report['$type'] == Report._type]
[ "def", "list", "(", "self", ")", ":", "raw_reports", "=", "self", ".", "_swimlane", ".", "request", "(", "'get'", ",", "\"reports?appId={}\"", ".", "format", "(", "self", ".", "_app", ".", "id", ")", ")", ".", "json", "(", ")", "# Ignore StatsReports for now", "return", "[", "Report", "(", "self", ".", "_app", ",", "raw_report", ")", "for", "raw_report", "in", "raw_reports", "if", "raw_report", "[", "'$type'", "]", "==", "Report", ".", "_type", "]" ]
Retrieve all reports for parent app Returns: :class:`list` of :class:`~swimlane.core.resources.report.Report`: List of all returned reports
[ "Retrieve", "all", "reports", "for", "parent", "app" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/adapters/report.py#L20-L28
train
swimlane/swimlane-python
swimlane/core/adapters/report.py
ReportAdapter.get
def get(self, report_id): """Retrieve report by ID Args: report_id (str): Full report ID Returns: Report: Corresponding Report instance """ return Report( self._app, self._swimlane.request('get', "reports/{0}".format(report_id)).json() )
python
def get(self, report_id): """Retrieve report by ID Args: report_id (str): Full report ID Returns: Report: Corresponding Report instance """ return Report( self._app, self._swimlane.request('get', "reports/{0}".format(report_id)).json() )
[ "def", "get", "(", "self", ",", "report_id", ")", ":", "return", "Report", "(", "self", ".", "_app", ",", "self", ".", "_swimlane", ".", "request", "(", "'get'", ",", "\"reports/{0}\"", ".", "format", "(", "report_id", ")", ")", ".", "json", "(", ")", ")" ]
Retrieve report by ID Args: report_id (str): Full report ID Returns: Report: Corresponding Report instance
[ "Retrieve", "report", "by", "ID" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/adapters/report.py#L30-L42
train
swimlane/swimlane-python
swimlane/core/adapters/usergroup.py
GroupAdapter.get
def get(self, key, value): """Retrieve single group record by id or name Supports resource cache Keyword Args: id (str): Full Group ID name (str): Group name Raises: TypeError: Unexpected or more than one keyword argument provided ValueError: No matching group found based on provided inputs Returns: Group: Group instance matching provided inputs """ if key == 'id': response = self._swimlane.request('get', 'groups/{}'.format(value)) return Group(self._swimlane, response.json()) else: response = self._swimlane.request('get', 'groups/lookup?name={}'.format(value)) matched_groups = response.json() for group_data in matched_groups: if group_data.get('name') == value: return Group(self._swimlane, group_data) raise ValueError('Unable to find group with name "{}"'.format(value))
python
def get(self, key, value): """Retrieve single group record by id or name Supports resource cache Keyword Args: id (str): Full Group ID name (str): Group name Raises: TypeError: Unexpected or more than one keyword argument provided ValueError: No matching group found based on provided inputs Returns: Group: Group instance matching provided inputs """ if key == 'id': response = self._swimlane.request('get', 'groups/{}'.format(value)) return Group(self._swimlane, response.json()) else: response = self._swimlane.request('get', 'groups/lookup?name={}'.format(value)) matched_groups = response.json() for group_data in matched_groups: if group_data.get('name') == value: return Group(self._swimlane, group_data) raise ValueError('Unable to find group with name "{}"'.format(value))
[ "def", "get", "(", "self", ",", "key", ",", "value", ")", ":", "if", "key", "==", "'id'", ":", "response", "=", "self", ".", "_swimlane", ".", "request", "(", "'get'", ",", "'groups/{}'", ".", "format", "(", "value", ")", ")", "return", "Group", "(", "self", ".", "_swimlane", ",", "response", ".", "json", "(", ")", ")", "else", ":", "response", "=", "self", ".", "_swimlane", ".", "request", "(", "'get'", ",", "'groups/lookup?name={}'", ".", "format", "(", "value", ")", ")", "matched_groups", "=", "response", ".", "json", "(", ")", "for", "group_data", "in", "matched_groups", ":", "if", "group_data", ".", "get", "(", "'name'", ")", "==", "value", ":", "return", "Group", "(", "self", ".", "_swimlane", ",", "group_data", ")", "raise", "ValueError", "(", "'Unable to find group with name \"{}\"'", ".", "format", "(", "value", ")", ")" ]
Retrieve single group record by id or name Supports resource cache Keyword Args: id (str): Full Group ID name (str): Group name Raises: TypeError: Unexpected or more than one keyword argument provided ValueError: No matching group found based on provided inputs Returns: Group: Group instance matching provided inputs
[ "Retrieve", "single", "group", "record", "by", "id", "or", "name" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/adapters/usergroup.py#L45-L73
train
swimlane/swimlane-python
swimlane/core/adapters/usergroup.py
UserAdapter.get
def get(self, arg, value): """Retrieve single user record by id or username Warnings: User display names are not unique. If using `display_name`, method will fail if multiple Users are returned with the same display name Keyword Args: id (str): Full User ID display_name (str): User display name Returns: User: User instance matching provided inputs Raises: TypeError: Unexpected or more than one keyword argument provided ValueError: No matching user found based on provided inputs, or multiple Users with same display name """ if arg == 'id': response = self._swimlane.request('get', 'user/{}'.format(value)) try: user_data = response.json() except ValueError: raise ValueError('Unable to find user with ID "{}"'.format(value)) return User(self._swimlane, user_data) else: response = self._swimlane.request('get', 'user/search?query={}'.format(quote_plus(value))) matched_users = response.json() # Display name not unique, fail if multiple users share the same target display name target_matches = [] for user_data in matched_users: user_display_name = user_data.get('displayName') if user_display_name == value: target_matches.append(user_data) # No matches if not target_matches: raise ValueError('Unable to find user with display name "{}"'.format(value)) # Multiple matches if len(target_matches) > 1: raise ValueError('Multiple users returned with display name "{}". Matching user IDs: {}'.format( value, ', '.join(['"{}"'.format(r['id']) for r in target_matches]) )) return User(self._swimlane, target_matches[0])
python
def get(self, arg, value): """Retrieve single user record by id or username Warnings: User display names are not unique. If using `display_name`, method will fail if multiple Users are returned with the same display name Keyword Args: id (str): Full User ID display_name (str): User display name Returns: User: User instance matching provided inputs Raises: TypeError: Unexpected or more than one keyword argument provided ValueError: No matching user found based on provided inputs, or multiple Users with same display name """ if arg == 'id': response = self._swimlane.request('get', 'user/{}'.format(value)) try: user_data = response.json() except ValueError: raise ValueError('Unable to find user with ID "{}"'.format(value)) return User(self._swimlane, user_data) else: response = self._swimlane.request('get', 'user/search?query={}'.format(quote_plus(value))) matched_users = response.json() # Display name not unique, fail if multiple users share the same target display name target_matches = [] for user_data in matched_users: user_display_name = user_data.get('displayName') if user_display_name == value: target_matches.append(user_data) # No matches if not target_matches: raise ValueError('Unable to find user with display name "{}"'.format(value)) # Multiple matches if len(target_matches) > 1: raise ValueError('Multiple users returned with display name "{}". Matching user IDs: {}'.format( value, ', '.join(['"{}"'.format(r['id']) for r in target_matches]) )) return User(self._swimlane, target_matches[0])
[ "def", "get", "(", "self", ",", "arg", ",", "value", ")", ":", "if", "arg", "==", "'id'", ":", "response", "=", "self", ".", "_swimlane", ".", "request", "(", "'get'", ",", "'user/{}'", ".", "format", "(", "value", ")", ")", "try", ":", "user_data", "=", "response", ".", "json", "(", ")", "except", "ValueError", ":", "raise", "ValueError", "(", "'Unable to find user with ID \"{}\"'", ".", "format", "(", "value", ")", ")", "return", "User", "(", "self", ".", "_swimlane", ",", "user_data", ")", "else", ":", "response", "=", "self", ".", "_swimlane", ".", "request", "(", "'get'", ",", "'user/search?query={}'", ".", "format", "(", "quote_plus", "(", "value", ")", ")", ")", "matched_users", "=", "response", ".", "json", "(", ")", "# Display name not unique, fail if multiple users share the same target display name", "target_matches", "=", "[", "]", "for", "user_data", "in", "matched_users", ":", "user_display_name", "=", "user_data", ".", "get", "(", "'displayName'", ")", "if", "user_display_name", "==", "value", ":", "target_matches", ".", "append", "(", "user_data", ")", "# No matches", "if", "not", "target_matches", ":", "raise", "ValueError", "(", "'Unable to find user with display name \"{}\"'", ".", "format", "(", "value", ")", ")", "# Multiple matches", "if", "len", "(", "target_matches", ")", ">", "1", ":", "raise", "ValueError", "(", "'Multiple users returned with display name \"{}\". Matching user IDs: {}'", ".", "format", "(", "value", ",", "', '", ".", "join", "(", "[", "'\"{}\"'", ".", "format", "(", "r", "[", "'id'", "]", ")", "for", "r", "in", "target_matches", "]", ")", ")", ")", "return", "User", "(", "self", ".", "_swimlane", ",", "target_matches", "[", "0", "]", ")" ]
Retrieve single user record by id or username Warnings: User display names are not unique. If using `display_name`, method will fail if multiple Users are returned with the same display name Keyword Args: id (str): Full User ID display_name (str): User display name Returns: User: User instance matching provided inputs Raises: TypeError: Unexpected or more than one keyword argument provided ValueError: No matching user found based on provided inputs, or multiple Users with same display name
[ "Retrieve", "single", "user", "record", "by", "id", "or", "username" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/adapters/usergroup.py#L111-L162
train
swimlane/swimlane-python
swimlane/core/fields/history.py
RevisionCursor._evaluate
def _evaluate(self): """Lazily retrieves, caches, and returns the list of record _revisions""" if not self.__retrieved: self._elements = self._retrieve_revisions() self.__retrieved = True return super(RevisionCursor, self)._evaluate()
python
def _evaluate(self): """Lazily retrieves, caches, and returns the list of record _revisions""" if not self.__retrieved: self._elements = self._retrieve_revisions() self.__retrieved = True return super(RevisionCursor, self)._evaluate()
[ "def", "_evaluate", "(", "self", ")", ":", "if", "not", "self", ".", "__retrieved", ":", "self", ".", "_elements", "=", "self", ".", "_retrieve_revisions", "(", ")", "self", ".", "__retrieved", "=", "True", "return", "super", "(", "RevisionCursor", ",", "self", ")", ".", "_evaluate", "(", ")" ]
Lazily retrieves, caches, and returns the list of record _revisions
[ "Lazily", "retrieves", "caches", "and", "returns", "the", "list", "of", "record", "_revisions" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/fields/history.py#L15-L21
train
swimlane/swimlane-python
swimlane/core/fields/history.py
RevisionCursor._retrieve_revisions
def _retrieve_revisions(self): """Retrieve and populate Revision instances from history API endpoint""" response = self._swimlane.request( 'get', 'history', params={ 'type': 'Records', 'id': self._record.id } ) raw_revisions = response.json() return [Revision(self._record, raw) for raw in raw_revisions]
python
def _retrieve_revisions(self): """Retrieve and populate Revision instances from history API endpoint""" response = self._swimlane.request( 'get', 'history', params={ 'type': 'Records', 'id': self._record.id } ) raw_revisions = response.json() return [Revision(self._record, raw) for raw in raw_revisions]
[ "def", "_retrieve_revisions", "(", "self", ")", ":", "response", "=", "self", ".", "_swimlane", ".", "request", "(", "'get'", ",", "'history'", ",", "params", "=", "{", "'type'", ":", "'Records'", ",", "'id'", ":", "self", ".", "_record", ".", "id", "}", ")", "raw_revisions", "=", "response", ".", "json", "(", ")", "return", "[", "Revision", "(", "self", ".", "_record", ",", "raw", ")", "for", "raw", "in", "raw_revisions", "]" ]
Retrieve and populate Revision instances from history API endpoint
[ "Retrieve", "and", "populate", "Revision", "instances", "from", "history", "API", "endpoint" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/fields/history.py#L23-L36
train
swimlane/swimlane-python
swimlane/core/fields/valueslist.py
ValuesListField.validate_value
def validate_value(self, value): """Validate provided value is one of the valid options""" super(ValuesListField, self).validate_value(value) if value is not None: if value not in self.selection_to_id_map: raise ValidationError( self.record, 'Field "{}" invalid value "{}". Valid options: {}'.format( self.name, value, ', '.join(self.selection_to_id_map.keys()) ) )
python
def validate_value(self, value): """Validate provided value is one of the valid options""" super(ValuesListField, self).validate_value(value) if value is not None: if value not in self.selection_to_id_map: raise ValidationError( self.record, 'Field "{}" invalid value "{}". Valid options: {}'.format( self.name, value, ', '.join(self.selection_to_id_map.keys()) ) )
[ "def", "validate_value", "(", "self", ",", "value", ")", ":", "super", "(", "ValuesListField", ",", "self", ")", ".", "validate_value", "(", "value", ")", "if", "value", "is", "not", "None", ":", "if", "value", "not", "in", "self", ".", "selection_to_id_map", ":", "raise", "ValidationError", "(", "self", ".", "record", ",", "'Field \"{}\" invalid value \"{}\". Valid options: {}'", ".", "format", "(", "self", ".", "name", ",", "value", ",", "', '", ".", "join", "(", "self", ".", "selection_to_id_map", ".", "keys", "(", ")", ")", ")", ")" ]
Validate provided value is one of the valid options
[ "Validate", "provided", "value", "is", "one", "of", "the", "valid", "options" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/fields/valueslist.py#L20-L33
train
swimlane/swimlane-python
swimlane/core/fields/valueslist.py
ValuesListField.cast_to_report
def cast_to_report(self, value): """Report format uses only the value's id""" value = super(ValuesListField, self).cast_to_report(value) if value: return value['id']
python
def cast_to_report(self, value): """Report format uses only the value's id""" value = super(ValuesListField, self).cast_to_report(value) if value: return value['id']
[ "def", "cast_to_report", "(", "self", ",", "value", ")", ":", "value", "=", "super", "(", "ValuesListField", ",", "self", ")", ".", "cast_to_report", "(", "value", ")", "if", "value", ":", "return", "value", "[", "'id'", "]" ]
Report format uses only the value's id
[ "Report", "format", "uses", "only", "the", "value", "s", "id" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/fields/valueslist.py#L53-L58
train
swimlane/swimlane-python
swimlane/core/adapters/record.py
validate_filters_or_records
def validate_filters_or_records(filters_or_records): """Validation for filters_or_records variable from bulk_modify and bulk_delete""" # If filters_or_records is empty, fail if not filters_or_records: raise ValueError('Must provide at least one filter tuples or Records') # If filters_or_records is not list of Record or tuple, fail if not isinstance(filters_or_records[0], (Record, tuple)): raise ValueError('Cannot provide both filter tuples and Records') # If filters_or_records is not list of either Record or only tuple, fail _type = type(filters_or_records[0]) for item in filters_or_records: if not isinstance(item, _type): raise ValueError("Expected filter tuple or Record, received {0}".format(item)) return _type
python
def validate_filters_or_records(filters_or_records): """Validation for filters_or_records variable from bulk_modify and bulk_delete""" # If filters_or_records is empty, fail if not filters_or_records: raise ValueError('Must provide at least one filter tuples or Records') # If filters_or_records is not list of Record or tuple, fail if not isinstance(filters_or_records[0], (Record, tuple)): raise ValueError('Cannot provide both filter tuples and Records') # If filters_or_records is not list of either Record or only tuple, fail _type = type(filters_or_records[0]) for item in filters_or_records: if not isinstance(item, _type): raise ValueError("Expected filter tuple or Record, received {0}".format(item)) return _type
[ "def", "validate_filters_or_records", "(", "filters_or_records", ")", ":", "# If filters_or_records is empty, fail", "if", "not", "filters_or_records", ":", "raise", "ValueError", "(", "'Must provide at least one filter tuples or Records'", ")", "# If filters_or_records is not list of Record or tuple, fail", "if", "not", "isinstance", "(", "filters_or_records", "[", "0", "]", ",", "(", "Record", ",", "tuple", ")", ")", ":", "raise", "ValueError", "(", "'Cannot provide both filter tuples and Records'", ")", "# If filters_or_records is not list of either Record or only tuple, fail", "_type", "=", "type", "(", "filters_or_records", "[", "0", "]", ")", "for", "item", "in", "filters_or_records", ":", "if", "not", "isinstance", "(", "item", ",", "_type", ")", ":", "raise", "ValueError", "(", "\"Expected filter tuple or Record, received {0}\"", ".", "format", "(", "item", ")", ")", "return", "_type" ]
Validation for filters_or_records variable from bulk_modify and bulk_delete
[ "Validation", "for", "filters_or_records", "variable", "from", "bulk_modify", "and", "bulk_delete" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/adapters/record.py#L373-L387
train
swimlane/swimlane-python
swimlane/core/adapters/record.py
RecordAdapter.get
def get(self, key, value): """Get a single record by id Supports resource cache .. versionchanged:: 2.17.0 Added option to retrieve record by tracking_id Keyword Args: id (str): Full record ID tracking_id (str): Record Tracking ID Returns: Record: Matching Record instance returned from API Raises: TypeError: No id argument provided """ if key == 'id': response = self._swimlane.request('get', "app/{0}/record/{1}".format(self._app.id, value)) return Record(self._app, response.json()) if key == 'tracking_id': response = self._swimlane.request('get', "app/{0}/record/tracking/{1}".format(self._app.id, value)) return Record(self._app, response.json())
python
def get(self, key, value): """Get a single record by id Supports resource cache .. versionchanged:: 2.17.0 Added option to retrieve record by tracking_id Keyword Args: id (str): Full record ID tracking_id (str): Record Tracking ID Returns: Record: Matching Record instance returned from API Raises: TypeError: No id argument provided """ if key == 'id': response = self._swimlane.request('get', "app/{0}/record/{1}".format(self._app.id, value)) return Record(self._app, response.json()) if key == 'tracking_id': response = self._swimlane.request('get', "app/{0}/record/tracking/{1}".format(self._app.id, value)) return Record(self._app, response.json())
[ "def", "get", "(", "self", ",", "key", ",", "value", ")", ":", "if", "key", "==", "'id'", ":", "response", "=", "self", ".", "_swimlane", ".", "request", "(", "'get'", ",", "\"app/{0}/record/{1}\"", ".", "format", "(", "self", ".", "_app", ".", "id", ",", "value", ")", ")", "return", "Record", "(", "self", ".", "_app", ",", "response", ".", "json", "(", ")", ")", "if", "key", "==", "'tracking_id'", ":", "response", "=", "self", ".", "_swimlane", ".", "request", "(", "'get'", ",", "\"app/{0}/record/tracking/{1}\"", ".", "format", "(", "self", ".", "_app", ".", "id", ",", "value", ")", ")", "return", "Record", "(", "self", ".", "_app", ",", "response", ".", "json", "(", ")", ")" ]
Get a single record by id Supports resource cache .. versionchanged:: 2.17.0 Added option to retrieve record by tracking_id Keyword Args: id (str): Full record ID tracking_id (str): Record Tracking ID Returns: Record: Matching Record instance returned from API Raises: TypeError: No id argument provided
[ "Get", "a", "single", "record", "by", "id" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/adapters/record.py#L17-L40
train
swimlane/swimlane-python
swimlane/core/adapters/record.py
RecordAdapter.search
def search(self, *filters, **kwargs): """Shortcut to generate a new temporary search report using provided filters and return the resulting records Args: *filters (tuple): Zero or more filter tuples of (field_name, operator, field_value) Keyword Args: keywords (list(str)): List of strings of keywords to use in report search limit (int): Set maximum number of returned Records, defaults to `Report.default_limit`. Set to 0 to return all records Notes: Uses a temporary Report instance with a random name to facilitate search. Records are normally paginated, but are returned as a single list here, potentially causing performance issues with large searches. All provided filters are AND'ed together Filter operators are available as constants in `swimlane.core.search` Examples: :: # Return records matching all filters with default limit from swimlane.core import search records = app.records.search( ('field_name', 'equals', 'field_value'), ('other_field', search.NOT_EQ, 'value') ) :: # Run keyword search with multiple keywords records = app.records.search(keywords=['example', 'test']) :: # Return all records from app records = app.records.search(limit=0) Returns: :class:`list` of :class:`~swimlane.core.resources.record.Record`: List of Record instances returned from the search results """ report = self._app.reports.build( 'search-' + random_string(8), keywords=kwargs.pop('keywords', []), limit=kwargs.pop('limit', Report.default_limit) ) for filter_tuples in filters: report.filter(*filter_tuples) return list(report)
python
def search(self, *filters, **kwargs): """Shortcut to generate a new temporary search report using provided filters and return the resulting records Args: *filters (tuple): Zero or more filter tuples of (field_name, operator, field_value) Keyword Args: keywords (list(str)): List of strings of keywords to use in report search limit (int): Set maximum number of returned Records, defaults to `Report.default_limit`. Set to 0 to return all records Notes: Uses a temporary Report instance with a random name to facilitate search. Records are normally paginated, but are returned as a single list here, potentially causing performance issues with large searches. All provided filters are AND'ed together Filter operators are available as constants in `swimlane.core.search` Examples: :: # Return records matching all filters with default limit from swimlane.core import search records = app.records.search( ('field_name', 'equals', 'field_value'), ('other_field', search.NOT_EQ, 'value') ) :: # Run keyword search with multiple keywords records = app.records.search(keywords=['example', 'test']) :: # Return all records from app records = app.records.search(limit=0) Returns: :class:`list` of :class:`~swimlane.core.resources.record.Record`: List of Record instances returned from the search results """ report = self._app.reports.build( 'search-' + random_string(8), keywords=kwargs.pop('keywords', []), limit=kwargs.pop('limit', Report.default_limit) ) for filter_tuples in filters: report.filter(*filter_tuples) return list(report)
[ "def", "search", "(", "self", ",", "*", "filters", ",", "*", "*", "kwargs", ")", ":", "report", "=", "self", ".", "_app", ".", "reports", ".", "build", "(", "'search-'", "+", "random_string", "(", "8", ")", ",", "keywords", "=", "kwargs", ".", "pop", "(", "'keywords'", ",", "[", "]", ")", ",", "limit", "=", "kwargs", ".", "pop", "(", "'limit'", ",", "Report", ".", "default_limit", ")", ")", "for", "filter_tuples", "in", "filters", ":", "report", ".", "filter", "(", "*", "filter_tuples", ")", "return", "list", "(", "report", ")" ]
Shortcut to generate a new temporary search report using provided filters and return the resulting records Args: *filters (tuple): Zero or more filter tuples of (field_name, operator, field_value) Keyword Args: keywords (list(str)): List of strings of keywords to use in report search limit (int): Set maximum number of returned Records, defaults to `Report.default_limit`. Set to 0 to return all records Notes: Uses a temporary Report instance with a random name to facilitate search. Records are normally paginated, but are returned as a single list here, potentially causing performance issues with large searches. All provided filters are AND'ed together Filter operators are available as constants in `swimlane.core.search` Examples: :: # Return records matching all filters with default limit from swimlane.core import search records = app.records.search( ('field_name', 'equals', 'field_value'), ('other_field', search.NOT_EQ, 'value') ) :: # Run keyword search with multiple keywords records = app.records.search(keywords=['example', 'test']) :: # Return all records from app records = app.records.search(limit=0) Returns: :class:`list` of :class:`~swimlane.core.resources.record.Record`: List of Record instances returned from the search results
[ "Shortcut", "to", "generate", "a", "new", "temporary", "search", "report", "using", "provided", "filters", "and", "return", "the", "resulting", "records" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/adapters/record.py#L42-L98
train
swimlane/swimlane-python
swimlane/core/adapters/record.py
RecordAdapter.create
def create(self, **fields): """Create and return a new record in associated app and return the newly created Record instance Args: **fields: Field names and values to be validated and sent to server with create request Notes: Keyword arguments should be field names with their respective python values Field values are validated before sending create request to server Examples: Create a new record on an app with simple field names :: record = app.records.create( field_a='Some Value', someOtherField=100, ... ) Create a new record on an app with complex field names :: record = app.records.create(**{ 'Field 1': 'Field 1 Value', 'Field 2': 100, ... }) Returns: Record: Newly created Record instance with data as returned from API response Raises: swimlane.exceptions.UnknownField: If any fields are provided that are not available on target app swimlane.exceptions.ValidationError: If any field fails validation before creation """ new_record = record_factory(self._app, fields) new_record.save() return new_record
python
def create(self, **fields): """Create and return a new record in associated app and return the newly created Record instance Args: **fields: Field names and values to be validated and sent to server with create request Notes: Keyword arguments should be field names with their respective python values Field values are validated before sending create request to server Examples: Create a new record on an app with simple field names :: record = app.records.create( field_a='Some Value', someOtherField=100, ... ) Create a new record on an app with complex field names :: record = app.records.create(**{ 'Field 1': 'Field 1 Value', 'Field 2': 100, ... }) Returns: Record: Newly created Record instance with data as returned from API response Raises: swimlane.exceptions.UnknownField: If any fields are provided that are not available on target app swimlane.exceptions.ValidationError: If any field fails validation before creation """ new_record = record_factory(self._app, fields) new_record.save() return new_record
[ "def", "create", "(", "self", ",", "*", "*", "fields", ")", ":", "new_record", "=", "record_factory", "(", "self", ".", "_app", ",", "fields", ")", "new_record", ".", "save", "(", ")", "return", "new_record" ]
Create and return a new record in associated app and return the newly created Record instance Args: **fields: Field names and values to be validated and sent to server with create request Notes: Keyword arguments should be field names with their respective python values Field values are validated before sending create request to server Examples: Create a new record on an app with simple field names :: record = app.records.create( field_a='Some Value', someOtherField=100, ... ) Create a new record on an app with complex field names :: record = app.records.create(**{ 'Field 1': 'Field 1 Value', 'Field 2': 100, ... }) Returns: Record: Newly created Record instance with data as returned from API response Raises: swimlane.exceptions.UnknownField: If any fields are provided that are not available on target app swimlane.exceptions.ValidationError: If any field fails validation before creation
[ "Create", "and", "return", "a", "new", "record", "in", "associated", "app", "and", "return", "the", "newly", "created", "Record", "instance" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/adapters/record.py#L100-L143
train
swimlane/swimlane-python
swimlane/core/adapters/record.py
RecordAdapter.bulk_create
def bulk_create(self, *records): """Create and validate multiple records in associated app Args: *records (dict): One or more dicts of new record field names and values Notes: Requires Swimlane 2.15+ Validates like :meth:`create`, but only sends a single request to create all provided fields, and does not return the newly created records Any validation failures on any of the records will abort the batch creation, not creating any new records Does not return the newly created records Examples: Create 3 new records with single request :: app.records.bulk_create( {'Field 1': 'value 1', ...}, {'Field 1': 'value 2', ...}, {'Field 1': 'value 3', ...} ) Raises: swimlane.exceptions.UnknownField: If any field in any new record cannot be found swimlane.exceptions.ValidationError: If any field in any new record fails validation TypeError: If no dict of fields was provided, or any provided argument is not a dict """ if not records: raise TypeError('Must provide at least one record') if any(not isinstance(r, dict) for r in records): raise TypeError('New records must be provided as dicts') # Create local records from factory for initial full validation new_records = [] for record_data in records: record = record_factory(self._app, record_data) record.validate() new_records.append(record) self._swimlane.request( 'post', 'app/{}/record/batch'.format(self._app.id), json=[r._raw for r in new_records] )
python
def bulk_create(self, *records): """Create and validate multiple records in associated app Args: *records (dict): One or more dicts of new record field names and values Notes: Requires Swimlane 2.15+ Validates like :meth:`create`, but only sends a single request to create all provided fields, and does not return the newly created records Any validation failures on any of the records will abort the batch creation, not creating any new records Does not return the newly created records Examples: Create 3 new records with single request :: app.records.bulk_create( {'Field 1': 'value 1', ...}, {'Field 1': 'value 2', ...}, {'Field 1': 'value 3', ...} ) Raises: swimlane.exceptions.UnknownField: If any field in any new record cannot be found swimlane.exceptions.ValidationError: If any field in any new record fails validation TypeError: If no dict of fields was provided, or any provided argument is not a dict """ if not records: raise TypeError('Must provide at least one record') if any(not isinstance(r, dict) for r in records): raise TypeError('New records must be provided as dicts') # Create local records from factory for initial full validation new_records = [] for record_data in records: record = record_factory(self._app, record_data) record.validate() new_records.append(record) self._swimlane.request( 'post', 'app/{}/record/batch'.format(self._app.id), json=[r._raw for r in new_records] )
[ "def", "bulk_create", "(", "self", ",", "*", "records", ")", ":", "if", "not", "records", ":", "raise", "TypeError", "(", "'Must provide at least one record'", ")", "if", "any", "(", "not", "isinstance", "(", "r", ",", "dict", ")", "for", "r", "in", "records", ")", ":", "raise", "TypeError", "(", "'New records must be provided as dicts'", ")", "# Create local records from factory for initial full validation", "new_records", "=", "[", "]", "for", "record_data", "in", "records", ":", "record", "=", "record_factory", "(", "self", ".", "_app", ",", "record_data", ")", "record", ".", "validate", "(", ")", "new_records", ".", "append", "(", "record", ")", "self", ".", "_swimlane", ".", "request", "(", "'post'", ",", "'app/{}/record/batch'", ".", "format", "(", "self", ".", "_app", ".", "id", ")", ",", "json", "=", "[", "r", ".", "_raw", "for", "r", "in", "new_records", "]", ")" ]
Create and validate multiple records in associated app Args: *records (dict): One or more dicts of new record field names and values Notes: Requires Swimlane 2.15+ Validates like :meth:`create`, but only sends a single request to create all provided fields, and does not return the newly created records Any validation failures on any of the records will abort the batch creation, not creating any new records Does not return the newly created records Examples: Create 3 new records with single request :: app.records.bulk_create( {'Field 1': 'value 1', ...}, {'Field 1': 'value 2', ...}, {'Field 1': 'value 3', ...} ) Raises: swimlane.exceptions.UnknownField: If any field in any new record cannot be found swimlane.exceptions.ValidationError: If any field in any new record fails validation TypeError: If no dict of fields was provided, or any provided argument is not a dict
[ "Create", "and", "validate", "multiple", "records", "in", "associated", "app" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/adapters/record.py#L146-L198
train
swimlane/swimlane-python
swimlane/core/fields/list.py
_ListFieldCursor._validate_list
def _validate_list(self, target): """Validate a list against field validation rules""" # Check list length restrictions min_items = self._field.field_definition.get('minItems') max_items = self._field.field_definition.get('maxItems') if min_items is not None: if len(target) < min_items: raise ValidationError( self._record, "Field '{}' must have a minimum of {} item(s)".format(self._field.name, min_items) ) if max_items is not None: if len(target) > max_items: raise ValidationError( self._record, "Field '{}' can only have a maximum of {} item(s)".format(self._field.name, max_items) ) # Individual item validation for item in target: self._validate_item(item)
python
def _validate_list(self, target): """Validate a list against field validation rules""" # Check list length restrictions min_items = self._field.field_definition.get('minItems') max_items = self._field.field_definition.get('maxItems') if min_items is not None: if len(target) < min_items: raise ValidationError( self._record, "Field '{}' must have a minimum of {} item(s)".format(self._field.name, min_items) ) if max_items is not None: if len(target) > max_items: raise ValidationError( self._record, "Field '{}' can only have a maximum of {} item(s)".format(self._field.name, max_items) ) # Individual item validation for item in target: self._validate_item(item)
[ "def", "_validate_list", "(", "self", ",", "target", ")", ":", "# Check list length restrictions", "min_items", "=", "self", ".", "_field", ".", "field_definition", ".", "get", "(", "'minItems'", ")", "max_items", "=", "self", ".", "_field", ".", "field_definition", ".", "get", "(", "'maxItems'", ")", "if", "min_items", "is", "not", "None", ":", "if", "len", "(", "target", ")", "<", "min_items", ":", "raise", "ValidationError", "(", "self", ".", "_record", ",", "\"Field '{}' must have a minimum of {} item(s)\"", ".", "format", "(", "self", ".", "_field", ".", "name", ",", "min_items", ")", ")", "if", "max_items", "is", "not", "None", ":", "if", "len", "(", "target", ")", ">", "max_items", ":", "raise", "ValidationError", "(", "self", ".", "_record", ",", "\"Field '{}' can only have a maximum of {} item(s)\"", ".", "format", "(", "self", ".", "_field", ".", "name", ",", "max_items", ")", ")", "# Individual item validation", "for", "item", "in", "target", ":", "self", ".", "_validate_item", "(", "item", ")" ]
Validate a list against field validation rules
[ "Validate", "a", "list", "against", "field", "validation", "rules" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/fields/list.py#L18-L40
train
swimlane/swimlane-python
swimlane/core/fields/list.py
ListField.set_swimlane
def set_swimlane(self, value): """Convert from list of dicts with values to list of values Cache list items with their ID pairs to restore existing IDs to unmodified values to prevent workflow evaluating on each save for any already existing values """ value = value or [] self._initial_value_to_ids_map = defaultdict(list) for item in value: self._initial_value_to_ids_map[item['value']].append(item['id']) return super(ListField, self).set_swimlane([d['value'] for d in value])
python
def set_swimlane(self, value): """Convert from list of dicts with values to list of values Cache list items with their ID pairs to restore existing IDs to unmodified values to prevent workflow evaluating on each save for any already existing values """ value = value or [] self._initial_value_to_ids_map = defaultdict(list) for item in value: self._initial_value_to_ids_map[item['value']].append(item['id']) return super(ListField, self).set_swimlane([d['value'] for d in value])
[ "def", "set_swimlane", "(", "self", ",", "value", ")", ":", "value", "=", "value", "or", "[", "]", "self", ".", "_initial_value_to_ids_map", "=", "defaultdict", "(", "list", ")", "for", "item", "in", "value", ":", "self", ".", "_initial_value_to_ids_map", "[", "item", "[", "'value'", "]", "]", ".", "append", "(", "item", "[", "'id'", "]", ")", "return", "super", "(", "ListField", ",", "self", ")", ".", "set_swimlane", "(", "[", "d", "[", "'value'", "]", "for", "d", "in", "value", "]", ")" ]
Convert from list of dicts with values to list of values Cache list items with their ID pairs to restore existing IDs to unmodified values to prevent workflow evaluating on each save for any already existing values
[ "Convert", "from", "list", "of", "dicts", "with", "values", "to", "list", "of", "values" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/fields/list.py#L207-L219
train
swimlane/swimlane-python
swimlane/core/fields/list.py
ListField.set_python
def set_python(self, value): """Validate using cursor for consistency between direct set of values vs modification of cursor values""" if not isinstance(value, (list, type(None))): raise ValidationError( self.record, "Field '{}' must be set to a list, not '{}'".format( self.name, value.__class__ ) ) value = value or [] self.cursor._validate_list(value) return super(ListField, self).set_python(value)
python
def set_python(self, value): """Validate using cursor for consistency between direct set of values vs modification of cursor values""" if not isinstance(value, (list, type(None))): raise ValidationError( self.record, "Field '{}' must be set to a list, not '{}'".format( self.name, value.__class__ ) ) value = value or [] self.cursor._validate_list(value) return super(ListField, self).set_python(value)
[ "def", "set_python", "(", "self", ",", "value", ")", ":", "if", "not", "isinstance", "(", "value", ",", "(", "list", ",", "type", "(", "None", ")", ")", ")", ":", "raise", "ValidationError", "(", "self", ".", "record", ",", "\"Field '{}' must be set to a list, not '{}'\"", ".", "format", "(", "self", ".", "name", ",", "value", ".", "__class__", ")", ")", "value", "=", "value", "or", "[", "]", "self", ".", "cursor", ".", "_validate_list", "(", "value", ")", "return", "super", "(", "ListField", ",", "self", ")", ".", "set_python", "(", "value", ")" ]
Validate using cursor for consistency between direct set of values vs modification of cursor values
[ "Validate", "using", "cursor", "for", "consistency", "between", "direct", "set", "of", "values", "vs", "modification", "of", "cursor", "values" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/fields/list.py#L221-L233
train
swimlane/swimlane-python
swimlane/core/fields/list.py
ListField.cast_to_swimlane
def cast_to_swimlane(self, value): """Restore swimlane format, attempting to keep initial IDs for any previously existing values""" value = super(ListField, self).cast_to_swimlane(value) if not value: return None # Copy initial values to pop IDs out as each value is hydrated back to server format, without modifying initial # cache of value -> list(ids) map value_ids = deepcopy(self._initial_value_to_ids_map) return [self._build_list_item(item, value_ids[item].pop(0) if value_ids[item] else None) for item in value]
python
def cast_to_swimlane(self, value): """Restore swimlane format, attempting to keep initial IDs for any previously existing values""" value = super(ListField, self).cast_to_swimlane(value) if not value: return None # Copy initial values to pop IDs out as each value is hydrated back to server format, without modifying initial # cache of value -> list(ids) map value_ids = deepcopy(self._initial_value_to_ids_map) return [self._build_list_item(item, value_ids[item].pop(0) if value_ids[item] else None) for item in value]
[ "def", "cast_to_swimlane", "(", "self", ",", "value", ")", ":", "value", "=", "super", "(", "ListField", ",", "self", ")", ".", "cast_to_swimlane", "(", "value", ")", "if", "not", "value", ":", "return", "None", "# Copy initial values to pop IDs out as each value is hydrated back to server format, without modifying initial", "# cache of value -> list(ids) map", "value_ids", "=", "deepcopy", "(", "self", ".", "_initial_value_to_ids_map", ")", "return", "[", "self", ".", "_build_list_item", "(", "item", ",", "value_ids", "[", "item", "]", ".", "pop", "(", "0", ")", "if", "value_ids", "[", "item", "]", "else", "None", ")", "for", "item", "in", "value", "]" ]
Restore swimlane format, attempting to keep initial IDs for any previously existing values
[ "Restore", "swimlane", "format", "attempting", "to", "keep", "initial", "IDs", "for", "any", "previously", "existing", "values" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/fields/list.py#L235-L246
train
swimlane/swimlane-python
swimlane/core/fields/base/multiselect.py
MultiSelectCursor.select
def select(self, element): """Add an element to the set of selected elements Proxy to internal set.add and sync field """ self._field.validate_value(element) self._elements.add(element) self._sync_field()
python
def select(self, element): """Add an element to the set of selected elements Proxy to internal set.add and sync field """ self._field.validate_value(element) self._elements.add(element) self._sync_field()
[ "def", "select", "(", "self", ",", "element", ")", ":", "self", ".", "_field", ".", "validate_value", "(", "element", ")", "self", ".", "_elements", ".", "add", "(", "element", ")", "self", ".", "_sync_field", "(", ")" ]
Add an element to the set of selected elements Proxy to internal set.add and sync field
[ "Add", "an", "element", "to", "the", "set", "of", "selected", "elements", "Proxy", "to", "internal", "set", ".", "add", "and", "sync", "field" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/fields/base/multiselect.py#L17-L24
train
swimlane/swimlane-python
swimlane/core/fields/base/multiselect.py
MultiSelectField.get_python
def get_python(self): """Only return cursor instance if configured for multiselect""" if self.multiselect: return super(MultiSelectField, self).get_python() return self._get()
python
def get_python(self): """Only return cursor instance if configured for multiselect""" if self.multiselect: return super(MultiSelectField, self).get_python() return self._get()
[ "def", "get_python", "(", "self", ")", ":", "if", "self", ".", "multiselect", ":", "return", "super", "(", "MultiSelectField", ",", "self", ")", ".", "get_python", "(", ")", "return", "self", ".", "_get", "(", ")" ]
Only return cursor instance if configured for multiselect
[ "Only", "return", "cursor", "instance", "if", "configured", "for", "multiselect" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/fields/base/multiselect.py#L40-L45
train
swimlane/swimlane-python
swimlane/core/fields/base/multiselect.py
MultiSelectField.get_swimlane
def get_swimlane(self): """Handle multi-select and single-select modes""" if self.multiselect: value = self._get() children = [] if value: for child in value: children.append(self.cast_to_swimlane(child)) return children return None return super(MultiSelectField, self).get_swimlane()
python
def get_swimlane(self): """Handle multi-select and single-select modes""" if self.multiselect: value = self._get() children = [] if value: for child in value: children.append(self.cast_to_swimlane(child)) return children return None return super(MultiSelectField, self).get_swimlane()
[ "def", "get_swimlane", "(", "self", ")", ":", "if", "self", ".", "multiselect", ":", "value", "=", "self", ".", "_get", "(", ")", "children", "=", "[", "]", "if", "value", ":", "for", "child", "in", "value", ":", "children", ".", "append", "(", "self", ".", "cast_to_swimlane", "(", "child", ")", ")", "return", "children", "return", "None", "return", "super", "(", "MultiSelectField", ",", "self", ")", ".", "get_swimlane", "(", ")" ]
Handle multi-select and single-select modes
[ "Handle", "multi", "-", "select", "and", "single", "-", "select", "modes" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/fields/base/multiselect.py#L47-L58
train
swimlane/swimlane-python
swimlane/core/fields/base/multiselect.py
MultiSelectField.set_python
def set_python(self, value): """Override to remove key from raw data when empty to work with server 2.16+ validation""" if self.multiselect: value = value or [] elements = [] for element in value: self.validate_value(element) elements.append(element) value = elements else: self.validate_value(value) self._set(value)
python
def set_python(self, value): """Override to remove key from raw data when empty to work with server 2.16+ validation""" if self.multiselect: value = value or [] elements = [] for element in value: self.validate_value(element) elements.append(element) value = elements else: self.validate_value(value) self._set(value)
[ "def", "set_python", "(", "self", ",", "value", ")", ":", "if", "self", ".", "multiselect", ":", "value", "=", "value", "or", "[", "]", "elements", "=", "[", "]", "for", "element", "in", "value", ":", "self", ".", "validate_value", "(", "element", ")", "elements", ".", "append", "(", "element", ")", "value", "=", "elements", "else", ":", "self", ".", "validate_value", "(", "value", ")", "self", ".", "_set", "(", "value", ")" ]
Override to remove key from raw data when empty to work with server 2.16+ validation
[ "Override", "to", "remove", "key", "from", "raw", "data", "when", "empty", "to", "work", "with", "server", "2", ".", "16", "+", "validation" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/fields/base/multiselect.py#L64-L78
train
swimlane/swimlane-python
swimlane/core/fields/base/multiselect.py
MultiSelectField.set_swimlane
def set_swimlane(self, value): """Cast all multi-select elements to correct internal type like single-select mode""" if self.multiselect: value = value or [] children = [] for child in value: children.append(self.cast_to_python(child)) return self._set(children) return super(MultiSelectField, self).set_swimlane(value)
python
def set_swimlane(self, value): """Cast all multi-select elements to correct internal type like single-select mode""" if self.multiselect: value = value or [] children = [] for child in value: children.append(self.cast_to_python(child)) return self._set(children) return super(MultiSelectField, self).set_swimlane(value)
[ "def", "set_swimlane", "(", "self", ",", "value", ")", ":", "if", "self", ".", "multiselect", ":", "value", "=", "value", "or", "[", "]", "children", "=", "[", "]", "for", "child", "in", "value", ":", "children", ".", "append", "(", "self", ".", "cast_to_python", "(", "child", ")", ")", "return", "self", ".", "_set", "(", "children", ")", "return", "super", "(", "MultiSelectField", ",", "self", ")", ".", "set_swimlane", "(", "value", ")" ]
Cast all multi-select elements to correct internal type like single-select mode
[ "Cast", "all", "multi", "-", "select", "elements", "to", "correct", "internal", "type", "like", "single", "-", "select", "mode" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/fields/base/multiselect.py#L80-L91
train
swimlane/swimlane-python
swimlane/core/fields/base/multiselect.py
MultiSelectField.for_json
def for_json(self): """Handle multi-select vs single-select""" if self.multiselect: return super(MultiSelectField, self).for_json() value = self.get_python() if hasattr(value, 'for_json'): return value.for_json() return value
python
def for_json(self): """Handle multi-select vs single-select""" if self.multiselect: return super(MultiSelectField, self).for_json() value = self.get_python() if hasattr(value, 'for_json'): return value.for_json() return value
[ "def", "for_json", "(", "self", ")", ":", "if", "self", ".", "multiselect", ":", "return", "super", "(", "MultiSelectField", ",", "self", ")", ".", "for_json", "(", ")", "value", "=", "self", ".", "get_python", "(", ")", "if", "hasattr", "(", "value", ",", "'for_json'", ")", ":", "return", "value", ".", "for_json", "(", ")", "return", "value" ]
Handle multi-select vs single-select
[ "Handle", "multi", "-", "select", "vs", "single", "-", "select" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/fields/base/multiselect.py#L93-L103
train
swimlane/swimlane-python
swimlane/core/resources/record.py
record_factory
def record_factory(app, fields=None): """Return a temporary Record instance to be used for field validation and value parsing Args: app (App): Target App to create a transient Record instance for fields (dict): Optional dict of fields and values to set on new Record instance before returning Returns: Record: Unsaved Record instance to be used for validation, creation, etc. """ # pylint: disable=line-too-long record = Record(app, { '$type': Record._type, 'isNew': True, 'applicationId': app.id, 'comments': { '$type': 'System.Collections.Generic.Dictionary`2[[System.String, mscorlib],[System.Collections.Generic.List`1[[Core.Models.Record.Comments, Core]], mscorlib]], mscorlib' }, 'values': { '$type': 'System.Collections.Generic.Dictionary`2[[System.String, mscorlib],[System.Object, mscorlib]], mscorlib' } }) fields = fields or {} for name, value in six.iteritems(fields): record[name] = value # Pop off fields with None value to allow for saving empty fields copy_raw = copy.copy(record._raw) values_dict = {} for key, value in six.iteritems(copy_raw['values']): if value is not None: values_dict[key] = value record._raw['values'] = values_dict return record
python
def record_factory(app, fields=None): """Return a temporary Record instance to be used for field validation and value parsing Args: app (App): Target App to create a transient Record instance for fields (dict): Optional dict of fields and values to set on new Record instance before returning Returns: Record: Unsaved Record instance to be used for validation, creation, etc. """ # pylint: disable=line-too-long record = Record(app, { '$type': Record._type, 'isNew': True, 'applicationId': app.id, 'comments': { '$type': 'System.Collections.Generic.Dictionary`2[[System.String, mscorlib],[System.Collections.Generic.List`1[[Core.Models.Record.Comments, Core]], mscorlib]], mscorlib' }, 'values': { '$type': 'System.Collections.Generic.Dictionary`2[[System.String, mscorlib],[System.Object, mscorlib]], mscorlib' } }) fields = fields or {} for name, value in six.iteritems(fields): record[name] = value # Pop off fields with None value to allow for saving empty fields copy_raw = copy.copy(record._raw) values_dict = {} for key, value in six.iteritems(copy_raw['values']): if value is not None: values_dict[key] = value record._raw['values'] = values_dict return record
[ "def", "record_factory", "(", "app", ",", "fields", "=", "None", ")", ":", "# pylint: disable=line-too-long", "record", "=", "Record", "(", "app", ",", "{", "'$type'", ":", "Record", ".", "_type", ",", "'isNew'", ":", "True", ",", "'applicationId'", ":", "app", ".", "id", ",", "'comments'", ":", "{", "'$type'", ":", "'System.Collections.Generic.Dictionary`2[[System.String, mscorlib],[System.Collections.Generic.List`1[[Core.Models.Record.Comments, Core]], mscorlib]], mscorlib'", "}", ",", "'values'", ":", "{", "'$type'", ":", "'System.Collections.Generic.Dictionary`2[[System.String, mscorlib],[System.Object, mscorlib]], mscorlib'", "}", "}", ")", "fields", "=", "fields", "or", "{", "}", "for", "name", ",", "value", "in", "six", ".", "iteritems", "(", "fields", ")", ":", "record", "[", "name", "]", "=", "value", "# Pop off fields with None value to allow for saving empty fields", "copy_raw", "=", "copy", ".", "copy", "(", "record", ".", "_raw", ")", "values_dict", "=", "{", "}", "for", "key", ",", "value", "in", "six", ".", "iteritems", "(", "copy_raw", "[", "'values'", "]", ")", ":", "if", "value", "is", "not", "None", ":", "values_dict", "[", "key", "]", "=", "value", "record", ".", "_raw", "[", "'values'", "]", "=", "values_dict", "return", "record" ]
Return a temporary Record instance to be used for field validation and value parsing Args: app (App): Target App to create a transient Record instance for fields (dict): Optional dict of fields and values to set on new Record instance before returning Returns: Record: Unsaved Record instance to be used for validation, creation, etc.
[ "Return", "a", "temporary", "Record", "instance", "to", "be", "used", "for", "field", "validation", "and", "value", "parsing" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/resources/record.py#L311-L347
train
swimlane/swimlane-python
swimlane/core/fields/text.py
TextField.set_python
def set_python(self, value): """Set field internal value from the python representation of field value""" # hook exists to stringify before validation # set to string if not string or unicode if value is not None and not isinstance(value, self.supported_types) or isinstance(value, int): value = str(value) return super(TextField, self).set_python(value)
python
def set_python(self, value): """Set field internal value from the python representation of field value""" # hook exists to stringify before validation # set to string if not string or unicode if value is not None and not isinstance(value, self.supported_types) or isinstance(value, int): value = str(value) return super(TextField, self).set_python(value)
[ "def", "set_python", "(", "self", ",", "value", ")", ":", "# hook exists to stringify before validation", "# set to string if not string or unicode", "if", "value", "is", "not", "None", "and", "not", "isinstance", "(", "value", ",", "self", ".", "supported_types", ")", "or", "isinstance", "(", "value", ",", "int", ")", ":", "value", "=", "str", "(", "value", ")", "return", "super", "(", "TextField", ",", "self", ")", ".", "set_python", "(", "value", ")" ]
Set field internal value from the python representation of field value
[ "Set", "field", "internal", "value", "from", "the", "python", "representation", "of", "field", "value" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/fields/text.py#L15-L23
train
swimlane/swimlane-python
swimlane/utils/version.py
compare_versions
def compare_versions(version_a, version_b, zerofill=False): """Return direction of version relative to provided version sections Args: version_a (str): First version to compare version_b (str): Second version to compare zerofill (bool): If True, treat any missing version sections as 0, otherwise ignore section. Defaults to False Returns: int: 0 if equal, -1 if a > b, 1 if a < b Examples: If a is equal to b, return 0 If a is greater than b, return -1 If a is less than b, return 1 >>> compare_versions('2', '2') == 0 >>> compare_versions('2', '1') == -1 >>> compare_versions('2', '3') == 1 If zerofill is False (default), sections not included in both versions are ignored during comparison >>> compare_versions('2.13.2', '2.13') == 0 >>> compare_versions('2.13.2-1234', '3') == 1 If zerofill is True, any sections in one version not included in other version are set to 0 >>> compare_versions('2.13.2', '2.13', True) == -1 >>> compare_versions('2.13.2-1234', '2.13.2', True) == -1 >>> compare_versions('2.13.2', '2.13.2', True) == 0 """ a_sections = list((int(match) for match in re.findall(r'\d+', version_a))) b_sections = list((int(match) for match in re.findall(r'\d+', version_b))) if zerofill: max_sections = max([len(a_sections), len(b_sections)]) a_sections += [0 for _ in range(max(max_sections - len(a_sections), 0))] b_sections += [0 for _ in range(max(max_sections - len(b_sections), 0))] else: min_sections = min([len(a_sections), len(b_sections)]) a_sections = a_sections[:min_sections] b_sections = b_sections[:min_sections] return (b_sections > a_sections) - (b_sections < a_sections)
python
def compare_versions(version_a, version_b, zerofill=False): """Return direction of version relative to provided version sections Args: version_a (str): First version to compare version_b (str): Second version to compare zerofill (bool): If True, treat any missing version sections as 0, otherwise ignore section. Defaults to False Returns: int: 0 if equal, -1 if a > b, 1 if a < b Examples: If a is equal to b, return 0 If a is greater than b, return -1 If a is less than b, return 1 >>> compare_versions('2', '2') == 0 >>> compare_versions('2', '1') == -1 >>> compare_versions('2', '3') == 1 If zerofill is False (default), sections not included in both versions are ignored during comparison >>> compare_versions('2.13.2', '2.13') == 0 >>> compare_versions('2.13.2-1234', '3') == 1 If zerofill is True, any sections in one version not included in other version are set to 0 >>> compare_versions('2.13.2', '2.13', True) == -1 >>> compare_versions('2.13.2-1234', '2.13.2', True) == -1 >>> compare_versions('2.13.2', '2.13.2', True) == 0 """ a_sections = list((int(match) for match in re.findall(r'\d+', version_a))) b_sections = list((int(match) for match in re.findall(r'\d+', version_b))) if zerofill: max_sections = max([len(a_sections), len(b_sections)]) a_sections += [0 for _ in range(max(max_sections - len(a_sections), 0))] b_sections += [0 for _ in range(max(max_sections - len(b_sections), 0))] else: min_sections = min([len(a_sections), len(b_sections)]) a_sections = a_sections[:min_sections] b_sections = b_sections[:min_sections] return (b_sections > a_sections) - (b_sections < a_sections)
[ "def", "compare_versions", "(", "version_a", ",", "version_b", ",", "zerofill", "=", "False", ")", ":", "a_sections", "=", "list", "(", "(", "int", "(", "match", ")", "for", "match", "in", "re", ".", "findall", "(", "r'\\d+'", ",", "version_a", ")", ")", ")", "b_sections", "=", "list", "(", "(", "int", "(", "match", ")", "for", "match", "in", "re", ".", "findall", "(", "r'\\d+'", ",", "version_b", ")", ")", ")", "if", "zerofill", ":", "max_sections", "=", "max", "(", "[", "len", "(", "a_sections", ")", ",", "len", "(", "b_sections", ")", "]", ")", "a_sections", "+=", "[", "0", "for", "_", "in", "range", "(", "max", "(", "max_sections", "-", "len", "(", "a_sections", ")", ",", "0", ")", ")", "]", "b_sections", "+=", "[", "0", "for", "_", "in", "range", "(", "max", "(", "max_sections", "-", "len", "(", "b_sections", ")", ",", "0", ")", ")", "]", "else", ":", "min_sections", "=", "min", "(", "[", "len", "(", "a_sections", ")", ",", "len", "(", "b_sections", ")", "]", ")", "a_sections", "=", "a_sections", "[", ":", "min_sections", "]", "b_sections", "=", "b_sections", "[", ":", "min_sections", "]", "return", "(", "b_sections", ">", "a_sections", ")", "-", "(", "b_sections", "<", "a_sections", ")" ]
Return direction of version relative to provided version sections Args: version_a (str): First version to compare version_b (str): Second version to compare zerofill (bool): If True, treat any missing version sections as 0, otherwise ignore section. Defaults to False Returns: int: 0 if equal, -1 if a > b, 1 if a < b Examples: If a is equal to b, return 0 If a is greater than b, return -1 If a is less than b, return 1 >>> compare_versions('2', '2') == 0 >>> compare_versions('2', '1') == -1 >>> compare_versions('2', '3') == 1 If zerofill is False (default), sections not included in both versions are ignored during comparison >>> compare_versions('2.13.2', '2.13') == 0 >>> compare_versions('2.13.2-1234', '3') == 1 If zerofill is True, any sections in one version not included in other version are set to 0 >>> compare_versions('2.13.2', '2.13', True) == -1 >>> compare_versions('2.13.2-1234', '2.13.2', True) == -1 >>> compare_versions('2.13.2', '2.13.2', True) == 0
[ "Return", "direction", "of", "version", "relative", "to", "provided", "version", "sections" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/utils/version.py#L9-L56
train
swimlane/swimlane-python
swimlane/utils/version.py
requires_swimlane_version
def requires_swimlane_version(min_version=None, max_version=None): """Decorator for SwimlaneResolver methods verifying Swimlane server build version is within a given inclusive range Raises: InvalidVersion: Raised before decorated method call if Swimlane server version is out of provided range ValueError: If neither min_version or max_version were provided, or if those values conflict (2.15 < 2.14) """ if min_version is None and max_version is None: raise ValueError('Must provide either min_version, max_version, or both') if min_version and max_version and compare_versions(min_version, max_version) < 0: raise ValueError('min_version must be <= max_version ({}, {})'.format(min_version, max_version)) def decorator(func): @functools.wraps(func) def wrapper(self, *args, **kwargs): swimlane = self._swimlane if min_version and compare_versions(min_version, swimlane.build_version, True) < 0: raise InvalidSwimlaneBuildVersion(swimlane, min_version, max_version) if max_version and compare_versions(swimlane.build_version, max_version, True) < 0: raise InvalidSwimlaneBuildVersion(swimlane, min_version, max_version) return func(self, *args, **kwargs) return wrapper return decorator
python
def requires_swimlane_version(min_version=None, max_version=None): """Decorator for SwimlaneResolver methods verifying Swimlane server build version is within a given inclusive range Raises: InvalidVersion: Raised before decorated method call if Swimlane server version is out of provided range ValueError: If neither min_version or max_version were provided, or if those values conflict (2.15 < 2.14) """ if min_version is None and max_version is None: raise ValueError('Must provide either min_version, max_version, or both') if min_version and max_version and compare_versions(min_version, max_version) < 0: raise ValueError('min_version must be <= max_version ({}, {})'.format(min_version, max_version)) def decorator(func): @functools.wraps(func) def wrapper(self, *args, **kwargs): swimlane = self._swimlane if min_version and compare_versions(min_version, swimlane.build_version, True) < 0: raise InvalidSwimlaneBuildVersion(swimlane, min_version, max_version) if max_version and compare_versions(swimlane.build_version, max_version, True) < 0: raise InvalidSwimlaneBuildVersion(swimlane, min_version, max_version) return func(self, *args, **kwargs) return wrapper return decorator
[ "def", "requires_swimlane_version", "(", "min_version", "=", "None", ",", "max_version", "=", "None", ")", ":", "if", "min_version", "is", "None", "and", "max_version", "is", "None", ":", "raise", "ValueError", "(", "'Must provide either min_version, max_version, or both'", ")", "if", "min_version", "and", "max_version", "and", "compare_versions", "(", "min_version", ",", "max_version", ")", "<", "0", ":", "raise", "ValueError", "(", "'min_version must be <= max_version ({}, {})'", ".", "format", "(", "min_version", ",", "max_version", ")", ")", "def", "decorator", "(", "func", ")", ":", "@", "functools", ".", "wraps", "(", "func", ")", "def", "wrapper", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "swimlane", "=", "self", ".", "_swimlane", "if", "min_version", "and", "compare_versions", "(", "min_version", ",", "swimlane", ".", "build_version", ",", "True", ")", "<", "0", ":", "raise", "InvalidSwimlaneBuildVersion", "(", "swimlane", ",", "min_version", ",", "max_version", ")", "if", "max_version", "and", "compare_versions", "(", "swimlane", ".", "build_version", ",", "max_version", ",", "True", ")", "<", "0", ":", "raise", "InvalidSwimlaneBuildVersion", "(", "swimlane", ",", "min_version", ",", "max_version", ")", "return", "func", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", "return", "wrapper", "return", "decorator" ]
Decorator for SwimlaneResolver methods verifying Swimlane server build version is within a given inclusive range Raises: InvalidVersion: Raised before decorated method call if Swimlane server version is out of provided range ValueError: If neither min_version or max_version were provided, or if those values conflict (2.15 < 2.14)
[ "Decorator", "for", "SwimlaneResolver", "methods", "verifying", "Swimlane", "server", "build", "version", "is", "within", "a", "given", "inclusive", "range" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/utils/version.py#L59-L89
train
swimlane/swimlane-python
swimlane/core/fields/base/field.py
Field.get_report
def get_report(self, value): """Return provided field Python value formatted for use in report filter""" if self.multiselect: value = value or [] children = [] for child in value: children.append(self.cast_to_report(child)) return children return self.cast_to_report(value)
python
def get_report(self, value): """Return provided field Python value formatted for use in report filter""" if self.multiselect: value = value or [] children = [] for child in value: children.append(self.cast_to_report(child)) return children return self.cast_to_report(value)
[ "def", "get_report", "(", "self", ",", "value", ")", ":", "if", "self", ".", "multiselect", ":", "value", "=", "value", "or", "[", "]", "children", "=", "[", "]", "for", "child", "in", "value", ":", "children", ".", "append", "(", "self", ".", "cast_to_report", "(", "child", ")", ")", "return", "children", "return", "self", ".", "cast_to_report", "(", "value", ")" ]
Return provided field Python value formatted for use in report filter
[ "Return", "provided", "field", "Python", "value", "formatted", "for", "use", "in", "report", "filter" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/fields/base/field.py#L57-L68
train
swimlane/swimlane-python
swimlane/core/fields/base/field.py
Field.get_bulk_modify
def get_bulk_modify(self, value): """Return value in format for bulk modify""" if self.multiselect: value = value or [] return [self.cast_to_bulk_modify(child) for child in value] return self.cast_to_bulk_modify(value)
python
def get_bulk_modify(self, value): """Return value in format for bulk modify""" if self.multiselect: value = value or [] return [self.cast_to_bulk_modify(child) for child in value] return self.cast_to_bulk_modify(value)
[ "def", "get_bulk_modify", "(", "self", ",", "value", ")", ":", "if", "self", ".", "multiselect", ":", "value", "=", "value", "or", "[", "]", "return", "[", "self", ".", "cast_to_bulk_modify", "(", "child", ")", "for", "child", "in", "value", "]", "return", "self", ".", "cast_to_bulk_modify", "(", "value", ")" ]
Return value in format for bulk modify
[ "Return", "value", "in", "format", "for", "bulk", "modify" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/fields/base/field.py#L70-L76
train
swimlane/swimlane-python
swimlane/core/fields/base/field.py
Field.validate_value
def validate_value(self, value): """Validate value is an acceptable type during set_python operation""" if self.readonly: raise ValidationError(self.record, "Cannot set readonly field '{}'".format(self.name)) if value not in (None, self._unset): if self.supported_types and not isinstance(value, tuple(self.supported_types)): raise ValidationError(self.record, "Field '{}' expects one of {}, got '{}' instead".format( self.name, ', '.join([repr(t.__name__) for t in self.supported_types]), type(value).__name__) )
python
def validate_value(self, value): """Validate value is an acceptable type during set_python operation""" if self.readonly: raise ValidationError(self.record, "Cannot set readonly field '{}'".format(self.name)) if value not in (None, self._unset): if self.supported_types and not isinstance(value, tuple(self.supported_types)): raise ValidationError(self.record, "Field '{}' expects one of {}, got '{}' instead".format( self.name, ', '.join([repr(t.__name__) for t in self.supported_types]), type(value).__name__) )
[ "def", "validate_value", "(", "self", ",", "value", ")", ":", "if", "self", ".", "readonly", ":", "raise", "ValidationError", "(", "self", ".", "record", ",", "\"Cannot set readonly field '{}'\"", ".", "format", "(", "self", ".", "name", ")", ")", "if", "value", "not", "in", "(", "None", ",", "self", ".", "_unset", ")", ":", "if", "self", ".", "supported_types", "and", "not", "isinstance", "(", "value", ",", "tuple", "(", "self", ".", "supported_types", ")", ")", ":", "raise", "ValidationError", "(", "self", ".", "record", ",", "\"Field '{}' expects one of {}, got '{}' instead\"", ".", "format", "(", "self", ".", "name", ",", "', '", ".", "join", "(", "[", "repr", "(", "t", ".", "__name__", ")", "for", "t", "in", "self", ".", "supported_types", "]", ")", ",", "type", "(", "value", ")", ".", "__name__", ")", ")" ]
Validate value is an acceptable type during set_python operation
[ "Validate", "value", "is", "an", "acceptable", "type", "during", "set_python", "operation" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/fields/base/field.py#L101-L111
train
swimlane/swimlane-python
swimlane/core/fields/base/field.py
Field._set
def _set(self, value): """Default setter used for both representations unless overridden""" self._value = value self.record._raw['values'][self.id] = self.get_swimlane()
python
def _set(self, value): """Default setter used for both representations unless overridden""" self._value = value self.record._raw['values'][self.id] = self.get_swimlane()
[ "def", "_set", "(", "self", ",", "value", ")", ":", "self", ".", "_value", "=", "value", "self", ".", "record", ".", "_raw", "[", "'values'", "]", "[", "self", ".", "id", "]", "=", "self", ".", "get_swimlane", "(", ")" ]
Default setter used for both representations unless overridden
[ "Default", "setter", "used", "for", "both", "representations", "unless", "overridden" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/fields/base/field.py#L113-L116
train
swimlane/swimlane-python
swimlane/core/fields/__init__.py
resolve_field_class
def resolve_field_class(field_definition): """Return field class most fitting of provided Swimlane field definition""" try: return _FIELD_TYPE_MAP[field_definition['$type']] except KeyError as error: error.message = 'No field available to handle Swimlane $type "{}"'.format(field_definition) raise
python
def resolve_field_class(field_definition): """Return field class most fitting of provided Swimlane field definition""" try: return _FIELD_TYPE_MAP[field_definition['$type']] except KeyError as error: error.message = 'No field available to handle Swimlane $type "{}"'.format(field_definition) raise
[ "def", "resolve_field_class", "(", "field_definition", ")", ":", "try", ":", "return", "_FIELD_TYPE_MAP", "[", "field_definition", "[", "'$type'", "]", "]", "except", "KeyError", "as", "error", ":", "error", ".", "message", "=", "'No field available to handle Swimlane $type \"{}\"'", ".", "format", "(", "field_definition", ")", "raise" ]
Return field class most fitting of provided Swimlane field definition
[ "Return", "field", "class", "most", "fitting", "of", "provided", "Swimlane", "field", "definition" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/fields/__init__.py#L37-L43
train
swimlane/swimlane-python
swimlane/core/cache.py
get_cache_index_key
def get_cache_index_key(resource): """Return a usable cache lookup key for an already initialized resource Args: resource (APIResource|tuple): APIResource instance or 3-length tuple key returned from this function Raises: TypeError: If resource is not an APIResource instance or acceptable 3-length tuple cache key """ if isinstance(resource, APIResource): attr, attr_value = list(resource.get_cache_index_keys().items())[0] key = (type(resource), attr, attr_value) else: key = tuple(resource) if len(key) != 3: raise TypeError('Cache key must be tuple of (class, key, value), got `{!r}` instead'.format(key)) if not issubclass(key[0], APIResource): raise TypeError('First value of cache key must be a subclass of APIResource, got `{!r}` instead'.format(key[0])) return key
python
def get_cache_index_key(resource): """Return a usable cache lookup key for an already initialized resource Args: resource (APIResource|tuple): APIResource instance or 3-length tuple key returned from this function Raises: TypeError: If resource is not an APIResource instance or acceptable 3-length tuple cache key """ if isinstance(resource, APIResource): attr, attr_value = list(resource.get_cache_index_keys().items())[0] key = (type(resource), attr, attr_value) else: key = tuple(resource) if len(key) != 3: raise TypeError('Cache key must be tuple of (class, key, value), got `{!r}` instead'.format(key)) if not issubclass(key[0], APIResource): raise TypeError('First value of cache key must be a subclass of APIResource, got `{!r}` instead'.format(key[0])) return key
[ "def", "get_cache_index_key", "(", "resource", ")", ":", "if", "isinstance", "(", "resource", ",", "APIResource", ")", ":", "attr", ",", "attr_value", "=", "list", "(", "resource", ".", "get_cache_index_keys", "(", ")", ".", "items", "(", ")", ")", "[", "0", "]", "key", "=", "(", "type", "(", "resource", ")", ",", "attr", ",", "attr_value", ")", "else", ":", "key", "=", "tuple", "(", "resource", ")", "if", "len", "(", "key", ")", "!=", "3", ":", "raise", "TypeError", "(", "'Cache key must be tuple of (class, key, value), got `{!r}` instead'", ".", "format", "(", "key", ")", ")", "if", "not", "issubclass", "(", "key", "[", "0", "]", ",", "APIResource", ")", ":", "raise", "TypeError", "(", "'First value of cache key must be a subclass of APIResource, got `{!r}` instead'", ".", "format", "(", "key", "[", "0", "]", ")", ")", "return", "key" ]
Return a usable cache lookup key for an already initialized resource Args: resource (APIResource|tuple): APIResource instance or 3-length tuple key returned from this function Raises: TypeError: If resource is not an APIResource instance or acceptable 3-length tuple cache key
[ "Return", "a", "usable", "cache", "lookup", "key", "for", "an", "already", "initialized", "resource" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/cache.py#L106-L127
train
swimlane/swimlane-python
swimlane/core/cache.py
check_cache
def check_cache(resource_type): """Decorator for adapter methods to check cache for resource before normally sending requests to retrieve data Only works with single kwargs, almost always used with @one_of_keyword_only decorator Args: resource_type (type(APIResource)): Subclass of APIResource of cache to be checked when called """ def decorator(func): @functools.wraps(func) def wrapper(*args, **kwargs): try: adapter = args[0] key, val = list(kwargs.items())[0] except IndexError: logger.warning("Couldn't generate full index key, skipping cache") else: index_key = (resource_type, key, val) try: cached_record = adapter._swimlane.resources_cache[index_key] except KeyError: logger.debug('Cache miss: `{!r}`'.format(index_key)) else: logger.debug('Cache hit: `{!r}`'.format(cached_record)) return cached_record # Fallback to default function call return func(*args, **kwargs) return wrapper return decorator
python
def check_cache(resource_type): """Decorator for adapter methods to check cache for resource before normally sending requests to retrieve data Only works with single kwargs, almost always used with @one_of_keyword_only decorator Args: resource_type (type(APIResource)): Subclass of APIResource of cache to be checked when called """ def decorator(func): @functools.wraps(func) def wrapper(*args, **kwargs): try: adapter = args[0] key, val = list(kwargs.items())[0] except IndexError: logger.warning("Couldn't generate full index key, skipping cache") else: index_key = (resource_type, key, val) try: cached_record = adapter._swimlane.resources_cache[index_key] except KeyError: logger.debug('Cache miss: `{!r}`'.format(index_key)) else: logger.debug('Cache hit: `{!r}`'.format(cached_record)) return cached_record # Fallback to default function call return func(*args, **kwargs) return wrapper return decorator
[ "def", "check_cache", "(", "resource_type", ")", ":", "def", "decorator", "(", "func", ")", ":", "@", "functools", ".", "wraps", "(", "func", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "try", ":", "adapter", "=", "args", "[", "0", "]", "key", ",", "val", "=", "list", "(", "kwargs", ".", "items", "(", ")", ")", "[", "0", "]", "except", "IndexError", ":", "logger", ".", "warning", "(", "\"Couldn't generate full index key, skipping cache\"", ")", "else", ":", "index_key", "=", "(", "resource_type", ",", "key", ",", "val", ")", "try", ":", "cached_record", "=", "adapter", ".", "_swimlane", ".", "resources_cache", "[", "index_key", "]", "except", "KeyError", ":", "logger", ".", "debug", "(", "'Cache miss: `{!r}`'", ".", "format", "(", "index_key", ")", ")", "else", ":", "logger", ".", "debug", "(", "'Cache hit: `{!r}`'", ".", "format", "(", "cached_record", ")", ")", "return", "cached_record", "# Fallback to default function call", "return", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "wrapper", "return", "decorator" ]
Decorator for adapter methods to check cache for resource before normally sending requests to retrieve data Only works with single kwargs, almost always used with @one_of_keyword_only decorator Args: resource_type (type(APIResource)): Subclass of APIResource of cache to be checked when called
[ "Decorator", "for", "adapter", "methods", "to", "check", "cache", "for", "resource", "before", "normally", "sending", "requests", "to", "retrieve", "data" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/cache.py#L130-L162
train
swimlane/swimlane-python
swimlane/core/cache.py
ResourcesCache.cache
def cache(self, resource): """Insert a resource instance into appropriate resource cache""" if not isinstance(resource, APIResource): raise TypeError('Cannot cache `{!r}`, can only cache APIResource instances'.format(resource)) # Disable inserts to cache when disabled if self.__cache_max_size == 0: return try: cache_internal_key = resource.get_cache_internal_key() cache_index_keys = resource.get_cache_index_keys().items() except NotImplementedError: logger.warning( 'Not caching `{!r}`, resource did not provide all necessary cache details'.format(resource) ) else: resource_type = type(resource) for key, value in cache_index_keys: self.__cache_index_key_map[(resource_type, key, value)] = cache_internal_key self.__caches[resource_type][cache_internal_key] = resource logger.debug('Cached `{!r}`'.format(resource))
python
def cache(self, resource): """Insert a resource instance into appropriate resource cache""" if not isinstance(resource, APIResource): raise TypeError('Cannot cache `{!r}`, can only cache APIResource instances'.format(resource)) # Disable inserts to cache when disabled if self.__cache_max_size == 0: return try: cache_internal_key = resource.get_cache_internal_key() cache_index_keys = resource.get_cache_index_keys().items() except NotImplementedError: logger.warning( 'Not caching `{!r}`, resource did not provide all necessary cache details'.format(resource) ) else: resource_type = type(resource) for key, value in cache_index_keys: self.__cache_index_key_map[(resource_type, key, value)] = cache_internal_key self.__caches[resource_type][cache_internal_key] = resource logger.debug('Cached `{!r}`'.format(resource))
[ "def", "cache", "(", "self", ",", "resource", ")", ":", "if", "not", "isinstance", "(", "resource", ",", "APIResource", ")", ":", "raise", "TypeError", "(", "'Cannot cache `{!r}`, can only cache APIResource instances'", ".", "format", "(", "resource", ")", ")", "# Disable inserts to cache when disabled", "if", "self", ".", "__cache_max_size", "==", "0", ":", "return", "try", ":", "cache_internal_key", "=", "resource", ".", "get_cache_internal_key", "(", ")", "cache_index_keys", "=", "resource", ".", "get_cache_index_keys", "(", ")", ".", "items", "(", ")", "except", "NotImplementedError", ":", "logger", ".", "warning", "(", "'Not caching `{!r}`, resource did not provide all necessary cache details'", ".", "format", "(", "resource", ")", ")", "else", ":", "resource_type", "=", "type", "(", "resource", ")", "for", "key", ",", "value", "in", "cache_index_keys", ":", "self", ".", "__cache_index_key_map", "[", "(", "resource_type", ",", "key", ",", "value", ")", "]", "=", "cache_internal_key", "self", ".", "__caches", "[", "resource_type", "]", "[", "cache_internal_key", "]", "=", "resource", "logger", ".", "debug", "(", "'Cached `{!r}`'", ".", "format", "(", "resource", ")", ")" ]
Insert a resource instance into appropriate resource cache
[ "Insert", "a", "resource", "instance", "into", "appropriate", "resource", "cache" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/cache.py#L70-L94
train
swimlane/swimlane-python
swimlane/core/cache.py
ResourcesCache.clear
def clear(self, *resource_types): """Clear cache for each provided APIResource class, or all resources if no classes are provided""" resource_types = resource_types or tuple(self.__caches.keys()) for cls in resource_types: # Clear and delete cache instances to guarantee no lingering references self.__caches[cls].clear() del self.__caches[cls]
python
def clear(self, *resource_types): """Clear cache for each provided APIResource class, or all resources if no classes are provided""" resource_types = resource_types or tuple(self.__caches.keys()) for cls in resource_types: # Clear and delete cache instances to guarantee no lingering references self.__caches[cls].clear() del self.__caches[cls]
[ "def", "clear", "(", "self", ",", "*", "resource_types", ")", ":", "resource_types", "=", "resource_types", "or", "tuple", "(", "self", ".", "__caches", ".", "keys", "(", ")", ")", "for", "cls", "in", "resource_types", ":", "# Clear and delete cache instances to guarantee no lingering references", "self", ".", "__caches", "[", "cls", "]", ".", "clear", "(", ")", "del", "self", ".", "__caches", "[", "cls", "]" ]
Clear cache for each provided APIResource class, or all resources if no classes are provided
[ "Clear", "cache", "for", "each", "provided", "APIResource", "class", "or", "all", "resources", "if", "no", "classes", "are", "provided" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/cache.py#L96-L103
train
swimlane/swimlane-python
swimlane/core/fields/attachment.py
AttachmentsField._set
def _set(self, value): """Override setter, allow clearing cursor""" super(AttachmentsField, self)._set(value) self._cursor = None
python
def _set(self, value): """Override setter, allow clearing cursor""" super(AttachmentsField, self)._set(value) self._cursor = None
[ "def", "_set", "(", "self", ",", "value", ")", ":", "super", "(", "AttachmentsField", ",", "self", ")", ".", "_set", "(", "value", ")", "self", ".", "_cursor", "=", "None" ]
Override setter, allow clearing cursor
[ "Override", "setter", "allow", "clearing", "cursor" ]
588fc503a76799bcdb5aecdf2f64a6ee05e3922d
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/fields/attachment.py#L58-L61
train
MicroPyramid/django-mfa
django_mfa/views.py
verify_otp
def verify_otp(request): """ Verify a OTP request """ ctx = {} if request.method == "POST": verification_code = request.POST.get('verification_code') if verification_code is None: ctx['error_message'] = "Missing verification code." else: otp_ = UserOTP.objects.get(user=request.user) totp_ = totp.TOTP(otp_.secret_key) is_verified = totp_.verify(verification_code) if is_verified: request.session['verfied_otp'] = True response = redirect(request.POST.get("next", settings.LOGIN_REDIRECT_URL)) return update_rmb_cookie(request, response) ctx['error_message'] = "Your code is expired or invalid." ctx['next'] = request.GET.get('next', settings.LOGIN_REDIRECT_URL) return render(request, 'django_mfa/login_verify.html', ctx, status=400)
python
def verify_otp(request): """ Verify a OTP request """ ctx = {} if request.method == "POST": verification_code = request.POST.get('verification_code') if verification_code is None: ctx['error_message'] = "Missing verification code." else: otp_ = UserOTP.objects.get(user=request.user) totp_ = totp.TOTP(otp_.secret_key) is_verified = totp_.verify(verification_code) if is_verified: request.session['verfied_otp'] = True response = redirect(request.POST.get("next", settings.LOGIN_REDIRECT_URL)) return update_rmb_cookie(request, response) ctx['error_message'] = "Your code is expired or invalid." ctx['next'] = request.GET.get('next', settings.LOGIN_REDIRECT_URL) return render(request, 'django_mfa/login_verify.html', ctx, status=400)
[ "def", "verify_otp", "(", "request", ")", ":", "ctx", "=", "{", "}", "if", "request", ".", "method", "==", "\"POST\"", ":", "verification_code", "=", "request", ".", "POST", ".", "get", "(", "'verification_code'", ")", "if", "verification_code", "is", "None", ":", "ctx", "[", "'error_message'", "]", "=", "\"Missing verification code.\"", "else", ":", "otp_", "=", "UserOTP", ".", "objects", ".", "get", "(", "user", "=", "request", ".", "user", ")", "totp_", "=", "totp", ".", "TOTP", "(", "otp_", ".", "secret_key", ")", "is_verified", "=", "totp_", ".", "verify", "(", "verification_code", ")", "if", "is_verified", ":", "request", ".", "session", "[", "'verfied_otp'", "]", "=", "True", "response", "=", "redirect", "(", "request", ".", "POST", ".", "get", "(", "\"next\"", ",", "settings", ".", "LOGIN_REDIRECT_URL", ")", ")", "return", "update_rmb_cookie", "(", "request", ",", "response", ")", "ctx", "[", "'error_message'", "]", "=", "\"Your code is expired or invalid.\"", "ctx", "[", "'next'", "]", "=", "request", ".", "GET", ".", "get", "(", "'next'", ",", "settings", ".", "LOGIN_REDIRECT_URL", ")", "return", "render", "(", "request", ",", "'django_mfa/login_verify.html'", ",", "ctx", ",", "status", "=", "400", ")" ]
Verify a OTP request
[ "Verify", "a", "OTP", "request" ]
7baf16297ffa8b5b4aa0b9961a889a964fcbdb39
https://github.com/MicroPyramid/django-mfa/blob/7baf16297ffa8b5b4aa0b9961a889a964fcbdb39/django_mfa/views.py#L142-L166
train
MicroPyramid/django-mfa
django_mfa/totp.py
TOTP.at
def at(self, for_time, counter_offset=0): """ Accepts either a Unix timestamp integer or a Time object. Time objects will be adjusted to UTC automatically @param [Time/Integer] time the time to generate an OTP for @param [Integer] counter_offset an amount of ticks to add to the time counter """ if not isinstance(for_time, datetime.datetime): for_time = datetime.datetime.fromtimestamp(int(for_time)) return self.generate_otp(self.timecode(for_time) + counter_offset)
python
def at(self, for_time, counter_offset=0): """ Accepts either a Unix timestamp integer or a Time object. Time objects will be adjusted to UTC automatically @param [Time/Integer] time the time to generate an OTP for @param [Integer] counter_offset an amount of ticks to add to the time counter """ if not isinstance(for_time, datetime.datetime): for_time = datetime.datetime.fromtimestamp(int(for_time)) return self.generate_otp(self.timecode(for_time) + counter_offset)
[ "def", "at", "(", "self", ",", "for_time", ",", "counter_offset", "=", "0", ")", ":", "if", "not", "isinstance", "(", "for_time", ",", "datetime", ".", "datetime", ")", ":", "for_time", "=", "datetime", ".", "datetime", ".", "fromtimestamp", "(", "int", "(", "for_time", ")", ")", "return", "self", ".", "generate_otp", "(", "self", ".", "timecode", "(", "for_time", ")", "+", "counter_offset", ")" ]
Accepts either a Unix timestamp integer or a Time object. Time objects will be adjusted to UTC automatically @param [Time/Integer] time the time to generate an OTP for @param [Integer] counter_offset an amount of ticks to add to the time counter
[ "Accepts", "either", "a", "Unix", "timestamp", "integer", "or", "a", "Time", "object", ".", "Time", "objects", "will", "be", "adjusted", "to", "UTC", "automatically" ]
7baf16297ffa8b5b4aa0b9961a889a964fcbdb39
https://github.com/MicroPyramid/django-mfa/blob/7baf16297ffa8b5b4aa0b9961a889a964fcbdb39/django_mfa/totp.py#L19-L28
train
MicroPyramid/django-mfa
django_mfa/totp.py
TOTP.verify
def verify(self, otp, for_time=None, valid_window=0): """ Verifies the OTP passed in against the current time OTP @param [String/Integer] otp the OTP to check against @param [Integer] valid_window extends the validity to this many counter ticks before and after the current one """ if for_time is None: for_time = datetime.datetime.now() if valid_window: for i in range(-valid_window, valid_window + 1): if utils.strings_equal(str(otp), str(self.at(for_time, i))): return True return False return utils.strings_equal(str(otp), str(self.at(for_time)))
python
def verify(self, otp, for_time=None, valid_window=0): """ Verifies the OTP passed in against the current time OTP @param [String/Integer] otp the OTP to check against @param [Integer] valid_window extends the validity to this many counter ticks before and after the current one """ if for_time is None: for_time = datetime.datetime.now() if valid_window: for i in range(-valid_window, valid_window + 1): if utils.strings_equal(str(otp), str(self.at(for_time, i))): return True return False return utils.strings_equal(str(otp), str(self.at(for_time)))
[ "def", "verify", "(", "self", ",", "otp", ",", "for_time", "=", "None", ",", "valid_window", "=", "0", ")", ":", "if", "for_time", "is", "None", ":", "for_time", "=", "datetime", ".", "datetime", ".", "now", "(", ")", "if", "valid_window", ":", "for", "i", "in", "range", "(", "-", "valid_window", ",", "valid_window", "+", "1", ")", ":", "if", "utils", ".", "strings_equal", "(", "str", "(", "otp", ")", ",", "str", "(", "self", ".", "at", "(", "for_time", ",", "i", ")", ")", ")", ":", "return", "True", "return", "False", "return", "utils", ".", "strings_equal", "(", "str", "(", "otp", ")", ",", "str", "(", "self", ".", "at", "(", "for_time", ")", ")", ")" ]
Verifies the OTP passed in against the current time OTP @param [String/Integer] otp the OTP to check against @param [Integer] valid_window extends the validity to this many counter ticks before and after the current one
[ "Verifies", "the", "OTP", "passed", "in", "against", "the", "current", "time", "OTP" ]
7baf16297ffa8b5b4aa0b9961a889a964fcbdb39
https://github.com/MicroPyramid/django-mfa/blob/7baf16297ffa8b5b4aa0b9961a889a964fcbdb39/django_mfa/totp.py#L37-L52
train
MicroPyramid/django-mfa
django_mfa/totp.py
TOTP.provisioning_uri
def provisioning_uri(self, name, issuer_name=None): """ Returns the provisioning URI for the OTP This can then be encoded in a QR Code and used to provision the Google Authenticator app @param [String] name of the account @return [String] provisioning uri """ return utils.build_uri(self.secret, name, issuer_name=issuer_name)
python
def provisioning_uri(self, name, issuer_name=None): """ Returns the provisioning URI for the OTP This can then be encoded in a QR Code and used to provision the Google Authenticator app @param [String] name of the account @return [String] provisioning uri """ return utils.build_uri(self.secret, name, issuer_name=issuer_name)
[ "def", "provisioning_uri", "(", "self", ",", "name", ",", "issuer_name", "=", "None", ")", ":", "return", "utils", ".", "build_uri", "(", "self", ".", "secret", ",", "name", ",", "issuer_name", "=", "issuer_name", ")" ]
Returns the provisioning URI for the OTP This can then be encoded in a QR Code and used to provision the Google Authenticator app @param [String] name of the account @return [String] provisioning uri
[ "Returns", "the", "provisioning", "URI", "for", "the", "OTP", "This", "can", "then", "be", "encoded", "in", "a", "QR", "Code", "and", "used", "to", "provision", "the", "Google", "Authenticator", "app" ]
7baf16297ffa8b5b4aa0b9961a889a964fcbdb39
https://github.com/MicroPyramid/django-mfa/blob/7baf16297ffa8b5b4aa0b9961a889a964fcbdb39/django_mfa/totp.py#L54-L62
train
MicroPyramid/django-mfa
django_mfa/utils.py
build_uri
def build_uri(secret, name, initial_count=None, issuer_name=None): """ Returns the provisioning URI for the OTP; works for either TOTP or HOTP. This can then be encoded in a QR Code and used to provision the Google Authenticator app. For module-internal use. See also: http://code.google.com/p/google-authenticator/wiki/KeyUriFormat @param [String] the hotp/totp secret used to generate the URI @param [String] name of the account @param [Integer] initial_count starting counter value, defaults to None. If none, the OTP type will be assumed as TOTP. @param [String] the name of the OTP issuer; this will be the organization title of the OTP entry in Authenticator @return [String] provisioning uri """ # initial_count may be 0 as a valid param is_initial_count_present = (initial_count is not None) otp_type = 'hotp' if is_initial_count_present else 'totp' base = 'otpauth://%s/' % otp_type if issuer_name: issuer_name = quote(issuer_name) base += '%s:' % issuer_name uri = '%(base)s%(name)s?secret=%(secret)s' % { 'name': quote(name, safe='@'), 'secret': secret, 'base': base, } if is_initial_count_present: uri += '&counter=%s' % initial_count if issuer_name: uri += '&issuer=%s' % issuer_name return uri
python
def build_uri(secret, name, initial_count=None, issuer_name=None): """ Returns the provisioning URI for the OTP; works for either TOTP or HOTP. This can then be encoded in a QR Code and used to provision the Google Authenticator app. For module-internal use. See also: http://code.google.com/p/google-authenticator/wiki/KeyUriFormat @param [String] the hotp/totp secret used to generate the URI @param [String] name of the account @param [Integer] initial_count starting counter value, defaults to None. If none, the OTP type will be assumed as TOTP. @param [String] the name of the OTP issuer; this will be the organization title of the OTP entry in Authenticator @return [String] provisioning uri """ # initial_count may be 0 as a valid param is_initial_count_present = (initial_count is not None) otp_type = 'hotp' if is_initial_count_present else 'totp' base = 'otpauth://%s/' % otp_type if issuer_name: issuer_name = quote(issuer_name) base += '%s:' % issuer_name uri = '%(base)s%(name)s?secret=%(secret)s' % { 'name': quote(name, safe='@'), 'secret': secret, 'base': base, } if is_initial_count_present: uri += '&counter=%s' % initial_count if issuer_name: uri += '&issuer=%s' % issuer_name return uri
[ "def", "build_uri", "(", "secret", ",", "name", ",", "initial_count", "=", "None", ",", "issuer_name", "=", "None", ")", ":", "# initial_count may be 0 as a valid param", "is_initial_count_present", "=", "(", "initial_count", "is", "not", "None", ")", "otp_type", "=", "'hotp'", "if", "is_initial_count_present", "else", "'totp'", "base", "=", "'otpauth://%s/'", "%", "otp_type", "if", "issuer_name", ":", "issuer_name", "=", "quote", "(", "issuer_name", ")", "base", "+=", "'%s:'", "%", "issuer_name", "uri", "=", "'%(base)s%(name)s?secret=%(secret)s'", "%", "{", "'name'", ":", "quote", "(", "name", ",", "safe", "=", "'@'", ")", ",", "'secret'", ":", "secret", ",", "'base'", ":", "base", ",", "}", "if", "is_initial_count_present", ":", "uri", "+=", "'&counter=%s'", "%", "initial_count", "if", "issuer_name", ":", "uri", "+=", "'&issuer=%s'", "%", "issuer_name", "return", "uri" ]
Returns the provisioning URI for the OTP; works for either TOTP or HOTP. This can then be encoded in a QR Code and used to provision the Google Authenticator app. For module-internal use. See also: http://code.google.com/p/google-authenticator/wiki/KeyUriFormat @param [String] the hotp/totp secret used to generate the URI @param [String] name of the account @param [Integer] initial_count starting counter value, defaults to None. If none, the OTP type will be assumed as TOTP. @param [String] the name of the OTP issuer; this will be the organization title of the OTP entry in Authenticator @return [String] provisioning uri
[ "Returns", "the", "provisioning", "URI", "for", "the", "OTP", ";", "works", "for", "either", "TOTP", "or", "HOTP", "." ]
7baf16297ffa8b5b4aa0b9961a889a964fcbdb39
https://github.com/MicroPyramid/django-mfa/blob/7baf16297ffa8b5b4aa0b9961a889a964fcbdb39/django_mfa/utils.py#L15-L57
train
MicroPyramid/django-mfa
django_mfa/utils.py
strings_equal
def strings_equal(s1, s2): """ Timing-attack resistant string comparison. Normal comparison using == will short-circuit on the first mismatching character. This avoids that by scanning the whole string, though we still reveal to a timing attack whether the strings are the same length. """ try: s1 = unicodedata.normalize('NFKC', str(s1)) s2 = unicodedata.normalize('NFKC', str(s2)) except: s1 = unicodedata.normalize('NFKC', unicode(s1)) s2 = unicodedata.normalize('NFKC', unicode(s2)) return compare_digest(s1, s2)
python
def strings_equal(s1, s2): """ Timing-attack resistant string comparison. Normal comparison using == will short-circuit on the first mismatching character. This avoids that by scanning the whole string, though we still reveal to a timing attack whether the strings are the same length. """ try: s1 = unicodedata.normalize('NFKC', str(s1)) s2 = unicodedata.normalize('NFKC', str(s2)) except: s1 = unicodedata.normalize('NFKC', unicode(s1)) s2 = unicodedata.normalize('NFKC', unicode(s2)) return compare_digest(s1, s2)
[ "def", "strings_equal", "(", "s1", ",", "s2", ")", ":", "try", ":", "s1", "=", "unicodedata", ".", "normalize", "(", "'NFKC'", ",", "str", "(", "s1", ")", ")", "s2", "=", "unicodedata", ".", "normalize", "(", "'NFKC'", ",", "str", "(", "s2", ")", ")", "except", ":", "s1", "=", "unicodedata", ".", "normalize", "(", "'NFKC'", ",", "unicode", "(", "s1", ")", ")", "s2", "=", "unicodedata", ".", "normalize", "(", "'NFKC'", ",", "unicode", "(", "s2", ")", ")", "return", "compare_digest", "(", "s1", ",", "s2", ")" ]
Timing-attack resistant string comparison. Normal comparison using == will short-circuit on the first mismatching character. This avoids that by scanning the whole string, though we still reveal to a timing attack whether the strings are the same length.
[ "Timing", "-", "attack", "resistant", "string", "comparison", "." ]
7baf16297ffa8b5b4aa0b9961a889a964fcbdb39
https://github.com/MicroPyramid/django-mfa/blob/7baf16297ffa8b5b4aa0b9961a889a964fcbdb39/django_mfa/utils.py#L79-L94
train
libyal/libbde
setup.py
GetPythonLibraryDirectoryPath
def GetPythonLibraryDirectoryPath(): """Retrieves the Python library directory path.""" path = sysconfig.get_python_lib(True) _, _, path = path.rpartition(sysconfig.PREFIX) if path.startswith(os.sep): path = path[1:] return path
python
def GetPythonLibraryDirectoryPath(): """Retrieves the Python library directory path.""" path = sysconfig.get_python_lib(True) _, _, path = path.rpartition(sysconfig.PREFIX) if path.startswith(os.sep): path = path[1:] return path
[ "def", "GetPythonLibraryDirectoryPath", "(", ")", ":", "path", "=", "sysconfig", ".", "get_python_lib", "(", "True", ")", "_", ",", "_", ",", "path", "=", "path", ".", "rpartition", "(", "sysconfig", ".", "PREFIX", ")", "if", "path", ".", "startswith", "(", "os", ".", "sep", ")", ":", "path", "=", "path", "[", "1", ":", "]", "return", "path" ]
Retrieves the Python library directory path.
[ "Retrieves", "the", "Python", "library", "directory", "path", "." ]
5f59d11dbb52690b4155f2cc3fcb1ac512d076a8
https://github.com/libyal/libbde/blob/5f59d11dbb52690b4155f2cc3fcb1ac512d076a8/setup.py#L267-L275
train
libyal/libbde
setup.py
custom_build_ext.run
def run(self): """Runs the build extension.""" compiler = new_compiler(compiler=self.compiler) if compiler.compiler_type == "msvc": self.define = [ ("UNICODE", ""), ] else: command = "sh configure --disable-shared-libs" output = self._RunCommand(command) print_line = False for line in output.split("\n"): line = line.rstrip() if line == "configure:": print_line = True if print_line: print(line) self.define = [ ("HAVE_CONFIG_H", ""), ("LOCALEDIR", "\"/usr/share/locale\""), ] build_ext.run(self)
python
def run(self): """Runs the build extension.""" compiler = new_compiler(compiler=self.compiler) if compiler.compiler_type == "msvc": self.define = [ ("UNICODE", ""), ] else: command = "sh configure --disable-shared-libs" output = self._RunCommand(command) print_line = False for line in output.split("\n"): line = line.rstrip() if line == "configure:": print_line = True if print_line: print(line) self.define = [ ("HAVE_CONFIG_H", ""), ("LOCALEDIR", "\"/usr/share/locale\""), ] build_ext.run(self)
[ "def", "run", "(", "self", ")", ":", "compiler", "=", "new_compiler", "(", "compiler", "=", "self", ".", "compiler", ")", "if", "compiler", ".", "compiler_type", "==", "\"msvc\"", ":", "self", ".", "define", "=", "[", "(", "\"UNICODE\"", ",", "\"\"", ")", ",", "]", "else", ":", "command", "=", "\"sh configure --disable-shared-libs\"", "output", "=", "self", ".", "_RunCommand", "(", "command", ")", "print_line", "=", "False", "for", "line", "in", "output", ".", "split", "(", "\"\\n\"", ")", ":", "line", "=", "line", ".", "rstrip", "(", ")", "if", "line", "==", "\"configure:\"", ":", "print_line", "=", "True", "if", "print_line", ":", "print", "(", "line", ")", "self", ".", "define", "=", "[", "(", "\"HAVE_CONFIG_H\"", ",", "\"\"", ")", ",", "(", "\"LOCALEDIR\"", ",", "\"\\\"/usr/share/locale\\\"\"", ")", ",", "]", "build_ext", ".", "run", "(", "self", ")" ]
Runs the build extension.
[ "Runs", "the", "build", "extension", "." ]
5f59d11dbb52690b4155f2cc3fcb1ac512d076a8
https://github.com/libyal/libbde/blob/5f59d11dbb52690b4155f2cc3fcb1ac512d076a8/setup.py#L82-L108
train
libyal/libbde
setup.py
ProjectInformation._ReadConfigureAc
def _ReadConfigureAc(self): """Reads configure.ac to initialize the project information.""" file_object = open("configure.ac", "rb") if not file_object: raise IOError("Unable to open: configure.ac") found_ac_init = False found_library_name = False for line in file_object.readlines(): line = line.strip() if found_library_name: library_version = line[1:-2] if sys.version_info[0] >= 3: library_version = library_version.decode("ascii") self.library_version = library_version break elif found_ac_init: library_name = line[1:-2] if sys.version_info[0] >= 3: library_name = library_name.decode("ascii") self.library_name = library_name found_library_name = True elif line.startswith(b"AC_INIT"): found_ac_init = True file_object.close() if not self.library_name or not self.library_version: raise RuntimeError( "Unable to find library name and version in: configure.ac")
python
def _ReadConfigureAc(self): """Reads configure.ac to initialize the project information.""" file_object = open("configure.ac", "rb") if not file_object: raise IOError("Unable to open: configure.ac") found_ac_init = False found_library_name = False for line in file_object.readlines(): line = line.strip() if found_library_name: library_version = line[1:-2] if sys.version_info[0] >= 3: library_version = library_version.decode("ascii") self.library_version = library_version break elif found_ac_init: library_name = line[1:-2] if sys.version_info[0] >= 3: library_name = library_name.decode("ascii") self.library_name = library_name found_library_name = True elif line.startswith(b"AC_INIT"): found_ac_init = True file_object.close() if not self.library_name or not self.library_version: raise RuntimeError( "Unable to find library name and version in: configure.ac")
[ "def", "_ReadConfigureAc", "(", "self", ")", ":", "file_object", "=", "open", "(", "\"configure.ac\"", ",", "\"rb\"", ")", "if", "not", "file_object", ":", "raise", "IOError", "(", "\"Unable to open: configure.ac\"", ")", "found_ac_init", "=", "False", "found_library_name", "=", "False", "for", "line", "in", "file_object", ".", "readlines", "(", ")", ":", "line", "=", "line", ".", "strip", "(", ")", "if", "found_library_name", ":", "library_version", "=", "line", "[", "1", ":", "-", "2", "]", "if", "sys", ".", "version_info", "[", "0", "]", ">=", "3", ":", "library_version", "=", "library_version", ".", "decode", "(", "\"ascii\"", ")", "self", ".", "library_version", "=", "library_version", "break", "elif", "found_ac_init", ":", "library_name", "=", "line", "[", "1", ":", "-", "2", "]", "if", "sys", ".", "version_info", "[", "0", "]", ">=", "3", ":", "library_name", "=", "library_name", ".", "decode", "(", "\"ascii\"", ")", "self", ".", "library_name", "=", "library_name", "found_library_name", "=", "True", "elif", "line", ".", "startswith", "(", "b\"AC_INIT\"", ")", ":", "found_ac_init", "=", "True", "file_object", ".", "close", "(", ")", "if", "not", "self", ".", "library_name", "or", "not", "self", ".", "library_version", ":", "raise", "RuntimeError", "(", "\"Unable to find library name and version in: configure.ac\"", ")" ]
Reads configure.ac to initialize the project information.
[ "Reads", "configure", ".", "ac", "to", "initialize", "the", "project", "information", "." ]
5f59d11dbb52690b4155f2cc3fcb1ac512d076a8
https://github.com/libyal/libbde/blob/5f59d11dbb52690b4155f2cc3fcb1ac512d076a8/setup.py#L198-L229
train
libyal/libbde
setup.py
ProjectInformation._ReadMakefileAm
def _ReadMakefileAm(self): """Reads Makefile.am to initialize the project information.""" if not self.library_name: raise RuntimeError("Missing library name") file_object = open("Makefile.am", "rb") if not file_object: raise IOError("Unable to open: Makefile.am") found_subdirs = False for line in file_object.readlines(): line = line.strip() if found_subdirs: library_name, _, _ = line.partition(b" ") if sys.version_info[0] >= 3: library_name = library_name.decode("ascii") self.include_directories.append(library_name) if library_name.startswith("lib"): self.library_names.append(library_name) if library_name == self.library_name: break elif line.startswith(b"SUBDIRS"): found_subdirs = True file_object.close() if not self.include_directories or not self.library_names: raise RuntimeError( "Unable to find include directories and library names in: " "Makefile.am")
python
def _ReadMakefileAm(self): """Reads Makefile.am to initialize the project information.""" if not self.library_name: raise RuntimeError("Missing library name") file_object = open("Makefile.am", "rb") if not file_object: raise IOError("Unable to open: Makefile.am") found_subdirs = False for line in file_object.readlines(): line = line.strip() if found_subdirs: library_name, _, _ = line.partition(b" ") if sys.version_info[0] >= 3: library_name = library_name.decode("ascii") self.include_directories.append(library_name) if library_name.startswith("lib"): self.library_names.append(library_name) if library_name == self.library_name: break elif line.startswith(b"SUBDIRS"): found_subdirs = True file_object.close() if not self.include_directories or not self.library_names: raise RuntimeError( "Unable to find include directories and library names in: " "Makefile.am")
[ "def", "_ReadMakefileAm", "(", "self", ")", ":", "if", "not", "self", ".", "library_name", ":", "raise", "RuntimeError", "(", "\"Missing library name\"", ")", "file_object", "=", "open", "(", "\"Makefile.am\"", ",", "\"rb\"", ")", "if", "not", "file_object", ":", "raise", "IOError", "(", "\"Unable to open: Makefile.am\"", ")", "found_subdirs", "=", "False", "for", "line", "in", "file_object", ".", "readlines", "(", ")", ":", "line", "=", "line", ".", "strip", "(", ")", "if", "found_subdirs", ":", "library_name", ",", "_", ",", "_", "=", "line", ".", "partition", "(", "b\" \"", ")", "if", "sys", ".", "version_info", "[", "0", "]", ">=", "3", ":", "library_name", "=", "library_name", ".", "decode", "(", "\"ascii\"", ")", "self", ".", "include_directories", ".", "append", "(", "library_name", ")", "if", "library_name", ".", "startswith", "(", "\"lib\"", ")", ":", "self", ".", "library_names", ".", "append", "(", "library_name", ")", "if", "library_name", "==", "self", ".", "library_name", ":", "break", "elif", "line", ".", "startswith", "(", "b\"SUBDIRS\"", ")", ":", "found_subdirs", "=", "True", "file_object", ".", "close", "(", ")", "if", "not", "self", ".", "include_directories", "or", "not", "self", ".", "library_names", ":", "raise", "RuntimeError", "(", "\"Unable to find include directories and library names in: \"", "\"Makefile.am\"", ")" ]
Reads Makefile.am to initialize the project information.
[ "Reads", "Makefile", ".", "am", "to", "initialize", "the", "project", "information", "." ]
5f59d11dbb52690b4155f2cc3fcb1ac512d076a8
https://github.com/libyal/libbde/blob/5f59d11dbb52690b4155f2cc3fcb1ac512d076a8/setup.py#L231-L264
train
amol-/dukpy
dukpy/babel.py
babel_compile
def babel_compile(source, **kwargs): """Compiles the given ``source`` from ES6 to ES5 using Babeljs""" presets = kwargs.get('presets') if not presets: kwargs['presets'] = ["es2015"] with open(BABEL_COMPILER, 'rb') as babel_js: return evaljs( (babel_js.read().decode('utf-8'), 'var bres, res;' 'bres = Babel.transform(dukpy.es6code, dukpy.babel_options);', 'res = {map: bres.map, code: bres.code};'), es6code=source, babel_options=kwargs )
python
def babel_compile(source, **kwargs): """Compiles the given ``source`` from ES6 to ES5 using Babeljs""" presets = kwargs.get('presets') if not presets: kwargs['presets'] = ["es2015"] with open(BABEL_COMPILER, 'rb') as babel_js: return evaljs( (babel_js.read().decode('utf-8'), 'var bres, res;' 'bres = Babel.transform(dukpy.es6code, dukpy.babel_options);', 'res = {map: bres.map, code: bres.code};'), es6code=source, babel_options=kwargs )
[ "def", "babel_compile", "(", "source", ",", "*", "*", "kwargs", ")", ":", "presets", "=", "kwargs", ".", "get", "(", "'presets'", ")", "if", "not", "presets", ":", "kwargs", "[", "'presets'", "]", "=", "[", "\"es2015\"", "]", "with", "open", "(", "BABEL_COMPILER", ",", "'rb'", ")", "as", "babel_js", ":", "return", "evaljs", "(", "(", "babel_js", ".", "read", "(", ")", ".", "decode", "(", "'utf-8'", ")", ",", "'var bres, res;'", "'bres = Babel.transform(dukpy.es6code, dukpy.babel_options);'", ",", "'res = {map: bres.map, code: bres.code};'", ")", ",", "es6code", "=", "source", ",", "babel_options", "=", "kwargs", ")" ]
Compiles the given ``source`` from ES6 to ES5 using Babeljs
[ "Compiles", "the", "given", "source", "from", "ES6", "to", "ES5", "using", "Babeljs" ]
69f56f375a217c9f907499c28dbc964af76feae6
https://github.com/amol-/dukpy/blob/69f56f375a217c9f907499c28dbc964af76feae6/dukpy/babel.py#L7-L20
train
amol-/dukpy
dukpy/coffee.py
coffee_compile
def coffee_compile(source): """Compiles the given ``source`` from CoffeeScript to JavaScript""" with open(COFFEE_COMPILER, 'rb') as coffeescript_js: return evaljs( (coffeescript_js.read().decode('utf-8'), 'CoffeeScript.compile(dukpy.coffeecode)'), coffeecode=source )
python
def coffee_compile(source): """Compiles the given ``source`` from CoffeeScript to JavaScript""" with open(COFFEE_COMPILER, 'rb') as coffeescript_js: return evaljs( (coffeescript_js.read().decode('utf-8'), 'CoffeeScript.compile(dukpy.coffeecode)'), coffeecode=source )
[ "def", "coffee_compile", "(", "source", ")", ":", "with", "open", "(", "COFFEE_COMPILER", ",", "'rb'", ")", "as", "coffeescript_js", ":", "return", "evaljs", "(", "(", "coffeescript_js", ".", "read", "(", ")", ".", "decode", "(", "'utf-8'", ")", ",", "'CoffeeScript.compile(dukpy.coffeecode)'", ")", ",", "coffeecode", "=", "source", ")" ]
Compiles the given ``source`` from CoffeeScript to JavaScript
[ "Compiles", "the", "given", "source", "from", "CoffeeScript", "to", "JavaScript" ]
69f56f375a217c9f907499c28dbc964af76feae6
https://github.com/amol-/dukpy/blob/69f56f375a217c9f907499c28dbc964af76feae6/dukpy/coffee.py#L7-L14
train
amol-/dukpy
dukpy/module_loader.py
JSModuleLoader.register_path
def register_path(self, path): """Registers a directory where to look for modules. By default only modules relative to current path are found. """ self._paths.insert(0, os.path.abspath(path))
python
def register_path(self, path): """Registers a directory where to look for modules. By default only modules relative to current path are found. """ self._paths.insert(0, os.path.abspath(path))
[ "def", "register_path", "(", "self", ",", "path", ")", ":", "self", ".", "_paths", ".", "insert", "(", "0", ",", "os", ".", "path", ".", "abspath", "(", "path", ")", ")" ]
Registers a directory where to look for modules. By default only modules relative to current path are found.
[ "Registers", "a", "directory", "where", "to", "look", "for", "modules", "." ]
69f56f375a217c9f907499c28dbc964af76feae6
https://github.com/amol-/dukpy/blob/69f56f375a217c9f907499c28dbc964af76feae6/dukpy/module_loader.py#L20-L25
train
amol-/dukpy
dukpy/module_loader.py
JSModuleLoader.lookup
def lookup(self, module_name): """Searches for a file providing given module. Returns the normalized module id and path of the file. """ for search_path in self._paths: module_path = os.path.join(search_path, module_name) new_module_name, module_file = self._lookup(module_path, module_name) if module_file: return new_module_name, module_file return None, None
python
def lookup(self, module_name): """Searches for a file providing given module. Returns the normalized module id and path of the file. """ for search_path in self._paths: module_path = os.path.join(search_path, module_name) new_module_name, module_file = self._lookup(module_path, module_name) if module_file: return new_module_name, module_file return None, None
[ "def", "lookup", "(", "self", ",", "module_name", ")", ":", "for", "search_path", "in", "self", ".", "_paths", ":", "module_path", "=", "os", ".", "path", ".", "join", "(", "search_path", ",", "module_name", ")", "new_module_name", ",", "module_file", "=", "self", ".", "_lookup", "(", "module_path", ",", "module_name", ")", "if", "module_file", ":", "return", "new_module_name", ",", "module_file", "return", "None", ",", "None" ]
Searches for a file providing given module. Returns the normalized module id and path of the file.
[ "Searches", "for", "a", "file", "providing", "given", "module", "." ]
69f56f375a217c9f907499c28dbc964af76feae6
https://github.com/amol-/dukpy/blob/69f56f375a217c9f907499c28dbc964af76feae6/dukpy/module_loader.py#L27-L37
train
amol-/dukpy
dukpy/module_loader.py
JSModuleLoader.load
def load(self, module_name): """Returns source code and normalized module id of the given module. Only supports source code files encoded as UTF-8 """ module_name, path = self.lookup(module_name) if path: with open(path, 'rb') as f: return module_name, f.read().decode('utf-8') return None, None
python
def load(self, module_name): """Returns source code and normalized module id of the given module. Only supports source code files encoded as UTF-8 """ module_name, path = self.lookup(module_name) if path: with open(path, 'rb') as f: return module_name, f.read().decode('utf-8') return None, None
[ "def", "load", "(", "self", ",", "module_name", ")", ":", "module_name", ",", "path", "=", "self", ".", "lookup", "(", "module_name", ")", "if", "path", ":", "with", "open", "(", "path", ",", "'rb'", ")", "as", "f", ":", "return", "module_name", ",", "f", ".", "read", "(", ")", ".", "decode", "(", "'utf-8'", ")", "return", "None", ",", "None" ]
Returns source code and normalized module id of the given module. Only supports source code files encoded as UTF-8
[ "Returns", "source", "code", "and", "normalized", "module", "id", "of", "the", "given", "module", "." ]
69f56f375a217c9f907499c28dbc964af76feae6
https://github.com/amol-/dukpy/blob/69f56f375a217c9f907499c28dbc964af76feae6/dukpy/module_loader.py#L39-L48
train
amol-/dukpy
dukpy/lessc.py
less_compile
def less_compile(source, options=None): """Compiles the given ``source`` from LESS to CSS""" options = options or {} res = NodeLikeInterpreter().evaljs( ('var result = null;' 'var less = require("less/less-node");', 'less.render(dukpy.lesscode, dukpy.lessoptions, function(error, output) {' ' result = {"error": error, "output": output};' '});' 'result;'), lesscode=source, lessoptions=options ) if not res: raise RuntimeError('Results or errors unavailable') if res.get('error'): raise LessCompilerError(res['error']['message']) return res['output']['css']
python
def less_compile(source, options=None): """Compiles the given ``source`` from LESS to CSS""" options = options or {} res = NodeLikeInterpreter().evaljs( ('var result = null;' 'var less = require("less/less-node");', 'less.render(dukpy.lesscode, dukpy.lessoptions, function(error, output) {' ' result = {"error": error, "output": output};' '});' 'result;'), lesscode=source, lessoptions=options ) if not res: raise RuntimeError('Results or errors unavailable') if res.get('error'): raise LessCompilerError(res['error']['message']) return res['output']['css']
[ "def", "less_compile", "(", "source", ",", "options", "=", "None", ")", ":", "options", "=", "options", "or", "{", "}", "res", "=", "NodeLikeInterpreter", "(", ")", ".", "evaljs", "(", "(", "'var result = null;'", "'var less = require(\"less/less-node\");'", ",", "'less.render(dukpy.lesscode, dukpy.lessoptions, function(error, output) {'", "' result = {\"error\": error, \"output\": output};'", "'});'", "'result;'", ")", ",", "lesscode", "=", "source", ",", "lessoptions", "=", "options", ")", "if", "not", "res", ":", "raise", "RuntimeError", "(", "'Results or errors unavailable'", ")", "if", "res", ".", "get", "(", "'error'", ")", ":", "raise", "LessCompilerError", "(", "res", "[", "'error'", "]", "[", "'message'", "]", ")", "return", "res", "[", "'output'", "]", "[", "'css'", "]" ]
Compiles the given ``source`` from LESS to CSS
[ "Compiles", "the", "given", "source", "from", "LESS", "to", "CSS" ]
69f56f375a217c9f907499c28dbc964af76feae6
https://github.com/amol-/dukpy/blob/69f56f375a217c9f907499c28dbc964af76feae6/dukpy/lessc.py#L4-L23
train
amol-/dukpy
dukpy/install.py
install_jspackage
def install_jspackage(package_name, version, modulesdir): """Installs a JavaScript package downloaded from npmjs.org. For example to install React:: install_jspackage('react', '0.14.8', './node_modules') To install last version provide `None` as the version. """ if not version: version = '' requirements = _resolve_dependencies(package_name, version) print('Packages going to be installed: {0}'.format(', '.join( '{0}->{1}'.format(*i) for i in requirements ))) downloads = {} for dependency_name, _, version_info in requirements: try: downloads[dependency_name] = version_info['dist']['tarball'] except KeyError: raise JSPackageInstallError('Unable to detect a supported download url for package', error_code=3) for dependency_name, download_url in downloads.items(): tarball = BytesIO() print('Fetching {0}'.format(download_url), end='') with closing(urlopen(download_url)) as data: chunk = data.read(1024) while chunk: print('.', end='') tarball.write(chunk) chunk = data.read(1024) print('') tarball.seek(0) with closing(tarfile.open(fileobj=tarball)) as tb: dest = os.path.join(modulesdir, dependency_name) tmpdir = tempfile.mkdtemp() try: tb.extractall(tmpdir) shutil.rmtree(os.path.abspath(dest), ignore_errors=True) shutil.move(os.path.join(tmpdir, 'package'), os.path.abspath(dest)) finally: shutil.rmtree(tmpdir) print('Installing {0} in {1} Done!'.format(package_name, modulesdir))
python
def install_jspackage(package_name, version, modulesdir): """Installs a JavaScript package downloaded from npmjs.org. For example to install React:: install_jspackage('react', '0.14.8', './node_modules') To install last version provide `None` as the version. """ if not version: version = '' requirements = _resolve_dependencies(package_name, version) print('Packages going to be installed: {0}'.format(', '.join( '{0}->{1}'.format(*i) for i in requirements ))) downloads = {} for dependency_name, _, version_info in requirements: try: downloads[dependency_name] = version_info['dist']['tarball'] except KeyError: raise JSPackageInstallError('Unable to detect a supported download url for package', error_code=3) for dependency_name, download_url in downloads.items(): tarball = BytesIO() print('Fetching {0}'.format(download_url), end='') with closing(urlopen(download_url)) as data: chunk = data.read(1024) while chunk: print('.', end='') tarball.write(chunk) chunk = data.read(1024) print('') tarball.seek(0) with closing(tarfile.open(fileobj=tarball)) as tb: dest = os.path.join(modulesdir, dependency_name) tmpdir = tempfile.mkdtemp() try: tb.extractall(tmpdir) shutil.rmtree(os.path.abspath(dest), ignore_errors=True) shutil.move(os.path.join(tmpdir, 'package'), os.path.abspath(dest)) finally: shutil.rmtree(tmpdir) print('Installing {0} in {1} Done!'.format(package_name, modulesdir))
[ "def", "install_jspackage", "(", "package_name", ",", "version", ",", "modulesdir", ")", ":", "if", "not", "version", ":", "version", "=", "''", "requirements", "=", "_resolve_dependencies", "(", "package_name", ",", "version", ")", "print", "(", "'Packages going to be installed: {0}'", ".", "format", "(", "', '", ".", "join", "(", "'{0}->{1}'", ".", "format", "(", "*", "i", ")", "for", "i", "in", "requirements", ")", ")", ")", "downloads", "=", "{", "}", "for", "dependency_name", ",", "_", ",", "version_info", "in", "requirements", ":", "try", ":", "downloads", "[", "dependency_name", "]", "=", "version_info", "[", "'dist'", "]", "[", "'tarball'", "]", "except", "KeyError", ":", "raise", "JSPackageInstallError", "(", "'Unable to detect a supported download url for package'", ",", "error_code", "=", "3", ")", "for", "dependency_name", ",", "download_url", "in", "downloads", ".", "items", "(", ")", ":", "tarball", "=", "BytesIO", "(", ")", "print", "(", "'Fetching {0}'", ".", "format", "(", "download_url", ")", ",", "end", "=", "''", ")", "with", "closing", "(", "urlopen", "(", "download_url", ")", ")", "as", "data", ":", "chunk", "=", "data", ".", "read", "(", "1024", ")", "while", "chunk", ":", "print", "(", "'.'", ",", "end", "=", "''", ")", "tarball", ".", "write", "(", "chunk", ")", "chunk", "=", "data", ".", "read", "(", "1024", ")", "print", "(", "''", ")", "tarball", ".", "seek", "(", "0", ")", "with", "closing", "(", "tarfile", ".", "open", "(", "fileobj", "=", "tarball", ")", ")", "as", "tb", ":", "dest", "=", "os", ".", "path", ".", "join", "(", "modulesdir", ",", "dependency_name", ")", "tmpdir", "=", "tempfile", ".", "mkdtemp", "(", ")", "try", ":", "tb", ".", "extractall", "(", "tmpdir", ")", "shutil", ".", "rmtree", "(", "os", ".", "path", ".", "abspath", "(", "dest", ")", ",", "ignore_errors", "=", "True", ")", "shutil", ".", "move", "(", "os", ".", "path", ".", "join", "(", "tmpdir", ",", "'package'", ")", ",", "os", ".", "path", ".", "abspath", "(", "dest", ")", ")", "finally", ":", "shutil", ".", "rmtree", "(", "tmpdir", ")", "print", "(", "'Installing {0} in {1} Done!'", ".", "format", "(", "package_name", ",", "modulesdir", ")", ")" ]
Installs a JavaScript package downloaded from npmjs.org. For example to install React:: install_jspackage('react', '0.14.8', './node_modules') To install last version provide `None` as the version.
[ "Installs", "a", "JavaScript", "package", "downloaded", "from", "npmjs", ".", "org", "." ]
69f56f375a217c9f907499c28dbc964af76feae6
https://github.com/amol-/dukpy/blob/69f56f375a217c9f907499c28dbc964af76feae6/dukpy/install.py#L39-L87
train
amol-/dukpy
dukpy/evaljs.py
JSInterpreter.evaljs
def evaljs(self, code, **kwargs): """Runs JavaScript code in the context of the interpreter. All arguments will be converted to plain javascript objects through the JSON encoder and will be available in `dukpy` global object. Returns the last object on javascript stack. """ jsvars = json.dumps(kwargs) jscode = self._adapt_code(code) if not isinstance(jscode, bytes): jscode = jscode.encode('utf-8') if not isinstance(jsvars, bytes): jsvars = jsvars.encode('utf-8') res = _dukpy.eval_string(self, jscode, jsvars) if res is None: return None return json.loads(res.decode('utf-8'))
python
def evaljs(self, code, **kwargs): """Runs JavaScript code in the context of the interpreter. All arguments will be converted to plain javascript objects through the JSON encoder and will be available in `dukpy` global object. Returns the last object on javascript stack. """ jsvars = json.dumps(kwargs) jscode = self._adapt_code(code) if not isinstance(jscode, bytes): jscode = jscode.encode('utf-8') if not isinstance(jsvars, bytes): jsvars = jsvars.encode('utf-8') res = _dukpy.eval_string(self, jscode, jsvars) if res is None: return None return json.loads(res.decode('utf-8'))
[ "def", "evaljs", "(", "self", ",", "code", ",", "*", "*", "kwargs", ")", ":", "jsvars", "=", "json", ".", "dumps", "(", "kwargs", ")", "jscode", "=", "self", ".", "_adapt_code", "(", "code", ")", "if", "not", "isinstance", "(", "jscode", ",", "bytes", ")", ":", "jscode", "=", "jscode", ".", "encode", "(", "'utf-8'", ")", "if", "not", "isinstance", "(", "jsvars", ",", "bytes", ")", ":", "jsvars", "=", "jsvars", ".", "encode", "(", "'utf-8'", ")", "res", "=", "_dukpy", ".", "eval_string", "(", "self", ",", "jscode", ",", "jsvars", ")", "if", "res", "is", "None", ":", "return", "None", "return", "json", ".", "loads", "(", "res", ".", "decode", "(", "'utf-8'", ")", ")" ]
Runs JavaScript code in the context of the interpreter. All arguments will be converted to plain javascript objects through the JSON encoder and will be available in `dukpy` global object. Returns the last object on javascript stack.
[ "Runs", "JavaScript", "code", "in", "the", "context", "of", "the", "interpreter", "." ]
69f56f375a217c9f907499c28dbc964af76feae6
https://github.com/amol-/dukpy/blob/69f56f375a217c9f907499c28dbc964af76feae6/dukpy/evaljs.py#L39-L61
train
amol-/dukpy
dukpy/tsc.py
typescript_compile
def typescript_compile(source): """Compiles the given ``source`` from TypeScript to ES5 using TypescriptServices.js""" with open(TS_COMPILER, 'r') as tsservices_js: return evaljs( (tsservices_js.read(), 'ts.transpile(dukpy.tscode, {options});'.format(options=TSC_OPTIONS)), tscode=source )
python
def typescript_compile(source): """Compiles the given ``source`` from TypeScript to ES5 using TypescriptServices.js""" with open(TS_COMPILER, 'r') as tsservices_js: return evaljs( (tsservices_js.read(), 'ts.transpile(dukpy.tscode, {options});'.format(options=TSC_OPTIONS)), tscode=source )
[ "def", "typescript_compile", "(", "source", ")", ":", "with", "open", "(", "TS_COMPILER", ",", "'r'", ")", "as", "tsservices_js", ":", "return", "evaljs", "(", "(", "tsservices_js", ".", "read", "(", ")", ",", "'ts.transpile(dukpy.tscode, {options});'", ".", "format", "(", "options", "=", "TSC_OPTIONS", ")", ")", ",", "tscode", "=", "source", ")" ]
Compiles the given ``source`` from TypeScript to ES5 using TypescriptServices.js
[ "Compiles", "the", "given", "source", "from", "TypeScript", "to", "ES5", "using", "TypescriptServices", ".", "js" ]
69f56f375a217c9f907499c28dbc964af76feae6
https://github.com/amol-/dukpy/blob/69f56f375a217c9f907499c28dbc964af76feae6/dukpy/tsc.py#L8-L15
train
edoburu/django-private-storage
private_storage/views.py
PrivateStorageView.get_private_file
def get_private_file(self): """ Return all relevant data in a single object, so this is easy to extend and server implementations can pick what they need. """ return PrivateFile( request=self.request, storage=self.get_storage(), relative_name=self.get_path() )
python
def get_private_file(self): """ Return all relevant data in a single object, so this is easy to extend and server implementations can pick what they need. """ return PrivateFile( request=self.request, storage=self.get_storage(), relative_name=self.get_path() )
[ "def", "get_private_file", "(", "self", ")", ":", "return", "PrivateFile", "(", "request", "=", "self", ".", "request", ",", "storage", "=", "self", ".", "get_storage", "(", ")", ",", "relative_name", "=", "self", ".", "get_path", "(", ")", ")" ]
Return all relevant data in a single object, so this is easy to extend and server implementations can pick what they need.
[ "Return", "all", "relevant", "data", "in", "a", "single", "object", "so", "this", "is", "easy", "to", "extend", "and", "server", "implementations", "can", "pick", "what", "they", "need", "." ]
35b718024fee75b0ed3400f601976b20246c7d05
https://github.com/edoburu/django-private-storage/blob/35b718024fee75b0ed3400f601976b20246c7d05/private_storage/views.py#L55-L64
train
edoburu/django-private-storage
private_storage/views.py
PrivateStorageView.get
def get(self, request, *args, **kwargs): """ Handle incoming GET requests """ private_file = self.get_private_file() if not self.can_access_file(private_file): return HttpResponseForbidden('Private storage access denied') if not private_file.exists(): return self.serve_file_not_found(private_file) else: return self.serve_file(private_file)
python
def get(self, request, *args, **kwargs): """ Handle incoming GET requests """ private_file = self.get_private_file() if not self.can_access_file(private_file): return HttpResponseForbidden('Private storage access denied') if not private_file.exists(): return self.serve_file_not_found(private_file) else: return self.serve_file(private_file)
[ "def", "get", "(", "self", ",", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "private_file", "=", "self", ".", "get_private_file", "(", ")", "if", "not", "self", ".", "can_access_file", "(", "private_file", ")", ":", "return", "HttpResponseForbidden", "(", "'Private storage access denied'", ")", "if", "not", "private_file", ".", "exists", "(", ")", ":", "return", "self", ".", "serve_file_not_found", "(", "private_file", ")", "else", ":", "return", "self", ".", "serve_file", "(", "private_file", ")" ]
Handle incoming GET requests
[ "Handle", "incoming", "GET", "requests" ]
35b718024fee75b0ed3400f601976b20246c7d05
https://github.com/edoburu/django-private-storage/blob/35b718024fee75b0ed3400f601976b20246c7d05/private_storage/views.py#L66-L78
train
edoburu/django-private-storage
private_storage/views.py
PrivateStorageView.serve_file
def serve_file(self, private_file): """ Serve the file that was retrieved from the storage. The relative path can be found with ``private_file.relative_name``. :type private_file: :class:`private_storage.models.PrivateFile` :rtype: django.http.HttpResponse """ response = self.server_class().serve(private_file) if self.content_disposition: # Join syntax works in all Python versions. Python 3 doesn't support b'..'.format(), # and % formatting was added for bytes in 3.5: https://bugs.python.org/issue3982 filename = self.get_content_disposition_filename(private_file) response['Content-Disposition'] = b'; '.join([ self.content_disposition.encode(), self._encode_filename_header(filename) ]) return response
python
def serve_file(self, private_file): """ Serve the file that was retrieved from the storage. The relative path can be found with ``private_file.relative_name``. :type private_file: :class:`private_storage.models.PrivateFile` :rtype: django.http.HttpResponse """ response = self.server_class().serve(private_file) if self.content_disposition: # Join syntax works in all Python versions. Python 3 doesn't support b'..'.format(), # and % formatting was added for bytes in 3.5: https://bugs.python.org/issue3982 filename = self.get_content_disposition_filename(private_file) response['Content-Disposition'] = b'; '.join([ self.content_disposition.encode(), self._encode_filename_header(filename) ]) return response
[ "def", "serve_file", "(", "self", ",", "private_file", ")", ":", "response", "=", "self", ".", "server_class", "(", ")", ".", "serve", "(", "private_file", ")", "if", "self", ".", "content_disposition", ":", "# Join syntax works in all Python versions. Python 3 doesn't support b'..'.format(),", "# and % formatting was added for bytes in 3.5: https://bugs.python.org/issue3982", "filename", "=", "self", ".", "get_content_disposition_filename", "(", "private_file", ")", "response", "[", "'Content-Disposition'", "]", "=", "b'; '", ".", "join", "(", "[", "self", ".", "content_disposition", ".", "encode", "(", ")", ",", "self", ".", "_encode_filename_header", "(", "filename", ")", "]", ")", "return", "response" ]
Serve the file that was retrieved from the storage. The relative path can be found with ``private_file.relative_name``. :type private_file: :class:`private_storage.models.PrivateFile` :rtype: django.http.HttpResponse
[ "Serve", "the", "file", "that", "was", "retrieved", "from", "the", "storage", ".", "The", "relative", "path", "can", "be", "found", "with", "private_file", ".", "relative_name", "." ]
35b718024fee75b0ed3400f601976b20246c7d05
https://github.com/edoburu/django-private-storage/blob/35b718024fee75b0ed3400f601976b20246c7d05/private_storage/views.py#L94-L112
train
edoburu/django-private-storage
private_storage/views.py
PrivateStorageView.get_content_disposition_filename
def get_content_disposition_filename(self, private_file): """ Return the filename in the download header. """ return self.content_disposition_filename or os.path.basename(private_file.relative_name)
python
def get_content_disposition_filename(self, private_file): """ Return the filename in the download header. """ return self.content_disposition_filename or os.path.basename(private_file.relative_name)
[ "def", "get_content_disposition_filename", "(", "self", ",", "private_file", ")", ":", "return", "self", ".", "content_disposition_filename", "or", "os", ".", "path", ".", "basename", "(", "private_file", ".", "relative_name", ")" ]
Return the filename in the download header.
[ "Return", "the", "filename", "in", "the", "download", "header", "." ]
35b718024fee75b0ed3400f601976b20246c7d05
https://github.com/edoburu/django-private-storage/blob/35b718024fee75b0ed3400f601976b20246c7d05/private_storage/views.py#L114-L118
train
edoburu/django-private-storage
private_storage/views.py
PrivateStorageView._encode_filename_header
def _encode_filename_header(self, filename): """ The filename, encoded to use in a ``Content-Disposition`` header. """ # Based on https://www.djangosnippets.org/snippets/1710/ user_agent = self.request.META.get('HTTP_USER_AGENT', None) if 'WebKit' in user_agent: # Support available for UTF-8 encoded strings. # This also matches Edgee. return u'filename={}'.format(filename).encode("utf-8") elif 'MSIE' in user_agent: # IE does not support RFC2231 for internationalized headers, but somehow # percent-decodes it so this can be used instead. Note that using the word # "attachment" anywhere in the filename overrides an inline content-disposition. url_encoded = quote(filename.encode("utf-8")).replace('attachment', "a%74tachment") return "filename={}".format(url_encoded).encode("utf-8") else: # For others like Firefox, we follow RFC2231 (encoding extension in HTTP headers). rfc2231_filename = quote(filename.encode("utf-8")) return "filename*=UTF-8''{}".format(rfc2231_filename).encode("utf-8")
python
def _encode_filename_header(self, filename): """ The filename, encoded to use in a ``Content-Disposition`` header. """ # Based on https://www.djangosnippets.org/snippets/1710/ user_agent = self.request.META.get('HTTP_USER_AGENT', None) if 'WebKit' in user_agent: # Support available for UTF-8 encoded strings. # This also matches Edgee. return u'filename={}'.format(filename).encode("utf-8") elif 'MSIE' in user_agent: # IE does not support RFC2231 for internationalized headers, but somehow # percent-decodes it so this can be used instead. Note that using the word # "attachment" anywhere in the filename overrides an inline content-disposition. url_encoded = quote(filename.encode("utf-8")).replace('attachment', "a%74tachment") return "filename={}".format(url_encoded).encode("utf-8") else: # For others like Firefox, we follow RFC2231 (encoding extension in HTTP headers). rfc2231_filename = quote(filename.encode("utf-8")) return "filename*=UTF-8''{}".format(rfc2231_filename).encode("utf-8")
[ "def", "_encode_filename_header", "(", "self", ",", "filename", ")", ":", "# Based on https://www.djangosnippets.org/snippets/1710/", "user_agent", "=", "self", ".", "request", ".", "META", ".", "get", "(", "'HTTP_USER_AGENT'", ",", "None", ")", "if", "'WebKit'", "in", "user_agent", ":", "# Support available for UTF-8 encoded strings.", "# This also matches Edgee.", "return", "u'filename={}'", ".", "format", "(", "filename", ")", ".", "encode", "(", "\"utf-8\"", ")", "elif", "'MSIE'", "in", "user_agent", ":", "# IE does not support RFC2231 for internationalized headers, but somehow", "# percent-decodes it so this can be used instead. Note that using the word", "# \"attachment\" anywhere in the filename overrides an inline content-disposition.", "url_encoded", "=", "quote", "(", "filename", ".", "encode", "(", "\"utf-8\"", ")", ")", ".", "replace", "(", "'attachment'", ",", "\"a%74tachment\"", ")", "return", "\"filename={}\"", ".", "format", "(", "url_encoded", ")", ".", "encode", "(", "\"utf-8\"", ")", "else", ":", "# For others like Firefox, we follow RFC2231 (encoding extension in HTTP headers).", "rfc2231_filename", "=", "quote", "(", "filename", ".", "encode", "(", "\"utf-8\"", ")", ")", "return", "\"filename*=UTF-8''{}\"", ".", "format", "(", "rfc2231_filename", ")", ".", "encode", "(", "\"utf-8\"", ")" ]
The filename, encoded to use in a ``Content-Disposition`` header.
[ "The", "filename", "encoded", "to", "use", "in", "a", "Content", "-", "Disposition", "header", "." ]
35b718024fee75b0ed3400f601976b20246c7d05
https://github.com/edoburu/django-private-storage/blob/35b718024fee75b0ed3400f601976b20246c7d05/private_storage/views.py#L120-L139
train
edoburu/django-private-storage
private_storage/servers.py
add_no_cache_headers
def add_no_cache_headers(func): """ Makes sure the retrieved file is not cached on disk, or cached by proxy servers in between. This would circumvent any checking whether the user may even access the file. """ @wraps(func) def _dec(*args, **kwargs): response = func(*args, **kwargs) response['Expires'] = 'Thu, 01 Jan 1970 00:00:00 GMT' # HTTP 1.0 proxies response['Cache-Control'] = 'max-age=0, no-cache, must-revalidate, proxy-revalidate' # HTTP 1.1 return response return _dec
python
def add_no_cache_headers(func): """ Makes sure the retrieved file is not cached on disk, or cached by proxy servers in between. This would circumvent any checking whether the user may even access the file. """ @wraps(func) def _dec(*args, **kwargs): response = func(*args, **kwargs) response['Expires'] = 'Thu, 01 Jan 1970 00:00:00 GMT' # HTTP 1.0 proxies response['Cache-Control'] = 'max-age=0, no-cache, must-revalidate, proxy-revalidate' # HTTP 1.1 return response return _dec
[ "def", "add_no_cache_headers", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "_dec", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "response", "=", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "response", "[", "'Expires'", "]", "=", "'Thu, 01 Jan 1970 00:00:00 GMT'", "# HTTP 1.0 proxies", "response", "[", "'Cache-Control'", "]", "=", "'max-age=0, no-cache, must-revalidate, proxy-revalidate'", "# HTTP 1.1", "return", "response", "return", "_dec" ]
Makes sure the retrieved file is not cached on disk, or cached by proxy servers in between. This would circumvent any checking whether the user may even access the file.
[ "Makes", "sure", "the", "retrieved", "file", "is", "not", "cached", "on", "disk", "or", "cached", "by", "proxy", "servers", "in", "between", ".", "This", "would", "circumvent", "any", "checking", "whether", "the", "user", "may", "even", "access", "the", "file", "." ]
35b718024fee75b0ed3400f601976b20246c7d05
https://github.com/edoburu/django-private-storage/blob/35b718024fee75b0ed3400f601976b20246c7d05/private_storage/servers.py#L43-L56
train
datamade/parserator
parserator/training.py
readTrainingData
def readTrainingData(file_locations, GROUP_LABEL): ''' Used in downstream tests ''' class Mock(object): pass mock_module = Mock() mock_module.PARENT_LABEL = GROUP_LABEL for location in file_locations: with open(location) as f: tree = etree.parse(f) xml = tree.getroot() for each in data_prep_utils.TrainingData(xml, mock_module): yield each
python
def readTrainingData(file_locations, GROUP_LABEL): ''' Used in downstream tests ''' class Mock(object): pass mock_module = Mock() mock_module.PARENT_LABEL = GROUP_LABEL for location in file_locations: with open(location) as f: tree = etree.parse(f) xml = tree.getroot() for each in data_prep_utils.TrainingData(xml, mock_module): yield each
[ "def", "readTrainingData", "(", "file_locations", ",", "GROUP_LABEL", ")", ":", "class", "Mock", "(", "object", ")", ":", "pass", "mock_module", "=", "Mock", "(", ")", "mock_module", ".", "PARENT_LABEL", "=", "GROUP_LABEL", "for", "location", "in", "file_locations", ":", "with", "open", "(", "location", ")", "as", "f", ":", "tree", "=", "etree", ".", "parse", "(", "f", ")", "xml", "=", "tree", ".", "getroot", "(", ")", "for", "each", "in", "data_prep_utils", ".", "TrainingData", "(", "xml", ",", "mock_module", ")", ":", "yield", "each" ]
Used in downstream tests
[ "Used", "in", "downstream", "tests" ]
4dc69b0d115bf33e2d169ff40b05143257a5f481
https://github.com/datamade/parserator/blob/4dc69b0d115bf33e2d169ff40b05143257a5f481/parserator/training.py#L58-L72
train
Bouke/django-user-sessions
user_sessions/templatetags/user_sessions.py
device
def device(value): """ Transform a User Agent into human readable text. Example output: * Safari on iPhone * Chrome on Windows 8.1 * Safari on OS X * Firefox * Linux * None """ browser = None for regex, name in BROWSERS: if regex.search(value): browser = name break device = None for regex, name in DEVICES: if regex.search(value): device = name break if browser and device: return _('%(browser)s on %(device)s') % { 'browser': browser, 'device': device } if browser: return browser if device: return device return None
python
def device(value): """ Transform a User Agent into human readable text. Example output: * Safari on iPhone * Chrome on Windows 8.1 * Safari on OS X * Firefox * Linux * None """ browser = None for regex, name in BROWSERS: if regex.search(value): browser = name break device = None for regex, name in DEVICES: if regex.search(value): device = name break if browser and device: return _('%(browser)s on %(device)s') % { 'browser': browser, 'device': device } if browser: return browser if device: return device return None
[ "def", "device", "(", "value", ")", ":", "browser", "=", "None", "for", "regex", ",", "name", "in", "BROWSERS", ":", "if", "regex", ".", "search", "(", "value", ")", ":", "browser", "=", "name", "break", "device", "=", "None", "for", "regex", ",", "name", "in", "DEVICES", ":", "if", "regex", ".", "search", "(", "value", ")", ":", "device", "=", "name", "break", "if", "browser", "and", "device", ":", "return", "_", "(", "'%(browser)s on %(device)s'", ")", "%", "{", "'browser'", ":", "browser", ",", "'device'", ":", "device", "}", "if", "browser", ":", "return", "browser", "if", "device", ":", "return", "device", "return", "None" ]
Transform a User Agent into human readable text. Example output: * Safari on iPhone * Chrome on Windows 8.1 * Safari on OS X * Firefox * Linux * None
[ "Transform", "a", "User", "Agent", "into", "human", "readable", "text", "." ]
9362ad60d61b68faccac674e9aae030537ff821a
https://github.com/Bouke/django-user-sessions/blob/9362ad60d61b68faccac674e9aae030537ff821a/user_sessions/templatetags/user_sessions.py#L39-L77
train
Bouke/django-user-sessions
user_sessions/templatetags/user_sessions.py
location
def location(value): """ Transform an IP address into an approximate location. Example output: * Zwolle, The Netherlands * The Netherlands * None """ try: location = geoip() and geoip().city(value) except Exception: try: location = geoip() and geoip().country(value) except Exception as e: warnings.warn(str(e)) location = None if location and location['country_name']: if 'city' in location and location['city']: return '{}, {}'.format(location['city'], location['country_name']) return location['country_name'] return None
python
def location(value): """ Transform an IP address into an approximate location. Example output: * Zwolle, The Netherlands * The Netherlands * None """ try: location = geoip() and geoip().city(value) except Exception: try: location = geoip() and geoip().country(value) except Exception as e: warnings.warn(str(e)) location = None if location and location['country_name']: if 'city' in location and location['city']: return '{}, {}'.format(location['city'], location['country_name']) return location['country_name'] return None
[ "def", "location", "(", "value", ")", ":", "try", ":", "location", "=", "geoip", "(", ")", "and", "geoip", "(", ")", ".", "city", "(", "value", ")", "except", "Exception", ":", "try", ":", "location", "=", "geoip", "(", ")", "and", "geoip", "(", ")", ".", "country", "(", "value", ")", "except", "Exception", "as", "e", ":", "warnings", ".", "warn", "(", "str", "(", "e", ")", ")", "location", "=", "None", "if", "location", "and", "location", "[", "'country_name'", "]", ":", "if", "'city'", "in", "location", "and", "location", "[", "'city'", "]", ":", "return", "'{}, {}'", ".", "format", "(", "location", "[", "'city'", "]", ",", "location", "[", "'country_name'", "]", ")", "return", "location", "[", "'country_name'", "]", "return", "None" ]
Transform an IP address into an approximate location. Example output: * Zwolle, The Netherlands * The Netherlands * None
[ "Transform", "an", "IP", "address", "into", "an", "approximate", "location", "." ]
9362ad60d61b68faccac674e9aae030537ff821a
https://github.com/Bouke/django-user-sessions/blob/9362ad60d61b68faccac674e9aae030537ff821a/user_sessions/templatetags/user_sessions.py#L81-L103
train
dschep/lambda-decorators
lambda_decorators.py
before
def before(func): """ Run a function before the handler is invoked, is passed the event & context and must return an event & context too. Usage:: >>> # to create a reusable decorator >>> @before ... def print_request_id(event, context): ... print(context.aws_request_id) ... return event, context >>> @print_request_id ... def handler(event, context): ... pass >>> class Context: ... aws_request_id = 'ID!' >>> handler({}, Context()) ID! >>> # or a one off >>> @before(lambda e, c: (e['body'], c)) ... def handler(body, context): ... return body >>> handler({'body': 'BOOODYY'}, object()) 'BOOODYY' """ class BeforeDecorator(LambdaDecorator): def before(self, event, context): return func(event, context) return BeforeDecorator
python
def before(func): """ Run a function before the handler is invoked, is passed the event & context and must return an event & context too. Usage:: >>> # to create a reusable decorator >>> @before ... def print_request_id(event, context): ... print(context.aws_request_id) ... return event, context >>> @print_request_id ... def handler(event, context): ... pass >>> class Context: ... aws_request_id = 'ID!' >>> handler({}, Context()) ID! >>> # or a one off >>> @before(lambda e, c: (e['body'], c)) ... def handler(body, context): ... return body >>> handler({'body': 'BOOODYY'}, object()) 'BOOODYY' """ class BeforeDecorator(LambdaDecorator): def before(self, event, context): return func(event, context) return BeforeDecorator
[ "def", "before", "(", "func", ")", ":", "class", "BeforeDecorator", "(", "LambdaDecorator", ")", ":", "def", "before", "(", "self", ",", "event", ",", "context", ")", ":", "return", "func", "(", "event", ",", "context", ")", "return", "BeforeDecorator" ]
Run a function before the handler is invoked, is passed the event & context and must return an event & context too. Usage:: >>> # to create a reusable decorator >>> @before ... def print_request_id(event, context): ... print(context.aws_request_id) ... return event, context >>> @print_request_id ... def handler(event, context): ... pass >>> class Context: ... aws_request_id = 'ID!' >>> handler({}, Context()) ID! >>> # or a one off >>> @before(lambda e, c: (e['body'], c)) ... def handler(body, context): ... return body >>> handler({'body': 'BOOODYY'}, object()) 'BOOODYY'
[ "Run", "a", "function", "before", "the", "handler", "is", "invoked", "is", "passed", "the", "event", "&", "context", "and", "must", "return", "an", "event", "&", "context", "too", "." ]
9195914c8afe26843de9968d96dae6a89f061e8a
https://github.com/dschep/lambda-decorators/blob/9195914c8afe26843de9968d96dae6a89f061e8a/lambda_decorators.py#L234-L264
train
dschep/lambda-decorators
lambda_decorators.py
after
def after(func): """ Run a function after the handler is invoked, is passed the response and must return an response too. Usage:: >>> # to create a reusable decorator >>> @after ... def gnu_terry_pratchett(retval): ... retval.setdefault('Headers', {})['X-Clacks-Overhead'] = 'GNU Terry Pratchett' ... return retval >>> @gnu_terry_pratchett ... def handler(event, context): ... return {'body': ''} >>> handler({}, object()) {'body': '', 'Headers': {'X-Clacks-Overhead': 'GNU Terry Pratchett'}} """ class AfterDecorator(LambdaDecorator): def after(self, retval): return func(retval) return AfterDecorator
python
def after(func): """ Run a function after the handler is invoked, is passed the response and must return an response too. Usage:: >>> # to create a reusable decorator >>> @after ... def gnu_terry_pratchett(retval): ... retval.setdefault('Headers', {})['X-Clacks-Overhead'] = 'GNU Terry Pratchett' ... return retval >>> @gnu_terry_pratchett ... def handler(event, context): ... return {'body': ''} >>> handler({}, object()) {'body': '', 'Headers': {'X-Clacks-Overhead': 'GNU Terry Pratchett'}} """ class AfterDecorator(LambdaDecorator): def after(self, retval): return func(retval) return AfterDecorator
[ "def", "after", "(", "func", ")", ":", "class", "AfterDecorator", "(", "LambdaDecorator", ")", ":", "def", "after", "(", "self", ",", "retval", ")", ":", "return", "func", "(", "retval", ")", "return", "AfterDecorator" ]
Run a function after the handler is invoked, is passed the response and must return an response too. Usage:: >>> # to create a reusable decorator >>> @after ... def gnu_terry_pratchett(retval): ... retval.setdefault('Headers', {})['X-Clacks-Overhead'] = 'GNU Terry Pratchett' ... return retval >>> @gnu_terry_pratchett ... def handler(event, context): ... return {'body': ''} >>> handler({}, object()) {'body': '', 'Headers': {'X-Clacks-Overhead': 'GNU Terry Pratchett'}}
[ "Run", "a", "function", "after", "the", "handler", "is", "invoked", "is", "passed", "the", "response", "and", "must", "return", "an", "response", "too", "." ]
9195914c8afe26843de9968d96dae6a89f061e8a
https://github.com/dschep/lambda-decorators/blob/9195914c8afe26843de9968d96dae6a89f061e8a/lambda_decorators.py#L267-L289
train
dschep/lambda-decorators
lambda_decorators.py
on_exception
def on_exception(func): """ Run a function when a handler thows an exception. It's return value is returned to AWS. Usage:: >>> # to create a reusable decorator >>> @on_exception ... def handle_errors(exception): ... print(exception) ... return {'statusCode': 500, 'body': 'uh oh'} >>> @handle_errors ... def handler(event, context): ... raise Exception('it broke!') >>> handler({}, object()) it broke! {'statusCode': 500, 'body': 'uh oh'} >>> # or a one off >>> @on_exception(lambda e: {'statusCode': 500}) ... def handler(body, context): ... raise Exception >>> handler({}, object()) {'statusCode': 500} """ class OnExceptionDecorator(LambdaDecorator): def on_exception(self, exception): return func(exception) return OnExceptionDecorator
python
def on_exception(func): """ Run a function when a handler thows an exception. It's return value is returned to AWS. Usage:: >>> # to create a reusable decorator >>> @on_exception ... def handle_errors(exception): ... print(exception) ... return {'statusCode': 500, 'body': 'uh oh'} >>> @handle_errors ... def handler(event, context): ... raise Exception('it broke!') >>> handler({}, object()) it broke! {'statusCode': 500, 'body': 'uh oh'} >>> # or a one off >>> @on_exception(lambda e: {'statusCode': 500}) ... def handler(body, context): ... raise Exception >>> handler({}, object()) {'statusCode': 500} """ class OnExceptionDecorator(LambdaDecorator): def on_exception(self, exception): return func(exception) return OnExceptionDecorator
[ "def", "on_exception", "(", "func", ")", ":", "class", "OnExceptionDecorator", "(", "LambdaDecorator", ")", ":", "def", "on_exception", "(", "self", ",", "exception", ")", ":", "return", "func", "(", "exception", ")", "return", "OnExceptionDecorator" ]
Run a function when a handler thows an exception. It's return value is returned to AWS. Usage:: >>> # to create a reusable decorator >>> @on_exception ... def handle_errors(exception): ... print(exception) ... return {'statusCode': 500, 'body': 'uh oh'} >>> @handle_errors ... def handler(event, context): ... raise Exception('it broke!') >>> handler({}, object()) it broke! {'statusCode': 500, 'body': 'uh oh'} >>> # or a one off >>> @on_exception(lambda e: {'statusCode': 500}) ... def handler(body, context): ... raise Exception >>> handler({}, object()) {'statusCode': 500}
[ "Run", "a", "function", "when", "a", "handler", "thows", "an", "exception", ".", "It", "s", "return", "value", "is", "returned", "to", "AWS", "." ]
9195914c8afe26843de9968d96dae6a89f061e8a
https://github.com/dschep/lambda-decorators/blob/9195914c8afe26843de9968d96dae6a89f061e8a/lambda_decorators.py#L292-L321
train
dschep/lambda-decorators
lambda_decorators.py
async_handler
def async_handler(handler): """ This decorator allows for use of async handlers by automatically running them in an event loop. The loop is added to the context object for if the handler needs it. Usage:: >>> from lambda_decorators import async_handler >>> async def foobar(): ... return 'foobar' >>> @async_handler ... async def handler(event, context): ... return await foobar() >>> class Context: ... pass >>> handler({}, Context()) 'foobar' *NOTE: Python 3 only* """ @wraps(handler) def wrapper(event, context): context.loop = asyncio.get_event_loop() return context.loop.run_until_complete(handler(event, context)) return wrapper
python
def async_handler(handler): """ This decorator allows for use of async handlers by automatically running them in an event loop. The loop is added to the context object for if the handler needs it. Usage:: >>> from lambda_decorators import async_handler >>> async def foobar(): ... return 'foobar' >>> @async_handler ... async def handler(event, context): ... return await foobar() >>> class Context: ... pass >>> handler({}, Context()) 'foobar' *NOTE: Python 3 only* """ @wraps(handler) def wrapper(event, context): context.loop = asyncio.get_event_loop() return context.loop.run_until_complete(handler(event, context)) return wrapper
[ "def", "async_handler", "(", "handler", ")", ":", "@", "wraps", "(", "handler", ")", "def", "wrapper", "(", "event", ",", "context", ")", ":", "context", ".", "loop", "=", "asyncio", ".", "get_event_loop", "(", ")", "return", "context", ".", "loop", ".", "run_until_complete", "(", "handler", "(", "event", ",", "context", ")", ")", "return", "wrapper" ]
This decorator allows for use of async handlers by automatically running them in an event loop. The loop is added to the context object for if the handler needs it. Usage:: >>> from lambda_decorators import async_handler >>> async def foobar(): ... return 'foobar' >>> @async_handler ... async def handler(event, context): ... return await foobar() >>> class Context: ... pass >>> handler({}, Context()) 'foobar' *NOTE: Python 3 only*
[ "This", "decorator", "allows", "for", "use", "of", "async", "handlers", "by", "automatically", "running", "them", "in", "an", "event", "loop", ".", "The", "loop", "is", "added", "to", "the", "context", "object", "for", "if", "the", "handler", "needs", "it", "." ]
9195914c8afe26843de9968d96dae6a89f061e8a
https://github.com/dschep/lambda-decorators/blob/9195914c8afe26843de9968d96dae6a89f061e8a/lambda_decorators.py#L324-L351
train
dschep/lambda-decorators
lambda_decorators.py
dump_json_body
def dump_json_body(handler): """ Automatically serialize response bodies with json.dumps. Returns a 500 error if the response cannot be serialized Usage:: >>> from lambda_decorators import dump_json_body >>> @dump_json_body ... def handler(event, context): ... return {'statusCode': 200, 'body': {'hello': 'world'}} >>> handler({}, object()) {'statusCode': 200, 'body': '{"hello": "world"}'} """ @wraps(handler) def wrapper(event, context): response = handler(event, context) if 'body' in response: try: response['body'] = json.dumps(response['body']) except Exception as exception: return {'statusCode': 500, 'body': str(exception)} return response return wrapper
python
def dump_json_body(handler): """ Automatically serialize response bodies with json.dumps. Returns a 500 error if the response cannot be serialized Usage:: >>> from lambda_decorators import dump_json_body >>> @dump_json_body ... def handler(event, context): ... return {'statusCode': 200, 'body': {'hello': 'world'}} >>> handler({}, object()) {'statusCode': 200, 'body': '{"hello": "world"}'} """ @wraps(handler) def wrapper(event, context): response = handler(event, context) if 'body' in response: try: response['body'] = json.dumps(response['body']) except Exception as exception: return {'statusCode': 500, 'body': str(exception)} return response return wrapper
[ "def", "dump_json_body", "(", "handler", ")", ":", "@", "wraps", "(", "handler", ")", "def", "wrapper", "(", "event", ",", "context", ")", ":", "response", "=", "handler", "(", "event", ",", "context", ")", "if", "'body'", "in", "response", ":", "try", ":", "response", "[", "'body'", "]", "=", "json", ".", "dumps", "(", "response", "[", "'body'", "]", ")", "except", "Exception", "as", "exception", ":", "return", "{", "'statusCode'", ":", "500", ",", "'body'", ":", "str", "(", "exception", ")", "}", "return", "response", "return", "wrapper" ]
Automatically serialize response bodies with json.dumps. Returns a 500 error if the response cannot be serialized Usage:: >>> from lambda_decorators import dump_json_body >>> @dump_json_body ... def handler(event, context): ... return {'statusCode': 200, 'body': {'hello': 'world'}} >>> handler({}, object()) {'statusCode': 200, 'body': '{"hello": "world"}'}
[ "Automatically", "serialize", "response", "bodies", "with", "json", ".", "dumps", "." ]
9195914c8afe26843de9968d96dae6a89f061e8a
https://github.com/dschep/lambda-decorators/blob/9195914c8afe26843de9968d96dae6a89f061e8a/lambda_decorators.py#L401-L425
train
dschep/lambda-decorators
lambda_decorators.py
json_http_resp
def json_http_resp(handler): """ Automatically serialize return value to the body of a successfull HTTP response. Returns a 500 error if the response cannot be serialized Usage:: >>> from lambda_decorators import json_http_resp >>> @json_http_resp ... def handler(event, context): ... return {'hello': 'world'} >>> handler({}, object()) {'statusCode': 200, 'body': '{"hello": "world"}'} in this example, the decorated handler returns: .. code:: python {'statusCode': 200, 'body': '{"hello": "world"}'} """ @wraps(handler) def wrapper(event, context): response = handler(event, context) try: body = json.dumps(response) except Exception as exception: return {'statusCode': 500, 'body': str(exception)} return {'statusCode': 200, 'body': body} return wrapper
python
def json_http_resp(handler): """ Automatically serialize return value to the body of a successfull HTTP response. Returns a 500 error if the response cannot be serialized Usage:: >>> from lambda_decorators import json_http_resp >>> @json_http_resp ... def handler(event, context): ... return {'hello': 'world'} >>> handler({}, object()) {'statusCode': 200, 'body': '{"hello": "world"}'} in this example, the decorated handler returns: .. code:: python {'statusCode': 200, 'body': '{"hello": "world"}'} """ @wraps(handler) def wrapper(event, context): response = handler(event, context) try: body = json.dumps(response) except Exception as exception: return {'statusCode': 500, 'body': str(exception)} return {'statusCode': 200, 'body': body} return wrapper
[ "def", "json_http_resp", "(", "handler", ")", ":", "@", "wraps", "(", "handler", ")", "def", "wrapper", "(", "event", ",", "context", ")", ":", "response", "=", "handler", "(", "event", ",", "context", ")", "try", ":", "body", "=", "json", ".", "dumps", "(", "response", ")", "except", "Exception", "as", "exception", ":", "return", "{", "'statusCode'", ":", "500", ",", "'body'", ":", "str", "(", "exception", ")", "}", "return", "{", "'statusCode'", ":", "200", ",", "'body'", ":", "body", "}", "return", "wrapper" ]
Automatically serialize return value to the body of a successfull HTTP response. Returns a 500 error if the response cannot be serialized Usage:: >>> from lambda_decorators import json_http_resp >>> @json_http_resp ... def handler(event, context): ... return {'hello': 'world'} >>> handler({}, object()) {'statusCode': 200, 'body': '{"hello": "world"}'} in this example, the decorated handler returns: .. code:: python {'statusCode': 200, 'body': '{"hello": "world"}'}
[ "Automatically", "serialize", "return", "value", "to", "the", "body", "of", "a", "successfull", "HTTP", "response", "." ]
9195914c8afe26843de9968d96dae6a89f061e8a
https://github.com/dschep/lambda-decorators/blob/9195914c8afe26843de9968d96dae6a89f061e8a/lambda_decorators.py#L428-L459
train
dschep/lambda-decorators
lambda_decorators.py
load_json_body
def load_json_body(handler): """ Automatically deserialize event bodies with json.loads. Automatically returns a 400 BAD REQUEST if there is an error while parsing. Usage:: >>> from lambda_decorators import load_json_body >>> @load_json_body ... def handler(event, context): ... return event['body']['foo'] >>> handler({'body': '{"foo": "bar"}'}, object()) 'bar' note that ``event['body']`` is already a dictionary and didn't have to explicitly be parsed. """ @wraps(handler) def wrapper(event, context): if isinstance(event.get('body'), str): try: event['body'] = json.loads(event['body']) except: return {'statusCode': 400, 'body': 'BAD REQUEST'} return handler(event, context) return wrapper
python
def load_json_body(handler): """ Automatically deserialize event bodies with json.loads. Automatically returns a 400 BAD REQUEST if there is an error while parsing. Usage:: >>> from lambda_decorators import load_json_body >>> @load_json_body ... def handler(event, context): ... return event['body']['foo'] >>> handler({'body': '{"foo": "bar"}'}, object()) 'bar' note that ``event['body']`` is already a dictionary and didn't have to explicitly be parsed. """ @wraps(handler) def wrapper(event, context): if isinstance(event.get('body'), str): try: event['body'] = json.loads(event['body']) except: return {'statusCode': 400, 'body': 'BAD REQUEST'} return handler(event, context) return wrapper
[ "def", "load_json_body", "(", "handler", ")", ":", "@", "wraps", "(", "handler", ")", "def", "wrapper", "(", "event", ",", "context", ")", ":", "if", "isinstance", "(", "event", ".", "get", "(", "'body'", ")", ",", "str", ")", ":", "try", ":", "event", "[", "'body'", "]", "=", "json", ".", "loads", "(", "event", "[", "'body'", "]", ")", "except", ":", "return", "{", "'statusCode'", ":", "400", ",", "'body'", ":", "'BAD REQUEST'", "}", "return", "handler", "(", "event", ",", "context", ")", "return", "wrapper" ]
Automatically deserialize event bodies with json.loads. Automatically returns a 400 BAD REQUEST if there is an error while parsing. Usage:: >>> from lambda_decorators import load_json_body >>> @load_json_body ... def handler(event, context): ... return event['body']['foo'] >>> handler({'body': '{"foo": "bar"}'}, object()) 'bar' note that ``event['body']`` is already a dictionary and didn't have to explicitly be parsed.
[ "Automatically", "deserialize", "event", "bodies", "with", "json", ".", "loads", "." ]
9195914c8afe26843de9968d96dae6a89f061e8a
https://github.com/dschep/lambda-decorators/blob/9195914c8afe26843de9968d96dae6a89f061e8a/lambda_decorators.py#L462-L489
train
dschep/lambda-decorators
lambda_decorators.py
json_schema_validator
def json_schema_validator(request_schema=None, response_schema=None): """ Validate your request & response payloads against a JSONSchema. *NOTE: depends on the* `jsonschema <https://github.com/Julian/jsonschema>`_ *package. If you're using* `serverless-python-requirements <https://github.com/UnitedIncome/serverless-python-requirements>`_ *you're all set. If you cURLed* ``lambda_decorators.py`` *you'll have to install it manually in your service's root directory.* Usage:: >>> from jsonschema import ValidationError >>> from lambda_decorators import json_schema_validator >>> @json_schema_validator(request_schema={ ... 'type': 'object', 'properties': {'price': {'type': 'number'}}}) ... def handler(event, context): ... return event['price'] >>> handler({'price': 'bar'}, object()) {'statusCode': 400, 'body': "RequestValidationError: 'bar' is not of type 'number'"} >>> @json_schema_validator(response_schema={ ... 'type': 'object', 'properties': {'price': {'type': 'number'}}}) ... def handler(event, context): ... return {'price': 'bar'} >>> handler({}, object()) {'statusCode': 500, 'body': "ResponseValidationError: 'bar' is not of type 'number'"} """ def wrapper_wrapper(handler): @wraps(handler) def wrapper(event, context): if request_schema is not None: if jsonschema is None: logger.error('jsonschema is not installed, skipping request validation') else: try: jsonschema.validate(event, request_schema) except jsonschema.ValidationError as exception: return {'statusCode': 400, 'body': 'RequestValidationError: {}'.format( exception.message)} response = handler(event, context) if response_schema is not None: if jsonschema is None: logger.error('jsonschema is not installed, skipping response validation') else: try: jsonschema.validate(response, response_schema) except jsonschema.ValidationError as exception: return {'statusCode': 500, 'body': 'ResponseValidationError: {}'.format( exception.message)} return response return wrapper return wrapper_wrapper
python
def json_schema_validator(request_schema=None, response_schema=None): """ Validate your request & response payloads against a JSONSchema. *NOTE: depends on the* `jsonschema <https://github.com/Julian/jsonschema>`_ *package. If you're using* `serverless-python-requirements <https://github.com/UnitedIncome/serverless-python-requirements>`_ *you're all set. If you cURLed* ``lambda_decorators.py`` *you'll have to install it manually in your service's root directory.* Usage:: >>> from jsonschema import ValidationError >>> from lambda_decorators import json_schema_validator >>> @json_schema_validator(request_schema={ ... 'type': 'object', 'properties': {'price': {'type': 'number'}}}) ... def handler(event, context): ... return event['price'] >>> handler({'price': 'bar'}, object()) {'statusCode': 400, 'body': "RequestValidationError: 'bar' is not of type 'number'"} >>> @json_schema_validator(response_schema={ ... 'type': 'object', 'properties': {'price': {'type': 'number'}}}) ... def handler(event, context): ... return {'price': 'bar'} >>> handler({}, object()) {'statusCode': 500, 'body': "ResponseValidationError: 'bar' is not of type 'number'"} """ def wrapper_wrapper(handler): @wraps(handler) def wrapper(event, context): if request_schema is not None: if jsonschema is None: logger.error('jsonschema is not installed, skipping request validation') else: try: jsonschema.validate(event, request_schema) except jsonschema.ValidationError as exception: return {'statusCode': 400, 'body': 'RequestValidationError: {}'.format( exception.message)} response = handler(event, context) if response_schema is not None: if jsonschema is None: logger.error('jsonschema is not installed, skipping response validation') else: try: jsonschema.validate(response, response_schema) except jsonschema.ValidationError as exception: return {'statusCode': 500, 'body': 'ResponseValidationError: {}'.format( exception.message)} return response return wrapper return wrapper_wrapper
[ "def", "json_schema_validator", "(", "request_schema", "=", "None", ",", "response_schema", "=", "None", ")", ":", "def", "wrapper_wrapper", "(", "handler", ")", ":", "@", "wraps", "(", "handler", ")", "def", "wrapper", "(", "event", ",", "context", ")", ":", "if", "request_schema", "is", "not", "None", ":", "if", "jsonschema", "is", "None", ":", "logger", ".", "error", "(", "'jsonschema is not installed, skipping request validation'", ")", "else", ":", "try", ":", "jsonschema", ".", "validate", "(", "event", ",", "request_schema", ")", "except", "jsonschema", ".", "ValidationError", "as", "exception", ":", "return", "{", "'statusCode'", ":", "400", ",", "'body'", ":", "'RequestValidationError: {}'", ".", "format", "(", "exception", ".", "message", ")", "}", "response", "=", "handler", "(", "event", ",", "context", ")", "if", "response_schema", "is", "not", "None", ":", "if", "jsonschema", "is", "None", ":", "logger", ".", "error", "(", "'jsonschema is not installed, skipping response validation'", ")", "else", ":", "try", ":", "jsonschema", ".", "validate", "(", "response", ",", "response_schema", ")", "except", "jsonschema", ".", "ValidationError", "as", "exception", ":", "return", "{", "'statusCode'", ":", "500", ",", "'body'", ":", "'ResponseValidationError: {}'", ".", "format", "(", "exception", ".", "message", ")", "}", "return", "response", "return", "wrapper", "return", "wrapper_wrapper" ]
Validate your request & response payloads against a JSONSchema. *NOTE: depends on the* `jsonschema <https://github.com/Julian/jsonschema>`_ *package. If you're using* `serverless-python-requirements <https://github.com/UnitedIncome/serverless-python-requirements>`_ *you're all set. If you cURLed* ``lambda_decorators.py`` *you'll have to install it manually in your service's root directory.* Usage:: >>> from jsonschema import ValidationError >>> from lambda_decorators import json_schema_validator >>> @json_schema_validator(request_schema={ ... 'type': 'object', 'properties': {'price': {'type': 'number'}}}) ... def handler(event, context): ... return event['price'] >>> handler({'price': 'bar'}, object()) {'statusCode': 400, 'body': "RequestValidationError: 'bar' is not of type 'number'"} >>> @json_schema_validator(response_schema={ ... 'type': 'object', 'properties': {'price': {'type': 'number'}}}) ... def handler(event, context): ... return {'price': 'bar'} >>> handler({}, object()) {'statusCode': 500, 'body': "ResponseValidationError: 'bar' is not of type 'number'"}
[ "Validate", "your", "request", "&", "response", "payloads", "against", "a", "JSONSchema", "." ]
9195914c8afe26843de9968d96dae6a89f061e8a
https://github.com/dschep/lambda-decorators/blob/9195914c8afe26843de9968d96dae6a89f061e8a/lambda_decorators.py#L492-L546
train
dschep/lambda-decorators
lambda_decorators.py
no_retry_on_failure
def no_retry_on_failure(handler): """ AWS Lambda retries scheduled lambdas that don't execute succesfully. This detects this by storing requests IDs in memory and exiting early on duplicates. Since this is in memory, don't use it on very frequently scheduled lambdas. It logs a critical message then exits with a statusCode of 200 to avoid further retries. Usage:: >>> import logging, sys >>> from lambda_decorators import no_retry_on_failure, logger >>> logger.addHandler(logging.StreamHandler(stream=sys.stdout)) >>> @no_retry_on_failure ... def scheduled_handler(event, context): ... return {'statusCode': 500} >>> class Context: ... aws_request_id = 1 >>> scheduled_handler({}, Context()) {'statusCode': 500} >>> scheduled_handler({}, Context()) Retry attempt on request id 1 detected. {'statusCode': 200} """ seen_request_ids = set() @wraps(handler) def wrapper(event, context): if context.aws_request_id in seen_request_ids: logger.critical('Retry attempt on request id %s detected.', context.aws_request_id) return {'statusCode': 200} seen_request_ids.add(context.aws_request_id) return handler(event, context) return wrapper
python
def no_retry_on_failure(handler): """ AWS Lambda retries scheduled lambdas that don't execute succesfully. This detects this by storing requests IDs in memory and exiting early on duplicates. Since this is in memory, don't use it on very frequently scheduled lambdas. It logs a critical message then exits with a statusCode of 200 to avoid further retries. Usage:: >>> import logging, sys >>> from lambda_decorators import no_retry_on_failure, logger >>> logger.addHandler(logging.StreamHandler(stream=sys.stdout)) >>> @no_retry_on_failure ... def scheduled_handler(event, context): ... return {'statusCode': 500} >>> class Context: ... aws_request_id = 1 >>> scheduled_handler({}, Context()) {'statusCode': 500} >>> scheduled_handler({}, Context()) Retry attempt on request id 1 detected. {'statusCode': 200} """ seen_request_ids = set() @wraps(handler) def wrapper(event, context): if context.aws_request_id in seen_request_ids: logger.critical('Retry attempt on request id %s detected.', context.aws_request_id) return {'statusCode': 200} seen_request_ids.add(context.aws_request_id) return handler(event, context) return wrapper
[ "def", "no_retry_on_failure", "(", "handler", ")", ":", "seen_request_ids", "=", "set", "(", ")", "@", "wraps", "(", "handler", ")", "def", "wrapper", "(", "event", ",", "context", ")", ":", "if", "context", ".", "aws_request_id", "in", "seen_request_ids", ":", "logger", ".", "critical", "(", "'Retry attempt on request id %s detected.'", ",", "context", ".", "aws_request_id", ")", "return", "{", "'statusCode'", ":", "200", "}", "seen_request_ids", ".", "add", "(", "context", ".", "aws_request_id", ")", "return", "handler", "(", "event", ",", "context", ")", "return", "wrapper" ]
AWS Lambda retries scheduled lambdas that don't execute succesfully. This detects this by storing requests IDs in memory and exiting early on duplicates. Since this is in memory, don't use it on very frequently scheduled lambdas. It logs a critical message then exits with a statusCode of 200 to avoid further retries. Usage:: >>> import logging, sys >>> from lambda_decorators import no_retry_on_failure, logger >>> logger.addHandler(logging.StreamHandler(stream=sys.stdout)) >>> @no_retry_on_failure ... def scheduled_handler(event, context): ... return {'statusCode': 500} >>> class Context: ... aws_request_id = 1 >>> scheduled_handler({}, Context()) {'statusCode': 500} >>> scheduled_handler({}, Context()) Retry attempt on request id 1 detected. {'statusCode': 200}
[ "AWS", "Lambda", "retries", "scheduled", "lambdas", "that", "don", "t", "execute", "succesfully", "." ]
9195914c8afe26843de9968d96dae6a89f061e8a
https://github.com/dschep/lambda-decorators/blob/9195914c8afe26843de9968d96dae6a89f061e8a/lambda_decorators.py#L579-L617
train
dmarx/psaw
psaw/PushshiftAPI.py
PushshiftAPIMinimal._wrap_thing
def _wrap_thing(self, thing, kind): """Mimic praw.Submission and praw.Comment API""" thing['created'] = self._epoch_utc_to_local(thing['created_utc']) thing['d_'] = copy.deepcopy(thing) ThingType = namedtuple(kind, thing.keys()) thing = ThingType(**thing) return thing
python
def _wrap_thing(self, thing, kind): """Mimic praw.Submission and praw.Comment API""" thing['created'] = self._epoch_utc_to_local(thing['created_utc']) thing['d_'] = copy.deepcopy(thing) ThingType = namedtuple(kind, thing.keys()) thing = ThingType(**thing) return thing
[ "def", "_wrap_thing", "(", "self", ",", "thing", ",", "kind", ")", ":", "thing", "[", "'created'", "]", "=", "self", ".", "_epoch_utc_to_local", "(", "thing", "[", "'created_utc'", "]", ")", "thing", "[", "'d_'", "]", "=", "copy", ".", "deepcopy", "(", "thing", ")", "ThingType", "=", "namedtuple", "(", "kind", ",", "thing", ".", "keys", "(", ")", ")", "thing", "=", "ThingType", "(", "*", "*", "thing", ")", "return", "thing" ]
Mimic praw.Submission and praw.Comment API
[ "Mimic", "praw", ".", "Submission", "and", "praw", ".", "Comment", "API" ]
5702abdd1a0ccd60b115fc4b545eb2c087c56194
https://github.com/dmarx/psaw/blob/5702abdd1a0ccd60b115fc4b545eb2c087c56194/psaw/PushshiftAPI.py#L112-L118
train
dmarx/psaw
psaw/PushshiftAPI.py
PushshiftAPIMinimal._add_nec_args
def _add_nec_args(self, payload): """Adds 'limit' and 'created_utc' arguments to the payload as necessary.""" if self._limited(payload): # Do nothing I guess? Not sure how paging works on this endpoint... return if 'limit' not in payload: payload['limit'] = self.max_results_per_request if 'sort' not in payload: # Getting weird results if this is not made explicit. Unclear why. payload['sort'] = 'desc' if 'filter' in payload: #and payload.get('created_utc', None) is None: if not isinstance(payload['filter'], list): if isinstance(payload['filter'], str): payload['filter'] = [payload['filter']] else: payload['filter'] = list(payload['filter']) if 'created_utc' not in payload['filter']: payload['filter'].append('created_utc')
python
def _add_nec_args(self, payload): """Adds 'limit' and 'created_utc' arguments to the payload as necessary.""" if self._limited(payload): # Do nothing I guess? Not sure how paging works on this endpoint... return if 'limit' not in payload: payload['limit'] = self.max_results_per_request if 'sort' not in payload: # Getting weird results if this is not made explicit. Unclear why. payload['sort'] = 'desc' if 'filter' in payload: #and payload.get('created_utc', None) is None: if not isinstance(payload['filter'], list): if isinstance(payload['filter'], str): payload['filter'] = [payload['filter']] else: payload['filter'] = list(payload['filter']) if 'created_utc' not in payload['filter']: payload['filter'].append('created_utc')
[ "def", "_add_nec_args", "(", "self", ",", "payload", ")", ":", "if", "self", ".", "_limited", "(", "payload", ")", ":", "# Do nothing I guess? Not sure how paging works on this endpoint...", "return", "if", "'limit'", "not", "in", "payload", ":", "payload", "[", "'limit'", "]", "=", "self", ".", "max_results_per_request", "if", "'sort'", "not", "in", "payload", ":", "# Getting weird results if this is not made explicit. Unclear why.", "payload", "[", "'sort'", "]", "=", "'desc'", "if", "'filter'", "in", "payload", ":", "#and payload.get('created_utc', None) is None:", "if", "not", "isinstance", "(", "payload", "[", "'filter'", "]", ",", "list", ")", ":", "if", "isinstance", "(", "payload", "[", "'filter'", "]", ",", "str", ")", ":", "payload", "[", "'filter'", "]", "=", "[", "payload", "[", "'filter'", "]", "]", "else", ":", "payload", "[", "'filter'", "]", "=", "list", "(", "payload", "[", "'filter'", "]", ")", "if", "'created_utc'", "not", "in", "payload", "[", "'filter'", "]", ":", "payload", "[", "'filter'", "]", ".", "append", "(", "'created_utc'", ")" ]
Adds 'limit' and 'created_utc' arguments to the payload as necessary.
[ "Adds", "limit", "and", "created_utc", "arguments", "to", "the", "payload", "as", "necessary", "." ]
5702abdd1a0ccd60b115fc4b545eb2c087c56194
https://github.com/dmarx/psaw/blob/5702abdd1a0ccd60b115fc4b545eb2c087c56194/psaw/PushshiftAPI.py#L130-L147
train
pyroscope/pyrocore
src/pyrocore/scripts/rtmv.py
pretty_path
def pretty_path(path): """ Prettify path for logging. """ path = fmt.to_utf8(path) home_dir = os.path.expanduser("~") if path.startswith(home_dir): path = "~" + path[len(home_dir):] return '"%s"' % (path,)
python
def pretty_path(path): """ Prettify path for logging. """ path = fmt.to_utf8(path) home_dir = os.path.expanduser("~") if path.startswith(home_dir): path = "~" + path[len(home_dir):] return '"%s"' % (path,)
[ "def", "pretty_path", "(", "path", ")", ":", "path", "=", "fmt", ".", "to_utf8", "(", "path", ")", "home_dir", "=", "os", ".", "path", ".", "expanduser", "(", "\"~\"", ")", "if", "path", ".", "startswith", "(", "home_dir", ")", ":", "path", "=", "\"~\"", "+", "path", "[", "len", "(", "home_dir", ")", ":", "]", "return", "'\"%s\"'", "%", "(", "path", ",", ")" ]
Prettify path for logging.
[ "Prettify", "path", "for", "logging", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/rtmv.py#L31-L38
train
pyroscope/pyrocore
src/pyrocore/scripts/rtmv.py
RtorrentMove.guarded
def guarded(self, call, *args): """ Catch exceptions thrown by filesystem calls, and don't really execute them in dry-run mode. """ self.LOG.debug('%s(%s)' % ( call.__name__, ', '.join([pretty_path(i) for i in args]), )) if not self.options.dry_run: try: call(*args) except (EnvironmentError, UnicodeError) as exc: self.fatal('%s(%s) failed [%s]' % ( call.__name__, ', '.join([pretty_path(i) for i in args]), exc, ))
python
def guarded(self, call, *args): """ Catch exceptions thrown by filesystem calls, and don't really execute them in dry-run mode. """ self.LOG.debug('%s(%s)' % ( call.__name__, ', '.join([pretty_path(i) for i in args]), )) if not self.options.dry_run: try: call(*args) except (EnvironmentError, UnicodeError) as exc: self.fatal('%s(%s) failed [%s]' % ( call.__name__, ', '.join([pretty_path(i) for i in args]), exc, ))
[ "def", "guarded", "(", "self", ",", "call", ",", "*", "args", ")", ":", "self", ".", "LOG", ".", "debug", "(", "'%s(%s)'", "%", "(", "call", ".", "__name__", ",", "', '", ".", "join", "(", "[", "pretty_path", "(", "i", ")", "for", "i", "in", "args", "]", ")", ",", ")", ")", "if", "not", "self", ".", "options", ".", "dry_run", ":", "try", ":", "call", "(", "*", "args", ")", "except", "(", "EnvironmentError", ",", "UnicodeError", ")", "as", "exc", ":", "self", ".", "fatal", "(", "'%s(%s) failed [%s]'", "%", "(", "call", ".", "__name__", ",", "', '", ".", "join", "(", "[", "pretty_path", "(", "i", ")", "for", "i", "in", "args", "]", ")", ",", "exc", ",", ")", ")" ]
Catch exceptions thrown by filesystem calls, and don't really execute them in dry-run mode.
[ "Catch", "exceptions", "thrown", "by", "filesystem", "calls", "and", "don", "t", "really", "execute", "them", "in", "dry", "-", "run", "mode", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/rtmv.py#L82-L95
train
pyroscope/pyrocore
src/pyrocore/torrent/rtorrent.py
run
def run(): """ Module level test. """ logging.basicConfig(level=logging.DEBUG) load_config.ConfigLoader().load() config.debug = True print(repr(config.engine.item(sys.argv[1])))
python
def run(): """ Module level test. """ logging.basicConfig(level=logging.DEBUG) load_config.ConfigLoader().load() config.debug = True print(repr(config.engine.item(sys.argv[1])))
[ "def", "run", "(", ")", ":", "logging", ".", "basicConfig", "(", "level", "=", "logging", ".", "DEBUG", ")", "load_config", ".", "ConfigLoader", "(", ")", ".", "load", "(", ")", "config", ".", "debug", "=", "True", "print", "(", "repr", "(", "config", ".", "engine", ".", "item", "(", "sys", ".", "argv", "[", "1", "]", ")", ")", ")" ]
Module level test.
[ "Module", "level", "test", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/rtorrent.py#L848-L854
train
pyroscope/pyrocore
src/pyrocore/torrent/rtorrent.py
RtorrentItem._make_it_so
def _make_it_so(self, command, calls, *args, **kwargs): """ Perform some error-checked XMLRPC calls. """ observer = kwargs.pop('observer', False) args = (self._fields["hash"],) + args try: for call in calls: self._engine.LOG.debug("%s%s torrent #%s (%s)" % ( command[0].upper(), command[1:], self._fields["hash"], call)) if call.startswith(':') or call[:2].endswith('.'): namespace = self._engine._rpc else: namespace = self._engine._rpc.d result = getattr(namespace, call.lstrip(':'))(*args) if observer: observer(result) except xmlrpc.ERRORS as exc: raise error.EngineError("While %s torrent #%s: %s" % (command, self._fields["hash"], exc))
python
def _make_it_so(self, command, calls, *args, **kwargs): """ Perform some error-checked XMLRPC calls. """ observer = kwargs.pop('observer', False) args = (self._fields["hash"],) + args try: for call in calls: self._engine.LOG.debug("%s%s torrent #%s (%s)" % ( command[0].upper(), command[1:], self._fields["hash"], call)) if call.startswith(':') or call[:2].endswith('.'): namespace = self._engine._rpc else: namespace = self._engine._rpc.d result = getattr(namespace, call.lstrip(':'))(*args) if observer: observer(result) except xmlrpc.ERRORS as exc: raise error.EngineError("While %s torrent #%s: %s" % (command, self._fields["hash"], exc))
[ "def", "_make_it_so", "(", "self", ",", "command", ",", "calls", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "observer", "=", "kwargs", ".", "pop", "(", "'observer'", ",", "False", ")", "args", "=", "(", "self", ".", "_fields", "[", "\"hash\"", "]", ",", ")", "+", "args", "try", ":", "for", "call", "in", "calls", ":", "self", ".", "_engine", ".", "LOG", ".", "debug", "(", "\"%s%s torrent #%s (%s)\"", "%", "(", "command", "[", "0", "]", ".", "upper", "(", ")", ",", "command", "[", "1", ":", "]", ",", "self", ".", "_fields", "[", "\"hash\"", "]", ",", "call", ")", ")", "if", "call", ".", "startswith", "(", "':'", ")", "or", "call", "[", ":", "2", "]", ".", "endswith", "(", "'.'", ")", ":", "namespace", "=", "self", ".", "_engine", ".", "_rpc", "else", ":", "namespace", "=", "self", ".", "_engine", ".", "_rpc", ".", "d", "result", "=", "getattr", "(", "namespace", ",", "call", ".", "lstrip", "(", "':'", ")", ")", "(", "*", "args", ")", "if", "observer", ":", "observer", "(", "result", ")", "except", "xmlrpc", ".", "ERRORS", "as", "exc", ":", "raise", "error", ".", "EngineError", "(", "\"While %s torrent #%s: %s\"", "%", "(", "command", ",", "self", ".", "_fields", "[", "\"hash\"", "]", ",", "exc", ")", ")" ]
Perform some error-checked XMLRPC calls.
[ "Perform", "some", "error", "-", "checked", "XMLRPC", "calls", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/rtorrent.py#L60-L77
train
pyroscope/pyrocore
src/pyrocore/torrent/rtorrent.py
RtorrentItem.fetch
def fetch(self, name, engine_name=None): """ Get a field on demand. """ # TODO: Get each on-demand field in a multicall for all other items, since # we likely need it anyway; another (more easy) way would be to pre-fetch dynamically # with the list of fields from filters and output formats try: return self._fields[name] except KeyError: if isinstance(name, (int, long)): name = "custom_%d" % name if name == "done": val = float(self.fetch("completed_chunks")) / self.fetch("size_chunks") elif name == "files": val = self._get_files() elif name.startswith("kind_") and name[5:].isdigit(): val = self._get_kind(int(name[5:], 10)) elif name.startswith("custom_"): key = name[7:] try: if len(key) == 1 and key in "12345": val = getattr(self._engine._rpc.d, "custom"+key)(self._fields["hash"]) else: val = self._engine._rpc.d.custom(self._fields["hash"], key) except xmlrpc.ERRORS as exc: raise error.EngineError("While accessing field %r: %s" % (name, exc)) else: getter_name = engine_name if engine_name else RtorrentEngine.PYRO2RT_MAPPING.get(name, name) if getter_name[0] == '=': getter_name = getter_name[1:] else: getter_name = "get_" + getter_name getter = getattr(self._engine._rpc.d, getter_name) try: val = getter(self._fields["hash"]) except xmlrpc.ERRORS as exc: raise error.EngineError("While accessing field %r: %s" % (name, exc)) # TODO: Currently, NOT caching makes no sense; in a demon, it does! #if isinstance(FieldDefinition.FIELDS.get(name), engine.ConstantField): self._fields[name] = val return val
python
def fetch(self, name, engine_name=None): """ Get a field on demand. """ # TODO: Get each on-demand field in a multicall for all other items, since # we likely need it anyway; another (more easy) way would be to pre-fetch dynamically # with the list of fields from filters and output formats try: return self._fields[name] except KeyError: if isinstance(name, (int, long)): name = "custom_%d" % name if name == "done": val = float(self.fetch("completed_chunks")) / self.fetch("size_chunks") elif name == "files": val = self._get_files() elif name.startswith("kind_") and name[5:].isdigit(): val = self._get_kind(int(name[5:], 10)) elif name.startswith("custom_"): key = name[7:] try: if len(key) == 1 and key in "12345": val = getattr(self._engine._rpc.d, "custom"+key)(self._fields["hash"]) else: val = self._engine._rpc.d.custom(self._fields["hash"], key) except xmlrpc.ERRORS as exc: raise error.EngineError("While accessing field %r: %s" % (name, exc)) else: getter_name = engine_name if engine_name else RtorrentEngine.PYRO2RT_MAPPING.get(name, name) if getter_name[0] == '=': getter_name = getter_name[1:] else: getter_name = "get_" + getter_name getter = getattr(self._engine._rpc.d, getter_name) try: val = getter(self._fields["hash"]) except xmlrpc.ERRORS as exc: raise error.EngineError("While accessing field %r: %s" % (name, exc)) # TODO: Currently, NOT caching makes no sense; in a demon, it does! #if isinstance(FieldDefinition.FIELDS.get(name), engine.ConstantField): self._fields[name] = val return val
[ "def", "fetch", "(", "self", ",", "name", ",", "engine_name", "=", "None", ")", ":", "# TODO: Get each on-demand field in a multicall for all other items, since", "# we likely need it anyway; another (more easy) way would be to pre-fetch dynamically", "# with the list of fields from filters and output formats", "try", ":", "return", "self", ".", "_fields", "[", "name", "]", "except", "KeyError", ":", "if", "isinstance", "(", "name", ",", "(", "int", ",", "long", ")", ")", ":", "name", "=", "\"custom_%d\"", "%", "name", "if", "name", "==", "\"done\"", ":", "val", "=", "float", "(", "self", ".", "fetch", "(", "\"completed_chunks\"", ")", ")", "/", "self", ".", "fetch", "(", "\"size_chunks\"", ")", "elif", "name", "==", "\"files\"", ":", "val", "=", "self", ".", "_get_files", "(", ")", "elif", "name", ".", "startswith", "(", "\"kind_\"", ")", "and", "name", "[", "5", ":", "]", ".", "isdigit", "(", ")", ":", "val", "=", "self", ".", "_get_kind", "(", "int", "(", "name", "[", "5", ":", "]", ",", "10", ")", ")", "elif", "name", ".", "startswith", "(", "\"custom_\"", ")", ":", "key", "=", "name", "[", "7", ":", "]", "try", ":", "if", "len", "(", "key", ")", "==", "1", "and", "key", "in", "\"12345\"", ":", "val", "=", "getattr", "(", "self", ".", "_engine", ".", "_rpc", ".", "d", ",", "\"custom\"", "+", "key", ")", "(", "self", ".", "_fields", "[", "\"hash\"", "]", ")", "else", ":", "val", "=", "self", ".", "_engine", ".", "_rpc", ".", "d", ".", "custom", "(", "self", ".", "_fields", "[", "\"hash\"", "]", ",", "key", ")", "except", "xmlrpc", ".", "ERRORS", "as", "exc", ":", "raise", "error", ".", "EngineError", "(", "\"While accessing field %r: %s\"", "%", "(", "name", ",", "exc", ")", ")", "else", ":", "getter_name", "=", "engine_name", "if", "engine_name", "else", "RtorrentEngine", ".", "PYRO2RT_MAPPING", ".", "get", "(", "name", ",", "name", ")", "if", "getter_name", "[", "0", "]", "==", "'='", ":", "getter_name", "=", "getter_name", "[", "1", ":", "]", "else", ":", "getter_name", "=", "\"get_\"", "+", "getter_name", "getter", "=", "getattr", "(", "self", ".", "_engine", ".", "_rpc", ".", "d", ",", "getter_name", ")", "try", ":", "val", "=", "getter", "(", "self", ".", "_fields", "[", "\"hash\"", "]", ")", "except", "xmlrpc", ".", "ERRORS", "as", "exc", ":", "raise", "error", ".", "EngineError", "(", "\"While accessing field %r: %s\"", "%", "(", "name", ",", "exc", ")", ")", "# TODO: Currently, NOT caching makes no sense; in a demon, it does!", "#if isinstance(FieldDefinition.FIELDS.get(name), engine.ConstantField):", "self", ".", "_fields", "[", "name", "]", "=", "val", "return", "val" ]
Get a field on demand.
[ "Get", "a", "field", "on", "demand", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/rtorrent.py#L167-L211
train
pyroscope/pyrocore
src/pyrocore/torrent/rtorrent.py
RtorrentItem.datapath
def datapath(self): """ Get an item's data path. """ path = self._fields['path'] if not path: # stopped item with no base_dir? path = self.fetch('directory') if path and not self._fields['is_multi_file']: path = os.path.join(path, self._fields['name']) return os.path.expanduser(fmt.to_unicode(path))
python
def datapath(self): """ Get an item's data path. """ path = self._fields['path'] if not path: # stopped item with no base_dir? path = self.fetch('directory') if path and not self._fields['is_multi_file']: path = os.path.join(path, self._fields['name']) return os.path.expanduser(fmt.to_unicode(path))
[ "def", "datapath", "(", "self", ")", ":", "path", "=", "self", ".", "_fields", "[", "'path'", "]", "if", "not", "path", ":", "# stopped item with no base_dir?", "path", "=", "self", ".", "fetch", "(", "'directory'", ")", "if", "path", "and", "not", "self", ".", "_fields", "[", "'is_multi_file'", "]", ":", "path", "=", "os", ".", "path", ".", "join", "(", "path", ",", "self", ".", "_fields", "[", "'name'", "]", ")", "return", "os", ".", "path", ".", "expanduser", "(", "fmt", ".", "to_unicode", "(", "path", ")", ")" ]
Get an item's data path.
[ "Get", "an", "item", "s", "data", "path", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/rtorrent.py#L214-L222
train
pyroscope/pyrocore
src/pyrocore/torrent/rtorrent.py
RtorrentItem.announce_urls
def announce_urls(self, default=[]): # pylint: disable=dangerous-default-value """ Get a list of all announce URLs. Returns `default` if no trackers are found at all. """ try: response = self._engine._rpc.t.multicall(self._fields["hash"], 0, "t.url=", "t.is_enabled=") except xmlrpc.ERRORS as exc: raise error.EngineError("While getting announce URLs for #%s: %s" % (self._fields["hash"], exc)) if response: return [i[0] for i in response if i[1]] else: return default
python
def announce_urls(self, default=[]): # pylint: disable=dangerous-default-value """ Get a list of all announce URLs. Returns `default` if no trackers are found at all. """ try: response = self._engine._rpc.t.multicall(self._fields["hash"], 0, "t.url=", "t.is_enabled=") except xmlrpc.ERRORS as exc: raise error.EngineError("While getting announce URLs for #%s: %s" % (self._fields["hash"], exc)) if response: return [i[0] for i in response if i[1]] else: return default
[ "def", "announce_urls", "(", "self", ",", "default", "=", "[", "]", ")", ":", "# pylint: disable=dangerous-default-value", "try", ":", "response", "=", "self", ".", "_engine", ".", "_rpc", ".", "t", ".", "multicall", "(", "self", ".", "_fields", "[", "\"hash\"", "]", ",", "0", ",", "\"t.url=\"", ",", "\"t.is_enabled=\"", ")", "except", "xmlrpc", ".", "ERRORS", "as", "exc", ":", "raise", "error", ".", "EngineError", "(", "\"While getting announce URLs for #%s: %s\"", "%", "(", "self", ".", "_fields", "[", "\"hash\"", "]", ",", "exc", ")", ")", "if", "response", ":", "return", "[", "i", "[", "0", "]", "for", "i", "in", "response", "if", "i", "[", "1", "]", "]", "else", ":", "return", "default" ]
Get a list of all announce URLs. Returns `default` if no trackers are found at all.
[ "Get", "a", "list", "of", "all", "announce", "URLs", ".", "Returns", "default", "if", "no", "trackers", "are", "found", "at", "all", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/rtorrent.py#L225-L237
train