repository_name stringclasses 316 values | func_path_in_repository stringlengths 6 223 | func_name stringlengths 1 134 | language stringclasses 1 value | func_code_string stringlengths 57 65.5k | func_documentation_string stringlengths 1 46.3k | split_name stringclasses 1 value | func_code_url stringlengths 91 315 | called_functions listlengths 1 156 ⌀ | enclosing_scope stringlengths 2 1.48M |
|---|---|---|---|---|---|---|---|---|---|
tkem/uritools | uritools/split.py | SplitResult.getuserinfo | python | def getuserinfo(self, default=None, encoding='utf-8', errors='strict'):
userinfo = self.userinfo
if userinfo is None:
return default
else:
return uridecode(userinfo, encoding, errors) | Return the decoded userinfo subcomponent of the URI authority, or
`default` if the original URI reference did not contain a
userinfo field. | train | https://github.com/tkem/uritools/blob/e77ba4acd937b68da9850138563debd4c925ef9f/uritools/split.py#L133-L143 | [
"def uridecode(uristring, encoding='utf-8', errors='strict'):\n \"\"\"Decode a URI string or string component.\"\"\"\n if not isinstance(uristring, bytes):\n uristring = uristring.encode(encoding or 'ascii', errors)\n parts = uristring.split(b'%')\n result = [parts[0]]\n append = result.append... | class SplitResult(collections.namedtuple('SplitResult', _URI_COMPONENTS)):
"""Base class to hold :func:`urisplit` results."""
__slots__ = () # prevent creation of instance dictionary
@property
def userinfo(self):
authority = self.authority
if authority is None:
return None
userinfo, present, _ = authority.rpartition(self.AT)
if present:
return userinfo
else:
return None
@property
def host(self):
authority = self.authority
if authority is None:
return None
_, _, hostinfo = authority.rpartition(self.AT)
host, _, port = hostinfo.rpartition(self.COLON)
if port.lstrip(self.DIGITS):
return hostinfo
else:
return host
@property
def port(self):
authority = self.authority
if authority is None:
return None
_, present, port = authority.rpartition(self.COLON)
if present and not port.lstrip(self.DIGITS):
return port
else:
return None
def geturi(self):
"""Return the re-combined version of the original URI reference as a
string.
"""
scheme, authority, path, query, fragment = self
# RFC 3986 5.3. Component Recomposition
result = []
if scheme is not None:
result.extend([scheme, self.COLON])
if authority is not None:
result.extend([self.SLASH, self.SLASH, authority])
result.append(path)
if query is not None:
result.extend([self.QUEST, query])
if fragment is not None:
result.extend([self.HASH, fragment])
return self.EMPTY.join(result)
def getscheme(self, default=None):
"""Return the URI scheme in canonical (lowercase) form, or `default`
if the original URI reference did not contain a scheme component.
"""
scheme = self.scheme
if scheme is None:
return default
elif isinstance(scheme, bytes):
return scheme.decode('ascii').lower()
else:
return scheme.lower()
def getauthority(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded userinfo, host and port subcomponents of the URI
authority as a three-item tuple.
"""
# TBD: (userinfo, host, port) kwargs, default string?
if default is None:
default = (None, None, None)
elif not isinstance(default, collections.Iterable):
raise TypeError('Invalid default type')
elif len(default) != 3:
raise ValueError('Invalid default length')
# TODO: this could be much more efficient by using a dedicated regex
return (
self.getuserinfo(default[0], encoding, errors),
self.gethost(default[1], errors),
self.getport(default[2])
)
def gethost(self, default=None, errors='strict'):
"""Return the decoded host subcomponent of the URI authority as a
string or an :mod:`ipaddress` address object, or `default` if
the original URI reference did not contain a host.
"""
host = self.host
if host is None or (not host and default is not None):
return default
elif host.startswith(self.LBRACKET) and host.endswith(self.RBRACKET):
return _ip_literal(host[1:-1])
elif host.startswith(self.LBRACKET) or host.endswith(self.RBRACKET):
raise ValueError('Invalid host %r' % host)
# TODO: faster check for IPv4 address?
return _ipv4_address(host) or uridecode(host, 'utf-8', errors).lower()
def getport(self, default=None):
"""Return the port subcomponent of the URI authority as an
:class:`int`, or `default` if the original URI reference did
not contain a port or if the port was empty.
"""
port = self.port
if port:
return int(port)
else:
return default
def getpath(self, encoding='utf-8', errors='strict'):
"""Return the normalized decoded URI path."""
path = self.__remove_dot_segments(self.path)
return uridecode(path, encoding, errors)
def getquery(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded query string, or `default` if the original URI
reference did not contain a query component.
"""
query = self.query
if query is None:
return default
else:
return uridecode(query, encoding, errors)
def getquerydict(self, sep='&', encoding='utf-8', errors='strict'):
"""Split the query component into individual `name=value` pairs
separated by `sep` and return a dictionary of query variables.
The dictionary keys are the unique query variable names and
the values are lists of values for each name.
"""
dict = collections.defaultdict(list)
for name, value in self.getquerylist(sep, encoding, errors):
dict[name].append(value)
return dict
def getquerylist(self, sep='&', encoding='utf-8', errors='strict'):
"""Split the query component into individual `name=value` pairs
separated by `sep`, and return a list of `(name, value)`
tuples.
"""
if not self.query:
return []
elif isinstance(sep, type(self.query)):
qsl = self.query.split(sep)
elif isinstance(sep, bytes):
qsl = self.query.split(sep.decode('ascii'))
else:
qsl = self.query.split(sep.encode('ascii'))
items = []
for parts in [qs.partition(self.EQ) for qs in qsl if qs]:
name = uridecode(parts[0], encoding, errors)
if parts[1]:
value = uridecode(parts[2], encoding, errors)
else:
value = None
items.append((name, value))
return items
def getfragment(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded fragment identifier, or `default` if the
original URI reference did not contain a fragment component.
"""
fragment = self.fragment
if fragment is None:
return default
else:
return uridecode(fragment, encoding, errors)
def isuri(self):
"""Return :const:`True` if this is a URI."""
return self.scheme is not None
def isabsuri(self):
"""Return :const:`True` if this is an absolute URI."""
return self.scheme is not None and self.fragment is None
def isnetpath(self):
"""Return :const:`True` if this is a network-path reference."""
return self.scheme is None and self.authority is not None
def isabspath(self):
"""Return :const:`True` if this is an absolute-path reference."""
return (self.scheme is None and self.authority is None and
self.path.startswith(self.SLASH))
def isrelpath(self):
"""Return :const:`True` if this is a relative-path reference."""
return (self.scheme is None and self.authority is None and
not self.path.startswith(self.SLASH))
def issamedoc(self):
"""Return :const:`True` if this is a same-document reference."""
return (self.scheme is None and self.authority is None and
not self.path and self.query is None)
def transform(self, ref, strict=False):
"""Transform a URI reference relative to `self` into a
:class:`SplitResult` representing its target URI.
"""
scheme, authority, path, query, fragment = self.RE.match(ref).groups()
# RFC 3986 5.2.2. Transform References
if scheme is not None and (strict or scheme != self.scheme):
path = self.__remove_dot_segments(path)
elif authority is not None:
scheme = self.scheme
path = self.__remove_dot_segments(path)
elif not path:
scheme = self.scheme
authority = self.authority
path = self.path
query = self.query if query is None else query
elif path.startswith(self.SLASH):
scheme = self.scheme
authority = self.authority
path = self.__remove_dot_segments(path)
else:
scheme = self.scheme
authority = self.authority
path = self.__remove_dot_segments(self.__merge(path))
return type(self)(scheme, authority, path, query, fragment)
def __merge(self, path):
# RFC 3986 5.2.3. Merge Paths
if self.authority is not None and not self.path:
return self.SLASH + path
else:
parts = self.path.rpartition(self.SLASH)
return parts[1].join((parts[0], path))
@classmethod
def __remove_dot_segments(cls, path):
# RFC 3986 5.2.4. Remove Dot Segments
pseg = []
for s in path.split(cls.SLASH):
if s == cls.DOT:
continue
elif s != cls.DOTDOT:
pseg.append(s)
elif len(pseg) == 1 and not pseg[0]:
continue
elif pseg and pseg[-1] != cls.DOTDOT:
pseg.pop()
else:
pseg.append(s)
# adjust for trailing '/.' or '/..'
if path.rpartition(cls.SLASH)[2] in (cls.DOT, cls.DOTDOT):
pseg.append(cls.EMPTY)
if path and len(pseg) == 1 and pseg[0] == cls.EMPTY:
pseg.insert(0, cls.DOT)
return cls.SLASH.join(pseg)
|
tkem/uritools | uritools/split.py | SplitResult.gethost | python | def gethost(self, default=None, errors='strict'):
host = self.host
if host is None or (not host and default is not None):
return default
elif host.startswith(self.LBRACKET) and host.endswith(self.RBRACKET):
return _ip_literal(host[1:-1])
elif host.startswith(self.LBRACKET) or host.endswith(self.RBRACKET):
raise ValueError('Invalid host %r' % host)
# TODO: faster check for IPv4 address?
return _ipv4_address(host) or uridecode(host, 'utf-8', errors).lower() | Return the decoded host subcomponent of the URI authority as a
string or an :mod:`ipaddress` address object, or `default` if
the original URI reference did not contain a host. | train | https://github.com/tkem/uritools/blob/e77ba4acd937b68da9850138563debd4c925ef9f/uritools/split.py#L145-L159 | null | class SplitResult(collections.namedtuple('SplitResult', _URI_COMPONENTS)):
"""Base class to hold :func:`urisplit` results."""
__slots__ = () # prevent creation of instance dictionary
@property
def userinfo(self):
authority = self.authority
if authority is None:
return None
userinfo, present, _ = authority.rpartition(self.AT)
if present:
return userinfo
else:
return None
@property
def host(self):
authority = self.authority
if authority is None:
return None
_, _, hostinfo = authority.rpartition(self.AT)
host, _, port = hostinfo.rpartition(self.COLON)
if port.lstrip(self.DIGITS):
return hostinfo
else:
return host
@property
def port(self):
authority = self.authority
if authority is None:
return None
_, present, port = authority.rpartition(self.COLON)
if present and not port.lstrip(self.DIGITS):
return port
else:
return None
def geturi(self):
"""Return the re-combined version of the original URI reference as a
string.
"""
scheme, authority, path, query, fragment = self
# RFC 3986 5.3. Component Recomposition
result = []
if scheme is not None:
result.extend([scheme, self.COLON])
if authority is not None:
result.extend([self.SLASH, self.SLASH, authority])
result.append(path)
if query is not None:
result.extend([self.QUEST, query])
if fragment is not None:
result.extend([self.HASH, fragment])
return self.EMPTY.join(result)
def getscheme(self, default=None):
"""Return the URI scheme in canonical (lowercase) form, or `default`
if the original URI reference did not contain a scheme component.
"""
scheme = self.scheme
if scheme is None:
return default
elif isinstance(scheme, bytes):
return scheme.decode('ascii').lower()
else:
return scheme.lower()
def getauthority(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded userinfo, host and port subcomponents of the URI
authority as a three-item tuple.
"""
# TBD: (userinfo, host, port) kwargs, default string?
if default is None:
default = (None, None, None)
elif not isinstance(default, collections.Iterable):
raise TypeError('Invalid default type')
elif len(default) != 3:
raise ValueError('Invalid default length')
# TODO: this could be much more efficient by using a dedicated regex
return (
self.getuserinfo(default[0], encoding, errors),
self.gethost(default[1], errors),
self.getport(default[2])
)
def getuserinfo(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded userinfo subcomponent of the URI authority, or
`default` if the original URI reference did not contain a
userinfo field.
"""
userinfo = self.userinfo
if userinfo is None:
return default
else:
return uridecode(userinfo, encoding, errors)
def getport(self, default=None):
"""Return the port subcomponent of the URI authority as an
:class:`int`, or `default` if the original URI reference did
not contain a port or if the port was empty.
"""
port = self.port
if port:
return int(port)
else:
return default
def getpath(self, encoding='utf-8', errors='strict'):
"""Return the normalized decoded URI path."""
path = self.__remove_dot_segments(self.path)
return uridecode(path, encoding, errors)
def getquery(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded query string, or `default` if the original URI
reference did not contain a query component.
"""
query = self.query
if query is None:
return default
else:
return uridecode(query, encoding, errors)
def getquerydict(self, sep='&', encoding='utf-8', errors='strict'):
"""Split the query component into individual `name=value` pairs
separated by `sep` and return a dictionary of query variables.
The dictionary keys are the unique query variable names and
the values are lists of values for each name.
"""
dict = collections.defaultdict(list)
for name, value in self.getquerylist(sep, encoding, errors):
dict[name].append(value)
return dict
def getquerylist(self, sep='&', encoding='utf-8', errors='strict'):
"""Split the query component into individual `name=value` pairs
separated by `sep`, and return a list of `(name, value)`
tuples.
"""
if not self.query:
return []
elif isinstance(sep, type(self.query)):
qsl = self.query.split(sep)
elif isinstance(sep, bytes):
qsl = self.query.split(sep.decode('ascii'))
else:
qsl = self.query.split(sep.encode('ascii'))
items = []
for parts in [qs.partition(self.EQ) for qs in qsl if qs]:
name = uridecode(parts[0], encoding, errors)
if parts[1]:
value = uridecode(parts[2], encoding, errors)
else:
value = None
items.append((name, value))
return items
def getfragment(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded fragment identifier, or `default` if the
original URI reference did not contain a fragment component.
"""
fragment = self.fragment
if fragment is None:
return default
else:
return uridecode(fragment, encoding, errors)
def isuri(self):
"""Return :const:`True` if this is a URI."""
return self.scheme is not None
def isabsuri(self):
"""Return :const:`True` if this is an absolute URI."""
return self.scheme is not None and self.fragment is None
def isnetpath(self):
"""Return :const:`True` if this is a network-path reference."""
return self.scheme is None and self.authority is not None
def isabspath(self):
"""Return :const:`True` if this is an absolute-path reference."""
return (self.scheme is None and self.authority is None and
self.path.startswith(self.SLASH))
def isrelpath(self):
"""Return :const:`True` if this is a relative-path reference."""
return (self.scheme is None and self.authority is None and
not self.path.startswith(self.SLASH))
def issamedoc(self):
"""Return :const:`True` if this is a same-document reference."""
return (self.scheme is None and self.authority is None and
not self.path and self.query is None)
def transform(self, ref, strict=False):
"""Transform a URI reference relative to `self` into a
:class:`SplitResult` representing its target URI.
"""
scheme, authority, path, query, fragment = self.RE.match(ref).groups()
# RFC 3986 5.2.2. Transform References
if scheme is not None and (strict or scheme != self.scheme):
path = self.__remove_dot_segments(path)
elif authority is not None:
scheme = self.scheme
path = self.__remove_dot_segments(path)
elif not path:
scheme = self.scheme
authority = self.authority
path = self.path
query = self.query if query is None else query
elif path.startswith(self.SLASH):
scheme = self.scheme
authority = self.authority
path = self.__remove_dot_segments(path)
else:
scheme = self.scheme
authority = self.authority
path = self.__remove_dot_segments(self.__merge(path))
return type(self)(scheme, authority, path, query, fragment)
def __merge(self, path):
# RFC 3986 5.2.3. Merge Paths
if self.authority is not None and not self.path:
return self.SLASH + path
else:
parts = self.path.rpartition(self.SLASH)
return parts[1].join((parts[0], path))
@classmethod
def __remove_dot_segments(cls, path):
# RFC 3986 5.2.4. Remove Dot Segments
pseg = []
for s in path.split(cls.SLASH):
if s == cls.DOT:
continue
elif s != cls.DOTDOT:
pseg.append(s)
elif len(pseg) == 1 and not pseg[0]:
continue
elif pseg and pseg[-1] != cls.DOTDOT:
pseg.pop()
else:
pseg.append(s)
# adjust for trailing '/.' or '/..'
if path.rpartition(cls.SLASH)[2] in (cls.DOT, cls.DOTDOT):
pseg.append(cls.EMPTY)
if path and len(pseg) == 1 and pseg[0] == cls.EMPTY:
pseg.insert(0, cls.DOT)
return cls.SLASH.join(pseg)
|
tkem/uritools | uritools/split.py | SplitResult.getport | python | def getport(self, default=None):
port = self.port
if port:
return int(port)
else:
return default | Return the port subcomponent of the URI authority as an
:class:`int`, or `default` if the original URI reference did
not contain a port or if the port was empty. | train | https://github.com/tkem/uritools/blob/e77ba4acd937b68da9850138563debd4c925ef9f/uritools/split.py#L161-L171 | null | class SplitResult(collections.namedtuple('SplitResult', _URI_COMPONENTS)):
"""Base class to hold :func:`urisplit` results."""
__slots__ = () # prevent creation of instance dictionary
@property
def userinfo(self):
authority = self.authority
if authority is None:
return None
userinfo, present, _ = authority.rpartition(self.AT)
if present:
return userinfo
else:
return None
@property
def host(self):
authority = self.authority
if authority is None:
return None
_, _, hostinfo = authority.rpartition(self.AT)
host, _, port = hostinfo.rpartition(self.COLON)
if port.lstrip(self.DIGITS):
return hostinfo
else:
return host
@property
def port(self):
authority = self.authority
if authority is None:
return None
_, present, port = authority.rpartition(self.COLON)
if present and not port.lstrip(self.DIGITS):
return port
else:
return None
def geturi(self):
"""Return the re-combined version of the original URI reference as a
string.
"""
scheme, authority, path, query, fragment = self
# RFC 3986 5.3. Component Recomposition
result = []
if scheme is not None:
result.extend([scheme, self.COLON])
if authority is not None:
result.extend([self.SLASH, self.SLASH, authority])
result.append(path)
if query is not None:
result.extend([self.QUEST, query])
if fragment is not None:
result.extend([self.HASH, fragment])
return self.EMPTY.join(result)
def getscheme(self, default=None):
"""Return the URI scheme in canonical (lowercase) form, or `default`
if the original URI reference did not contain a scheme component.
"""
scheme = self.scheme
if scheme is None:
return default
elif isinstance(scheme, bytes):
return scheme.decode('ascii').lower()
else:
return scheme.lower()
def getauthority(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded userinfo, host and port subcomponents of the URI
authority as a three-item tuple.
"""
# TBD: (userinfo, host, port) kwargs, default string?
if default is None:
default = (None, None, None)
elif not isinstance(default, collections.Iterable):
raise TypeError('Invalid default type')
elif len(default) != 3:
raise ValueError('Invalid default length')
# TODO: this could be much more efficient by using a dedicated regex
return (
self.getuserinfo(default[0], encoding, errors),
self.gethost(default[1], errors),
self.getport(default[2])
)
def getuserinfo(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded userinfo subcomponent of the URI authority, or
`default` if the original URI reference did not contain a
userinfo field.
"""
userinfo = self.userinfo
if userinfo is None:
return default
else:
return uridecode(userinfo, encoding, errors)
def gethost(self, default=None, errors='strict'):
"""Return the decoded host subcomponent of the URI authority as a
string or an :mod:`ipaddress` address object, or `default` if
the original URI reference did not contain a host.
"""
host = self.host
if host is None or (not host and default is not None):
return default
elif host.startswith(self.LBRACKET) and host.endswith(self.RBRACKET):
return _ip_literal(host[1:-1])
elif host.startswith(self.LBRACKET) or host.endswith(self.RBRACKET):
raise ValueError('Invalid host %r' % host)
# TODO: faster check for IPv4 address?
return _ipv4_address(host) or uridecode(host, 'utf-8', errors).lower()
def getpath(self, encoding='utf-8', errors='strict'):
"""Return the normalized decoded URI path."""
path = self.__remove_dot_segments(self.path)
return uridecode(path, encoding, errors)
def getquery(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded query string, or `default` if the original URI
reference did not contain a query component.
"""
query = self.query
if query is None:
return default
else:
return uridecode(query, encoding, errors)
def getquerydict(self, sep='&', encoding='utf-8', errors='strict'):
"""Split the query component into individual `name=value` pairs
separated by `sep` and return a dictionary of query variables.
The dictionary keys are the unique query variable names and
the values are lists of values for each name.
"""
dict = collections.defaultdict(list)
for name, value in self.getquerylist(sep, encoding, errors):
dict[name].append(value)
return dict
def getquerylist(self, sep='&', encoding='utf-8', errors='strict'):
"""Split the query component into individual `name=value` pairs
separated by `sep`, and return a list of `(name, value)`
tuples.
"""
if not self.query:
return []
elif isinstance(sep, type(self.query)):
qsl = self.query.split(sep)
elif isinstance(sep, bytes):
qsl = self.query.split(sep.decode('ascii'))
else:
qsl = self.query.split(sep.encode('ascii'))
items = []
for parts in [qs.partition(self.EQ) for qs in qsl if qs]:
name = uridecode(parts[0], encoding, errors)
if parts[1]:
value = uridecode(parts[2], encoding, errors)
else:
value = None
items.append((name, value))
return items
def getfragment(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded fragment identifier, or `default` if the
original URI reference did not contain a fragment component.
"""
fragment = self.fragment
if fragment is None:
return default
else:
return uridecode(fragment, encoding, errors)
def isuri(self):
"""Return :const:`True` if this is a URI."""
return self.scheme is not None
def isabsuri(self):
"""Return :const:`True` if this is an absolute URI."""
return self.scheme is not None and self.fragment is None
def isnetpath(self):
"""Return :const:`True` if this is a network-path reference."""
return self.scheme is None and self.authority is not None
def isabspath(self):
"""Return :const:`True` if this is an absolute-path reference."""
return (self.scheme is None and self.authority is None and
self.path.startswith(self.SLASH))
def isrelpath(self):
"""Return :const:`True` if this is a relative-path reference."""
return (self.scheme is None and self.authority is None and
not self.path.startswith(self.SLASH))
def issamedoc(self):
"""Return :const:`True` if this is a same-document reference."""
return (self.scheme is None and self.authority is None and
not self.path and self.query is None)
def transform(self, ref, strict=False):
"""Transform a URI reference relative to `self` into a
:class:`SplitResult` representing its target URI.
"""
scheme, authority, path, query, fragment = self.RE.match(ref).groups()
# RFC 3986 5.2.2. Transform References
if scheme is not None and (strict or scheme != self.scheme):
path = self.__remove_dot_segments(path)
elif authority is not None:
scheme = self.scheme
path = self.__remove_dot_segments(path)
elif not path:
scheme = self.scheme
authority = self.authority
path = self.path
query = self.query if query is None else query
elif path.startswith(self.SLASH):
scheme = self.scheme
authority = self.authority
path = self.__remove_dot_segments(path)
else:
scheme = self.scheme
authority = self.authority
path = self.__remove_dot_segments(self.__merge(path))
return type(self)(scheme, authority, path, query, fragment)
def __merge(self, path):
# RFC 3986 5.2.3. Merge Paths
if self.authority is not None and not self.path:
return self.SLASH + path
else:
parts = self.path.rpartition(self.SLASH)
return parts[1].join((parts[0], path))
@classmethod
def __remove_dot_segments(cls, path):
# RFC 3986 5.2.4. Remove Dot Segments
pseg = []
for s in path.split(cls.SLASH):
if s == cls.DOT:
continue
elif s != cls.DOTDOT:
pseg.append(s)
elif len(pseg) == 1 and not pseg[0]:
continue
elif pseg and pseg[-1] != cls.DOTDOT:
pseg.pop()
else:
pseg.append(s)
# adjust for trailing '/.' or '/..'
if path.rpartition(cls.SLASH)[2] in (cls.DOT, cls.DOTDOT):
pseg.append(cls.EMPTY)
if path and len(pseg) == 1 and pseg[0] == cls.EMPTY:
pseg.insert(0, cls.DOT)
return cls.SLASH.join(pseg)
|
tkem/uritools | uritools/split.py | SplitResult.getpath | python | def getpath(self, encoding='utf-8', errors='strict'):
path = self.__remove_dot_segments(self.path)
return uridecode(path, encoding, errors) | Return the normalized decoded URI path. | train | https://github.com/tkem/uritools/blob/e77ba4acd937b68da9850138563debd4c925ef9f/uritools/split.py#L173-L176 | [
"def uridecode(uristring, encoding='utf-8', errors='strict'):\n \"\"\"Decode a URI string or string component.\"\"\"\n if not isinstance(uristring, bytes):\n uristring = uristring.encode(encoding or 'ascii', errors)\n parts = uristring.split(b'%')\n result = [parts[0]]\n append = result.append... | class SplitResult(collections.namedtuple('SplitResult', _URI_COMPONENTS)):
"""Base class to hold :func:`urisplit` results."""
__slots__ = () # prevent creation of instance dictionary
@property
def userinfo(self):
authority = self.authority
if authority is None:
return None
userinfo, present, _ = authority.rpartition(self.AT)
if present:
return userinfo
else:
return None
@property
def host(self):
authority = self.authority
if authority is None:
return None
_, _, hostinfo = authority.rpartition(self.AT)
host, _, port = hostinfo.rpartition(self.COLON)
if port.lstrip(self.DIGITS):
return hostinfo
else:
return host
@property
def port(self):
authority = self.authority
if authority is None:
return None
_, present, port = authority.rpartition(self.COLON)
if present and not port.lstrip(self.DIGITS):
return port
else:
return None
def geturi(self):
"""Return the re-combined version of the original URI reference as a
string.
"""
scheme, authority, path, query, fragment = self
# RFC 3986 5.3. Component Recomposition
result = []
if scheme is not None:
result.extend([scheme, self.COLON])
if authority is not None:
result.extend([self.SLASH, self.SLASH, authority])
result.append(path)
if query is not None:
result.extend([self.QUEST, query])
if fragment is not None:
result.extend([self.HASH, fragment])
return self.EMPTY.join(result)
def getscheme(self, default=None):
"""Return the URI scheme in canonical (lowercase) form, or `default`
if the original URI reference did not contain a scheme component.
"""
scheme = self.scheme
if scheme is None:
return default
elif isinstance(scheme, bytes):
return scheme.decode('ascii').lower()
else:
return scheme.lower()
def getauthority(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded userinfo, host and port subcomponents of the URI
authority as a three-item tuple.
"""
# TBD: (userinfo, host, port) kwargs, default string?
if default is None:
default = (None, None, None)
elif not isinstance(default, collections.Iterable):
raise TypeError('Invalid default type')
elif len(default) != 3:
raise ValueError('Invalid default length')
# TODO: this could be much more efficient by using a dedicated regex
return (
self.getuserinfo(default[0], encoding, errors),
self.gethost(default[1], errors),
self.getport(default[2])
)
def getuserinfo(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded userinfo subcomponent of the URI authority, or
`default` if the original URI reference did not contain a
userinfo field.
"""
userinfo = self.userinfo
if userinfo is None:
return default
else:
return uridecode(userinfo, encoding, errors)
def gethost(self, default=None, errors='strict'):
"""Return the decoded host subcomponent of the URI authority as a
string or an :mod:`ipaddress` address object, or `default` if
the original URI reference did not contain a host.
"""
host = self.host
if host is None or (not host and default is not None):
return default
elif host.startswith(self.LBRACKET) and host.endswith(self.RBRACKET):
return _ip_literal(host[1:-1])
elif host.startswith(self.LBRACKET) or host.endswith(self.RBRACKET):
raise ValueError('Invalid host %r' % host)
# TODO: faster check for IPv4 address?
return _ipv4_address(host) or uridecode(host, 'utf-8', errors).lower()
def getport(self, default=None):
"""Return the port subcomponent of the URI authority as an
:class:`int`, or `default` if the original URI reference did
not contain a port or if the port was empty.
"""
port = self.port
if port:
return int(port)
else:
return default
def getquery(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded query string, or `default` if the original URI
reference did not contain a query component.
"""
query = self.query
if query is None:
return default
else:
return uridecode(query, encoding, errors)
def getquerydict(self, sep='&', encoding='utf-8', errors='strict'):
"""Split the query component into individual `name=value` pairs
separated by `sep` and return a dictionary of query variables.
The dictionary keys are the unique query variable names and
the values are lists of values for each name.
"""
dict = collections.defaultdict(list)
for name, value in self.getquerylist(sep, encoding, errors):
dict[name].append(value)
return dict
def getquerylist(self, sep='&', encoding='utf-8', errors='strict'):
"""Split the query component into individual `name=value` pairs
separated by `sep`, and return a list of `(name, value)`
tuples.
"""
if not self.query:
return []
elif isinstance(sep, type(self.query)):
qsl = self.query.split(sep)
elif isinstance(sep, bytes):
qsl = self.query.split(sep.decode('ascii'))
else:
qsl = self.query.split(sep.encode('ascii'))
items = []
for parts in [qs.partition(self.EQ) for qs in qsl if qs]:
name = uridecode(parts[0], encoding, errors)
if parts[1]:
value = uridecode(parts[2], encoding, errors)
else:
value = None
items.append((name, value))
return items
def getfragment(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded fragment identifier, or `default` if the
original URI reference did not contain a fragment component.
"""
fragment = self.fragment
if fragment is None:
return default
else:
return uridecode(fragment, encoding, errors)
def isuri(self):
"""Return :const:`True` if this is a URI."""
return self.scheme is not None
def isabsuri(self):
"""Return :const:`True` if this is an absolute URI."""
return self.scheme is not None and self.fragment is None
def isnetpath(self):
"""Return :const:`True` if this is a network-path reference."""
return self.scheme is None and self.authority is not None
def isabspath(self):
"""Return :const:`True` if this is an absolute-path reference."""
return (self.scheme is None and self.authority is None and
self.path.startswith(self.SLASH))
def isrelpath(self):
"""Return :const:`True` if this is a relative-path reference."""
return (self.scheme is None and self.authority is None and
not self.path.startswith(self.SLASH))
def issamedoc(self):
"""Return :const:`True` if this is a same-document reference."""
return (self.scheme is None and self.authority is None and
not self.path and self.query is None)
def transform(self, ref, strict=False):
"""Transform a URI reference relative to `self` into a
:class:`SplitResult` representing its target URI.
"""
scheme, authority, path, query, fragment = self.RE.match(ref).groups()
# RFC 3986 5.2.2. Transform References
if scheme is not None and (strict or scheme != self.scheme):
path = self.__remove_dot_segments(path)
elif authority is not None:
scheme = self.scheme
path = self.__remove_dot_segments(path)
elif not path:
scheme = self.scheme
authority = self.authority
path = self.path
query = self.query if query is None else query
elif path.startswith(self.SLASH):
scheme = self.scheme
authority = self.authority
path = self.__remove_dot_segments(path)
else:
scheme = self.scheme
authority = self.authority
path = self.__remove_dot_segments(self.__merge(path))
return type(self)(scheme, authority, path, query, fragment)
def __merge(self, path):
# RFC 3986 5.2.3. Merge Paths
if self.authority is not None and not self.path:
return self.SLASH + path
else:
parts = self.path.rpartition(self.SLASH)
return parts[1].join((parts[0], path))
@classmethod
def __remove_dot_segments(cls, path):
# RFC 3986 5.2.4. Remove Dot Segments
pseg = []
for s in path.split(cls.SLASH):
if s == cls.DOT:
continue
elif s != cls.DOTDOT:
pseg.append(s)
elif len(pseg) == 1 and not pseg[0]:
continue
elif pseg and pseg[-1] != cls.DOTDOT:
pseg.pop()
else:
pseg.append(s)
# adjust for trailing '/.' or '/..'
if path.rpartition(cls.SLASH)[2] in (cls.DOT, cls.DOTDOT):
pseg.append(cls.EMPTY)
if path and len(pseg) == 1 and pseg[0] == cls.EMPTY:
pseg.insert(0, cls.DOT)
return cls.SLASH.join(pseg)
|
tkem/uritools | uritools/split.py | SplitResult.getquery | python | def getquery(self, default=None, encoding='utf-8', errors='strict'):
query = self.query
if query is None:
return default
else:
return uridecode(query, encoding, errors) | Return the decoded query string, or `default` if the original URI
reference did not contain a query component. | train | https://github.com/tkem/uritools/blob/e77ba4acd937b68da9850138563debd4c925ef9f/uritools/split.py#L178-L187 | [
"def uridecode(uristring, encoding='utf-8', errors='strict'):\n \"\"\"Decode a URI string or string component.\"\"\"\n if not isinstance(uristring, bytes):\n uristring = uristring.encode(encoding or 'ascii', errors)\n parts = uristring.split(b'%')\n result = [parts[0]]\n append = result.append... | class SplitResult(collections.namedtuple('SplitResult', _URI_COMPONENTS)):
"""Base class to hold :func:`urisplit` results."""
__slots__ = () # prevent creation of instance dictionary
@property
def userinfo(self):
authority = self.authority
if authority is None:
return None
userinfo, present, _ = authority.rpartition(self.AT)
if present:
return userinfo
else:
return None
@property
def host(self):
authority = self.authority
if authority is None:
return None
_, _, hostinfo = authority.rpartition(self.AT)
host, _, port = hostinfo.rpartition(self.COLON)
if port.lstrip(self.DIGITS):
return hostinfo
else:
return host
@property
def port(self):
authority = self.authority
if authority is None:
return None
_, present, port = authority.rpartition(self.COLON)
if present and not port.lstrip(self.DIGITS):
return port
else:
return None
def geturi(self):
"""Return the re-combined version of the original URI reference as a
string.
"""
scheme, authority, path, query, fragment = self
# RFC 3986 5.3. Component Recomposition
result = []
if scheme is not None:
result.extend([scheme, self.COLON])
if authority is not None:
result.extend([self.SLASH, self.SLASH, authority])
result.append(path)
if query is not None:
result.extend([self.QUEST, query])
if fragment is not None:
result.extend([self.HASH, fragment])
return self.EMPTY.join(result)
def getscheme(self, default=None):
"""Return the URI scheme in canonical (lowercase) form, or `default`
if the original URI reference did not contain a scheme component.
"""
scheme = self.scheme
if scheme is None:
return default
elif isinstance(scheme, bytes):
return scheme.decode('ascii').lower()
else:
return scheme.lower()
def getauthority(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded userinfo, host and port subcomponents of the URI
authority as a three-item tuple.
"""
# TBD: (userinfo, host, port) kwargs, default string?
if default is None:
default = (None, None, None)
elif not isinstance(default, collections.Iterable):
raise TypeError('Invalid default type')
elif len(default) != 3:
raise ValueError('Invalid default length')
# TODO: this could be much more efficient by using a dedicated regex
return (
self.getuserinfo(default[0], encoding, errors),
self.gethost(default[1], errors),
self.getport(default[2])
)
def getuserinfo(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded userinfo subcomponent of the URI authority, or
`default` if the original URI reference did not contain a
userinfo field.
"""
userinfo = self.userinfo
if userinfo is None:
return default
else:
return uridecode(userinfo, encoding, errors)
def gethost(self, default=None, errors='strict'):
"""Return the decoded host subcomponent of the URI authority as a
string or an :mod:`ipaddress` address object, or `default` if
the original URI reference did not contain a host.
"""
host = self.host
if host is None or (not host and default is not None):
return default
elif host.startswith(self.LBRACKET) and host.endswith(self.RBRACKET):
return _ip_literal(host[1:-1])
elif host.startswith(self.LBRACKET) or host.endswith(self.RBRACKET):
raise ValueError('Invalid host %r' % host)
# TODO: faster check for IPv4 address?
return _ipv4_address(host) or uridecode(host, 'utf-8', errors).lower()
def getport(self, default=None):
"""Return the port subcomponent of the URI authority as an
:class:`int`, or `default` if the original URI reference did
not contain a port or if the port was empty.
"""
port = self.port
if port:
return int(port)
else:
return default
def getpath(self, encoding='utf-8', errors='strict'):
"""Return the normalized decoded URI path."""
path = self.__remove_dot_segments(self.path)
return uridecode(path, encoding, errors)
def getquerydict(self, sep='&', encoding='utf-8', errors='strict'):
"""Split the query component into individual `name=value` pairs
separated by `sep` and return a dictionary of query variables.
The dictionary keys are the unique query variable names and
the values are lists of values for each name.
"""
dict = collections.defaultdict(list)
for name, value in self.getquerylist(sep, encoding, errors):
dict[name].append(value)
return dict
def getquerylist(self, sep='&', encoding='utf-8', errors='strict'):
"""Split the query component into individual `name=value` pairs
separated by `sep`, and return a list of `(name, value)`
tuples.
"""
if not self.query:
return []
elif isinstance(sep, type(self.query)):
qsl = self.query.split(sep)
elif isinstance(sep, bytes):
qsl = self.query.split(sep.decode('ascii'))
else:
qsl = self.query.split(sep.encode('ascii'))
items = []
for parts in [qs.partition(self.EQ) for qs in qsl if qs]:
name = uridecode(parts[0], encoding, errors)
if parts[1]:
value = uridecode(parts[2], encoding, errors)
else:
value = None
items.append((name, value))
return items
def getfragment(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded fragment identifier, or `default` if the
original URI reference did not contain a fragment component.
"""
fragment = self.fragment
if fragment is None:
return default
else:
return uridecode(fragment, encoding, errors)
def isuri(self):
"""Return :const:`True` if this is a URI."""
return self.scheme is not None
def isabsuri(self):
"""Return :const:`True` if this is an absolute URI."""
return self.scheme is not None and self.fragment is None
def isnetpath(self):
"""Return :const:`True` if this is a network-path reference."""
return self.scheme is None and self.authority is not None
def isabspath(self):
"""Return :const:`True` if this is an absolute-path reference."""
return (self.scheme is None and self.authority is None and
self.path.startswith(self.SLASH))
def isrelpath(self):
"""Return :const:`True` if this is a relative-path reference."""
return (self.scheme is None and self.authority is None and
not self.path.startswith(self.SLASH))
def issamedoc(self):
"""Return :const:`True` if this is a same-document reference."""
return (self.scheme is None and self.authority is None and
not self.path and self.query is None)
def transform(self, ref, strict=False):
"""Transform a URI reference relative to `self` into a
:class:`SplitResult` representing its target URI.
"""
scheme, authority, path, query, fragment = self.RE.match(ref).groups()
# RFC 3986 5.2.2. Transform References
if scheme is not None and (strict or scheme != self.scheme):
path = self.__remove_dot_segments(path)
elif authority is not None:
scheme = self.scheme
path = self.__remove_dot_segments(path)
elif not path:
scheme = self.scheme
authority = self.authority
path = self.path
query = self.query if query is None else query
elif path.startswith(self.SLASH):
scheme = self.scheme
authority = self.authority
path = self.__remove_dot_segments(path)
else:
scheme = self.scheme
authority = self.authority
path = self.__remove_dot_segments(self.__merge(path))
return type(self)(scheme, authority, path, query, fragment)
def __merge(self, path):
# RFC 3986 5.2.3. Merge Paths
if self.authority is not None and not self.path:
return self.SLASH + path
else:
parts = self.path.rpartition(self.SLASH)
return parts[1].join((parts[0], path))
@classmethod
def __remove_dot_segments(cls, path):
# RFC 3986 5.2.4. Remove Dot Segments
pseg = []
for s in path.split(cls.SLASH):
if s == cls.DOT:
continue
elif s != cls.DOTDOT:
pseg.append(s)
elif len(pseg) == 1 and not pseg[0]:
continue
elif pseg and pseg[-1] != cls.DOTDOT:
pseg.pop()
else:
pseg.append(s)
# adjust for trailing '/.' or '/..'
if path.rpartition(cls.SLASH)[2] in (cls.DOT, cls.DOTDOT):
pseg.append(cls.EMPTY)
if path and len(pseg) == 1 and pseg[0] == cls.EMPTY:
pseg.insert(0, cls.DOT)
return cls.SLASH.join(pseg)
|
tkem/uritools | uritools/split.py | SplitResult.getquerydict | python | def getquerydict(self, sep='&', encoding='utf-8', errors='strict'):
dict = collections.defaultdict(list)
for name, value in self.getquerylist(sep, encoding, errors):
dict[name].append(value)
return dict | Split the query component into individual `name=value` pairs
separated by `sep` and return a dictionary of query variables.
The dictionary keys are the unique query variable names and
the values are lists of values for each name. | train | https://github.com/tkem/uritools/blob/e77ba4acd937b68da9850138563debd4c925ef9f/uritools/split.py#L189-L199 | null | class SplitResult(collections.namedtuple('SplitResult', _URI_COMPONENTS)):
"""Base class to hold :func:`urisplit` results."""
__slots__ = () # prevent creation of instance dictionary
@property
def userinfo(self):
authority = self.authority
if authority is None:
return None
userinfo, present, _ = authority.rpartition(self.AT)
if present:
return userinfo
else:
return None
@property
def host(self):
authority = self.authority
if authority is None:
return None
_, _, hostinfo = authority.rpartition(self.AT)
host, _, port = hostinfo.rpartition(self.COLON)
if port.lstrip(self.DIGITS):
return hostinfo
else:
return host
@property
def port(self):
authority = self.authority
if authority is None:
return None
_, present, port = authority.rpartition(self.COLON)
if present and not port.lstrip(self.DIGITS):
return port
else:
return None
def geturi(self):
"""Return the re-combined version of the original URI reference as a
string.
"""
scheme, authority, path, query, fragment = self
# RFC 3986 5.3. Component Recomposition
result = []
if scheme is not None:
result.extend([scheme, self.COLON])
if authority is not None:
result.extend([self.SLASH, self.SLASH, authority])
result.append(path)
if query is not None:
result.extend([self.QUEST, query])
if fragment is not None:
result.extend([self.HASH, fragment])
return self.EMPTY.join(result)
def getscheme(self, default=None):
"""Return the URI scheme in canonical (lowercase) form, or `default`
if the original URI reference did not contain a scheme component.
"""
scheme = self.scheme
if scheme is None:
return default
elif isinstance(scheme, bytes):
return scheme.decode('ascii').lower()
else:
return scheme.lower()
def getauthority(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded userinfo, host and port subcomponents of the URI
authority as a three-item tuple.
"""
# TBD: (userinfo, host, port) kwargs, default string?
if default is None:
default = (None, None, None)
elif not isinstance(default, collections.Iterable):
raise TypeError('Invalid default type')
elif len(default) != 3:
raise ValueError('Invalid default length')
# TODO: this could be much more efficient by using a dedicated regex
return (
self.getuserinfo(default[0], encoding, errors),
self.gethost(default[1], errors),
self.getport(default[2])
)
def getuserinfo(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded userinfo subcomponent of the URI authority, or
`default` if the original URI reference did not contain a
userinfo field.
"""
userinfo = self.userinfo
if userinfo is None:
return default
else:
return uridecode(userinfo, encoding, errors)
def gethost(self, default=None, errors='strict'):
"""Return the decoded host subcomponent of the URI authority as a
string or an :mod:`ipaddress` address object, or `default` if
the original URI reference did not contain a host.
"""
host = self.host
if host is None or (not host and default is not None):
return default
elif host.startswith(self.LBRACKET) and host.endswith(self.RBRACKET):
return _ip_literal(host[1:-1])
elif host.startswith(self.LBRACKET) or host.endswith(self.RBRACKET):
raise ValueError('Invalid host %r' % host)
# TODO: faster check for IPv4 address?
return _ipv4_address(host) or uridecode(host, 'utf-8', errors).lower()
def getport(self, default=None):
"""Return the port subcomponent of the URI authority as an
:class:`int`, or `default` if the original URI reference did
not contain a port or if the port was empty.
"""
port = self.port
if port:
return int(port)
else:
return default
def getpath(self, encoding='utf-8', errors='strict'):
"""Return the normalized decoded URI path."""
path = self.__remove_dot_segments(self.path)
return uridecode(path, encoding, errors)
def getquery(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded query string, or `default` if the original URI
reference did not contain a query component.
"""
query = self.query
if query is None:
return default
else:
return uridecode(query, encoding, errors)
def getquerylist(self, sep='&', encoding='utf-8', errors='strict'):
"""Split the query component into individual `name=value` pairs
separated by `sep`, and return a list of `(name, value)`
tuples.
"""
if not self.query:
return []
elif isinstance(sep, type(self.query)):
qsl = self.query.split(sep)
elif isinstance(sep, bytes):
qsl = self.query.split(sep.decode('ascii'))
else:
qsl = self.query.split(sep.encode('ascii'))
items = []
for parts in [qs.partition(self.EQ) for qs in qsl if qs]:
name = uridecode(parts[0], encoding, errors)
if parts[1]:
value = uridecode(parts[2], encoding, errors)
else:
value = None
items.append((name, value))
return items
def getfragment(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded fragment identifier, or `default` if the
original URI reference did not contain a fragment component.
"""
fragment = self.fragment
if fragment is None:
return default
else:
return uridecode(fragment, encoding, errors)
def isuri(self):
"""Return :const:`True` if this is a URI."""
return self.scheme is not None
def isabsuri(self):
"""Return :const:`True` if this is an absolute URI."""
return self.scheme is not None and self.fragment is None
def isnetpath(self):
"""Return :const:`True` if this is a network-path reference."""
return self.scheme is None and self.authority is not None
def isabspath(self):
"""Return :const:`True` if this is an absolute-path reference."""
return (self.scheme is None and self.authority is None and
self.path.startswith(self.SLASH))
def isrelpath(self):
"""Return :const:`True` if this is a relative-path reference."""
return (self.scheme is None and self.authority is None and
not self.path.startswith(self.SLASH))
def issamedoc(self):
"""Return :const:`True` if this is a same-document reference."""
return (self.scheme is None and self.authority is None and
not self.path and self.query is None)
def transform(self, ref, strict=False):
"""Transform a URI reference relative to `self` into a
:class:`SplitResult` representing its target URI.
"""
scheme, authority, path, query, fragment = self.RE.match(ref).groups()
# RFC 3986 5.2.2. Transform References
if scheme is not None and (strict or scheme != self.scheme):
path = self.__remove_dot_segments(path)
elif authority is not None:
scheme = self.scheme
path = self.__remove_dot_segments(path)
elif not path:
scheme = self.scheme
authority = self.authority
path = self.path
query = self.query if query is None else query
elif path.startswith(self.SLASH):
scheme = self.scheme
authority = self.authority
path = self.__remove_dot_segments(path)
else:
scheme = self.scheme
authority = self.authority
path = self.__remove_dot_segments(self.__merge(path))
return type(self)(scheme, authority, path, query, fragment)
def __merge(self, path):
# RFC 3986 5.2.3. Merge Paths
if self.authority is not None and not self.path:
return self.SLASH + path
else:
parts = self.path.rpartition(self.SLASH)
return parts[1].join((parts[0], path))
@classmethod
def __remove_dot_segments(cls, path):
# RFC 3986 5.2.4. Remove Dot Segments
pseg = []
for s in path.split(cls.SLASH):
if s == cls.DOT:
continue
elif s != cls.DOTDOT:
pseg.append(s)
elif len(pseg) == 1 and not pseg[0]:
continue
elif pseg and pseg[-1] != cls.DOTDOT:
pseg.pop()
else:
pseg.append(s)
# adjust for trailing '/.' or '/..'
if path.rpartition(cls.SLASH)[2] in (cls.DOT, cls.DOTDOT):
pseg.append(cls.EMPTY)
if path and len(pseg) == 1 and pseg[0] == cls.EMPTY:
pseg.insert(0, cls.DOT)
return cls.SLASH.join(pseg)
|
tkem/uritools | uritools/split.py | SplitResult.getquerylist | python | def getquerylist(self, sep='&', encoding='utf-8', errors='strict'):
if not self.query:
return []
elif isinstance(sep, type(self.query)):
qsl = self.query.split(sep)
elif isinstance(sep, bytes):
qsl = self.query.split(sep.decode('ascii'))
else:
qsl = self.query.split(sep.encode('ascii'))
items = []
for parts in [qs.partition(self.EQ) for qs in qsl if qs]:
name = uridecode(parts[0], encoding, errors)
if parts[1]:
value = uridecode(parts[2], encoding, errors)
else:
value = None
items.append((name, value))
return items | Split the query component into individual `name=value` pairs
separated by `sep`, and return a list of `(name, value)`
tuples. | train | https://github.com/tkem/uritools/blob/e77ba4acd937b68da9850138563debd4c925ef9f/uritools/split.py#L201-L223 | [
"def uridecode(uristring, encoding='utf-8', errors='strict'):\n \"\"\"Decode a URI string or string component.\"\"\"\n if not isinstance(uristring, bytes):\n uristring = uristring.encode(encoding or 'ascii', errors)\n parts = uristring.split(b'%')\n result = [parts[0]]\n append = result.append... | class SplitResult(collections.namedtuple('SplitResult', _URI_COMPONENTS)):
"""Base class to hold :func:`urisplit` results."""
__slots__ = () # prevent creation of instance dictionary
@property
def userinfo(self):
authority = self.authority
if authority is None:
return None
userinfo, present, _ = authority.rpartition(self.AT)
if present:
return userinfo
else:
return None
@property
def host(self):
authority = self.authority
if authority is None:
return None
_, _, hostinfo = authority.rpartition(self.AT)
host, _, port = hostinfo.rpartition(self.COLON)
if port.lstrip(self.DIGITS):
return hostinfo
else:
return host
@property
def port(self):
authority = self.authority
if authority is None:
return None
_, present, port = authority.rpartition(self.COLON)
if present and not port.lstrip(self.DIGITS):
return port
else:
return None
def geturi(self):
"""Return the re-combined version of the original URI reference as a
string.
"""
scheme, authority, path, query, fragment = self
# RFC 3986 5.3. Component Recomposition
result = []
if scheme is not None:
result.extend([scheme, self.COLON])
if authority is not None:
result.extend([self.SLASH, self.SLASH, authority])
result.append(path)
if query is not None:
result.extend([self.QUEST, query])
if fragment is not None:
result.extend([self.HASH, fragment])
return self.EMPTY.join(result)
def getscheme(self, default=None):
"""Return the URI scheme in canonical (lowercase) form, or `default`
if the original URI reference did not contain a scheme component.
"""
scheme = self.scheme
if scheme is None:
return default
elif isinstance(scheme, bytes):
return scheme.decode('ascii').lower()
else:
return scheme.lower()
def getauthority(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded userinfo, host and port subcomponents of the URI
authority as a three-item tuple.
"""
# TBD: (userinfo, host, port) kwargs, default string?
if default is None:
default = (None, None, None)
elif not isinstance(default, collections.Iterable):
raise TypeError('Invalid default type')
elif len(default) != 3:
raise ValueError('Invalid default length')
# TODO: this could be much more efficient by using a dedicated regex
return (
self.getuserinfo(default[0], encoding, errors),
self.gethost(default[1], errors),
self.getport(default[2])
)
def getuserinfo(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded userinfo subcomponent of the URI authority, or
`default` if the original URI reference did not contain a
userinfo field.
"""
userinfo = self.userinfo
if userinfo is None:
return default
else:
return uridecode(userinfo, encoding, errors)
def gethost(self, default=None, errors='strict'):
"""Return the decoded host subcomponent of the URI authority as a
string or an :mod:`ipaddress` address object, or `default` if
the original URI reference did not contain a host.
"""
host = self.host
if host is None or (not host and default is not None):
return default
elif host.startswith(self.LBRACKET) and host.endswith(self.RBRACKET):
return _ip_literal(host[1:-1])
elif host.startswith(self.LBRACKET) or host.endswith(self.RBRACKET):
raise ValueError('Invalid host %r' % host)
# TODO: faster check for IPv4 address?
return _ipv4_address(host) or uridecode(host, 'utf-8', errors).lower()
def getport(self, default=None):
"""Return the port subcomponent of the URI authority as an
:class:`int`, or `default` if the original URI reference did
not contain a port or if the port was empty.
"""
port = self.port
if port:
return int(port)
else:
return default
def getpath(self, encoding='utf-8', errors='strict'):
"""Return the normalized decoded URI path."""
path = self.__remove_dot_segments(self.path)
return uridecode(path, encoding, errors)
def getquery(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded query string, or `default` if the original URI
reference did not contain a query component.
"""
query = self.query
if query is None:
return default
else:
return uridecode(query, encoding, errors)
def getquerydict(self, sep='&', encoding='utf-8', errors='strict'):
"""Split the query component into individual `name=value` pairs
separated by `sep` and return a dictionary of query variables.
The dictionary keys are the unique query variable names and
the values are lists of values for each name.
"""
dict = collections.defaultdict(list)
for name, value in self.getquerylist(sep, encoding, errors):
dict[name].append(value)
return dict
def getfragment(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded fragment identifier, or `default` if the
original URI reference did not contain a fragment component.
"""
fragment = self.fragment
if fragment is None:
return default
else:
return uridecode(fragment, encoding, errors)
def isuri(self):
"""Return :const:`True` if this is a URI."""
return self.scheme is not None
def isabsuri(self):
"""Return :const:`True` if this is an absolute URI."""
return self.scheme is not None and self.fragment is None
def isnetpath(self):
"""Return :const:`True` if this is a network-path reference."""
return self.scheme is None and self.authority is not None
def isabspath(self):
"""Return :const:`True` if this is an absolute-path reference."""
return (self.scheme is None and self.authority is None and
self.path.startswith(self.SLASH))
def isrelpath(self):
"""Return :const:`True` if this is a relative-path reference."""
return (self.scheme is None and self.authority is None and
not self.path.startswith(self.SLASH))
def issamedoc(self):
"""Return :const:`True` if this is a same-document reference."""
return (self.scheme is None and self.authority is None and
not self.path and self.query is None)
def transform(self, ref, strict=False):
"""Transform a URI reference relative to `self` into a
:class:`SplitResult` representing its target URI.
"""
scheme, authority, path, query, fragment = self.RE.match(ref).groups()
# RFC 3986 5.2.2. Transform References
if scheme is not None and (strict or scheme != self.scheme):
path = self.__remove_dot_segments(path)
elif authority is not None:
scheme = self.scheme
path = self.__remove_dot_segments(path)
elif not path:
scheme = self.scheme
authority = self.authority
path = self.path
query = self.query if query is None else query
elif path.startswith(self.SLASH):
scheme = self.scheme
authority = self.authority
path = self.__remove_dot_segments(path)
else:
scheme = self.scheme
authority = self.authority
path = self.__remove_dot_segments(self.__merge(path))
return type(self)(scheme, authority, path, query, fragment)
def __merge(self, path):
# RFC 3986 5.2.3. Merge Paths
if self.authority is not None and not self.path:
return self.SLASH + path
else:
parts = self.path.rpartition(self.SLASH)
return parts[1].join((parts[0], path))
@classmethod
def __remove_dot_segments(cls, path):
# RFC 3986 5.2.4. Remove Dot Segments
pseg = []
for s in path.split(cls.SLASH):
if s == cls.DOT:
continue
elif s != cls.DOTDOT:
pseg.append(s)
elif len(pseg) == 1 and not pseg[0]:
continue
elif pseg and pseg[-1] != cls.DOTDOT:
pseg.pop()
else:
pseg.append(s)
# adjust for trailing '/.' or '/..'
if path.rpartition(cls.SLASH)[2] in (cls.DOT, cls.DOTDOT):
pseg.append(cls.EMPTY)
if path and len(pseg) == 1 and pseg[0] == cls.EMPTY:
pseg.insert(0, cls.DOT)
return cls.SLASH.join(pseg)
|
tkem/uritools | uritools/split.py | SplitResult.isabspath | python | def isabspath(self):
return (self.scheme is None and self.authority is None and
self.path.startswith(self.SLASH)) | Return :const:`True` if this is an absolute-path reference. | train | https://github.com/tkem/uritools/blob/e77ba4acd937b68da9850138563debd4c925ef9f/uritools/split.py#L248-L251 | null | class SplitResult(collections.namedtuple('SplitResult', _URI_COMPONENTS)):
"""Base class to hold :func:`urisplit` results."""
__slots__ = () # prevent creation of instance dictionary
@property
def userinfo(self):
authority = self.authority
if authority is None:
return None
userinfo, present, _ = authority.rpartition(self.AT)
if present:
return userinfo
else:
return None
@property
def host(self):
authority = self.authority
if authority is None:
return None
_, _, hostinfo = authority.rpartition(self.AT)
host, _, port = hostinfo.rpartition(self.COLON)
if port.lstrip(self.DIGITS):
return hostinfo
else:
return host
@property
def port(self):
authority = self.authority
if authority is None:
return None
_, present, port = authority.rpartition(self.COLON)
if present and not port.lstrip(self.DIGITS):
return port
else:
return None
def geturi(self):
"""Return the re-combined version of the original URI reference as a
string.
"""
scheme, authority, path, query, fragment = self
# RFC 3986 5.3. Component Recomposition
result = []
if scheme is not None:
result.extend([scheme, self.COLON])
if authority is not None:
result.extend([self.SLASH, self.SLASH, authority])
result.append(path)
if query is not None:
result.extend([self.QUEST, query])
if fragment is not None:
result.extend([self.HASH, fragment])
return self.EMPTY.join(result)
def getscheme(self, default=None):
"""Return the URI scheme in canonical (lowercase) form, or `default`
if the original URI reference did not contain a scheme component.
"""
scheme = self.scheme
if scheme is None:
return default
elif isinstance(scheme, bytes):
return scheme.decode('ascii').lower()
else:
return scheme.lower()
def getauthority(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded userinfo, host and port subcomponents of the URI
authority as a three-item tuple.
"""
# TBD: (userinfo, host, port) kwargs, default string?
if default is None:
default = (None, None, None)
elif not isinstance(default, collections.Iterable):
raise TypeError('Invalid default type')
elif len(default) != 3:
raise ValueError('Invalid default length')
# TODO: this could be much more efficient by using a dedicated regex
return (
self.getuserinfo(default[0], encoding, errors),
self.gethost(default[1], errors),
self.getport(default[2])
)
def getuserinfo(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded userinfo subcomponent of the URI authority, or
`default` if the original URI reference did not contain a
userinfo field.
"""
userinfo = self.userinfo
if userinfo is None:
return default
else:
return uridecode(userinfo, encoding, errors)
def gethost(self, default=None, errors='strict'):
"""Return the decoded host subcomponent of the URI authority as a
string or an :mod:`ipaddress` address object, or `default` if
the original URI reference did not contain a host.
"""
host = self.host
if host is None or (not host and default is not None):
return default
elif host.startswith(self.LBRACKET) and host.endswith(self.RBRACKET):
return _ip_literal(host[1:-1])
elif host.startswith(self.LBRACKET) or host.endswith(self.RBRACKET):
raise ValueError('Invalid host %r' % host)
# TODO: faster check for IPv4 address?
return _ipv4_address(host) or uridecode(host, 'utf-8', errors).lower()
def getport(self, default=None):
"""Return the port subcomponent of the URI authority as an
:class:`int`, or `default` if the original URI reference did
not contain a port or if the port was empty.
"""
port = self.port
if port:
return int(port)
else:
return default
def getpath(self, encoding='utf-8', errors='strict'):
"""Return the normalized decoded URI path."""
path = self.__remove_dot_segments(self.path)
return uridecode(path, encoding, errors)
def getquery(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded query string, or `default` if the original URI
reference did not contain a query component.
"""
query = self.query
if query is None:
return default
else:
return uridecode(query, encoding, errors)
def getquerydict(self, sep='&', encoding='utf-8', errors='strict'):
"""Split the query component into individual `name=value` pairs
separated by `sep` and return a dictionary of query variables.
The dictionary keys are the unique query variable names and
the values are lists of values for each name.
"""
dict = collections.defaultdict(list)
for name, value in self.getquerylist(sep, encoding, errors):
dict[name].append(value)
return dict
def getquerylist(self, sep='&', encoding='utf-8', errors='strict'):
"""Split the query component into individual `name=value` pairs
separated by `sep`, and return a list of `(name, value)`
tuples.
"""
if not self.query:
return []
elif isinstance(sep, type(self.query)):
qsl = self.query.split(sep)
elif isinstance(sep, bytes):
qsl = self.query.split(sep.decode('ascii'))
else:
qsl = self.query.split(sep.encode('ascii'))
items = []
for parts in [qs.partition(self.EQ) for qs in qsl if qs]:
name = uridecode(parts[0], encoding, errors)
if parts[1]:
value = uridecode(parts[2], encoding, errors)
else:
value = None
items.append((name, value))
return items
def getfragment(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded fragment identifier, or `default` if the
original URI reference did not contain a fragment component.
"""
fragment = self.fragment
if fragment is None:
return default
else:
return uridecode(fragment, encoding, errors)
def isuri(self):
"""Return :const:`True` if this is a URI."""
return self.scheme is not None
def isabsuri(self):
"""Return :const:`True` if this is an absolute URI."""
return self.scheme is not None and self.fragment is None
def isnetpath(self):
"""Return :const:`True` if this is a network-path reference."""
return self.scheme is None and self.authority is not None
def isrelpath(self):
"""Return :const:`True` if this is a relative-path reference."""
return (self.scheme is None and self.authority is None and
not self.path.startswith(self.SLASH))
def issamedoc(self):
"""Return :const:`True` if this is a same-document reference."""
return (self.scheme is None and self.authority is None and
not self.path and self.query is None)
def transform(self, ref, strict=False):
"""Transform a URI reference relative to `self` into a
:class:`SplitResult` representing its target URI.
"""
scheme, authority, path, query, fragment = self.RE.match(ref).groups()
# RFC 3986 5.2.2. Transform References
if scheme is not None and (strict or scheme != self.scheme):
path = self.__remove_dot_segments(path)
elif authority is not None:
scheme = self.scheme
path = self.__remove_dot_segments(path)
elif not path:
scheme = self.scheme
authority = self.authority
path = self.path
query = self.query if query is None else query
elif path.startswith(self.SLASH):
scheme = self.scheme
authority = self.authority
path = self.__remove_dot_segments(path)
else:
scheme = self.scheme
authority = self.authority
path = self.__remove_dot_segments(self.__merge(path))
return type(self)(scheme, authority, path, query, fragment)
def __merge(self, path):
# RFC 3986 5.2.3. Merge Paths
if self.authority is not None and not self.path:
return self.SLASH + path
else:
parts = self.path.rpartition(self.SLASH)
return parts[1].join((parts[0], path))
@classmethod
def __remove_dot_segments(cls, path):
# RFC 3986 5.2.4. Remove Dot Segments
pseg = []
for s in path.split(cls.SLASH):
if s == cls.DOT:
continue
elif s != cls.DOTDOT:
pseg.append(s)
elif len(pseg) == 1 and not pseg[0]:
continue
elif pseg and pseg[-1] != cls.DOTDOT:
pseg.pop()
else:
pseg.append(s)
# adjust for trailing '/.' or '/..'
if path.rpartition(cls.SLASH)[2] in (cls.DOT, cls.DOTDOT):
pseg.append(cls.EMPTY)
if path and len(pseg) == 1 and pseg[0] == cls.EMPTY:
pseg.insert(0, cls.DOT)
return cls.SLASH.join(pseg)
|
tkem/uritools | uritools/split.py | SplitResult.isrelpath | python | def isrelpath(self):
return (self.scheme is None and self.authority is None and
not self.path.startswith(self.SLASH)) | Return :const:`True` if this is a relative-path reference. | train | https://github.com/tkem/uritools/blob/e77ba4acd937b68da9850138563debd4c925ef9f/uritools/split.py#L253-L256 | null | class SplitResult(collections.namedtuple('SplitResult', _URI_COMPONENTS)):
"""Base class to hold :func:`urisplit` results."""
__slots__ = () # prevent creation of instance dictionary
@property
def userinfo(self):
authority = self.authority
if authority is None:
return None
userinfo, present, _ = authority.rpartition(self.AT)
if present:
return userinfo
else:
return None
@property
def host(self):
authority = self.authority
if authority is None:
return None
_, _, hostinfo = authority.rpartition(self.AT)
host, _, port = hostinfo.rpartition(self.COLON)
if port.lstrip(self.DIGITS):
return hostinfo
else:
return host
@property
def port(self):
authority = self.authority
if authority is None:
return None
_, present, port = authority.rpartition(self.COLON)
if present and not port.lstrip(self.DIGITS):
return port
else:
return None
def geturi(self):
"""Return the re-combined version of the original URI reference as a
string.
"""
scheme, authority, path, query, fragment = self
# RFC 3986 5.3. Component Recomposition
result = []
if scheme is not None:
result.extend([scheme, self.COLON])
if authority is not None:
result.extend([self.SLASH, self.SLASH, authority])
result.append(path)
if query is not None:
result.extend([self.QUEST, query])
if fragment is not None:
result.extend([self.HASH, fragment])
return self.EMPTY.join(result)
def getscheme(self, default=None):
"""Return the URI scheme in canonical (lowercase) form, or `default`
if the original URI reference did not contain a scheme component.
"""
scheme = self.scheme
if scheme is None:
return default
elif isinstance(scheme, bytes):
return scheme.decode('ascii').lower()
else:
return scheme.lower()
def getauthority(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded userinfo, host and port subcomponents of the URI
authority as a three-item tuple.
"""
# TBD: (userinfo, host, port) kwargs, default string?
if default is None:
default = (None, None, None)
elif not isinstance(default, collections.Iterable):
raise TypeError('Invalid default type')
elif len(default) != 3:
raise ValueError('Invalid default length')
# TODO: this could be much more efficient by using a dedicated regex
return (
self.getuserinfo(default[0], encoding, errors),
self.gethost(default[1], errors),
self.getport(default[2])
)
def getuserinfo(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded userinfo subcomponent of the URI authority, or
`default` if the original URI reference did not contain a
userinfo field.
"""
userinfo = self.userinfo
if userinfo is None:
return default
else:
return uridecode(userinfo, encoding, errors)
def gethost(self, default=None, errors='strict'):
"""Return the decoded host subcomponent of the URI authority as a
string or an :mod:`ipaddress` address object, or `default` if
the original URI reference did not contain a host.
"""
host = self.host
if host is None or (not host and default is not None):
return default
elif host.startswith(self.LBRACKET) and host.endswith(self.RBRACKET):
return _ip_literal(host[1:-1])
elif host.startswith(self.LBRACKET) or host.endswith(self.RBRACKET):
raise ValueError('Invalid host %r' % host)
# TODO: faster check for IPv4 address?
return _ipv4_address(host) or uridecode(host, 'utf-8', errors).lower()
def getport(self, default=None):
"""Return the port subcomponent of the URI authority as an
:class:`int`, or `default` if the original URI reference did
not contain a port or if the port was empty.
"""
port = self.port
if port:
return int(port)
else:
return default
def getpath(self, encoding='utf-8', errors='strict'):
"""Return the normalized decoded URI path."""
path = self.__remove_dot_segments(self.path)
return uridecode(path, encoding, errors)
def getquery(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded query string, or `default` if the original URI
reference did not contain a query component.
"""
query = self.query
if query is None:
return default
else:
return uridecode(query, encoding, errors)
def getquerydict(self, sep='&', encoding='utf-8', errors='strict'):
"""Split the query component into individual `name=value` pairs
separated by `sep` and return a dictionary of query variables.
The dictionary keys are the unique query variable names and
the values are lists of values for each name.
"""
dict = collections.defaultdict(list)
for name, value in self.getquerylist(sep, encoding, errors):
dict[name].append(value)
return dict
def getquerylist(self, sep='&', encoding='utf-8', errors='strict'):
"""Split the query component into individual `name=value` pairs
separated by `sep`, and return a list of `(name, value)`
tuples.
"""
if not self.query:
return []
elif isinstance(sep, type(self.query)):
qsl = self.query.split(sep)
elif isinstance(sep, bytes):
qsl = self.query.split(sep.decode('ascii'))
else:
qsl = self.query.split(sep.encode('ascii'))
items = []
for parts in [qs.partition(self.EQ) for qs in qsl if qs]:
name = uridecode(parts[0], encoding, errors)
if parts[1]:
value = uridecode(parts[2], encoding, errors)
else:
value = None
items.append((name, value))
return items
def getfragment(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded fragment identifier, or `default` if the
original URI reference did not contain a fragment component.
"""
fragment = self.fragment
if fragment is None:
return default
else:
return uridecode(fragment, encoding, errors)
def isuri(self):
"""Return :const:`True` if this is a URI."""
return self.scheme is not None
def isabsuri(self):
"""Return :const:`True` if this is an absolute URI."""
return self.scheme is not None and self.fragment is None
def isnetpath(self):
"""Return :const:`True` if this is a network-path reference."""
return self.scheme is None and self.authority is not None
def isabspath(self):
"""Return :const:`True` if this is an absolute-path reference."""
return (self.scheme is None and self.authority is None and
self.path.startswith(self.SLASH))
def issamedoc(self):
"""Return :const:`True` if this is a same-document reference."""
return (self.scheme is None and self.authority is None and
not self.path and self.query is None)
def transform(self, ref, strict=False):
"""Transform a URI reference relative to `self` into a
:class:`SplitResult` representing its target URI.
"""
scheme, authority, path, query, fragment = self.RE.match(ref).groups()
# RFC 3986 5.2.2. Transform References
if scheme is not None and (strict or scheme != self.scheme):
path = self.__remove_dot_segments(path)
elif authority is not None:
scheme = self.scheme
path = self.__remove_dot_segments(path)
elif not path:
scheme = self.scheme
authority = self.authority
path = self.path
query = self.query if query is None else query
elif path.startswith(self.SLASH):
scheme = self.scheme
authority = self.authority
path = self.__remove_dot_segments(path)
else:
scheme = self.scheme
authority = self.authority
path = self.__remove_dot_segments(self.__merge(path))
return type(self)(scheme, authority, path, query, fragment)
def __merge(self, path):
# RFC 3986 5.2.3. Merge Paths
if self.authority is not None and not self.path:
return self.SLASH + path
else:
parts = self.path.rpartition(self.SLASH)
return parts[1].join((parts[0], path))
@classmethod
def __remove_dot_segments(cls, path):
# RFC 3986 5.2.4. Remove Dot Segments
pseg = []
for s in path.split(cls.SLASH):
if s == cls.DOT:
continue
elif s != cls.DOTDOT:
pseg.append(s)
elif len(pseg) == 1 and not pseg[0]:
continue
elif pseg and pseg[-1] != cls.DOTDOT:
pseg.pop()
else:
pseg.append(s)
# adjust for trailing '/.' or '/..'
if path.rpartition(cls.SLASH)[2] in (cls.DOT, cls.DOTDOT):
pseg.append(cls.EMPTY)
if path and len(pseg) == 1 and pseg[0] == cls.EMPTY:
pseg.insert(0, cls.DOT)
return cls.SLASH.join(pseg)
|
tkem/uritools | uritools/split.py | SplitResult.issamedoc | python | def issamedoc(self):
return (self.scheme is None and self.authority is None and
not self.path and self.query is None) | Return :const:`True` if this is a same-document reference. | train | https://github.com/tkem/uritools/blob/e77ba4acd937b68da9850138563debd4c925ef9f/uritools/split.py#L258-L261 | null | class SplitResult(collections.namedtuple('SplitResult', _URI_COMPONENTS)):
"""Base class to hold :func:`urisplit` results."""
__slots__ = () # prevent creation of instance dictionary
@property
def userinfo(self):
authority = self.authority
if authority is None:
return None
userinfo, present, _ = authority.rpartition(self.AT)
if present:
return userinfo
else:
return None
@property
def host(self):
authority = self.authority
if authority is None:
return None
_, _, hostinfo = authority.rpartition(self.AT)
host, _, port = hostinfo.rpartition(self.COLON)
if port.lstrip(self.DIGITS):
return hostinfo
else:
return host
@property
def port(self):
authority = self.authority
if authority is None:
return None
_, present, port = authority.rpartition(self.COLON)
if present and not port.lstrip(self.DIGITS):
return port
else:
return None
def geturi(self):
"""Return the re-combined version of the original URI reference as a
string.
"""
scheme, authority, path, query, fragment = self
# RFC 3986 5.3. Component Recomposition
result = []
if scheme is not None:
result.extend([scheme, self.COLON])
if authority is not None:
result.extend([self.SLASH, self.SLASH, authority])
result.append(path)
if query is not None:
result.extend([self.QUEST, query])
if fragment is not None:
result.extend([self.HASH, fragment])
return self.EMPTY.join(result)
def getscheme(self, default=None):
"""Return the URI scheme in canonical (lowercase) form, or `default`
if the original URI reference did not contain a scheme component.
"""
scheme = self.scheme
if scheme is None:
return default
elif isinstance(scheme, bytes):
return scheme.decode('ascii').lower()
else:
return scheme.lower()
def getauthority(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded userinfo, host and port subcomponents of the URI
authority as a three-item tuple.
"""
# TBD: (userinfo, host, port) kwargs, default string?
if default is None:
default = (None, None, None)
elif not isinstance(default, collections.Iterable):
raise TypeError('Invalid default type')
elif len(default) != 3:
raise ValueError('Invalid default length')
# TODO: this could be much more efficient by using a dedicated regex
return (
self.getuserinfo(default[0], encoding, errors),
self.gethost(default[1], errors),
self.getport(default[2])
)
def getuserinfo(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded userinfo subcomponent of the URI authority, or
`default` if the original URI reference did not contain a
userinfo field.
"""
userinfo = self.userinfo
if userinfo is None:
return default
else:
return uridecode(userinfo, encoding, errors)
def gethost(self, default=None, errors='strict'):
"""Return the decoded host subcomponent of the URI authority as a
string or an :mod:`ipaddress` address object, or `default` if
the original URI reference did not contain a host.
"""
host = self.host
if host is None or (not host and default is not None):
return default
elif host.startswith(self.LBRACKET) and host.endswith(self.RBRACKET):
return _ip_literal(host[1:-1])
elif host.startswith(self.LBRACKET) or host.endswith(self.RBRACKET):
raise ValueError('Invalid host %r' % host)
# TODO: faster check for IPv4 address?
return _ipv4_address(host) or uridecode(host, 'utf-8', errors).lower()
def getport(self, default=None):
"""Return the port subcomponent of the URI authority as an
:class:`int`, or `default` if the original URI reference did
not contain a port or if the port was empty.
"""
port = self.port
if port:
return int(port)
else:
return default
def getpath(self, encoding='utf-8', errors='strict'):
"""Return the normalized decoded URI path."""
path = self.__remove_dot_segments(self.path)
return uridecode(path, encoding, errors)
def getquery(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded query string, or `default` if the original URI
reference did not contain a query component.
"""
query = self.query
if query is None:
return default
else:
return uridecode(query, encoding, errors)
def getquerydict(self, sep='&', encoding='utf-8', errors='strict'):
"""Split the query component into individual `name=value` pairs
separated by `sep` and return a dictionary of query variables.
The dictionary keys are the unique query variable names and
the values are lists of values for each name.
"""
dict = collections.defaultdict(list)
for name, value in self.getquerylist(sep, encoding, errors):
dict[name].append(value)
return dict
def getquerylist(self, sep='&', encoding='utf-8', errors='strict'):
"""Split the query component into individual `name=value` pairs
separated by `sep`, and return a list of `(name, value)`
tuples.
"""
if not self.query:
return []
elif isinstance(sep, type(self.query)):
qsl = self.query.split(sep)
elif isinstance(sep, bytes):
qsl = self.query.split(sep.decode('ascii'))
else:
qsl = self.query.split(sep.encode('ascii'))
items = []
for parts in [qs.partition(self.EQ) for qs in qsl if qs]:
name = uridecode(parts[0], encoding, errors)
if parts[1]:
value = uridecode(parts[2], encoding, errors)
else:
value = None
items.append((name, value))
return items
def getfragment(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded fragment identifier, or `default` if the
original URI reference did not contain a fragment component.
"""
fragment = self.fragment
if fragment is None:
return default
else:
return uridecode(fragment, encoding, errors)
def isuri(self):
"""Return :const:`True` if this is a URI."""
return self.scheme is not None
def isabsuri(self):
"""Return :const:`True` if this is an absolute URI."""
return self.scheme is not None and self.fragment is None
def isnetpath(self):
"""Return :const:`True` if this is a network-path reference."""
return self.scheme is None and self.authority is not None
def isabspath(self):
"""Return :const:`True` if this is an absolute-path reference."""
return (self.scheme is None and self.authority is None and
self.path.startswith(self.SLASH))
def isrelpath(self):
"""Return :const:`True` if this is a relative-path reference."""
return (self.scheme is None and self.authority is None and
not self.path.startswith(self.SLASH))
def transform(self, ref, strict=False):
"""Transform a URI reference relative to `self` into a
:class:`SplitResult` representing its target URI.
"""
scheme, authority, path, query, fragment = self.RE.match(ref).groups()
# RFC 3986 5.2.2. Transform References
if scheme is not None and (strict or scheme != self.scheme):
path = self.__remove_dot_segments(path)
elif authority is not None:
scheme = self.scheme
path = self.__remove_dot_segments(path)
elif not path:
scheme = self.scheme
authority = self.authority
path = self.path
query = self.query if query is None else query
elif path.startswith(self.SLASH):
scheme = self.scheme
authority = self.authority
path = self.__remove_dot_segments(path)
else:
scheme = self.scheme
authority = self.authority
path = self.__remove_dot_segments(self.__merge(path))
return type(self)(scheme, authority, path, query, fragment)
def __merge(self, path):
# RFC 3986 5.2.3. Merge Paths
if self.authority is not None and not self.path:
return self.SLASH + path
else:
parts = self.path.rpartition(self.SLASH)
return parts[1].join((parts[0], path))
@classmethod
def __remove_dot_segments(cls, path):
# RFC 3986 5.2.4. Remove Dot Segments
pseg = []
for s in path.split(cls.SLASH):
if s == cls.DOT:
continue
elif s != cls.DOTDOT:
pseg.append(s)
elif len(pseg) == 1 and not pseg[0]:
continue
elif pseg and pseg[-1] != cls.DOTDOT:
pseg.pop()
else:
pseg.append(s)
# adjust for trailing '/.' or '/..'
if path.rpartition(cls.SLASH)[2] in (cls.DOT, cls.DOTDOT):
pseg.append(cls.EMPTY)
if path and len(pseg) == 1 and pseg[0] == cls.EMPTY:
pseg.insert(0, cls.DOT)
return cls.SLASH.join(pseg)
|
tkem/uritools | uritools/split.py | SplitResult.transform | python | def transform(self, ref, strict=False):
scheme, authority, path, query, fragment = self.RE.match(ref).groups()
# RFC 3986 5.2.2. Transform References
if scheme is not None and (strict or scheme != self.scheme):
path = self.__remove_dot_segments(path)
elif authority is not None:
scheme = self.scheme
path = self.__remove_dot_segments(path)
elif not path:
scheme = self.scheme
authority = self.authority
path = self.path
query = self.query if query is None else query
elif path.startswith(self.SLASH):
scheme = self.scheme
authority = self.authority
path = self.__remove_dot_segments(path)
else:
scheme = self.scheme
authority = self.authority
path = self.__remove_dot_segments(self.__merge(path))
return type(self)(scheme, authority, path, query, fragment) | Transform a URI reference relative to `self` into a
:class:`SplitResult` representing its target URI. | train | https://github.com/tkem/uritools/blob/e77ba4acd937b68da9850138563debd4c925ef9f/uritools/split.py#L263-L289 | null | class SplitResult(collections.namedtuple('SplitResult', _URI_COMPONENTS)):
"""Base class to hold :func:`urisplit` results."""
__slots__ = () # prevent creation of instance dictionary
@property
def userinfo(self):
authority = self.authority
if authority is None:
return None
userinfo, present, _ = authority.rpartition(self.AT)
if present:
return userinfo
else:
return None
@property
def host(self):
authority = self.authority
if authority is None:
return None
_, _, hostinfo = authority.rpartition(self.AT)
host, _, port = hostinfo.rpartition(self.COLON)
if port.lstrip(self.DIGITS):
return hostinfo
else:
return host
@property
def port(self):
authority = self.authority
if authority is None:
return None
_, present, port = authority.rpartition(self.COLON)
if present and not port.lstrip(self.DIGITS):
return port
else:
return None
def geturi(self):
"""Return the re-combined version of the original URI reference as a
string.
"""
scheme, authority, path, query, fragment = self
# RFC 3986 5.3. Component Recomposition
result = []
if scheme is not None:
result.extend([scheme, self.COLON])
if authority is not None:
result.extend([self.SLASH, self.SLASH, authority])
result.append(path)
if query is not None:
result.extend([self.QUEST, query])
if fragment is not None:
result.extend([self.HASH, fragment])
return self.EMPTY.join(result)
def getscheme(self, default=None):
"""Return the URI scheme in canonical (lowercase) form, or `default`
if the original URI reference did not contain a scheme component.
"""
scheme = self.scheme
if scheme is None:
return default
elif isinstance(scheme, bytes):
return scheme.decode('ascii').lower()
else:
return scheme.lower()
def getauthority(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded userinfo, host and port subcomponents of the URI
authority as a three-item tuple.
"""
# TBD: (userinfo, host, port) kwargs, default string?
if default is None:
default = (None, None, None)
elif not isinstance(default, collections.Iterable):
raise TypeError('Invalid default type')
elif len(default) != 3:
raise ValueError('Invalid default length')
# TODO: this could be much more efficient by using a dedicated regex
return (
self.getuserinfo(default[0], encoding, errors),
self.gethost(default[1], errors),
self.getport(default[2])
)
def getuserinfo(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded userinfo subcomponent of the URI authority, or
`default` if the original URI reference did not contain a
userinfo field.
"""
userinfo = self.userinfo
if userinfo is None:
return default
else:
return uridecode(userinfo, encoding, errors)
def gethost(self, default=None, errors='strict'):
"""Return the decoded host subcomponent of the URI authority as a
string or an :mod:`ipaddress` address object, or `default` if
the original URI reference did not contain a host.
"""
host = self.host
if host is None or (not host and default is not None):
return default
elif host.startswith(self.LBRACKET) and host.endswith(self.RBRACKET):
return _ip_literal(host[1:-1])
elif host.startswith(self.LBRACKET) or host.endswith(self.RBRACKET):
raise ValueError('Invalid host %r' % host)
# TODO: faster check for IPv4 address?
return _ipv4_address(host) or uridecode(host, 'utf-8', errors).lower()
def getport(self, default=None):
"""Return the port subcomponent of the URI authority as an
:class:`int`, or `default` if the original URI reference did
not contain a port or if the port was empty.
"""
port = self.port
if port:
return int(port)
else:
return default
def getpath(self, encoding='utf-8', errors='strict'):
"""Return the normalized decoded URI path."""
path = self.__remove_dot_segments(self.path)
return uridecode(path, encoding, errors)
def getquery(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded query string, or `default` if the original URI
reference did not contain a query component.
"""
query = self.query
if query is None:
return default
else:
return uridecode(query, encoding, errors)
def getquerydict(self, sep='&', encoding='utf-8', errors='strict'):
"""Split the query component into individual `name=value` pairs
separated by `sep` and return a dictionary of query variables.
The dictionary keys are the unique query variable names and
the values are lists of values for each name.
"""
dict = collections.defaultdict(list)
for name, value in self.getquerylist(sep, encoding, errors):
dict[name].append(value)
return dict
def getquerylist(self, sep='&', encoding='utf-8', errors='strict'):
"""Split the query component into individual `name=value` pairs
separated by `sep`, and return a list of `(name, value)`
tuples.
"""
if not self.query:
return []
elif isinstance(sep, type(self.query)):
qsl = self.query.split(sep)
elif isinstance(sep, bytes):
qsl = self.query.split(sep.decode('ascii'))
else:
qsl = self.query.split(sep.encode('ascii'))
items = []
for parts in [qs.partition(self.EQ) for qs in qsl if qs]:
name = uridecode(parts[0], encoding, errors)
if parts[1]:
value = uridecode(parts[2], encoding, errors)
else:
value = None
items.append((name, value))
return items
def getfragment(self, default=None, encoding='utf-8', errors='strict'):
"""Return the decoded fragment identifier, or `default` if the
original URI reference did not contain a fragment component.
"""
fragment = self.fragment
if fragment is None:
return default
else:
return uridecode(fragment, encoding, errors)
def isuri(self):
"""Return :const:`True` if this is a URI."""
return self.scheme is not None
def isabsuri(self):
"""Return :const:`True` if this is an absolute URI."""
return self.scheme is not None and self.fragment is None
def isnetpath(self):
"""Return :const:`True` if this is a network-path reference."""
return self.scheme is None and self.authority is not None
def isabspath(self):
"""Return :const:`True` if this is an absolute-path reference."""
return (self.scheme is None and self.authority is None and
self.path.startswith(self.SLASH))
def isrelpath(self):
"""Return :const:`True` if this is a relative-path reference."""
return (self.scheme is None and self.authority is None and
not self.path.startswith(self.SLASH))
def issamedoc(self):
"""Return :const:`True` if this is a same-document reference."""
return (self.scheme is None and self.authority is None and
not self.path and self.query is None)
def __merge(self, path):
# RFC 3986 5.2.3. Merge Paths
if self.authority is not None and not self.path:
return self.SLASH + path
else:
parts = self.path.rpartition(self.SLASH)
return parts[1].join((parts[0], path))
@classmethod
def __remove_dot_segments(cls, path):
# RFC 3986 5.2.4. Remove Dot Segments
pseg = []
for s in path.split(cls.SLASH):
if s == cls.DOT:
continue
elif s != cls.DOTDOT:
pseg.append(s)
elif len(pseg) == 1 and not pseg[0]:
continue
elif pseg and pseg[-1] != cls.DOTDOT:
pseg.pop()
else:
pseg.append(s)
# adjust for trailing '/.' or '/..'
if path.rpartition(cls.SLASH)[2] in (cls.DOT, cls.DOTDOT):
pseg.append(cls.EMPTY)
if path and len(pseg) == 1 and pseg[0] == cls.EMPTY:
pseg.insert(0, cls.DOT)
return cls.SLASH.join(pseg)
|
tkem/uritools | uritools/compose.py | uricompose | python | def uricompose(scheme=None, authority=None, path='', query=None,
fragment=None, userinfo=None, host=None, port=None,
querysep='&', encoding='utf-8'):
# RFC 3986 3.1: Scheme names consist of a sequence of characters
# beginning with a letter and followed by any combination of
# letters, digits, plus ("+"), period ("."), or hyphen ("-").
# Although schemes are case-insensitive, the canonical form is
# lowercase and documents that specify schemes must do so with
# lowercase letters. An implementation should accept uppercase
# letters as equivalent to lowercase in scheme names (e.g., allow
# "HTTP" as well as "http") for the sake of robustness but should
# only produce lowercase scheme names for consistency.
if isinstance(scheme, bytes):
scheme = _scheme(scheme)
elif scheme is not None:
scheme = _scheme(scheme.encode())
# authority must be string type or three-item iterable
if authority is None:
authority = (None, None, None)
elif isinstance(authority, bytes):
authority = _AUTHORITY_RE_BYTES.match(authority).groups()
elif isinstance(authority, _unicode):
authority = _AUTHORITY_RE_UNICODE.match(authority).groups()
elif not isinstance(authority, collections.Iterable):
raise TypeError('Invalid authority type')
elif len(authority) != 3:
raise ValueError('Invalid authority length')
authority = _authority(
userinfo if userinfo is not None else authority[0],
host if host is not None else authority[1],
port if port is not None else authority[2],
encoding
)
# RFC 3986 3.3: If a URI contains an authority component, then the
# path component must either be empty or begin with a slash ("/")
# character. If a URI does not contain an authority component,
# then the path cannot begin with two slash characters ("//").
path = uriencode(path, _SAFE_PATH, encoding)
if authority is not None and path and not path.startswith(b'/'):
raise ValueError('Invalid path with authority component')
if authority is None and path.startswith(b'//'):
raise ValueError('Invalid path without authority component')
# RFC 3986 4.2: A path segment that contains a colon character
# (e.g., "this:that") cannot be used as the first segment of a
# relative-path reference, as it would be mistaken for a scheme
# name. Such a segment must be preceded by a dot-segment (e.g.,
# "./this:that") to make a relative-path reference.
if scheme is None and authority is None and not path.startswith(b'/'):
if b':' in path.partition(b'/')[0]:
path = b'./' + path
# RFC 3986 3.4: The characters slash ("/") and question mark ("?")
# may represent data within the query component. Beware that some
# older, erroneous implementations may not handle such data
# correctly when it is used as the base URI for relative
# references (Section 5.1), apparently because they fail to
# distinguish query data from path data when looking for
# hierarchical separators. However, as query components are often
# used to carry identifying information in the form of "key=value"
# pairs and one frequently used value is a reference to another
# URI, it is sometimes better for usability to avoid percent-
# encoding those characters.
if isinstance(query, _strtypes):
query = uriencode(query, _SAFE_QUERY, encoding)
elif isinstance(query, collections.Mapping):
query = _querydict(query, querysep, encoding)
elif isinstance(query, collections.Iterable):
query = _querylist(query, querysep, encoding)
elif query is not None:
raise TypeError('Invalid query type')
# RFC 3986 3.5: The characters slash ("/") and question mark ("?")
# are allowed to represent data within the fragment identifier.
# Beware that some older, erroneous implementations may not handle
# this data correctly when it is used as the base URI for relative
# references.
if fragment is not None:
fragment = uriencode(fragment, _SAFE_FRAGMENT, encoding)
result = uriunsplit((scheme, authority, path, query, fragment))
# always return platform `str` type
return result if isinstance(result, str) else result.decode() | Compose a URI reference string from its individual components. | train | https://github.com/tkem/uritools/blob/e77ba4acd937b68da9850138563debd4c925ef9f/uritools/compose.py#L124-L210 | [
"def uriencode(uristring, safe='', encoding='utf-8', errors='strict'):\n \"\"\"Encode a URI string or string component.\"\"\"\n if not isinstance(uristring, bytes):\n uristring = uristring.encode(encoding, errors)\n if not isinstance(safe, bytes):\n safe = safe.encode('ascii')\n try:\n ... | import collections
import ipaddress
import numbers
import re
from .chars import SUB_DELIMS
from .encoding import uriencode
from .split import uriunsplit
_unicode = type(u'')
_strtypes = (bytes, _unicode)
# RFC 3986 3.1: scheme = ALPHA *( ALPHA / DIGIT / "+" / "-" / "." )
_SCHEME_RE = re.compile(b'^[A-Za-z][A-Za-z0-9+.-]*$')
# RFC 3986 3.2: authority = [ userinfo "@" ] host [ ":" port ]
_AUTHORITY_RE_BYTES = re.compile(b'^(?:(.*)@)?(.*?)(?::([0-9]*))?$')
_AUTHORITY_RE_UNICODE = re.compile(u'^(?:(.*)@)?(.*?)(?::([0-9]*))?$')
# safe component characters
_SAFE_USERINFO = SUB_DELIMS + ':'
_SAFE_HOST = SUB_DELIMS
_SAFE_PATH = SUB_DELIMS + ':@/'
_SAFE_QUERY = SUB_DELIMS + ':@/?'
_SAFE_FRAGMENT = SUB_DELIMS + ':@/?'
def _scheme(scheme):
if _SCHEME_RE.match(scheme):
return scheme.lower()
else:
raise ValueError('Invalid scheme component')
def _authority(userinfo, host, port, encoding):
authority = []
if userinfo is not None:
authority.append(uriencode(userinfo, _SAFE_USERINFO, encoding))
authority.append(b'@')
if isinstance(host, ipaddress.IPv6Address):
authority.append(b'[' + host.compressed.encode() + b']')
elif isinstance(host, ipaddress.IPv4Address):
authority.append(host.compressed.encode())
elif isinstance(host, bytes):
authority.append(_host(host))
elif host is not None:
authority.append(_host(host.encode('utf-8')))
if isinstance(port, numbers.Number):
authority.append(_port(str(port).encode()))
elif isinstance(port, bytes):
authority.append(_port(port))
elif port is not None:
authority.append(_port(port.encode()))
return b''.join(authority) if authority else None
def _ip_literal(address):
if address.startswith('v'):
raise ValueError('Address mechanism not supported')
else:
return b'[' + ipaddress.IPv6Address(address).compressed.encode() + b']'
def _host(host):
# RFC 3986 3.2.3: Although host is case-insensitive, producers and
# normalizers should use lowercase for registered names and
# hexadecimal addresses for the sake of uniformity, while only
# using uppercase letters for percent-encodings.
if host.startswith(b'[') and host.endswith(b']'):
return _ip_literal(host[1:-1].decode())
# check for IPv6 addresses as returned by SplitResult.gethost()
try:
return _ip_literal(host.decode('utf-8'))
except ValueError:
return uriencode(host, _SAFE_HOST, 'utf-8').lower()
def _port(port):
# RFC 3986 3.2.3: URI producers and normalizers should omit the
# port component and its ":" delimiter if port is empty or if its
# value would be the same as that of the scheme's default.
if port.lstrip(b'0123456789'):
raise ValueError('Invalid port subcomponent')
elif port:
return b':' + port
else:
return b''
def _querylist(items, sep, encoding):
terms = []
append = terms.append
safe = _SAFE_QUERY.replace(sep, '')
for key, value in items:
name = uriencode(key, safe, encoding)
if value is None:
append(name)
elif isinstance(value, _strtypes):
append(name + b'=' + uriencode(value, safe, encoding))
else:
append(name + b'=' + uriencode(str(value), safe, encoding))
if isinstance(sep, bytes):
return sep.join(terms)
else:
return sep.encode('ascii').join(terms)
def _querydict(mapping, sep, encoding):
items = []
for key, value in mapping.items():
if isinstance(value, _strtypes):
items.append((key, value))
elif isinstance(value, collections.Iterable):
items.extend([(key, v) for v in value])
else:
items.append((key, value))
return _querylist(items, sep, encoding)
|
tkem/uritools | uritools/encoding.py | uriencode | python | def uriencode(uristring, safe='', encoding='utf-8', errors='strict'):
if not isinstance(uristring, bytes):
uristring = uristring.encode(encoding, errors)
if not isinstance(safe, bytes):
safe = safe.encode('ascii')
try:
encoded = _encoded[safe]
except KeyError:
encoded = _encoded[b''][:]
for i in _tointseq(safe):
encoded[i] = _fromint(i)
_encoded[safe] = encoded
return b''.join(map(encoded.__getitem__, _tointseq(uristring))) | Encode a URI string or string component. | train | https://github.com/tkem/uritools/blob/e77ba4acd937b68da9850138563debd4c925ef9f/uritools/encoding.py#L40-L53 | [
"def _fromint(i): return bytes([i])\n",
"def _tointseq(b): return memoryview(b).tolist()\n",
"def _tointseq(b): return b\n"
] | from string import hexdigits
from .chars import UNRESERVED
if isinstance(chr(0), bytes):
_fromint = chr
else:
def _fromint(i): return bytes([i])
try:
_fromhex = bytes.fromhex
except AttributeError:
def _fromhex(x): return chr(int(x, 16))
try:
0 in b''
except TypeError:
def _tointseq(b): return memoryview(b).tolist()
else:
def _tointseq(b): return b
# RFC 3986 2.1: For consistency, URI producers and normalizers should
# use uppercase hexadecimal digits for all percent-encodings.
def _pctenc(byte):
return ('%%%02X' % byte).encode()
_unreserved = frozenset(_tointseq(UNRESERVED.encode()))
_encoded = {
b'': [_fromint(i) if i in _unreserved else _pctenc(i) for i in range(256)]
}
_decoded = {
(a + b).encode(): _fromhex(a + b) for a in hexdigits for b in hexdigits
}
def uridecode(uristring, encoding='utf-8', errors='strict'):
"""Decode a URI string or string component."""
if not isinstance(uristring, bytes):
uristring = uristring.encode(encoding or 'ascii', errors)
parts = uristring.split(b'%')
result = [parts[0]]
append = result.append
decode = _decoded.get
for s in parts[1:]:
append(decode(s[:2], b'%' + s[:2]))
append(s[2:])
if encoding is not None:
return b''.join(result).decode(encoding, errors)
else:
return b''.join(result)
|
tkem/uritools | uritools/encoding.py | uridecode | python | def uridecode(uristring, encoding='utf-8', errors='strict'):
if not isinstance(uristring, bytes):
uristring = uristring.encode(encoding or 'ascii', errors)
parts = uristring.split(b'%')
result = [parts[0]]
append = result.append
decode = _decoded.get
for s in parts[1:]:
append(decode(s[:2], b'%' + s[:2]))
append(s[2:])
if encoding is not None:
return b''.join(result).decode(encoding, errors)
else:
return b''.join(result) | Decode a URI string or string component. | train | https://github.com/tkem/uritools/blob/e77ba4acd937b68da9850138563debd4c925ef9f/uritools/encoding.py#L56-L70 | null | from string import hexdigits
from .chars import UNRESERVED
if isinstance(chr(0), bytes):
_fromint = chr
else:
def _fromint(i): return bytes([i])
try:
_fromhex = bytes.fromhex
except AttributeError:
def _fromhex(x): return chr(int(x, 16))
try:
0 in b''
except TypeError:
def _tointseq(b): return memoryview(b).tolist()
else:
def _tointseq(b): return b
# RFC 3986 2.1: For consistency, URI producers and normalizers should
# use uppercase hexadecimal digits for all percent-encodings.
def _pctenc(byte):
return ('%%%02X' % byte).encode()
_unreserved = frozenset(_tointseq(UNRESERVED.encode()))
_encoded = {
b'': [_fromint(i) if i in _unreserved else _pctenc(i) for i in range(256)]
}
_decoded = {
(a + b).encode(): _fromhex(a + b) for a in hexdigits for b in hexdigits
}
def uriencode(uristring, safe='', encoding='utf-8', errors='strict'):
"""Encode a URI string or string component."""
if not isinstance(uristring, bytes):
uristring = uristring.encode(encoding, errors)
if not isinstance(safe, bytes):
safe = safe.encode('ascii')
try:
encoded = _encoded[safe]
except KeyError:
encoded = _encoded[b''][:]
for i in _tointseq(safe):
encoded[i] = _fromint(i)
_encoded[safe] = encoded
return b''.join(map(encoded.__getitem__, _tointseq(uristring)))
|
MatiasSM/fcb | fcb/checker/cleanup/helper.py | delete_unverified_uploads | python | def delete_unverified_uploads(destinations):
with get_session() as session:
# TODO use triggers or cascades to delete relations
for destination in destinations:
_log.info("Deleting unverified uploads for destination %s", destination)
# get unverified FilesDestinations for the configured mail_conf
files_destinations_q = session.query(FilesDestinations)\
.filter(
FilesDestinations.verification_info.is_(None),
FilesDestinations.destinations_id == (
select([Destination.id]).
where(Destination.destination == destination).
as_scalar()))
files_destinations = files_destinations_q.all()
if not files_destinations:
continue
# get FilesContainer.id for containers which are not associated to another destination
fd1 = aliased(FilesDestinations)
fd2 = aliased(FilesDestinations)
files_container_ids_to_delete = [
f.file_containers_id for f in
session.query(fd1.file_containers_id)
.filter(fd1.file_containers_id.in_([fd.file_containers_id for fd in files_destinations]))
.filter(~exists().where(
and_(fd1.file_containers_id == fd2.file_containers_id,
fd1.destinations_id != fd2.destinations_id)))
.all()
]
# will delete all FilesInContainers for containers to be deleted. FIXME could be done in cascade
files_in_container_q = session.query(FilesInContainers)\
.filter(FilesInContainers.file_containers_id.in_(files_container_ids_to_delete))
# get files (and fragments) only present in containers to delete (can be deleted also)
fic1 = aliased(FilesInContainers)
fic2 = aliased(FilesInContainers)
files_to_delete = session.query(fic1)\
.filter(fic1.file_containers_id.in_(files_container_ids_to_delete))\
.filter(~exists().where(
and_(
# same file/fragment
fic1.uploaded_files_id == fic2.uploaded_files_id,
fic1.uploaded_file_fragment_number == fic2.uploaded_file_fragment_number,
# in other container
~fic2.file_containers_id.in_(files_container_ids_to_delete)
)))\
.all()
# delete fragments
# FIXME needs to be optimized (using placeholders or something)
for file_id, fragment_number in \
[(f.uploaded_files_id, f.uploaded_file_fragment_number)
for f in files_to_delete if f.uploaded_file_fragment_number > 0]:
session.query(FileFragment)\
.filter(FileFragment.file_id == file_id, FileFragment.fragment_number == fragment_number)\
.delete(synchronize_session='fetch')
# delete uploaded files without fragments
whole_file_ids = [f.uploaded_files_id for f in files_to_delete if f.uploaded_file_fragment_number == 0]
if whole_file_ids:
session.query(UploadedFile)\
.filter(UploadedFile.id.in_(whole_file_ids))\
.delete(synchronize_session='fetch')
# delete uploaded files with all their fragments deleted. FIXME optimize
fragmented_file_ids = [f.uploaded_files_id for f in files_to_delete if f.uploaded_file_fragment_number > 0]
if fragmented_file_ids:
session.query(UploadedFile)\
.filter(UploadedFile.id.in_(fragmented_file_ids),
~exists().where(FileFragment.file_id == UploadedFile.id))\
.delete(synchronize_session='fetch')
session.query(FilesContainer)\
.filter(FilesContainer.id.in_(files_container_ids_to_delete))\
.delete(synchronize_session='fetch')
files_in_container_q.delete(synchronize_session='fetch')
files_destinations_q.delete(synchronize_session='fetch')
session.commit() | :param destinations: list of Destination.destination
For each Destination.destination:
Deletes all FilesDestinations where the destination is not verified
Deletes each FilesContainer in the deleted FilesDestinations if not present in a non deleted FilesDestinations
Deletes each FileFragment if corresponds to a FilesInContainers for a FilesContainer deleted and not in
a non deleted FilesContainer
Deletes each UploadedFile if corresponds to a FilesInContainers for a FilesContainer deleted and not in
a non deleted FilesContainer and/or has no more FileFragment in non deleted FilesContainer | train | https://github.com/MatiasSM/fcb/blob/92a6c535287ea1c1ef986954a5d66e7905fb6221/fcb/checker/cleanup/helper.py#L14-L106 | [
"def get_session():\n return _shared_session\n"
] | from sqlalchemy import select
from sqlalchemy.orm.util import aliased
from sqlalchemy.sql.expression import exists, and_
from fcb.database.helpers import get_session
from fcb.database.schema import FilesDestinations, Destination, FileFragment, FilesContainer, FilesInContainers, \
UploadedFile
from fcb.utils.log_helper import get_logger_module
_log = get_logger_module('cleanup_helper')
|
MatiasSM/fcb | fcb/utils/trickle.py | TrickleBwShaper.wrap_call | python | def wrap_call(self, call_cmd):
if isinstance(call_cmd, basestring): # FIXME python 3 unsafe
call_cmd = [call_cmd]
return [self._trickle_cmd, "-s"] + self._settings.to_argument_list() + list(call_cmd) | "wraps" the call_cmd so it can be executed by subprocess.call (and related flavors) as "args" argument
:param call_cmd: original args like argument (string or sequence)
:return: a sequence with the original command "executed" under trickle | train | https://github.com/MatiasSM/fcb/blob/92a6c535287ea1c1ef986954a5d66e7905fb6221/fcb/utils/trickle.py#L40-L52 | null | class TrickleBwShaper(object):
_trickle_cmd = "trickle"
"""
Helper class to handle trickle (http://linux.die.net/man/1/trickle) usage
"""
def __init__(self, settings):
self._settings = deepcopy(settings)
self._trickle_cmd = find_executable("trickle")
if self._trickle_cmd is None:
raise RuntimeError("Couldn't find 'trickle' program")
|
MatiasSM/fcb | fcb/framework/workflow/SenderTask.py | SenderTask.do_heavy_work | python | def do_heavy_work(self, block):
destinations = self.destinations()
''' FIXME currently we return block whether it was correctly processed or not because MailSenders are chained
and not doing that would mean other wouldn't be able to try.'''
if not set(destinations).issubset(block.destinations):
self.log.debug("Block not for any of the associated destinations: %s", destinations)
else:
try:
self.do_send(block)
# mark the block as sent by this sender
block.send_destinations.extend(destinations)
verif_data = self.verification_data()
if verif_data is not None:
for destination in destinations:
block.destinations_verif_data[destination] = verif_data
except SendingError:
self.log.exception("Failed to send block (%s) to destination (%s)", block, destinations)
return block | Note: Expects Compressor Block like objects | train | https://github.com/MatiasSM/fcb/blob/92a6c535287ea1c1ef986954a5d66e7905fb6221/fcb/framework/workflow/SenderTask.py#L10-L32 | null | class SenderTask(HeavyPipelineTask):
# override from HeavyPipelineTask
def destinations(self):
raise NotImplementedError()
def do_send(self, block):
"""
Does the actual sending
:param block: to send
:raise: SendingError if sending fails
"""
raise NotImplementedError()
def verification_data(self):
"""
:return: verification data (if any) or None if no-one (yet)
"""
return None
|
MatiasSM/fcb | fcb/checker/mail.py | CheckHistoryVerifier.set_verified | python | def set_verified(self, msg_info):
assert hasattr(msg_info, 'files_containers_id')
with self._session_resource as session:
session.execute(
update(FilesDestinations)
.where(FilesDestinations.file_containers_id == msg_info.files_containers_id)
.values(verification_info=msg_info.msg_id)
)
self.update_last_checked_time(msg_info) | expects "msg_info" to have the field 'files_containers_id'
This call already executes "update_last_checked_time" so it doesn't need to be called separately | train | https://github.com/MatiasSM/fcb/blob/92a6c535287ea1c1ef986954a5d66e7905fb6221/fcb/checker/mail.py#L73-L87 | [
"def update_last_checked_time(self, msg_info):\n mail_dst = self._mail_dst\n with self._session_resource as session:\n session.execute(\n update(CheckerState)\n .where(and_(CheckerState.last_checked_time < msg_info.msg_date,\n CheckerState.destinatio... | class CheckHistoryVerifier(object):
def __init__(self, mail_dst):
self._session_resource = get_session()
self._log = get_logger_for(self)
self._mail_dst = mail_dst
self._last_checked_date = None
def close(self):
if self._session_resource:
with self._session_resource as session:
session.commit()
session.close()
def _get_last_checked_date(self):
mail_dst = self._mail_dst
if not self._last_checked_date:
value = 0.0
with self._session_resource as session:
try:
result = session \
.query(CheckerState.last_checked_time) \
.join(Destination) \
.filter(Destination.destination == mail_dst) \
.one()
value = float(result.last_checked_time)
except NoResultFound: # no last checked entry
destination = Destination.get_or_add(session, mail_dst)
session.flush() # gen id if necessary
session.add(CheckerState(destinations_id=destination.id, last_checked_time=0))
self._last_checked_date = value
return self._last_checked_date
def is_already_checked(self, msg_info):
last_verif_date = self._get_last_checked_date()
self._log.debug("last_verif_date '%0.4f', msg_info.msg_date '%0.4f'", last_verif_date, msg_info.msg_date)
if last_verif_date == msg_info.msg_date:
self._log.debug("Need to check for message id, last checked time matches the one of the message")
# because the resolution of the date may not be enough, we need to check by msg id
try:
with self._session_resource as session:
session \
.query(FilesDestinations) \
.join(Destination) \
.filter(FilesDestinations.verification_info == msg_info.msg_id) \
.one()
return True
except NoResultFound:
return False
else:
return last_verif_date > msg_info.msg_date
def update_last_checked_time(self, msg_info):
mail_dst = self._mail_dst
with self._session_resource as session:
session.execute(
update(CheckerState)
.where(and_(CheckerState.last_checked_time < msg_info.msg_date,
CheckerState.destinations_id == select([Destination.id]).
where(Destination.destination == mail_dst).
as_scalar()))
.values(last_checked_time=msg_info.msg_date)
)
|
MatiasSM/fcb | fcb/checker/mail.py | Checker.is_uploaded_container | python | def is_uploaded_container(self, msg_info):
results = {
'BAD': -1,
'NOT_FCB': 0,
'OK': 1
}
for part in msg_info.msg_body.walk():
if part.is_multipart():
continue
"""
if part.get('Content-Disposition') is None:
print("no content dispo")
continue
"""
if part.get_content_type() == 'text/plain':
if self._is_content_from_fcb(part.get_payload()):
self._log.debug("Body detected as FCB: %s", part.get_payload())
else:
self._log.debug("Body doesn't match FCB: %s", part.get_payload())
continue
attachment_name = self._get_attachment_name(part)
if not attachment_name:
self._log.debug("Couldn't get attachment name. Will ignore the part.")
continue
files_container = self._get_files_container_by_name(attachment_name)
if files_container:
sha1_in_db = files_container.sha1
msg_info.files_containers_id = files_container.id
tmp_file = FileInfo(os.path.join(tempfile.gettempdir(), "downloaded.tmp"))
fp = open(tmp_file.path, 'wb')
fp.write(part.get_payload(decode=1))
fp.flush()
fp.close()
if tmp_file.sha1 == sha1_in_db:
self._log.info("File container '%s' verified!", attachment_name)
result = results['OK']
else:
self._log.error("File container '%s' doesn't match the sha1 sum. Expected '%s' but got '%s'",
attachment_name, sha1_in_db, tmp_file.sha1)
result = results['BAD']
os.remove(tmp_file.path)
return result
else:
self._log.debug("Attached file '%s' not found in DB. Will ignore this mail.", attachment_name)
return results['NOT_FCB'] | returns 0 if it doesn't correspond to an uploaded container
-1 if it corresponds to an uploaded container but it is corrupted
1 if it corresponds to an uploaded container and is OK | train | https://github.com/MatiasSM/fcb/blob/92a6c535287ea1c1ef986954a5d66e7905fb6221/fcb/checker/mail.py#L170-L224 | null | class Checker(object):
def __init__(self):
self._session_resource = None
self._log = get_logger_for(self)
file_lines = "(^File: .+\\(sha1: .*\\).*\n)+"
empty_lines = "^(?:\n|\r\n?)*"
self.is_content_from_fcb_regex = re.compile(''.join((
'^\* Content:.*\n',
file_lines,
empty_lines,
'^\* Tar:.*\n',
file_lines,
empty_lines,
'^\* Attached:.*\n',
file_lines
)), re.MULTILINE)
def close(self):
if self._session_resource:
with self._session_resource as session:
session.commit()
session.close()
def _is_content_from_fcb(self, content):
return self.is_content_from_fcb_regex.match(content) is not None
@staticmethod
def _get_attachment_name(part):
dtypes = part.get_params(None, 'Content-Disposition')
if not dtypes:
if part.get_content_type() == 'text/plain':
return None
ctypes = part.get_params()
if not ctypes:
return None
for key, val in ctypes:
if key.lower() == 'name':
print "Attachment (NO DTYPE): '%s'" % val
return val
else:
return None
else:
attachment, filename = None, None
for key, val in dtypes:
key = key.lower()
if key == 'filename':
filename = val
if key == 'attachment':
attachment = 1
if not attachment:
return None
print "Attachment: '%s'" % filename
return filename
def _get_files_container_by_name(self, file_name):
if not self._session_resource:
self._session_resource = get_session()
try:
with self._session_resource as session:
return session \
.query(FilesContainer) \
.filter(FilesContainer.file_name == file_name) \
.one()
except NoResultFound:
return None
# otherwise it would have already returned
|
MatiasSM/fcb | fcb/processing/models/FileInfo.py | FileInfo.size | python | def size(self):
if not self._size:
self._size = os.path.getsize(self._path)
return self._size | size in bytes | train | https://github.com/MatiasSM/fcb/blob/92a6c535287ea1c1ef986954a5d66e7905fb6221/fcb/processing/models/FileInfo.py#L27-L31 | null | class FileInfo:
def __init__(self, path):
self._path = path
self._sha1 = None
self._size = None
@property
def path(self):
return self._path
@property
def upath(self):
return self._path.decode("utf-8")
@property
def sha1(self):
if not self._sha1:
self._sha1 = digest.gen_sha1(self._path)
return self._sha1
@property
@property
def basename(self):
return os.path.basename(self._path)
def __str__(self):
return "{} (sha1 '{}')".format(self._path, str(self._sha1))
|
MatiasSM/fcb | fcb/processing/transformations/Cipher.py | Cipher.do_heavy_work | python | def do_heavy_work(self, block):
cipher_key = self.gen_key(32)
in_file_path = block.latest_file_info.path
dst_file_path = block.processed_data_file_info.path + self.get_extension()
self.log.debug("Encrypting file '%s' with key '%s' to file '%s'",
in_file_path, cipher_key, dst_file_path)
self.encrypt_file(key=cipher_key,
in_filename=in_file_path,
out_filename=dst_file_path)
block.cipher_key = cipher_key
block.ciphered_file_info = FileInfo(dst_file_path)
block.latest_file_info = block.ciphered_file_info
return block | Expects Compressor Block like objects | train | https://github.com/MatiasSM/fcb/blob/92a6c535287ea1c1ef986954a5d66e7905fb6221/fcb/processing/transformations/Cipher.py#L27-L42 | null | class Cipher(HeavyPipelineTask):
@classmethod
def get_extension(cls):
return ".enc"
@classmethod
def is_transformed(cls, path):
return path.endswith(cls.get_extension())
# override from HeavyPipelineTask
# override from HeavyPipelineTask
def get_worker_channel(self):
return _worker_pool.get_worker()
@classmethod
def gen_key(cls, size):
return ''.join(random.choice("".join((string.letters, string.digits, string.punctuation))) for _ in range(size))
@classmethod
def encrypt_file(cls, key, in_filename, out_filename=None, chunksize=64 * 1024):
""" Encrypts a file using AES (CBC mode) with the
given key.
key:
The encryption key - a string that must be
either 16, 24 or 32 bytes long. Longer keys
are more secure.
in_filename:
Name of the input file
out_filename:
If None, '<in_filename>.enc' will be used.
chunksize:
Sets the size of the chunk which the function
uses to read and encrypt the file. Larger chunk
sizes can be faster for some files and machines.
chunksize must be divisible by 16.
"""
if not out_filename:
out_filename = in_filename + '.enc'
iv = ''.join(chr(random.randint(0, 0xFF)) for _ in range(16))
encryptor = AES.new(key, AES.MODE_CBC, iv)
filesize = os.path.getsize(in_filename)
with open(in_filename, 'rb') as infile:
with open(out_filename, 'wb') as outfile:
outfile.write(struct.pack('<Q', filesize))
outfile.write(iv)
while True:
chunk = infile.read(chunksize)
if len(chunk) == 0:
break
elif len(chunk) % 16 != 0:
chunk += ' ' * (16 - len(chunk) % 16)
outfile.write(encryptor.encrypt(chunk))
@classmethod
def decrypt_file(cls, key, in_filename, out_filename=None, chunksize=24 * 1024):
""" Decrypts a file using AES (CBC mode) with the
given key. Parameters are similar to encrypt_file,
with one difference: out_filename, if not supplied
will be in_filename without its last extension
(i.e. if in_filename is 'aaa.zip.enc' then
out_filename will be 'aaa.zip')
"""
if not out_filename:
out_filename = os.path.splitext(in_filename)[0]
with open(in_filename, 'rb') as infile:
origsize = struct.unpack('<Q', infile.read(struct.calcsize('Q')))[0]
iv = infile.read(16)
decryptor = AES.new(key, AES.MODE_CBC, iv)
with open(out_filename, 'wb') as outfile:
while True:
chunk = infile.read(chunksize)
if len(chunk) == 0:
break
outfile.write(decryptor.decrypt(chunk))
outfile.truncate(origsize)
|
MatiasSM/fcb | fcb/processing/transformations/Cipher.py | Cipher.encrypt_file | python | def encrypt_file(cls, key, in_filename, out_filename=None, chunksize=64 * 1024):
if not out_filename:
out_filename = in_filename + '.enc'
iv = ''.join(chr(random.randint(0, 0xFF)) for _ in range(16))
encryptor = AES.new(key, AES.MODE_CBC, iv)
filesize = os.path.getsize(in_filename)
with open(in_filename, 'rb') as infile:
with open(out_filename, 'wb') as outfile:
outfile.write(struct.pack('<Q', filesize))
outfile.write(iv)
while True:
chunk = infile.read(chunksize)
if len(chunk) == 0:
break
elif len(chunk) % 16 != 0:
chunk += ' ' * (16 - len(chunk) % 16)
outfile.write(encryptor.encrypt(chunk)) | Encrypts a file using AES (CBC mode) with the
given key.
key:
The encryption key - a string that must be
either 16, 24 or 32 bytes long. Longer keys
are more secure.
in_filename:
Name of the input file
out_filename:
If None, '<in_filename>.enc' will be used.
chunksize:
Sets the size of the chunk which the function
uses to read and encrypt the file. Larger chunk
sizes can be faster for some files and machines.
chunksize must be divisible by 16. | train | https://github.com/MatiasSM/fcb/blob/92a6c535287ea1c1ef986954a5d66e7905fb6221/fcb/processing/transformations/Cipher.py#L53-L93 | null | class Cipher(HeavyPipelineTask):
@classmethod
def get_extension(cls):
return ".enc"
@classmethod
def is_transformed(cls, path):
return path.endswith(cls.get_extension())
# override from HeavyPipelineTask
def do_heavy_work(self, block):
"""
Expects Compressor Block like objects
"""
cipher_key = self.gen_key(32)
in_file_path = block.latest_file_info.path
dst_file_path = block.processed_data_file_info.path + self.get_extension()
self.log.debug("Encrypting file '%s' with key '%s' to file '%s'",
in_file_path, cipher_key, dst_file_path)
self.encrypt_file(key=cipher_key,
in_filename=in_file_path,
out_filename=dst_file_path)
block.cipher_key = cipher_key
block.ciphered_file_info = FileInfo(dst_file_path)
block.latest_file_info = block.ciphered_file_info
return block
# override from HeavyPipelineTask
def get_worker_channel(self):
return _worker_pool.get_worker()
@classmethod
def gen_key(cls, size):
return ''.join(random.choice("".join((string.letters, string.digits, string.punctuation))) for _ in range(size))
@classmethod
def encrypt_file(cls, key, in_filename, out_filename=None, chunksize=64 * 1024):
""" Encrypts a file using AES (CBC mode) with the
given key.
key:
The encryption key - a string that must be
either 16, 24 or 32 bytes long. Longer keys
are more secure.
in_filename:
Name of the input file
out_filename:
If None, '<in_filename>.enc' will be used.
chunksize:
Sets the size of the chunk which the function
uses to read and encrypt the file. Larger chunk
sizes can be faster for some files and machines.
chunksize must be divisible by 16.
"""
if not out_filename:
out_filename = in_filename + '.enc'
iv = ''.join(chr(random.randint(0, 0xFF)) for _ in range(16))
encryptor = AES.new(key, AES.MODE_CBC, iv)
filesize = os.path.getsize(in_filename)
with open(in_filename, 'rb') as infile:
with open(out_filename, 'wb') as outfile:
outfile.write(struct.pack('<Q', filesize))
outfile.write(iv)
while True:
chunk = infile.read(chunksize)
if len(chunk) == 0:
break
elif len(chunk) % 16 != 0:
chunk += ' ' * (16 - len(chunk) % 16)
outfile.write(encryptor.encrypt(chunk))
@classmethod
def decrypt_file(cls, key, in_filename, out_filename=None, chunksize=24 * 1024):
""" Decrypts a file using AES (CBC mode) with the
given key. Parameters are similar to encrypt_file,
with one difference: out_filename, if not supplied
will be in_filename without its last extension
(i.e. if in_filename is 'aaa.zip.enc' then
out_filename will be 'aaa.zip')
"""
if not out_filename:
out_filename = os.path.splitext(in_filename)[0]
with open(in_filename, 'rb') as infile:
origsize = struct.unpack('<Q', infile.read(struct.calcsize('Q')))[0]
iv = infile.read(16)
decryptor = AES.new(key, AES.MODE_CBC, iv)
with open(out_filename, 'wb') as outfile:
while True:
chunk = infile.read(chunksize)
if len(chunk) == 0:
break
outfile.write(decryptor.decrypt(chunk))
outfile.truncate(origsize)
|
MatiasSM/fcb | fcb/processing/transformations/Cipher.py | Cipher.decrypt_file | python | def decrypt_file(cls, key, in_filename, out_filename=None, chunksize=24 * 1024):
if not out_filename:
out_filename = os.path.splitext(in_filename)[0]
with open(in_filename, 'rb') as infile:
origsize = struct.unpack('<Q', infile.read(struct.calcsize('Q')))[0]
iv = infile.read(16)
decryptor = AES.new(key, AES.MODE_CBC, iv)
with open(out_filename, 'wb') as outfile:
while True:
chunk = infile.read(chunksize)
if len(chunk) == 0:
break
outfile.write(decryptor.decrypt(chunk))
outfile.truncate(origsize) | Decrypts a file using AES (CBC mode) with the
given key. Parameters are similar to encrypt_file,
with one difference: out_filename, if not supplied
will be in_filename without its last extension
(i.e. if in_filename is 'aaa.zip.enc' then
out_filename will be 'aaa.zip') | train | https://github.com/MatiasSM/fcb/blob/92a6c535287ea1c1ef986954a5d66e7905fb6221/fcb/processing/transformations/Cipher.py#L96-L119 | null | class Cipher(HeavyPipelineTask):
@classmethod
def get_extension(cls):
return ".enc"
@classmethod
def is_transformed(cls, path):
return path.endswith(cls.get_extension())
# override from HeavyPipelineTask
def do_heavy_work(self, block):
"""
Expects Compressor Block like objects
"""
cipher_key = self.gen_key(32)
in_file_path = block.latest_file_info.path
dst_file_path = block.processed_data_file_info.path + self.get_extension()
self.log.debug("Encrypting file '%s' with key '%s' to file '%s'",
in_file_path, cipher_key, dst_file_path)
self.encrypt_file(key=cipher_key,
in_filename=in_file_path,
out_filename=dst_file_path)
block.cipher_key = cipher_key
block.ciphered_file_info = FileInfo(dst_file_path)
block.latest_file_info = block.ciphered_file_info
return block
# override from HeavyPipelineTask
def get_worker_channel(self):
return _worker_pool.get_worker()
@classmethod
def gen_key(cls, size):
return ''.join(random.choice("".join((string.letters, string.digits, string.punctuation))) for _ in range(size))
@classmethod
def encrypt_file(cls, key, in_filename, out_filename=None, chunksize=64 * 1024):
""" Encrypts a file using AES (CBC mode) with the
given key.
key:
The encryption key - a string that must be
either 16, 24 or 32 bytes long. Longer keys
are more secure.
in_filename:
Name of the input file
out_filename:
If None, '<in_filename>.enc' will be used.
chunksize:
Sets the size of the chunk which the function
uses to read and encrypt the file. Larger chunk
sizes can be faster for some files and machines.
chunksize must be divisible by 16.
"""
if not out_filename:
out_filename = in_filename + '.enc'
iv = ''.join(chr(random.randint(0, 0xFF)) for _ in range(16))
encryptor = AES.new(key, AES.MODE_CBC, iv)
filesize = os.path.getsize(in_filename)
with open(in_filename, 'rb') as infile:
with open(out_filename, 'wb') as outfile:
outfile.write(struct.pack('<Q', filesize))
outfile.write(iv)
while True:
chunk = infile.read(chunksize)
if len(chunk) == 0:
break
elif len(chunk) % 16 != 0:
chunk += ' ' * (16 - len(chunk) % 16)
outfile.write(encryptor.encrypt(chunk))
@classmethod
|
MatiasSM/fcb | fcb/processing/filters/QuotaFilter.py | QuotaFilter.process_data | python | def process_data(self, file_info):
if self._has_reached_stop_limit():
self.log.info("Remaining bytes in quota (%d) has reached minimum to request stop (%d)",
self._quota.remaining, self._stop_on_remaining)
self.fire(events.TransmissionQuotaReached())
elif not self._fits_in_quota(file_info):
self.log.debug("File would exceed quota. Won't process '%s'", str(file_info))
else:
return file_info
self.fire(events.FilteredFile(file_info)) | expects FileInfo | train | https://github.com/MatiasSM/fcb/blob/92a6c535287ea1c1ef986954a5d66e7905fb6221/fcb/processing/filters/QuotaFilter.py#L16-L26 | null | class QuotaFilter(PipelineTask):
_quota = None
_stop_on_remaining = None
def do_init(self, global_quota, stop_on_remaining):
self._quota = global_quota
self._stop_on_remaining = stop_on_remaining
# override from PipelineTask
def _fits_in_quota(self, file_info):
return self._quota.fits(file_info)
def _has_reached_stop_limit(self):
return not self._quota.is_infinite() and self._quota.remaining <= self._stop_on_remaining
|
MatiasSM/fcb | fcb/processing/filesystem/Compressor.py | _BlockFragmenter.can_add_new_content | python | def can_add_new_content(self, block, file_info):
return ((self._max_files_per_container == 0 or self._max_files_per_container > len(block.content_file_infos))
and (self.does_content_fit(file_info, block)
or
# check if we can fit some content by splitting the file
# Note: if max size was unlimited, does_content_fit would have been True
(block.content_size < self._max_container_content_size_in_bytes
and (self._should_split_small_files or not self._is_small_file(file_info))))) | new content from file_info can be added into block iff
- file count limit hasn't been reached for the block
- there is enough space to completely fit the info into the block
- OR the info can be split and some info can fit into the block | train | https://github.com/MatiasSM/fcb/blob/92a6c535287ea1c1ef986954a5d66e7905fb6221/fcb/processing/filesystem/Compressor.py#L110-L123 | [
"def does_content_fit(self, file_info, block):\n return (self._max_container_content_size_in_bytes == 0\n or file_info.size + block.content_size <= self._max_container_content_size_in_bytes)\n"
] | class _BlockFragmenter(object):
"""
Handles the logic to check if/how new content can be fit into a block
"""
def __init__(self, sender_spec, should_split_small_files, global_quota):
self.log = get_logger_for(self)
self._max_container_content_size_in_bytes = sender_spec.restrictions.max_container_content_size_in_bytes
self._max_files_per_container = sender_spec.restrictions.max_files_per_container
self._should_split_small_files = should_split_small_files
self._global_quota = global_quota
self._specific_quota = Quota(quota_limit=sender_spec.restrictions.max_upload_per_day_in_bytes,
be_thread_safe=False, # will only be accessed by this instance
used_quota=sender_spec.bytes_uploaded_today)
def does_fit_in_todays_share(self, file_info):
return self._global_quota.fits(file_info) and self._specific_quota.fits(file_info)
def get_fragments_spec(self, block):
class Spec(object):
def __init__(self, block_cur_size, max_container_content_size):
self.first = max_container_content_size - block_cur_size
self.remaining = max_container_content_size
return Spec(block.content_size, self._max_container_content_size_in_bytes)
def account_block(self, block):
self._global_quota.account_used(block.processed_data_file_info)
self._specific_quota.account_used(block.processed_data_file_info)
self.log.debug("Total (pending to be) uploaded today (global: %s, specific: %s)",
self.sizeof_fmt(self._global_quota.used), self.sizeof_fmt(self._specific_quota.used))
@staticmethod
def sizeof_fmt(num, suffix='B'):
for unit in ['', 'K', 'M', 'G', 'T', 'P', 'E', 'Z']:
if abs(num) < 1000.0:
return "%3.1f%s%s" % (num, unit, suffix)
num /= 1000.0
return "%.1f%s%s" % (num, 'Y', suffix)
def has_space_left(self, block):
return self._max_container_content_size_in_bytes == 0 \
or self._max_container_content_size_in_bytes > block.content_size
def does_content_fit(self, file_info, block):
return (self._max_container_content_size_in_bytes == 0
or file_info.size + block.content_size <= self._max_container_content_size_in_bytes)
@property
def bytes_uploaded_today(self):
return self._specific_quota.used
@property
def max_upload_per_day_in_bytes(self):
# the limit will be the min of non zero global and specific quotas
if self._global_quota.is_infinite() \
or (not self._specific_quota.is_infinite() and self._global_quota.limit > self._specific_quota.limit):
return self._specific_quota.limit
else:
return self._global_quota.limit
def _is_small_file(self, file_info):
"""
A file is considered as "small" if its content can fit into a (empty) block
"""
return self._max_container_content_size_in_bytes != 0 \
and self._max_container_content_size_in_bytes >= file_info.size
|
MatiasSM/fcb | fcb/processing/filesystem/Compressor.py | Compressor.do_init | python | def do_init(self, fs_settings, global_quota):
fs_settings = deepcopy(fs_settings) # because we store some of the info, we need a deep copy
'''
If the same restrictions are applied for many destinations, we use the same job to avoid processing
files twice
'''
for sender_spec in fs_settings.sender_specs:
restrictions = sender_spec.restrictions
if restrictions in self.restriction_to_job:
self.restriction_to_job[restrictions].add_destinations(sender_spec.destinations)
else:
compressor = _CompressorJob(
next_task=self.get_next_task(),
sender_spec=sender_spec,
tmp_file_parts_basepath=fs_settings.tmp_file_parts_basepath,
should_split_small_files=fs_settings.should_split_small_files,
global_quota=global_quota)
self.restriction_to_job[restrictions] = compressor
compressor.register(self) | If the same restrictions are applied for many destinations, we use the same job to avoid processing
files twice | train | https://github.com/MatiasSM/fcb/blob/92a6c535287ea1c1ef986954a5d66e7905fb6221/fcb/processing/filesystem/Compressor.py#L324-L342 | null | class Compressor(PipelineTask):
restriction_to_job = {} # keeps a map sender_spec.restrictions -> _CompressorJob
def next_task(self, next_task):
for job in self.restriction_to_job.values():
job.next_task(next_task)
PipelineTask.next_task(self, next_task)
# override from PipelineTask
def process_data(self, file_info):
for job in self.restriction_to_job.values():
self.log.debug("Processing file by: {}".format(job.name))
job.process_data(file_info)
@handler(events.FlushPendings.__name__)
def on_flush(self):
if not self.is_disabled:
for job in self.restriction_to_job.values():
job.flush()
|
MatiasSM/fcb | fcb/sending/SentLog.py | SentLog.process_data | python | def process_data(self, block):
if hasattr(block, 'send_destinations') and block.send_destinations:
self.fire(events.FileProcessed(block))
self._log_in_db(block)
if self._sent_log_file:
self._log_in_sent_log(block)
self.log.info("Sent to '%s' file '%s' containing files: %s",
str(block.send_destinations),
block.processed_data_file_info.basename,
str([file_info.path for file_info in block.content_file_infos]))
else:
self.log.info("File %s wasn't sent", block.processed_data_file_info.basename)
return block | expects Block from Compressor | train | https://github.com/MatiasSM/fcb/blob/92a6c535287ea1c1ef986954a5d66e7905fb6221/fcb/sending/SentLog.py#L19-L32 | null | class SentLog(PipelineTask):
_session_resource = None
_sent_log_file = None
def do_init(self, sent_log):
self._session_resource = None
self._sent_log_file = open(sent_log, 'a') if sent_log else None
# override from PipelineTask
# override from PipelineTask
def on_stopped(self):
if self._sent_log_file:
self._sent_log_file.close()
if self._session_resource:
with self._session_resource as session:
session.commit()
session.close()
# --- low visibility methods ------------------------------
@staticmethod
def _get_uploaded_file(session, file_info, fragment_count=0):
"""
:param session: locked session (with self._session_resource as >> session <<)
:param file_info: contains file information to save or query
:param fragment_count: amount of fragments associated to the file
:return: an UploadedFile associated to the file_info
"""
try:
return session.query(UploadedFile).filter(UploadedFile.sha1 == file_info.sha1).one()
except NoResultFound:
new_instance = UploadedFile(
sha1=file_info.sha1,
file_name=file_info.upath,
fragment_count=fragment_count
)
session.add(new_instance)
return new_instance
def _log_in_db(self, block):
if not self._session_resource:
self._session_resource = get_session()
with self._session_resource as session:
session.autoflush = False # to avoid IntegrityError raised during testing
sent_file_info = block.latest_file_info
# a new container has been saved
file_container = FilesContainer(
sha1=sent_file_info.sha1,
file_name=sent_file_info.basename,
encryption_key=block.cipher_key if hasattr(block, 'cipher_key') else '',
container_size=sent_file_info.size
)
session.add(file_container)
''' FIXME we need the container id because file_destination is not getting it
(not working example of SQLAlchemy) '''
session.flush() # get container id
# associate destinations to the container
for destination in block.send_destinations if hasattr(block, 'send_destinations') else []:
file_destination = FilesDestinations()
file_destination.destination = Destination.get_or_add(session, destination)
# FIXME according to the example in SQLAlchemy, this shouldn't be needed
file_destination.file_containers_id = file_container.id
if hasattr(block, 'destinations_verif_data') and destination in block.destinations_verif_data:
file_destination.verification_info = block.destinations_verif_data[destination]
file_container.files_destinations.append(file_destination)
# save/update each file in the container
for file_info in block.content_file_infos:
uploaded_file_fragment_number = 0
if hasattr(file_info, 'fragment_info'): # check if it is a fragment
uploaded_file_fragment_number = file_info.fragment_info.fragment_num
uploaded_file = \
self._get_uploaded_file(
session=session,
file_info=file_info.fragment_info.file_info,
fragment_count=file_info.fragment_info.fragments_count)
# save a new fragment for the file
file_fragment = FileFragment(
fragment_sha1=file_info.sha1,
fragment_name=file_info.upath,
fragment_number=file_info.fragment_info.fragment_num
)
uploaded_file.fragments.append(file_fragment)
else: # not fragmented file
uploaded_file = self._get_uploaded_file(session=session, file_info=file_info)
session.flush() # if uploaded_file has no id, we need one
file_in_container_assoc = FilesInContainers(
uploaded_file_fragment_number=uploaded_file_fragment_number,
uploaded_files_id=uploaded_file.id
)
file_in_container_assoc.container_file = file_container
file_container.fragments.append(file_in_container_assoc)
session.commit()
def _log_in_sent_log(self, block):
""" Logs:
block file | bfile (not encrypted) sha1 [| bfile encryption key | encrypted bfile sha1]
<tab>content file | cfile sha1 | part of parts [| whole file sha1]
"""
# FIXME all these should be done by the block itself
self._sent_log_file.write("\n")
sent_file_info = block.latest_file_info
self._sent_log_file.write("|".join((sent_file_info.basename, sent_file_info.sha1)))
if hasattr(block, 'cipher_key'):
self._sent_log_file.write("|")
self._sent_log_file.write("|".join((block.cipher_key, block.ciphered_file_info.sha1)))
for file_info in block.content_file_infos:
self._sent_log_file.write("\n\t")
self._sent_log_file.write("|".join((file_info.path, file_info.sha1)))
if hasattr(file_info, 'fragment_info'): # check if it is a fragment
self._sent_log_file.write("|")
self._sent_log_file.write("|".join((
"%d of %d" % (file_info.fragment_info.fragment_num, file_info.fragment_info.fragments_count),
file_info.fragment_info.file_info.sha1)))
|
MatiasSM/fcb | fcb/sending/SentLog.py | SentLog._get_uploaded_file | python | def _get_uploaded_file(session, file_info, fragment_count=0):
try:
return session.query(UploadedFile).filter(UploadedFile.sha1 == file_info.sha1).one()
except NoResultFound:
new_instance = UploadedFile(
sha1=file_info.sha1,
file_name=file_info.upath,
fragment_count=fragment_count
)
session.add(new_instance)
return new_instance | :param session: locked session (with self._session_resource as >> session <<)
:param file_info: contains file information to save or query
:param fragment_count: amount of fragments associated to the file
:return: an UploadedFile associated to the file_info | train | https://github.com/MatiasSM/fcb/blob/92a6c535287ea1c1ef986954a5d66e7905fb6221/fcb/sending/SentLog.py#L45-L61 | null | class SentLog(PipelineTask):
_session_resource = None
_sent_log_file = None
def do_init(self, sent_log):
self._session_resource = None
self._sent_log_file = open(sent_log, 'a') if sent_log else None
# override from PipelineTask
def process_data(self, block):
"""expects Block from Compressor"""
if hasattr(block, 'send_destinations') and block.send_destinations:
self.fire(events.FileProcessed(block))
self._log_in_db(block)
if self._sent_log_file:
self._log_in_sent_log(block)
self.log.info("Sent to '%s' file '%s' containing files: %s",
str(block.send_destinations),
block.processed_data_file_info.basename,
str([file_info.path for file_info in block.content_file_infos]))
else:
self.log.info("File %s wasn't sent", block.processed_data_file_info.basename)
return block
# override from PipelineTask
def on_stopped(self):
if self._sent_log_file:
self._sent_log_file.close()
if self._session_resource:
with self._session_resource as session:
session.commit()
session.close()
# --- low visibility methods ------------------------------
@staticmethod
def _log_in_db(self, block):
if not self._session_resource:
self._session_resource = get_session()
with self._session_resource as session:
session.autoflush = False # to avoid IntegrityError raised during testing
sent_file_info = block.latest_file_info
# a new container has been saved
file_container = FilesContainer(
sha1=sent_file_info.sha1,
file_name=sent_file_info.basename,
encryption_key=block.cipher_key if hasattr(block, 'cipher_key') else '',
container_size=sent_file_info.size
)
session.add(file_container)
''' FIXME we need the container id because file_destination is not getting it
(not working example of SQLAlchemy) '''
session.flush() # get container id
# associate destinations to the container
for destination in block.send_destinations if hasattr(block, 'send_destinations') else []:
file_destination = FilesDestinations()
file_destination.destination = Destination.get_or_add(session, destination)
# FIXME according to the example in SQLAlchemy, this shouldn't be needed
file_destination.file_containers_id = file_container.id
if hasattr(block, 'destinations_verif_data') and destination in block.destinations_verif_data:
file_destination.verification_info = block.destinations_verif_data[destination]
file_container.files_destinations.append(file_destination)
# save/update each file in the container
for file_info in block.content_file_infos:
uploaded_file_fragment_number = 0
if hasattr(file_info, 'fragment_info'): # check if it is a fragment
uploaded_file_fragment_number = file_info.fragment_info.fragment_num
uploaded_file = \
self._get_uploaded_file(
session=session,
file_info=file_info.fragment_info.file_info,
fragment_count=file_info.fragment_info.fragments_count)
# save a new fragment for the file
file_fragment = FileFragment(
fragment_sha1=file_info.sha1,
fragment_name=file_info.upath,
fragment_number=file_info.fragment_info.fragment_num
)
uploaded_file.fragments.append(file_fragment)
else: # not fragmented file
uploaded_file = self._get_uploaded_file(session=session, file_info=file_info)
session.flush() # if uploaded_file has no id, we need one
file_in_container_assoc = FilesInContainers(
uploaded_file_fragment_number=uploaded_file_fragment_number,
uploaded_files_id=uploaded_file.id
)
file_in_container_assoc.container_file = file_container
file_container.fragments.append(file_in_container_assoc)
session.commit()
def _log_in_sent_log(self, block):
""" Logs:
block file | bfile (not encrypted) sha1 [| bfile encryption key | encrypted bfile sha1]
<tab>content file | cfile sha1 | part of parts [| whole file sha1]
"""
# FIXME all these should be done by the block itself
self._sent_log_file.write("\n")
sent_file_info = block.latest_file_info
self._sent_log_file.write("|".join((sent_file_info.basename, sent_file_info.sha1)))
if hasattr(block, 'cipher_key'):
self._sent_log_file.write("|")
self._sent_log_file.write("|".join((block.cipher_key, block.ciphered_file_info.sha1)))
for file_info in block.content_file_infos:
self._sent_log_file.write("\n\t")
self._sent_log_file.write("|".join((file_info.path, file_info.sha1)))
if hasattr(file_info, 'fragment_info'): # check if it is a fragment
self._sent_log_file.write("|")
self._sent_log_file.write("|".join((
"%d of %d" % (file_info.fragment_info.fragment_num, file_info.fragment_info.fragments_count),
file_info.fragment_info.file_info.sha1)))
|
MatiasSM/fcb | fcb/sending/SentLog.py | SentLog._log_in_sent_log | python | def _log_in_sent_log(self, block):
# FIXME all these should be done by the block itself
self._sent_log_file.write("\n")
sent_file_info = block.latest_file_info
self._sent_log_file.write("|".join((sent_file_info.basename, sent_file_info.sha1)))
if hasattr(block, 'cipher_key'):
self._sent_log_file.write("|")
self._sent_log_file.write("|".join((block.cipher_key, block.ciphered_file_info.sha1)))
for file_info in block.content_file_infos:
self._sent_log_file.write("\n\t")
self._sent_log_file.write("|".join((file_info.path, file_info.sha1)))
if hasattr(file_info, 'fragment_info'): # check if it is a fragment
self._sent_log_file.write("|")
self._sent_log_file.write("|".join((
"%d of %d" % (file_info.fragment_info.fragment_num, file_info.fragment_info.fragments_count),
file_info.fragment_info.file_info.sha1))) | Logs:
block file | bfile (not encrypted) sha1 [| bfile encryption key | encrypted bfile sha1]
<tab>content file | cfile sha1 | part of parts [| whole file sha1] | train | https://github.com/MatiasSM/fcb/blob/92a6c535287ea1c1ef986954a5d66e7905fb6221/fcb/sending/SentLog.py#L125-L144 | null | class SentLog(PipelineTask):
_session_resource = None
_sent_log_file = None
def do_init(self, sent_log):
self._session_resource = None
self._sent_log_file = open(sent_log, 'a') if sent_log else None
# override from PipelineTask
def process_data(self, block):
"""expects Block from Compressor"""
if hasattr(block, 'send_destinations') and block.send_destinations:
self.fire(events.FileProcessed(block))
self._log_in_db(block)
if self._sent_log_file:
self._log_in_sent_log(block)
self.log.info("Sent to '%s' file '%s' containing files: %s",
str(block.send_destinations),
block.processed_data_file_info.basename,
str([file_info.path for file_info in block.content_file_infos]))
else:
self.log.info("File %s wasn't sent", block.processed_data_file_info.basename)
return block
# override from PipelineTask
def on_stopped(self):
if self._sent_log_file:
self._sent_log_file.close()
if self._session_resource:
with self._session_resource as session:
session.commit()
session.close()
# --- low visibility methods ------------------------------
@staticmethod
def _get_uploaded_file(session, file_info, fragment_count=0):
"""
:param session: locked session (with self._session_resource as >> session <<)
:param file_info: contains file information to save or query
:param fragment_count: amount of fragments associated to the file
:return: an UploadedFile associated to the file_info
"""
try:
return session.query(UploadedFile).filter(UploadedFile.sha1 == file_info.sha1).one()
except NoResultFound:
new_instance = UploadedFile(
sha1=file_info.sha1,
file_name=file_info.upath,
fragment_count=fragment_count
)
session.add(new_instance)
return new_instance
def _log_in_db(self, block):
if not self._session_resource:
self._session_resource = get_session()
with self._session_resource as session:
session.autoflush = False # to avoid IntegrityError raised during testing
sent_file_info = block.latest_file_info
# a new container has been saved
file_container = FilesContainer(
sha1=sent_file_info.sha1,
file_name=sent_file_info.basename,
encryption_key=block.cipher_key if hasattr(block, 'cipher_key') else '',
container_size=sent_file_info.size
)
session.add(file_container)
''' FIXME we need the container id because file_destination is not getting it
(not working example of SQLAlchemy) '''
session.flush() # get container id
# associate destinations to the container
for destination in block.send_destinations if hasattr(block, 'send_destinations') else []:
file_destination = FilesDestinations()
file_destination.destination = Destination.get_or_add(session, destination)
# FIXME according to the example in SQLAlchemy, this shouldn't be needed
file_destination.file_containers_id = file_container.id
if hasattr(block, 'destinations_verif_data') and destination in block.destinations_verif_data:
file_destination.verification_info = block.destinations_verif_data[destination]
file_container.files_destinations.append(file_destination)
# save/update each file in the container
for file_info in block.content_file_infos:
uploaded_file_fragment_number = 0
if hasattr(file_info, 'fragment_info'): # check if it is a fragment
uploaded_file_fragment_number = file_info.fragment_info.fragment_num
uploaded_file = \
self._get_uploaded_file(
session=session,
file_info=file_info.fragment_info.file_info,
fragment_count=file_info.fragment_info.fragments_count)
# save a new fragment for the file
file_fragment = FileFragment(
fragment_sha1=file_info.sha1,
fragment_name=file_info.upath,
fragment_number=file_info.fragment_info.fragment_num
)
uploaded_file.fragments.append(file_fragment)
else: # not fragmented file
uploaded_file = self._get_uploaded_file(session=session, file_info=file_info)
session.flush() # if uploaded_file has no id, we need one
file_in_container_assoc = FilesInContainers(
uploaded_file_fragment_number=uploaded_file_fragment_number,
uploaded_files_id=uploaded_file.id
)
file_in_container_assoc.container_file = file_container
file_container.fragments.append(file_in_container_assoc)
session.commit()
|
MatiasSM/fcb | fcb/sending/mega/helpers.py | MegaAccountHandler.execute_command | python | def execute_command(cls, command):
return Popen(
command,
stdout=PIPE,
stderr=STDOUT,
start_new_session=True
) | :return: Popen object with stderr redirected to stdout | train | https://github.com/MatiasSM/fcb/blob/92a6c535287ea1c1ef986954a5d66e7905fb6221/fcb/sending/mega/helpers.py#L73-L82 | null | class MegaAccountHandler(object):
dir_delimiter = "/"
dev_null = open(os.devnull, 'wb')
@classmethod
def to_absoulte_dst_path(cls, settings):
dst_path = settings.dst_path
if dst_path:
if dst_path[-1] == MegaAccountHandler.dir_delimiter:
dst_path = dst_path[:-1]
root_path = "/Root"
if not dst_path.startswith(root_path):
dst_path = MegaAccountHandler.dir_delimiter.join((root_path, dst_path))
return dst_path
@classmethod
def create_dest_directories(cls, settings):
dst_path = cls.to_absoulte_dst_path(settings)
log = get_logger_module(cls.__name__)
# we generate each directory from root so we create all that are missing
splited = dst_path.split(cls.dir_delimiter)
subdirs = []
# note: first 2 will be "" and root directory (since absolute path starts with "/<root>")
for included in xrange(3, len(splited) + 1):
subdirs.append(cls.dir_delimiter.join(splited[:included]))
if subdirs:
command = cls.build_command_argumetns(command_str="megamkdir", settings=settings, extra_args=subdirs)
log.debug("Executing command: %s", command)
call(command, start_new_session=True)
@classmethod
def verify_access(cls, settings):
log = get_logger_module(cls.__name__)
# try megadf to check if we can access
command = cls.build_command_argumetns(command_str="megadf", settings=settings)
log.debug("Executing command: %s", command)
try:
cls.check_call(command)
except CalledProcessError as e:
raise DestinationInaccessible("Failed access. Running '%s' result was '%s'", command, e.output)
log.debug("Access verified to destination mega (command: %s)", command)
@staticmethod
def build_command_argumetns(command_str, settings, extra_args=None):
general_args = ["--no-ask-password", "--disable-previews"]
return ([command_str] if settings.user is None or settings.password is None else
[command_str, "--username", settings.user, "--password", settings.password]) \
+ general_args \
+ ([] if extra_args is None else extra_args)
@staticmethod
def is_output_error(output_str):
return output_str.startswith("ERROR:")
@classmethod
def check_call(cls, command):
process = cls.execute_command(command)
process_output, _ = process.communicate()
for line in process_output.split("\n"):
if MegaAccountHandler.is_output_error(line):
raise CalledProcessError(1, command, process_output)
@classmethod
|
MatiasSM/fcb | fcb/database/helpers.py | get_db_version | python | def get_db_version(session):
value = session.query(ProgramInformation.value).filter(ProgramInformation.name == "db_version").scalar()
return int(value) | :param session: actually it is a sqlalchemy session
:return: version number | train | https://github.com/MatiasSM/fcb/blob/92a6c535287ea1c1ef986954a5d66e7905fb6221/fcb/database/helpers.py#L40-L46 | null | import threading
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from fcb.database import settings
from fcb.database.schema import ProgramInformation
_engine = create_engine(settings.Definitions.connect_string)
_Session = sessionmaker(bind=_engine)
class _LockedSession(object):
"""
This class is used to access database one thread at the time to avoid "database is locked" errors when using SQLite
Because SQLAlchemy sessions are not thread safe (nor multi-thread shareable seems), we use different sessions
but share the locks (to ensure exclusive access to DB)
When/If a different engine is to be used, this class may be changed to avoid locking in that case
"""
def __init__(self):
self._lock = threading.RLock()
self._cur_session = None
def __enter__(self):
self._lock.acquire()
self._cur_session = _Session()
return self._cur_session
def __exit__(self, *_):
self._cur_session.commit()
self._cur_session.close()
self._lock.release()
_shared_session = _LockedSession()
def get_session():
return _shared_session
|
MatiasSM/fcb | fcb/processing/filters/AlreadyProcessedFilter.py | AlreadyProcessedFilter.process_data | python | def process_data(self, file_info):
if self._is_already_processed(file_info):
self.log.debug("Content file already processed '%s'", str(file_info))
self.fire(events.FilteredFile(file_info))
self.fire(events.FileInfoAlreadyProcessed(file_info))
else:
return file_info
return None | expects FileInfo | train | https://github.com/MatiasSM/fcb/blob/92a6c535287ea1c1ef986954a5d66e7905fb6221/fcb/processing/filters/AlreadyProcessedFilter.py#L18-L26 | null | class AlreadyProcessedFilter(PipelineTask):
_session_resource = None
def do_init(self):
self._session_resource = get_session()
# override from PipelineTask
@handler("Stopped")
def on_stopped(self):
if self._session_resource:
with self._session_resource as session:
session.commit()
session.close()
# -------- low visibility methods
def _is_already_processed(self, file_info):
try:
with self._session_resource as session:
uploaded_file = session \
.query(UploadedFile) \
.filter(UploadedFile.sha1 == file_info.sha1) \
.order_by(UploadedFile.upload_date.desc()).one()
session.expunge_all()
self.log.debug("Found uploaded file by hash: {}".format(uploaded_file))
# get the uploaded date in local time (FIXME really ugly code)
date_string = uploaded_file.upload_date.replace(tzinfo=tz.gettz('GMT')).astimezone(tz.tzlocal()).isoformat()
if uploaded_file.fragment_count > 0:
# check if all fragments have been uploaded
if len(uploaded_file.fragments) < uploaded_file.fragment_count:
self.log.info(
"File '%s' was already started to be uploaded on '%s' but only %d of %d fragments arrived"
" to its end, the file will need to be re-uploaded",
file_info.path, date_string, len(uploaded_file.fragments), uploaded_file.fragment_count)
return False
self.log.info("File '%s' was already uploaded on '%s' with the name '%s' (sha1 '%s')",
file_info.path, date_string, uploaded_file.file_name.encode("utf-8"), str(file_info.sha1))
return True
except NoResultFound:
self.log.debug("No file found for file info: {}".format(file_info))
return False
|
MatiasSM/fcb | fcb/processing/filters/FileSizeFilter.py | FileSizeFilter.process_data | python | def process_data(self, file_info):
if self._exceeds_max_file_size(file_info):
self.log.info("File '%s' has a size in bytes (%d) greater than the configured limit. Will be ignored.",
file_info.path, file_info.size)
self.fire(events.FilteredFile(file_info))
return None
else:
return file_info | expects FileInfo | train | https://github.com/MatiasSM/fcb/blob/92a6c535287ea1c1ef986954a5d66e7905fb6221/fcb/processing/filters/FileSizeFilter.py#L12-L20 | null | class FileSizeFilter(PipelineTask):
_file_size_limit_bytes = None
def do_init(self, file_size_limit_bytes):
self._file_size_limit_bytes = file_size_limit_bytes
# override from PipelineTask
def _exceeds_max_file_size(self, file_info):
return self._file_size_limit_bytes != 0 and file_info.size > self._file_size_limit_bytes
|
MatiasSM/fcb | fcb/processing/transformations/ToImage.py | _determine_dimensions | python | def _determine_dimensions(num_of_pixels):
for x in xrange(int(math.sqrt(num_of_pixels)) + 1, 1, -1):
if num_of_pixels % x == 0:
return num_of_pixels // x, x
return 1, num_of_pixels | Given a number of pixels, determines the largest width and height that define a
rectangle with such an area | train | https://github.com/MatiasSM/fcb/blob/92a6c535287ea1c1ef986954a5d66e7905fb6221/fcb/processing/transformations/ToImage.py#L15-L23 | null | from PIL import Image
import numpy
import math
from fcb.framework.workers import hd_worker_pool
from fcb.framework.workflow.HeavyPipelineTask import HeavyPipelineTask
from fcb.processing.models.FileInfo import FileInfo
from fcb.utils.log_helper import get_logger_module
_log = get_logger_module("ToImage")
# TODO image and array manipulation functions need to be optimized (a lot)
# if no better dimensions could be found, use a "line"
def _to_image_array(file_path):
"""
Converts the file in file_path to a numpy array (matrix) representing an RGB image
The dimensions of the image are calculated using __determine_dimensions.
Padding is added provide enough bytes to generate the image (between 1 and 3 bytes can be added).
"""
_log.debug("File '%s' to image", file_path)
data = numpy.fromfile(file_path, numpy.uint8)
orig_len = len(data)
pad_req = (3 - (orig_len % 3))
pad_req += 3 if pad_req == 0 else 0
final_len = orig_len + pad_req
num_of_pixels = final_len // 3
w, h = _determine_dimensions(num_of_pixels)
reshaped = numpy.zeros((w, h, 3), dtype=numpy.uint8)
for i in xrange(final_len):
sidx = i // 3
y = sidx % h
x = sidx // h
s = i % 3
reshaped[x, y, s] = data[i] if i < orig_len else 0
reshaped[-1, -1, 2] = pad_req
return reshaped
def from_file_to_image(file_path, img_path):
data = _to_image_array(file_path)
img = Image.fromarray(data, 'RGB')
img.save(img_path)
def from_image_to_file(img_path, file_path):
"""
Expects images created by from_from_file_to_image
"""
img = Image.open(img_path)
data = numpy.array(img)
data = numpy.reshape(data, len(img.getdata()) * 3)
to_remove = data[len(data) - 1]
data = numpy.delete(data, xrange(len(data) - to_remove, len(data)))
data.tofile(file_path)
_worker_pool = hd_worker_pool
class ToImage(HeavyPipelineTask):
@classmethod
def get_extension(cls):
return ".png"
@classmethod
def is_transformed(cls, path):
return path.endswith(cls.get_extension())
# override from HeavyPipelineTask
def do_heavy_work(self, block):
"""
Note: Expects Compressor Block like objects
"""
src_file_path = block.latest_file_info.path
img_path = src_file_path + self.get_extension()
self.log.debug("Converting file '%s' to image '%s'", src_file_path, img_path)
from_file_to_image(src_file_path, img_path)
block.image_converted_file_info = FileInfo(img_path)
block.latest_file_info = block.image_converted_file_info
return block
# override from HeavyPipelineTask
def get_worker_channel(self):
return _worker_pool.get_worker()
|
MatiasSM/fcb | fcb/processing/transformations/ToImage.py | _to_image_array | python | def _to_image_array(file_path):
_log.debug("File '%s' to image", file_path)
data = numpy.fromfile(file_path, numpy.uint8)
orig_len = len(data)
pad_req = (3 - (orig_len % 3))
pad_req += 3 if pad_req == 0 else 0
final_len = orig_len + pad_req
num_of_pixels = final_len // 3
w, h = _determine_dimensions(num_of_pixels)
reshaped = numpy.zeros((w, h, 3), dtype=numpy.uint8)
for i in xrange(final_len):
sidx = i // 3
y = sidx % h
x = sidx // h
s = i % 3
reshaped[x, y, s] = data[i] if i < orig_len else 0
reshaped[-1, -1, 2] = pad_req
return reshaped | Converts the file in file_path to a numpy array (matrix) representing an RGB image
The dimensions of the image are calculated using __determine_dimensions.
Padding is added provide enough bytes to generate the image (between 1 and 3 bytes can be added). | train | https://github.com/MatiasSM/fcb/blob/92a6c535287ea1c1ef986954a5d66e7905fb6221/fcb/processing/transformations/ToImage.py#L26-L49 | [
"def _determine_dimensions(num_of_pixels):\n \"\"\"\n Given a number of pixels, determines the largest width and height that define a\n rectangle with such an area\n \"\"\"\n for x in xrange(int(math.sqrt(num_of_pixels)) + 1, 1, -1):\n if num_of_pixels % x == 0:\n return num_of_pi... | from PIL import Image
import numpy
import math
from fcb.framework.workers import hd_worker_pool
from fcb.framework.workflow.HeavyPipelineTask import HeavyPipelineTask
from fcb.processing.models.FileInfo import FileInfo
from fcb.utils.log_helper import get_logger_module
_log = get_logger_module("ToImage")
# TODO image and array manipulation functions need to be optimized (a lot)
def _determine_dimensions(num_of_pixels):
"""
Given a number of pixels, determines the largest width and height that define a
rectangle with such an area
"""
for x in xrange(int(math.sqrt(num_of_pixels)) + 1, 1, -1):
if num_of_pixels % x == 0:
return num_of_pixels // x, x
return 1, num_of_pixels # if no better dimensions could be found, use a "line"
def from_file_to_image(file_path, img_path):
data = _to_image_array(file_path)
img = Image.fromarray(data, 'RGB')
img.save(img_path)
def from_image_to_file(img_path, file_path):
"""
Expects images created by from_from_file_to_image
"""
img = Image.open(img_path)
data = numpy.array(img)
data = numpy.reshape(data, len(img.getdata()) * 3)
to_remove = data[len(data) - 1]
data = numpy.delete(data, xrange(len(data) - to_remove, len(data)))
data.tofile(file_path)
_worker_pool = hd_worker_pool
class ToImage(HeavyPipelineTask):
@classmethod
def get_extension(cls):
return ".png"
@classmethod
def is_transformed(cls, path):
return path.endswith(cls.get_extension())
# override from HeavyPipelineTask
def do_heavy_work(self, block):
"""
Note: Expects Compressor Block like objects
"""
src_file_path = block.latest_file_info.path
img_path = src_file_path + self.get_extension()
self.log.debug("Converting file '%s' to image '%s'", src_file_path, img_path)
from_file_to_image(src_file_path, img_path)
block.image_converted_file_info = FileInfo(img_path)
block.latest_file_info = block.image_converted_file_info
return block
# override from HeavyPipelineTask
def get_worker_channel(self):
return _worker_pool.get_worker()
|
MatiasSM/fcb | fcb/processing/transformations/ToImage.py | from_image_to_file | python | def from_image_to_file(img_path, file_path):
img = Image.open(img_path)
data = numpy.array(img)
data = numpy.reshape(data, len(img.getdata()) * 3)
to_remove = data[len(data) - 1]
data = numpy.delete(data, xrange(len(data) - to_remove, len(data)))
data.tofile(file_path) | Expects images created by from_from_file_to_image | train | https://github.com/MatiasSM/fcb/blob/92a6c535287ea1c1ef986954a5d66e7905fb6221/fcb/processing/transformations/ToImage.py#L58-L67 | null | from PIL import Image
import numpy
import math
from fcb.framework.workers import hd_worker_pool
from fcb.framework.workflow.HeavyPipelineTask import HeavyPipelineTask
from fcb.processing.models.FileInfo import FileInfo
from fcb.utils.log_helper import get_logger_module
_log = get_logger_module("ToImage")
# TODO image and array manipulation functions need to be optimized (a lot)
def _determine_dimensions(num_of_pixels):
"""
Given a number of pixels, determines the largest width and height that define a
rectangle with such an area
"""
for x in xrange(int(math.sqrt(num_of_pixels)) + 1, 1, -1):
if num_of_pixels % x == 0:
return num_of_pixels // x, x
return 1, num_of_pixels # if no better dimensions could be found, use a "line"
def _to_image_array(file_path):
"""
Converts the file in file_path to a numpy array (matrix) representing an RGB image
The dimensions of the image are calculated using __determine_dimensions.
Padding is added provide enough bytes to generate the image (between 1 and 3 bytes can be added).
"""
_log.debug("File '%s' to image", file_path)
data = numpy.fromfile(file_path, numpy.uint8)
orig_len = len(data)
pad_req = (3 - (orig_len % 3))
pad_req += 3 if pad_req == 0 else 0
final_len = orig_len + pad_req
num_of_pixels = final_len // 3
w, h = _determine_dimensions(num_of_pixels)
reshaped = numpy.zeros((w, h, 3), dtype=numpy.uint8)
for i in xrange(final_len):
sidx = i // 3
y = sidx % h
x = sidx // h
s = i % 3
reshaped[x, y, s] = data[i] if i < orig_len else 0
reshaped[-1, -1, 2] = pad_req
return reshaped
def from_file_to_image(file_path, img_path):
data = _to_image_array(file_path)
img = Image.fromarray(data, 'RGB')
img.save(img_path)
_worker_pool = hd_worker_pool
class ToImage(HeavyPipelineTask):
@classmethod
def get_extension(cls):
return ".png"
@classmethod
def is_transformed(cls, path):
return path.endswith(cls.get_extension())
# override from HeavyPipelineTask
def do_heavy_work(self, block):
"""
Note: Expects Compressor Block like objects
"""
src_file_path = block.latest_file_info.path
img_path = src_file_path + self.get_extension()
self.log.debug("Converting file '%s' to image '%s'", src_file_path, img_path)
from_file_to_image(src_file_path, img_path)
block.image_converted_file_info = FileInfo(img_path)
block.latest_file_info = block.image_converted_file_info
return block
# override from HeavyPipelineTask
def get_worker_channel(self):
return _worker_pool.get_worker()
|
MatiasSM/fcb | fcb/processing/transformations/ToImage.py | ToImage.do_heavy_work | python | def do_heavy_work(self, block):
src_file_path = block.latest_file_info.path
img_path = src_file_path + self.get_extension()
self.log.debug("Converting file '%s' to image '%s'", src_file_path, img_path)
from_file_to_image(src_file_path, img_path)
block.image_converted_file_info = FileInfo(img_path)
block.latest_file_info = block.image_converted_file_info
return block | Note: Expects Compressor Block like objects | train | https://github.com/MatiasSM/fcb/blob/92a6c535287ea1c1ef986954a5d66e7905fb6221/fcb/processing/transformations/ToImage.py#L83-L93 | [
"def from_file_to_image(file_path, img_path):\n data = _to_image_array(file_path)\n img = Image.fromarray(data, 'RGB')\n img.save(img_path)\n"
] | class ToImage(HeavyPipelineTask):
@classmethod
def get_extension(cls):
return ".png"
@classmethod
def is_transformed(cls, path):
return path.endswith(cls.get_extension())
# override from HeavyPipelineTask
# override from HeavyPipelineTask
def get_worker_channel(self):
return _worker_pool.get_worker()
|
wdm0006/sklearn-extensions | sklearn_extensions/kernel_regression/kr.py | KernelRegression.fit | python | def fit(self, X, y):
self.X = X
self.y = y
if hasattr(self.gamma, "__iter__"):
self.gamma = self._optimize_gamma(self.gamma)
return self | Fit the model
Parameters
----------
X : array-like of shape = [n_samples, n_features]
The training input samples.
y : array-like, shape = [n_samples]
The target values
Returns
-------
self : object
Returns self. | train | https://github.com/wdm0006/sklearn-extensions/blob/329f3efdb8c3c3a367b264f7c76c76411a784530/sklearn_extensions/kernel_regression/kr.py#L46-L68 | null | class KernelRegression(BaseEstimator, RegressorMixin):
"""Nadaraya-Watson kernel regression with automatic bandwidth selection.
This implements Nadaraya-Watson kernel regression with (optional) automatic
bandwith selection of the kernel via leave-one-out cross-validation. Kernel
regression is a simple non-parametric kernelized technique for learning
a non-linear relationship between input variable(s) and a target variable.
Parameters
----------
kernel : string or callable, default="rbf"
Kernel map to be approximated. A callable should accept two arguments
and the keyword arguments passed to this object as kernel_params, and
should return a floating point number.
gamma : float, default=None
Gamma parameter for the RBF ("bandwidth"), polynomial,
exponential chi2 and sigmoid kernels. Interpretation of the default
value is left to the kernel; see the documentation for
sklearn.metrics.pairwise. Ignored by other kernels. If a sequence of
values is given, one of these values is selected which minimizes
the mean-squared-error of leave-one-out cross-validation.
See also
--------
sklearn.metrics.pairwise.kernel_metrics : List of built-in kernels.
"""
def __init__(self, kernel="rbf", gamma=None):
self.kernel = kernel
self.gamma = gamma
def predict(self, X):
"""Predict target values for X.
Parameters
----------
X : array-like of shape = [n_samples, n_features]
The input samples.
Returns
-------
y : array of shape = [n_samples]
The predicted target value.
"""
K = pairwise_kernels(self.X, X, metric=self.kernel, gamma=self.gamma)
return (K * self.y[:, None]).sum(axis=0) / K.sum(axis=0)
def _optimize_gamma(self, gamma_values):
# Select specific value of gamma from the range of given gamma_values
# by minimizing mean-squared error in leave-one-out cross validation
mse = np.empty_like(gamma_values, dtype=np.float)
for i, gamma in enumerate(gamma_values):
K = pairwise_kernels(self.X, self.X, metric=self.kernel, gamma=gamma)
np.fill_diagonal(K, 0) # leave-one-out
Ky = K * self.y[:, np.newaxis]
y_pred = Ky.sum(axis=0) / K.sum(axis=0)
mse[i] = ((y_pred - self.y) ** 2).mean()
return gamma_values[np.nanargmin(mse)] |
wdm0006/sklearn-extensions | sklearn_extensions/kernel_regression/kr.py | KernelRegression.predict | python | def predict(self, X):
K = pairwise_kernels(self.X, X, metric=self.kernel, gamma=self.gamma)
return (K * self.y[:, None]).sum(axis=0) / K.sum(axis=0) | Predict target values for X.
Parameters
----------
X : array-like of shape = [n_samples, n_features]
The input samples.
Returns
-------
y : array of shape = [n_samples]
The predicted target value. | train | https://github.com/wdm0006/sklearn-extensions/blob/329f3efdb8c3c3a367b264f7c76c76411a784530/sklearn_extensions/kernel_regression/kr.py#L70-L84 | null | class KernelRegression(BaseEstimator, RegressorMixin):
"""Nadaraya-Watson kernel regression with automatic bandwidth selection.
This implements Nadaraya-Watson kernel regression with (optional) automatic
bandwith selection of the kernel via leave-one-out cross-validation. Kernel
regression is a simple non-parametric kernelized technique for learning
a non-linear relationship between input variable(s) and a target variable.
Parameters
----------
kernel : string or callable, default="rbf"
Kernel map to be approximated. A callable should accept two arguments
and the keyword arguments passed to this object as kernel_params, and
should return a floating point number.
gamma : float, default=None
Gamma parameter for the RBF ("bandwidth"), polynomial,
exponential chi2 and sigmoid kernels. Interpretation of the default
value is left to the kernel; see the documentation for
sklearn.metrics.pairwise. Ignored by other kernels. If a sequence of
values is given, one of these values is selected which minimizes
the mean-squared-error of leave-one-out cross-validation.
See also
--------
sklearn.metrics.pairwise.kernel_metrics : List of built-in kernels.
"""
def __init__(self, kernel="rbf", gamma=None):
self.kernel = kernel
self.gamma = gamma
def fit(self, X, y):
"""Fit the model
Parameters
----------
X : array-like of shape = [n_samples, n_features]
The training input samples.
y : array-like, shape = [n_samples]
The target values
Returns
-------
self : object
Returns self.
"""
self.X = X
self.y = y
if hasattr(self.gamma, "__iter__"):
self.gamma = self._optimize_gamma(self.gamma)
return self
def _optimize_gamma(self, gamma_values):
# Select specific value of gamma from the range of given gamma_values
# by minimizing mean-squared error in leave-one-out cross validation
mse = np.empty_like(gamma_values, dtype=np.float)
for i, gamma in enumerate(gamma_values):
K = pairwise_kernels(self.X, self.X, metric=self.kernel, gamma=gamma)
np.fill_diagonal(K, 0) # leave-one-out
Ky = K * self.y[:, np.newaxis]
y_pred = Ky.sum(axis=0) / K.sum(axis=0)
mse[i] = ((y_pred - self.y) ** 2).mean()
return gamma_values[np.nanargmin(mse)] |
wdm0006/sklearn-extensions | sklearn_extensions/extreme_learning_machines/random_layer.py | BaseRandomLayer._compute_hidden_activations | python | def _compute_hidden_activations(self, X):
self._compute_input_activations(X)
acts = self.input_activations_
if (callable(self.activation_func)):
args_dict = self.activation_args if (self.activation_args) else {}
X_new = self.activation_func(acts, **args_dict)
else:
func_name = self.activation_func
func = self._internal_activation_funcs[func_name]
X_new = func(acts, **self._extra_args)
return X_new | Compute hidden activations given X | train | https://github.com/wdm0006/sklearn-extensions/blob/329f3efdb8c3c3a367b264f7c76c76411a784530/sklearn_extensions/extreme_learning_machines/random_layer.py#L72-L88 | null | class BaseRandomLayer(BaseEstimator, TransformerMixin):
"""Abstract Base Class for random layers"""
__metaclass__ = ABCMeta
_internal_activation_funcs = dict()
@classmethod
def activation_func_names(cls):
"""Get list of internal activation function names"""
return cls._internal_activation_funcs.keys()
# take n_hidden and random_state, init components_ and
# input_activations_
def __init__(self, n_hidden=20, random_state=0, activation_func=None,
activation_args=None):
self.n_hidden = n_hidden
self.random_state = random_state
self.activation_func = activation_func
self.activation_args = activation_args
self.components_ = dict()
self.input_activations_ = None
# keyword args for internally defined funcs
self._extra_args = dict()
@abstractmethod
def _generate_components(self, X):
"""Generate components of hidden layer given X"""
@abstractmethod
def _compute_input_activations(self, X):
"""Compute input activations given X"""
# compute input activations and pass them
# through the hidden layer transfer functions
# to compute the transform
# perform fit by generating random components based
# on the input array
def fit(self, X, y=None):
"""Generate a random hidden layer.
Parameters
----------
X : {array-like, sparse matrix} of shape [n_samples, n_features]
Training set: only the shape is used to generate random component
values for hidden units
y : is not used: placeholder to allow for usage in a Pipeline.
Returns
-------
self
"""
self._generate_components(X)
return self
# perform transformation by calling compute_hidden_activations
# (which will normally call compute_input_activations first)
def transform(self, X, y=None):
"""Generate the random hidden layer's activations given X as input.
Parameters
----------
X : {array-like, sparse matrix}, shape [n_samples, n_features]
Data to transform
y : is not used: placeholder to allow for usage in a Pipeline.
Returns
-------
X_new : numpy array of shape [n_samples, n_components]
"""
if (self.components_ is None):
raise ValueError('No components initialized')
return self._compute_hidden_activations(X)
|
wdm0006/sklearn-extensions | sklearn_extensions/extreme_learning_machines/random_layer.py | BaseRandomLayer.transform | python | def transform(self, X, y=None):
if (self.components_ is None):
raise ValueError('No components initialized')
return self._compute_hidden_activations(X) | Generate the random hidden layer's activations given X as input.
Parameters
----------
X : {array-like, sparse matrix}, shape [n_samples, n_features]
Data to transform
y : is not used: placeholder to allow for usage in a Pipeline.
Returns
-------
X_new : numpy array of shape [n_samples, n_components] | train | https://github.com/wdm0006/sklearn-extensions/blob/329f3efdb8c3c3a367b264f7c76c76411a784530/sklearn_extensions/extreme_learning_machines/random_layer.py#L114-L132 | null | class BaseRandomLayer(BaseEstimator, TransformerMixin):
"""Abstract Base Class for random layers"""
__metaclass__ = ABCMeta
_internal_activation_funcs = dict()
@classmethod
def activation_func_names(cls):
"""Get list of internal activation function names"""
return cls._internal_activation_funcs.keys()
# take n_hidden and random_state, init components_ and
# input_activations_
def __init__(self, n_hidden=20, random_state=0, activation_func=None,
activation_args=None):
self.n_hidden = n_hidden
self.random_state = random_state
self.activation_func = activation_func
self.activation_args = activation_args
self.components_ = dict()
self.input_activations_ = None
# keyword args for internally defined funcs
self._extra_args = dict()
@abstractmethod
def _generate_components(self, X):
"""Generate components of hidden layer given X"""
@abstractmethod
def _compute_input_activations(self, X):
"""Compute input activations given X"""
# compute input activations and pass them
# through the hidden layer transfer functions
# to compute the transform
def _compute_hidden_activations(self, X):
"""Compute hidden activations given X"""
self._compute_input_activations(X)
acts = self.input_activations_
if (callable(self.activation_func)):
args_dict = self.activation_args if (self.activation_args) else {}
X_new = self.activation_func(acts, **args_dict)
else:
func_name = self.activation_func
func = self._internal_activation_funcs[func_name]
X_new = func(acts, **self._extra_args)
return X_new
# perform fit by generating random components based
# on the input array
def fit(self, X, y=None):
"""Generate a random hidden layer.
Parameters
----------
X : {array-like, sparse matrix} of shape [n_samples, n_features]
Training set: only the shape is used to generate random component
values for hidden units
y : is not used: placeholder to allow for usage in a Pipeline.
Returns
-------
self
"""
self._generate_components(X)
return self
# perform transformation by calling compute_hidden_activations
# (which will normally call compute_input_activations first)
|
wdm0006/sklearn-extensions | sklearn_extensions/extreme_learning_machines/random_layer.py | RandomLayer._compute_radii | python | def _compute_radii(self):
# use supplied radii if present
radii = self._get_user_components('radii')
# compute radii
if (radii is None):
centers = self.components_['centers']
n_centers = centers.shape[0]
max_dist = np.max(pairwise_distances(centers))
radii = np.ones(n_centers) * max_dist/sqrt(2.0 * n_centers)
self.components_['radii'] = radii | Generate RBF radii | train | https://github.com/wdm0006/sklearn-extensions/blob/329f3efdb8c3c3a367b264f7c76c76411a784530/sklearn_extensions/extreme_learning_machines/random_layer.py#L280-L294 | null | class RandomLayer(BaseRandomLayer):
"""RandomLayer is a transformer that creates a feature mapping of the
inputs that corresponds to a layer of hidden units with randomly
generated components.
The transformed values are a specified function of input activations
that are a weighted combination of dot product (multilayer perceptron)
and distance (rbf) activations:
input_activation = alpha * mlp_activation + (1-alpha) * rbf_activation
mlp_activation(x) = dot(x, weights) + bias
rbf_activation(x) = rbf_width * ||x - center||/radius
alpha and rbf_width are specified by the user
weights and biases are taken from normal distribution of
mean 0 and sd of 1
centers are taken uniformly from the bounding hyperrectangle
of the inputs, and radii are max(||x-c||)/sqrt(n_centers*2)
The input activation is transformed by a transfer function that defaults
to numpy.tanh if not specified, but can be any callable that returns an
array of the same shape as its argument (the input activation array, of
shape [n_samples, n_hidden]). Functions provided are 'sine', 'tanh',
'tribas', 'inv_tribas', 'sigmoid', 'hardlim', 'softlim', 'gaussian',
'multiquadric', or 'inv_multiquadric'.
Parameters
----------
`n_hidden` : int, optional (default=20)
Number of units to generate
`alpha` : float, optional (default=0.5)
Mixing coefficient for distance and dot product input activations:
activation = alpha*mlp_activation + (1-alpha)*rbf_width*rbf_activation
`rbf_width` : float, optional (default=1.0)
multiplier on rbf_activation
`user_components`: dictionary, optional (default=None)
dictionary containing values for components that woud otherwise be
randomly generated. Valid key/value pairs are as follows:
'radii' : array-like of shape [n_hidden]
'centers': array-like of shape [n_hidden, n_features]
'biases' : array-like of shape [n_hidden]
'weights': array-like of shape [n_features, n_hidden]
`activation_func` : {callable, string} optional (default='tanh')
Function used to transform input activation
It must be one of 'tanh', 'sine', 'tribas', 'inv_tribas',
'sigmoid', 'hardlim', 'softlim', 'gaussian', 'multiquadric',
'inv_multiquadric' or a callable. If None is given, 'tanh'
will be used.
If a callable is given, it will be used to compute the activations.
`activation_args` : dictionary, optional (default=None)
Supplies keyword arguments for a callable activation_func
`random_state` : int, RandomState instance or None (default=None)
Control the pseudo random number generator used to generate the
hidden unit weights at fit time.
Attributes
----------
`input_activations_` : numpy array of shape [n_samples, n_hidden]
Array containing dot(x, hidden_weights) + bias for all samples
`components_` : dictionary containing two keys:
`bias_weights_` : numpy array of shape [n_hidden]
`hidden_weights_` : numpy array of shape [n_features, n_hidden]
See Also
--------
"""
# triangular activation function
_tribas = (lambda x: np.clip(1.0 - np.fabs(x), 0.0, 1.0))
# inverse triangular activation function
_inv_tribas = (lambda x: np.clip(np.fabs(x), 0.0, 1.0))
# sigmoid activation function
_sigmoid = (lambda x: 1.0/(1.0 + np.exp(-x)))
# hard limit activation function
_hardlim = (lambda x: np.array(x > 0.0, dtype=float))
_softlim = (lambda x: np.clip(x, 0.0, 1.0))
# gaussian RBF
_gaussian = (lambda x: np.exp(-pow(x, 2.0)))
# multiquadric RBF
_multiquadric = (lambda x:
np.sqrt(1.0 + pow(x, 2.0)))
# inverse multiquadric RBF
_inv_multiquadric = (lambda x:
1.0/(np.sqrt(1.0 + pow(x, 2.0))))
# internal activation function table
_internal_activation_funcs = {'sine': np.sin,
'tanh': np.tanh,
'tribas': _tribas,
'inv_tribas': _inv_tribas,
'sigmoid': _sigmoid,
'softlim': _softlim,
'hardlim': _hardlim,
'gaussian': _gaussian,
'multiquadric': _multiquadric,
'inv_multiquadric': _inv_multiquadric,
}
def __init__(self, n_hidden=20, alpha=0.5, random_state=None,
activation_func='tanh', activation_args=None,
user_components=None, rbf_width=1.0):
super(RandomLayer, self).__init__(n_hidden=n_hidden,
random_state=random_state,
activation_func=activation_func,
activation_args=activation_args)
if (isinstance(self.activation_func, str)):
func_names = self._internal_activation_funcs.keys()
if (self.activation_func not in func_names):
msg = "unknown activation function '%s'" % self.activation_func
raise ValueError(msg)
self.alpha = alpha
self.rbf_width = rbf_width
self.user_components = user_components
self._use_mlp_input = (self.alpha != 0.0)
self._use_rbf_input = (self.alpha != 1.0)
def _get_user_components(self, key):
"""Look for given user component"""
try:
return self.user_components[key]
except (TypeError, KeyError):
return None
def _compute_centers(self, X, sparse, rs):
"""Generate RBF centers"""
# use supplied centers if present
centers = self._get_user_components('centers')
# use points taken uniformly from the bounding
# hyperrectangle
if (centers is None):
n_features = X.shape[1]
if (sparse):
fxr = range(n_features)
cols = [X.getcol(i) for i in fxr]
min_dtype = X.dtype.type(1.0e10)
sp_min = lambda col: np.minimum(min_dtype, np.min(col.data))
min_Xs = np.array(map(sp_min, cols))
max_dtype = X.dtype.type(-1.0e10)
sp_max = lambda col: np.maximum(max_dtype, np.max(col.data))
max_Xs = np.array(map(sp_max, cols))
else:
min_Xs = X.min(axis=0)
max_Xs = X.max(axis=0)
spans = max_Xs - min_Xs
ctrs_size = (self.n_hidden, n_features)
centers = min_Xs + spans * rs.uniform(0.0, 1.0, ctrs_size)
self.components_['centers'] = centers
def _compute_biases(self, rs):
"""Generate MLP biases"""
# use supplied biases if present
biases = self._get_user_components('biases')
if (biases is None):
b_size = self.n_hidden
biases = rs.normal(size=b_size)
self.components_['biases'] = biases
def _compute_weights(self, X, rs):
"""Generate MLP weights"""
# use supplied weights if present
weights = self._get_user_components('weights')
if (weights is None):
n_features = X.shape[1]
hw_size = (n_features, self.n_hidden)
weights = rs.normal(size=hw_size)
self.components_['weights'] = weights
def _generate_components(self, X):
"""Generate components of hidden layer given X"""
rs = check_random_state(self.random_state)
if (self._use_mlp_input):
self._compute_biases(rs)
self._compute_weights(X, rs)
if (self._use_rbf_input):
self._compute_centers(X, sp.issparse(X), rs)
self._compute_radii()
def _compute_input_activations(self, X):
"""Compute input activations given X"""
n_samples = X.shape[0]
mlp_acts = np.zeros((n_samples, self.n_hidden))
if (self._use_mlp_input):
b = self.components_['biases']
w = self.components_['weights']
mlp_acts = self.alpha * (safe_sparse_dot(X, w) + b)
rbf_acts = np.zeros((n_samples, self.n_hidden))
if (self._use_rbf_input):
radii = self.components_['radii']
centers = self.components_['centers']
scale = self.rbf_width * (1.0 - self.alpha)
rbf_acts = scale * cdist(X, centers)/radii
self.input_activations_ = mlp_acts + rbf_acts
|
wdm0006/sklearn-extensions | sklearn_extensions/extreme_learning_machines/random_layer.py | RandomLayer._compute_centers | python | def _compute_centers(self, X, sparse, rs):
# use supplied centers if present
centers = self._get_user_components('centers')
# use points taken uniformly from the bounding
# hyperrectangle
if (centers is None):
n_features = X.shape[1]
if (sparse):
fxr = range(n_features)
cols = [X.getcol(i) for i in fxr]
min_dtype = X.dtype.type(1.0e10)
sp_min = lambda col: np.minimum(min_dtype, np.min(col.data))
min_Xs = np.array(map(sp_min, cols))
max_dtype = X.dtype.type(-1.0e10)
sp_max = lambda col: np.maximum(max_dtype, np.max(col.data))
max_Xs = np.array(map(sp_max, cols))
else:
min_Xs = X.min(axis=0)
max_Xs = X.max(axis=0)
spans = max_Xs - min_Xs
ctrs_size = (self.n_hidden, n_features)
centers = min_Xs + spans * rs.uniform(0.0, 1.0, ctrs_size)
self.components_['centers'] = centers | Generate RBF centers | train | https://github.com/wdm0006/sklearn-extensions/blob/329f3efdb8c3c3a367b264f7c76c76411a784530/sklearn_extensions/extreme_learning_machines/random_layer.py#L296-L326 | null | class RandomLayer(BaseRandomLayer):
"""RandomLayer is a transformer that creates a feature mapping of the
inputs that corresponds to a layer of hidden units with randomly
generated components.
The transformed values are a specified function of input activations
that are a weighted combination of dot product (multilayer perceptron)
and distance (rbf) activations:
input_activation = alpha * mlp_activation + (1-alpha) * rbf_activation
mlp_activation(x) = dot(x, weights) + bias
rbf_activation(x) = rbf_width * ||x - center||/radius
alpha and rbf_width are specified by the user
weights and biases are taken from normal distribution of
mean 0 and sd of 1
centers are taken uniformly from the bounding hyperrectangle
of the inputs, and radii are max(||x-c||)/sqrt(n_centers*2)
The input activation is transformed by a transfer function that defaults
to numpy.tanh if not specified, but can be any callable that returns an
array of the same shape as its argument (the input activation array, of
shape [n_samples, n_hidden]). Functions provided are 'sine', 'tanh',
'tribas', 'inv_tribas', 'sigmoid', 'hardlim', 'softlim', 'gaussian',
'multiquadric', or 'inv_multiquadric'.
Parameters
----------
`n_hidden` : int, optional (default=20)
Number of units to generate
`alpha` : float, optional (default=0.5)
Mixing coefficient for distance and dot product input activations:
activation = alpha*mlp_activation + (1-alpha)*rbf_width*rbf_activation
`rbf_width` : float, optional (default=1.0)
multiplier on rbf_activation
`user_components`: dictionary, optional (default=None)
dictionary containing values for components that woud otherwise be
randomly generated. Valid key/value pairs are as follows:
'radii' : array-like of shape [n_hidden]
'centers': array-like of shape [n_hidden, n_features]
'biases' : array-like of shape [n_hidden]
'weights': array-like of shape [n_features, n_hidden]
`activation_func` : {callable, string} optional (default='tanh')
Function used to transform input activation
It must be one of 'tanh', 'sine', 'tribas', 'inv_tribas',
'sigmoid', 'hardlim', 'softlim', 'gaussian', 'multiquadric',
'inv_multiquadric' or a callable. If None is given, 'tanh'
will be used.
If a callable is given, it will be used to compute the activations.
`activation_args` : dictionary, optional (default=None)
Supplies keyword arguments for a callable activation_func
`random_state` : int, RandomState instance or None (default=None)
Control the pseudo random number generator used to generate the
hidden unit weights at fit time.
Attributes
----------
`input_activations_` : numpy array of shape [n_samples, n_hidden]
Array containing dot(x, hidden_weights) + bias for all samples
`components_` : dictionary containing two keys:
`bias_weights_` : numpy array of shape [n_hidden]
`hidden_weights_` : numpy array of shape [n_features, n_hidden]
See Also
--------
"""
# triangular activation function
_tribas = (lambda x: np.clip(1.0 - np.fabs(x), 0.0, 1.0))
# inverse triangular activation function
_inv_tribas = (lambda x: np.clip(np.fabs(x), 0.0, 1.0))
# sigmoid activation function
_sigmoid = (lambda x: 1.0/(1.0 + np.exp(-x)))
# hard limit activation function
_hardlim = (lambda x: np.array(x > 0.0, dtype=float))
_softlim = (lambda x: np.clip(x, 0.0, 1.0))
# gaussian RBF
_gaussian = (lambda x: np.exp(-pow(x, 2.0)))
# multiquadric RBF
_multiquadric = (lambda x:
np.sqrt(1.0 + pow(x, 2.0)))
# inverse multiquadric RBF
_inv_multiquadric = (lambda x:
1.0/(np.sqrt(1.0 + pow(x, 2.0))))
# internal activation function table
_internal_activation_funcs = {'sine': np.sin,
'tanh': np.tanh,
'tribas': _tribas,
'inv_tribas': _inv_tribas,
'sigmoid': _sigmoid,
'softlim': _softlim,
'hardlim': _hardlim,
'gaussian': _gaussian,
'multiquadric': _multiquadric,
'inv_multiquadric': _inv_multiquadric,
}
def __init__(self, n_hidden=20, alpha=0.5, random_state=None,
activation_func='tanh', activation_args=None,
user_components=None, rbf_width=1.0):
super(RandomLayer, self).__init__(n_hidden=n_hidden,
random_state=random_state,
activation_func=activation_func,
activation_args=activation_args)
if (isinstance(self.activation_func, str)):
func_names = self._internal_activation_funcs.keys()
if (self.activation_func not in func_names):
msg = "unknown activation function '%s'" % self.activation_func
raise ValueError(msg)
self.alpha = alpha
self.rbf_width = rbf_width
self.user_components = user_components
self._use_mlp_input = (self.alpha != 0.0)
self._use_rbf_input = (self.alpha != 1.0)
def _get_user_components(self, key):
"""Look for given user component"""
try:
return self.user_components[key]
except (TypeError, KeyError):
return None
def _compute_radii(self):
"""Generate RBF radii"""
# use supplied radii if present
radii = self._get_user_components('radii')
# compute radii
if (radii is None):
centers = self.components_['centers']
n_centers = centers.shape[0]
max_dist = np.max(pairwise_distances(centers))
radii = np.ones(n_centers) * max_dist/sqrt(2.0 * n_centers)
self.components_['radii'] = radii
def _compute_biases(self, rs):
"""Generate MLP biases"""
# use supplied biases if present
biases = self._get_user_components('biases')
if (biases is None):
b_size = self.n_hidden
biases = rs.normal(size=b_size)
self.components_['biases'] = biases
def _compute_weights(self, X, rs):
"""Generate MLP weights"""
# use supplied weights if present
weights = self._get_user_components('weights')
if (weights is None):
n_features = X.shape[1]
hw_size = (n_features, self.n_hidden)
weights = rs.normal(size=hw_size)
self.components_['weights'] = weights
def _generate_components(self, X):
"""Generate components of hidden layer given X"""
rs = check_random_state(self.random_state)
if (self._use_mlp_input):
self._compute_biases(rs)
self._compute_weights(X, rs)
if (self._use_rbf_input):
self._compute_centers(X, sp.issparse(X), rs)
self._compute_radii()
def _compute_input_activations(self, X):
"""Compute input activations given X"""
n_samples = X.shape[0]
mlp_acts = np.zeros((n_samples, self.n_hidden))
if (self._use_mlp_input):
b = self.components_['biases']
w = self.components_['weights']
mlp_acts = self.alpha * (safe_sparse_dot(X, w) + b)
rbf_acts = np.zeros((n_samples, self.n_hidden))
if (self._use_rbf_input):
radii = self.components_['radii']
centers = self.components_['centers']
scale = self.rbf_width * (1.0 - self.alpha)
rbf_acts = scale * cdist(X, centers)/radii
self.input_activations_ = mlp_acts + rbf_acts
|
wdm0006/sklearn-extensions | sklearn_extensions/extreme_learning_machines/random_layer.py | RandomLayer._compute_biases | python | def _compute_biases(self, rs):
# use supplied biases if present
biases = self._get_user_components('biases')
if (biases is None):
b_size = self.n_hidden
biases = rs.normal(size=b_size)
self.components_['biases'] = biases | Generate MLP biases | train | https://github.com/wdm0006/sklearn-extensions/blob/329f3efdb8c3c3a367b264f7c76c76411a784530/sklearn_extensions/extreme_learning_machines/random_layer.py#L328-L337 | null | class RandomLayer(BaseRandomLayer):
"""RandomLayer is a transformer that creates a feature mapping of the
inputs that corresponds to a layer of hidden units with randomly
generated components.
The transformed values are a specified function of input activations
that are a weighted combination of dot product (multilayer perceptron)
and distance (rbf) activations:
input_activation = alpha * mlp_activation + (1-alpha) * rbf_activation
mlp_activation(x) = dot(x, weights) + bias
rbf_activation(x) = rbf_width * ||x - center||/radius
alpha and rbf_width are specified by the user
weights and biases are taken from normal distribution of
mean 0 and sd of 1
centers are taken uniformly from the bounding hyperrectangle
of the inputs, and radii are max(||x-c||)/sqrt(n_centers*2)
The input activation is transformed by a transfer function that defaults
to numpy.tanh if not specified, but can be any callable that returns an
array of the same shape as its argument (the input activation array, of
shape [n_samples, n_hidden]). Functions provided are 'sine', 'tanh',
'tribas', 'inv_tribas', 'sigmoid', 'hardlim', 'softlim', 'gaussian',
'multiquadric', or 'inv_multiquadric'.
Parameters
----------
`n_hidden` : int, optional (default=20)
Number of units to generate
`alpha` : float, optional (default=0.5)
Mixing coefficient for distance and dot product input activations:
activation = alpha*mlp_activation + (1-alpha)*rbf_width*rbf_activation
`rbf_width` : float, optional (default=1.0)
multiplier on rbf_activation
`user_components`: dictionary, optional (default=None)
dictionary containing values for components that woud otherwise be
randomly generated. Valid key/value pairs are as follows:
'radii' : array-like of shape [n_hidden]
'centers': array-like of shape [n_hidden, n_features]
'biases' : array-like of shape [n_hidden]
'weights': array-like of shape [n_features, n_hidden]
`activation_func` : {callable, string} optional (default='tanh')
Function used to transform input activation
It must be one of 'tanh', 'sine', 'tribas', 'inv_tribas',
'sigmoid', 'hardlim', 'softlim', 'gaussian', 'multiquadric',
'inv_multiquadric' or a callable. If None is given, 'tanh'
will be used.
If a callable is given, it will be used to compute the activations.
`activation_args` : dictionary, optional (default=None)
Supplies keyword arguments for a callable activation_func
`random_state` : int, RandomState instance or None (default=None)
Control the pseudo random number generator used to generate the
hidden unit weights at fit time.
Attributes
----------
`input_activations_` : numpy array of shape [n_samples, n_hidden]
Array containing dot(x, hidden_weights) + bias for all samples
`components_` : dictionary containing two keys:
`bias_weights_` : numpy array of shape [n_hidden]
`hidden_weights_` : numpy array of shape [n_features, n_hidden]
See Also
--------
"""
# triangular activation function
_tribas = (lambda x: np.clip(1.0 - np.fabs(x), 0.0, 1.0))
# inverse triangular activation function
_inv_tribas = (lambda x: np.clip(np.fabs(x), 0.0, 1.0))
# sigmoid activation function
_sigmoid = (lambda x: 1.0/(1.0 + np.exp(-x)))
# hard limit activation function
_hardlim = (lambda x: np.array(x > 0.0, dtype=float))
_softlim = (lambda x: np.clip(x, 0.0, 1.0))
# gaussian RBF
_gaussian = (lambda x: np.exp(-pow(x, 2.0)))
# multiquadric RBF
_multiquadric = (lambda x:
np.sqrt(1.0 + pow(x, 2.0)))
# inverse multiquadric RBF
_inv_multiquadric = (lambda x:
1.0/(np.sqrt(1.0 + pow(x, 2.0))))
# internal activation function table
_internal_activation_funcs = {'sine': np.sin,
'tanh': np.tanh,
'tribas': _tribas,
'inv_tribas': _inv_tribas,
'sigmoid': _sigmoid,
'softlim': _softlim,
'hardlim': _hardlim,
'gaussian': _gaussian,
'multiquadric': _multiquadric,
'inv_multiquadric': _inv_multiquadric,
}
def __init__(self, n_hidden=20, alpha=0.5, random_state=None,
activation_func='tanh', activation_args=None,
user_components=None, rbf_width=1.0):
super(RandomLayer, self).__init__(n_hidden=n_hidden,
random_state=random_state,
activation_func=activation_func,
activation_args=activation_args)
if (isinstance(self.activation_func, str)):
func_names = self._internal_activation_funcs.keys()
if (self.activation_func not in func_names):
msg = "unknown activation function '%s'" % self.activation_func
raise ValueError(msg)
self.alpha = alpha
self.rbf_width = rbf_width
self.user_components = user_components
self._use_mlp_input = (self.alpha != 0.0)
self._use_rbf_input = (self.alpha != 1.0)
def _get_user_components(self, key):
"""Look for given user component"""
try:
return self.user_components[key]
except (TypeError, KeyError):
return None
def _compute_radii(self):
"""Generate RBF radii"""
# use supplied radii if present
radii = self._get_user_components('radii')
# compute radii
if (radii is None):
centers = self.components_['centers']
n_centers = centers.shape[0]
max_dist = np.max(pairwise_distances(centers))
radii = np.ones(n_centers) * max_dist/sqrt(2.0 * n_centers)
self.components_['radii'] = radii
def _compute_centers(self, X, sparse, rs):
"""Generate RBF centers"""
# use supplied centers if present
centers = self._get_user_components('centers')
# use points taken uniformly from the bounding
# hyperrectangle
if (centers is None):
n_features = X.shape[1]
if (sparse):
fxr = range(n_features)
cols = [X.getcol(i) for i in fxr]
min_dtype = X.dtype.type(1.0e10)
sp_min = lambda col: np.minimum(min_dtype, np.min(col.data))
min_Xs = np.array(map(sp_min, cols))
max_dtype = X.dtype.type(-1.0e10)
sp_max = lambda col: np.maximum(max_dtype, np.max(col.data))
max_Xs = np.array(map(sp_max, cols))
else:
min_Xs = X.min(axis=0)
max_Xs = X.max(axis=0)
spans = max_Xs - min_Xs
ctrs_size = (self.n_hidden, n_features)
centers = min_Xs + spans * rs.uniform(0.0, 1.0, ctrs_size)
self.components_['centers'] = centers
def _compute_weights(self, X, rs):
"""Generate MLP weights"""
# use supplied weights if present
weights = self._get_user_components('weights')
if (weights is None):
n_features = X.shape[1]
hw_size = (n_features, self.n_hidden)
weights = rs.normal(size=hw_size)
self.components_['weights'] = weights
def _generate_components(self, X):
"""Generate components of hidden layer given X"""
rs = check_random_state(self.random_state)
if (self._use_mlp_input):
self._compute_biases(rs)
self._compute_weights(X, rs)
if (self._use_rbf_input):
self._compute_centers(X, sp.issparse(X), rs)
self._compute_radii()
def _compute_input_activations(self, X):
"""Compute input activations given X"""
n_samples = X.shape[0]
mlp_acts = np.zeros((n_samples, self.n_hidden))
if (self._use_mlp_input):
b = self.components_['biases']
w = self.components_['weights']
mlp_acts = self.alpha * (safe_sparse_dot(X, w) + b)
rbf_acts = np.zeros((n_samples, self.n_hidden))
if (self._use_rbf_input):
radii = self.components_['radii']
centers = self.components_['centers']
scale = self.rbf_width * (1.0 - self.alpha)
rbf_acts = scale * cdist(X, centers)/radii
self.input_activations_ = mlp_acts + rbf_acts
|
wdm0006/sklearn-extensions | sklearn_extensions/extreme_learning_machines/random_layer.py | RandomLayer._compute_weights | python | def _compute_weights(self, X, rs):
# use supplied weights if present
weights = self._get_user_components('weights')
if (weights is None):
n_features = X.shape[1]
hw_size = (n_features, self.n_hidden)
weights = rs.normal(size=hw_size)
self.components_['weights'] = weights | Generate MLP weights | train | https://github.com/wdm0006/sklearn-extensions/blob/329f3efdb8c3c3a367b264f7c76c76411a784530/sklearn_extensions/extreme_learning_machines/random_layer.py#L339-L349 | null | class RandomLayer(BaseRandomLayer):
"""RandomLayer is a transformer that creates a feature mapping of the
inputs that corresponds to a layer of hidden units with randomly
generated components.
The transformed values are a specified function of input activations
that are a weighted combination of dot product (multilayer perceptron)
and distance (rbf) activations:
input_activation = alpha * mlp_activation + (1-alpha) * rbf_activation
mlp_activation(x) = dot(x, weights) + bias
rbf_activation(x) = rbf_width * ||x - center||/radius
alpha and rbf_width are specified by the user
weights and biases are taken from normal distribution of
mean 0 and sd of 1
centers are taken uniformly from the bounding hyperrectangle
of the inputs, and radii are max(||x-c||)/sqrt(n_centers*2)
The input activation is transformed by a transfer function that defaults
to numpy.tanh if not specified, but can be any callable that returns an
array of the same shape as its argument (the input activation array, of
shape [n_samples, n_hidden]). Functions provided are 'sine', 'tanh',
'tribas', 'inv_tribas', 'sigmoid', 'hardlim', 'softlim', 'gaussian',
'multiquadric', or 'inv_multiquadric'.
Parameters
----------
`n_hidden` : int, optional (default=20)
Number of units to generate
`alpha` : float, optional (default=0.5)
Mixing coefficient for distance and dot product input activations:
activation = alpha*mlp_activation + (1-alpha)*rbf_width*rbf_activation
`rbf_width` : float, optional (default=1.0)
multiplier on rbf_activation
`user_components`: dictionary, optional (default=None)
dictionary containing values for components that woud otherwise be
randomly generated. Valid key/value pairs are as follows:
'radii' : array-like of shape [n_hidden]
'centers': array-like of shape [n_hidden, n_features]
'biases' : array-like of shape [n_hidden]
'weights': array-like of shape [n_features, n_hidden]
`activation_func` : {callable, string} optional (default='tanh')
Function used to transform input activation
It must be one of 'tanh', 'sine', 'tribas', 'inv_tribas',
'sigmoid', 'hardlim', 'softlim', 'gaussian', 'multiquadric',
'inv_multiquadric' or a callable. If None is given, 'tanh'
will be used.
If a callable is given, it will be used to compute the activations.
`activation_args` : dictionary, optional (default=None)
Supplies keyword arguments for a callable activation_func
`random_state` : int, RandomState instance or None (default=None)
Control the pseudo random number generator used to generate the
hidden unit weights at fit time.
Attributes
----------
`input_activations_` : numpy array of shape [n_samples, n_hidden]
Array containing dot(x, hidden_weights) + bias for all samples
`components_` : dictionary containing two keys:
`bias_weights_` : numpy array of shape [n_hidden]
`hidden_weights_` : numpy array of shape [n_features, n_hidden]
See Also
--------
"""
# triangular activation function
_tribas = (lambda x: np.clip(1.0 - np.fabs(x), 0.0, 1.0))
# inverse triangular activation function
_inv_tribas = (lambda x: np.clip(np.fabs(x), 0.0, 1.0))
# sigmoid activation function
_sigmoid = (lambda x: 1.0/(1.0 + np.exp(-x)))
# hard limit activation function
_hardlim = (lambda x: np.array(x > 0.0, dtype=float))
_softlim = (lambda x: np.clip(x, 0.0, 1.0))
# gaussian RBF
_gaussian = (lambda x: np.exp(-pow(x, 2.0)))
# multiquadric RBF
_multiquadric = (lambda x:
np.sqrt(1.0 + pow(x, 2.0)))
# inverse multiquadric RBF
_inv_multiquadric = (lambda x:
1.0/(np.sqrt(1.0 + pow(x, 2.0))))
# internal activation function table
_internal_activation_funcs = {'sine': np.sin,
'tanh': np.tanh,
'tribas': _tribas,
'inv_tribas': _inv_tribas,
'sigmoid': _sigmoid,
'softlim': _softlim,
'hardlim': _hardlim,
'gaussian': _gaussian,
'multiquadric': _multiquadric,
'inv_multiquadric': _inv_multiquadric,
}
def __init__(self, n_hidden=20, alpha=0.5, random_state=None,
activation_func='tanh', activation_args=None,
user_components=None, rbf_width=1.0):
super(RandomLayer, self).__init__(n_hidden=n_hidden,
random_state=random_state,
activation_func=activation_func,
activation_args=activation_args)
if (isinstance(self.activation_func, str)):
func_names = self._internal_activation_funcs.keys()
if (self.activation_func not in func_names):
msg = "unknown activation function '%s'" % self.activation_func
raise ValueError(msg)
self.alpha = alpha
self.rbf_width = rbf_width
self.user_components = user_components
self._use_mlp_input = (self.alpha != 0.0)
self._use_rbf_input = (self.alpha != 1.0)
def _get_user_components(self, key):
"""Look for given user component"""
try:
return self.user_components[key]
except (TypeError, KeyError):
return None
def _compute_radii(self):
"""Generate RBF radii"""
# use supplied radii if present
radii = self._get_user_components('radii')
# compute radii
if (radii is None):
centers = self.components_['centers']
n_centers = centers.shape[0]
max_dist = np.max(pairwise_distances(centers))
radii = np.ones(n_centers) * max_dist/sqrt(2.0 * n_centers)
self.components_['radii'] = radii
def _compute_centers(self, X, sparse, rs):
"""Generate RBF centers"""
# use supplied centers if present
centers = self._get_user_components('centers')
# use points taken uniformly from the bounding
# hyperrectangle
if (centers is None):
n_features = X.shape[1]
if (sparse):
fxr = range(n_features)
cols = [X.getcol(i) for i in fxr]
min_dtype = X.dtype.type(1.0e10)
sp_min = lambda col: np.minimum(min_dtype, np.min(col.data))
min_Xs = np.array(map(sp_min, cols))
max_dtype = X.dtype.type(-1.0e10)
sp_max = lambda col: np.maximum(max_dtype, np.max(col.data))
max_Xs = np.array(map(sp_max, cols))
else:
min_Xs = X.min(axis=0)
max_Xs = X.max(axis=0)
spans = max_Xs - min_Xs
ctrs_size = (self.n_hidden, n_features)
centers = min_Xs + spans * rs.uniform(0.0, 1.0, ctrs_size)
self.components_['centers'] = centers
def _compute_biases(self, rs):
"""Generate MLP biases"""
# use supplied biases if present
biases = self._get_user_components('biases')
if (biases is None):
b_size = self.n_hidden
biases = rs.normal(size=b_size)
self.components_['biases'] = biases
def _generate_components(self, X):
"""Generate components of hidden layer given X"""
rs = check_random_state(self.random_state)
if (self._use_mlp_input):
self._compute_biases(rs)
self._compute_weights(X, rs)
if (self._use_rbf_input):
self._compute_centers(X, sp.issparse(X), rs)
self._compute_radii()
def _compute_input_activations(self, X):
"""Compute input activations given X"""
n_samples = X.shape[0]
mlp_acts = np.zeros((n_samples, self.n_hidden))
if (self._use_mlp_input):
b = self.components_['biases']
w = self.components_['weights']
mlp_acts = self.alpha * (safe_sparse_dot(X, w) + b)
rbf_acts = np.zeros((n_samples, self.n_hidden))
if (self._use_rbf_input):
radii = self.components_['radii']
centers = self.components_['centers']
scale = self.rbf_width * (1.0 - self.alpha)
rbf_acts = scale * cdist(X, centers)/radii
self.input_activations_ = mlp_acts + rbf_acts
|
wdm0006/sklearn-extensions | sklearn_extensions/extreme_learning_machines/random_layer.py | RandomLayer._generate_components | python | def _generate_components(self, X):
rs = check_random_state(self.random_state)
if (self._use_mlp_input):
self._compute_biases(rs)
self._compute_weights(X, rs)
if (self._use_rbf_input):
self._compute_centers(X, sp.issparse(X), rs)
self._compute_radii() | Generate components of hidden layer given X | train | https://github.com/wdm0006/sklearn-extensions/blob/329f3efdb8c3c3a367b264f7c76c76411a784530/sklearn_extensions/extreme_learning_machines/random_layer.py#L351-L361 | null | class RandomLayer(BaseRandomLayer):
"""RandomLayer is a transformer that creates a feature mapping of the
inputs that corresponds to a layer of hidden units with randomly
generated components.
The transformed values are a specified function of input activations
that are a weighted combination of dot product (multilayer perceptron)
and distance (rbf) activations:
input_activation = alpha * mlp_activation + (1-alpha) * rbf_activation
mlp_activation(x) = dot(x, weights) + bias
rbf_activation(x) = rbf_width * ||x - center||/radius
alpha and rbf_width are specified by the user
weights and biases are taken from normal distribution of
mean 0 and sd of 1
centers are taken uniformly from the bounding hyperrectangle
of the inputs, and radii are max(||x-c||)/sqrt(n_centers*2)
The input activation is transformed by a transfer function that defaults
to numpy.tanh if not specified, but can be any callable that returns an
array of the same shape as its argument (the input activation array, of
shape [n_samples, n_hidden]). Functions provided are 'sine', 'tanh',
'tribas', 'inv_tribas', 'sigmoid', 'hardlim', 'softlim', 'gaussian',
'multiquadric', or 'inv_multiquadric'.
Parameters
----------
`n_hidden` : int, optional (default=20)
Number of units to generate
`alpha` : float, optional (default=0.5)
Mixing coefficient for distance and dot product input activations:
activation = alpha*mlp_activation + (1-alpha)*rbf_width*rbf_activation
`rbf_width` : float, optional (default=1.0)
multiplier on rbf_activation
`user_components`: dictionary, optional (default=None)
dictionary containing values for components that woud otherwise be
randomly generated. Valid key/value pairs are as follows:
'radii' : array-like of shape [n_hidden]
'centers': array-like of shape [n_hidden, n_features]
'biases' : array-like of shape [n_hidden]
'weights': array-like of shape [n_features, n_hidden]
`activation_func` : {callable, string} optional (default='tanh')
Function used to transform input activation
It must be one of 'tanh', 'sine', 'tribas', 'inv_tribas',
'sigmoid', 'hardlim', 'softlim', 'gaussian', 'multiquadric',
'inv_multiquadric' or a callable. If None is given, 'tanh'
will be used.
If a callable is given, it will be used to compute the activations.
`activation_args` : dictionary, optional (default=None)
Supplies keyword arguments for a callable activation_func
`random_state` : int, RandomState instance or None (default=None)
Control the pseudo random number generator used to generate the
hidden unit weights at fit time.
Attributes
----------
`input_activations_` : numpy array of shape [n_samples, n_hidden]
Array containing dot(x, hidden_weights) + bias for all samples
`components_` : dictionary containing two keys:
`bias_weights_` : numpy array of shape [n_hidden]
`hidden_weights_` : numpy array of shape [n_features, n_hidden]
See Also
--------
"""
# triangular activation function
_tribas = (lambda x: np.clip(1.0 - np.fabs(x), 0.0, 1.0))
# inverse triangular activation function
_inv_tribas = (lambda x: np.clip(np.fabs(x), 0.0, 1.0))
# sigmoid activation function
_sigmoid = (lambda x: 1.0/(1.0 + np.exp(-x)))
# hard limit activation function
_hardlim = (lambda x: np.array(x > 0.0, dtype=float))
_softlim = (lambda x: np.clip(x, 0.0, 1.0))
# gaussian RBF
_gaussian = (lambda x: np.exp(-pow(x, 2.0)))
# multiquadric RBF
_multiquadric = (lambda x:
np.sqrt(1.0 + pow(x, 2.0)))
# inverse multiquadric RBF
_inv_multiquadric = (lambda x:
1.0/(np.sqrt(1.0 + pow(x, 2.0))))
# internal activation function table
_internal_activation_funcs = {'sine': np.sin,
'tanh': np.tanh,
'tribas': _tribas,
'inv_tribas': _inv_tribas,
'sigmoid': _sigmoid,
'softlim': _softlim,
'hardlim': _hardlim,
'gaussian': _gaussian,
'multiquadric': _multiquadric,
'inv_multiquadric': _inv_multiquadric,
}
def __init__(self, n_hidden=20, alpha=0.5, random_state=None,
activation_func='tanh', activation_args=None,
user_components=None, rbf_width=1.0):
super(RandomLayer, self).__init__(n_hidden=n_hidden,
random_state=random_state,
activation_func=activation_func,
activation_args=activation_args)
if (isinstance(self.activation_func, str)):
func_names = self._internal_activation_funcs.keys()
if (self.activation_func not in func_names):
msg = "unknown activation function '%s'" % self.activation_func
raise ValueError(msg)
self.alpha = alpha
self.rbf_width = rbf_width
self.user_components = user_components
self._use_mlp_input = (self.alpha != 0.0)
self._use_rbf_input = (self.alpha != 1.0)
def _get_user_components(self, key):
"""Look for given user component"""
try:
return self.user_components[key]
except (TypeError, KeyError):
return None
def _compute_radii(self):
"""Generate RBF radii"""
# use supplied radii if present
radii = self._get_user_components('radii')
# compute radii
if (radii is None):
centers = self.components_['centers']
n_centers = centers.shape[0]
max_dist = np.max(pairwise_distances(centers))
radii = np.ones(n_centers) * max_dist/sqrt(2.0 * n_centers)
self.components_['radii'] = radii
def _compute_centers(self, X, sparse, rs):
"""Generate RBF centers"""
# use supplied centers if present
centers = self._get_user_components('centers')
# use points taken uniformly from the bounding
# hyperrectangle
if (centers is None):
n_features = X.shape[1]
if (sparse):
fxr = range(n_features)
cols = [X.getcol(i) for i in fxr]
min_dtype = X.dtype.type(1.0e10)
sp_min = lambda col: np.minimum(min_dtype, np.min(col.data))
min_Xs = np.array(map(sp_min, cols))
max_dtype = X.dtype.type(-1.0e10)
sp_max = lambda col: np.maximum(max_dtype, np.max(col.data))
max_Xs = np.array(map(sp_max, cols))
else:
min_Xs = X.min(axis=0)
max_Xs = X.max(axis=0)
spans = max_Xs - min_Xs
ctrs_size = (self.n_hidden, n_features)
centers = min_Xs + spans * rs.uniform(0.0, 1.0, ctrs_size)
self.components_['centers'] = centers
def _compute_biases(self, rs):
"""Generate MLP biases"""
# use supplied biases if present
biases = self._get_user_components('biases')
if (biases is None):
b_size = self.n_hidden
biases = rs.normal(size=b_size)
self.components_['biases'] = biases
def _compute_weights(self, X, rs):
"""Generate MLP weights"""
# use supplied weights if present
weights = self._get_user_components('weights')
if (weights is None):
n_features = X.shape[1]
hw_size = (n_features, self.n_hidden)
weights = rs.normal(size=hw_size)
self.components_['weights'] = weights
def _compute_input_activations(self, X):
"""Compute input activations given X"""
n_samples = X.shape[0]
mlp_acts = np.zeros((n_samples, self.n_hidden))
if (self._use_mlp_input):
b = self.components_['biases']
w = self.components_['weights']
mlp_acts = self.alpha * (safe_sparse_dot(X, w) + b)
rbf_acts = np.zeros((n_samples, self.n_hidden))
if (self._use_rbf_input):
radii = self.components_['radii']
centers = self.components_['centers']
scale = self.rbf_width * (1.0 - self.alpha)
rbf_acts = scale * cdist(X, centers)/radii
self.input_activations_ = mlp_acts + rbf_acts
|
wdm0006/sklearn-extensions | sklearn_extensions/extreme_learning_machines/random_layer.py | RandomLayer._compute_input_activations | python | def _compute_input_activations(self, X):
n_samples = X.shape[0]
mlp_acts = np.zeros((n_samples, self.n_hidden))
if (self._use_mlp_input):
b = self.components_['biases']
w = self.components_['weights']
mlp_acts = self.alpha * (safe_sparse_dot(X, w) + b)
rbf_acts = np.zeros((n_samples, self.n_hidden))
if (self._use_rbf_input):
radii = self.components_['radii']
centers = self.components_['centers']
scale = self.rbf_width * (1.0 - self.alpha)
rbf_acts = scale * cdist(X, centers)/radii
self.input_activations_ = mlp_acts + rbf_acts | Compute input activations given X | train | https://github.com/wdm0006/sklearn-extensions/blob/329f3efdb8c3c3a367b264f7c76c76411a784530/sklearn_extensions/extreme_learning_machines/random_layer.py#L363-L381 | null | class RandomLayer(BaseRandomLayer):
"""RandomLayer is a transformer that creates a feature mapping of the
inputs that corresponds to a layer of hidden units with randomly
generated components.
The transformed values are a specified function of input activations
that are a weighted combination of dot product (multilayer perceptron)
and distance (rbf) activations:
input_activation = alpha * mlp_activation + (1-alpha) * rbf_activation
mlp_activation(x) = dot(x, weights) + bias
rbf_activation(x) = rbf_width * ||x - center||/radius
alpha and rbf_width are specified by the user
weights and biases are taken from normal distribution of
mean 0 and sd of 1
centers are taken uniformly from the bounding hyperrectangle
of the inputs, and radii are max(||x-c||)/sqrt(n_centers*2)
The input activation is transformed by a transfer function that defaults
to numpy.tanh if not specified, but can be any callable that returns an
array of the same shape as its argument (the input activation array, of
shape [n_samples, n_hidden]). Functions provided are 'sine', 'tanh',
'tribas', 'inv_tribas', 'sigmoid', 'hardlim', 'softlim', 'gaussian',
'multiquadric', or 'inv_multiquadric'.
Parameters
----------
`n_hidden` : int, optional (default=20)
Number of units to generate
`alpha` : float, optional (default=0.5)
Mixing coefficient for distance and dot product input activations:
activation = alpha*mlp_activation + (1-alpha)*rbf_width*rbf_activation
`rbf_width` : float, optional (default=1.0)
multiplier on rbf_activation
`user_components`: dictionary, optional (default=None)
dictionary containing values for components that woud otherwise be
randomly generated. Valid key/value pairs are as follows:
'radii' : array-like of shape [n_hidden]
'centers': array-like of shape [n_hidden, n_features]
'biases' : array-like of shape [n_hidden]
'weights': array-like of shape [n_features, n_hidden]
`activation_func` : {callable, string} optional (default='tanh')
Function used to transform input activation
It must be one of 'tanh', 'sine', 'tribas', 'inv_tribas',
'sigmoid', 'hardlim', 'softlim', 'gaussian', 'multiquadric',
'inv_multiquadric' or a callable. If None is given, 'tanh'
will be used.
If a callable is given, it will be used to compute the activations.
`activation_args` : dictionary, optional (default=None)
Supplies keyword arguments for a callable activation_func
`random_state` : int, RandomState instance or None (default=None)
Control the pseudo random number generator used to generate the
hidden unit weights at fit time.
Attributes
----------
`input_activations_` : numpy array of shape [n_samples, n_hidden]
Array containing dot(x, hidden_weights) + bias for all samples
`components_` : dictionary containing two keys:
`bias_weights_` : numpy array of shape [n_hidden]
`hidden_weights_` : numpy array of shape [n_features, n_hidden]
See Also
--------
"""
# triangular activation function
_tribas = (lambda x: np.clip(1.0 - np.fabs(x), 0.0, 1.0))
# inverse triangular activation function
_inv_tribas = (lambda x: np.clip(np.fabs(x), 0.0, 1.0))
# sigmoid activation function
_sigmoid = (lambda x: 1.0/(1.0 + np.exp(-x)))
# hard limit activation function
_hardlim = (lambda x: np.array(x > 0.0, dtype=float))
_softlim = (lambda x: np.clip(x, 0.0, 1.0))
# gaussian RBF
_gaussian = (lambda x: np.exp(-pow(x, 2.0)))
# multiquadric RBF
_multiquadric = (lambda x:
np.sqrt(1.0 + pow(x, 2.0)))
# inverse multiquadric RBF
_inv_multiquadric = (lambda x:
1.0/(np.sqrt(1.0 + pow(x, 2.0))))
# internal activation function table
_internal_activation_funcs = {'sine': np.sin,
'tanh': np.tanh,
'tribas': _tribas,
'inv_tribas': _inv_tribas,
'sigmoid': _sigmoid,
'softlim': _softlim,
'hardlim': _hardlim,
'gaussian': _gaussian,
'multiquadric': _multiquadric,
'inv_multiquadric': _inv_multiquadric,
}
def __init__(self, n_hidden=20, alpha=0.5, random_state=None,
activation_func='tanh', activation_args=None,
user_components=None, rbf_width=1.0):
super(RandomLayer, self).__init__(n_hidden=n_hidden,
random_state=random_state,
activation_func=activation_func,
activation_args=activation_args)
if (isinstance(self.activation_func, str)):
func_names = self._internal_activation_funcs.keys()
if (self.activation_func not in func_names):
msg = "unknown activation function '%s'" % self.activation_func
raise ValueError(msg)
self.alpha = alpha
self.rbf_width = rbf_width
self.user_components = user_components
self._use_mlp_input = (self.alpha != 0.0)
self._use_rbf_input = (self.alpha != 1.0)
def _get_user_components(self, key):
"""Look for given user component"""
try:
return self.user_components[key]
except (TypeError, KeyError):
return None
def _compute_radii(self):
"""Generate RBF radii"""
# use supplied radii if present
radii = self._get_user_components('radii')
# compute radii
if (radii is None):
centers = self.components_['centers']
n_centers = centers.shape[0]
max_dist = np.max(pairwise_distances(centers))
radii = np.ones(n_centers) * max_dist/sqrt(2.0 * n_centers)
self.components_['radii'] = radii
def _compute_centers(self, X, sparse, rs):
"""Generate RBF centers"""
# use supplied centers if present
centers = self._get_user_components('centers')
# use points taken uniformly from the bounding
# hyperrectangle
if (centers is None):
n_features = X.shape[1]
if (sparse):
fxr = range(n_features)
cols = [X.getcol(i) for i in fxr]
min_dtype = X.dtype.type(1.0e10)
sp_min = lambda col: np.minimum(min_dtype, np.min(col.data))
min_Xs = np.array(map(sp_min, cols))
max_dtype = X.dtype.type(-1.0e10)
sp_max = lambda col: np.maximum(max_dtype, np.max(col.data))
max_Xs = np.array(map(sp_max, cols))
else:
min_Xs = X.min(axis=0)
max_Xs = X.max(axis=0)
spans = max_Xs - min_Xs
ctrs_size = (self.n_hidden, n_features)
centers = min_Xs + spans * rs.uniform(0.0, 1.0, ctrs_size)
self.components_['centers'] = centers
def _compute_biases(self, rs):
"""Generate MLP biases"""
# use supplied biases if present
biases = self._get_user_components('biases')
if (biases is None):
b_size = self.n_hidden
biases = rs.normal(size=b_size)
self.components_['biases'] = biases
def _compute_weights(self, X, rs):
"""Generate MLP weights"""
# use supplied weights if present
weights = self._get_user_components('weights')
if (weights is None):
n_features = X.shape[1]
hw_size = (n_features, self.n_hidden)
weights = rs.normal(size=hw_size)
self.components_['weights'] = weights
def _generate_components(self, X):
"""Generate components of hidden layer given X"""
rs = check_random_state(self.random_state)
if (self._use_mlp_input):
self._compute_biases(rs)
self._compute_weights(X, rs)
if (self._use_rbf_input):
self._compute_centers(X, sp.issparse(X), rs)
self._compute_radii()
|
wdm0006/sklearn-extensions | sklearn_extensions/extreme_learning_machines/random_layer.py | GRBFRandomLayer._compute_centers | python | def _compute_centers(self, X, sparse, rs):
super(GRBFRandomLayer, self)._compute_centers(X, sparse, rs)
centers = self.components_['centers']
sorted_distances = np.sort(squareform(pdist(centers)))
self.dF_vals = sorted_distances[:, -1]
self.dN_vals = sorted_distances[:, 1]/100.0
#self.dN_vals = 0.0002 * np.ones(self.dF_vals.shape)
tauNum = np.log(np.log(self.grbf_lambda) /
np.log(1.0 - self.grbf_lambda))
tauDenom = np.log(self.dF_vals/self.dN_vals)
self.tau_vals = tauNum/tauDenom
self._extra_args['taus'] = self.tau_vals | Generate centers, then compute tau, dF and dN vals | train | https://github.com/wdm0006/sklearn-extensions/blob/329f3efdb8c3c3a367b264f7c76c76411a784530/sklearn_extensions/extreme_learning_machines/random_layer.py#L500-L518 | null | class GRBFRandomLayer(RBFRandomLayer):
"""Random Generalized RBF Hidden Layer transformer
Creates a layer of radial basis function units where:
f(a), s.t. a = ||x-c||/r
with c the unit center
and f() is exp(-gamma * a^tau) where tau and r are computed
based on [1]
Parameters
----------
`n_hidden` : int, optional (default=20)
Number of units to generate, ignored if centers are provided
`grbf_lambda` : float, optional (default=0.05)
GRBF shape parameter
`gamma` : {int, float} optional (default=1.0)
Width multiplier for GRBF distance argument
`centers` : array of shape (n_hidden, n_features), optional (default=None)
If provided, overrides internal computation of the centers
`radii` : array of shape (n_hidden), optional (default=None)
If provided, overrides internal computation of the radii
`use_exemplars` : bool, optional (default=False)
If True, uses random examples from the input to determine the RBF
centers, ignored if centers are provided
`random_state` : int or RandomState instance, optional (default=None)
Control the pseudo random number generator used to generate the
centers at fit time, ignored if centers are provided
Attributes
----------
`components_` : dictionary containing two keys:
`radii_` : numpy array of shape [n_hidden]
`centers_` : numpy array of shape [n_hidden, n_features]
`input_activations_` : numpy array of shape [n_samples, n_hidden]
Array containing ||x-c||/r for all samples
See Also
--------
ELMRegressor, ELMClassifier, SimpleELMRegressor, SimpleELMClassifier,
SimpleRandomLayer
References
----------
.. [1] Fernandez-Navarro, et al, "MELM-GRBF: a modified version of the
extreme learning machine for generalized radial basis function
neural networks", Neurocomputing 74 (2011), 2502-2510
"""
# def _grbf(acts, taus):
# """GRBF activation function"""
# return np.exp(np.exp(-pow(acts, taus)))
_grbf = (lambda acts, taus: np.exp(np.exp(-pow(acts, taus))))
_internal_activation_funcs = {'grbf': _grbf}
def __init__(self, n_hidden=20, grbf_lambda=0.001,
centers=None, radii=None, random_state=None):
super(GRBFRandomLayer, self).__init__(n_hidden=n_hidden,
activation_func='grbf',
centers=centers, radii=radii,
random_state=random_state)
self.grbf_lambda = grbf_lambda
self.dN_vals = None
self.dF_vals = None
self.tau_vals = None
# get centers from superclass, then calculate tau_vals
# according to ref [1]
# get radii according to ref [1]
def _compute_radii(self):
"""Generate radii"""
denom = pow(-np.log(self.grbf_lambda), 1.0/self.tau_vals)
self.components_['radii'] = self.dF_vals/denom
|
wdm0006/sklearn-extensions | sklearn_extensions/extreme_learning_machines/random_layer.py | GRBFRandomLayer._compute_radii | python | def _compute_radii(self):
denom = pow(-np.log(self.grbf_lambda), 1.0/self.tau_vals)
self.components_['radii'] = self.dF_vals/denom | Generate radii | train | https://github.com/wdm0006/sklearn-extensions/blob/329f3efdb8c3c3a367b264f7c76c76411a784530/sklearn_extensions/extreme_learning_machines/random_layer.py#L521-L525 | null | class GRBFRandomLayer(RBFRandomLayer):
"""Random Generalized RBF Hidden Layer transformer
Creates a layer of radial basis function units where:
f(a), s.t. a = ||x-c||/r
with c the unit center
and f() is exp(-gamma * a^tau) where tau and r are computed
based on [1]
Parameters
----------
`n_hidden` : int, optional (default=20)
Number of units to generate, ignored if centers are provided
`grbf_lambda` : float, optional (default=0.05)
GRBF shape parameter
`gamma` : {int, float} optional (default=1.0)
Width multiplier for GRBF distance argument
`centers` : array of shape (n_hidden, n_features), optional (default=None)
If provided, overrides internal computation of the centers
`radii` : array of shape (n_hidden), optional (default=None)
If provided, overrides internal computation of the radii
`use_exemplars` : bool, optional (default=False)
If True, uses random examples from the input to determine the RBF
centers, ignored if centers are provided
`random_state` : int or RandomState instance, optional (default=None)
Control the pseudo random number generator used to generate the
centers at fit time, ignored if centers are provided
Attributes
----------
`components_` : dictionary containing two keys:
`radii_` : numpy array of shape [n_hidden]
`centers_` : numpy array of shape [n_hidden, n_features]
`input_activations_` : numpy array of shape [n_samples, n_hidden]
Array containing ||x-c||/r for all samples
See Also
--------
ELMRegressor, ELMClassifier, SimpleELMRegressor, SimpleELMClassifier,
SimpleRandomLayer
References
----------
.. [1] Fernandez-Navarro, et al, "MELM-GRBF: a modified version of the
extreme learning machine for generalized radial basis function
neural networks", Neurocomputing 74 (2011), 2502-2510
"""
# def _grbf(acts, taus):
# """GRBF activation function"""
# return np.exp(np.exp(-pow(acts, taus)))
_grbf = (lambda acts, taus: np.exp(np.exp(-pow(acts, taus))))
_internal_activation_funcs = {'grbf': _grbf}
def __init__(self, n_hidden=20, grbf_lambda=0.001,
centers=None, radii=None, random_state=None):
super(GRBFRandomLayer, self).__init__(n_hidden=n_hidden,
activation_func='grbf',
centers=centers, radii=radii,
random_state=random_state)
self.grbf_lambda = grbf_lambda
self.dN_vals = None
self.dF_vals = None
self.tau_vals = None
# get centers from superclass, then calculate tau_vals
# according to ref [1]
def _compute_centers(self, X, sparse, rs):
"""Generate centers, then compute tau, dF and dN vals"""
super(GRBFRandomLayer, self)._compute_centers(X, sparse, rs)
centers = self.components_['centers']
sorted_distances = np.sort(squareform(pdist(centers)))
self.dF_vals = sorted_distances[:, -1]
self.dN_vals = sorted_distances[:, 1]/100.0
#self.dN_vals = 0.0002 * np.ones(self.dF_vals.shape)
tauNum = np.log(np.log(self.grbf_lambda) /
np.log(1.0 - self.grbf_lambda))
tauDenom = np.log(self.dF_vals/self.dN_vals)
self.tau_vals = tauNum/tauDenom
self._extra_args['taus'] = self.tau_vals
# get radii according to ref [1]
|
wdm0006/sklearn-extensions | sklearn_extensions/extreme_learning_machines/elm.py | GenELMRegressor._fit_regression | python | def _fit_regression(self, y):
if self.regressor is None:
self.coefs_ = safe_sparse_dot(pinv2(self.hidden_activations_), y)
else:
self.regressor.fit(self.hidden_activations_, y)
self.fitted_ = True | fit regression using pseudo-inverse
or supplied regressor | train | https://github.com/wdm0006/sklearn-extensions/blob/329f3efdb8c3c3a367b264f7c76c76411a784530/sklearn_extensions/extreme_learning_machines/elm.py#L149-L160 | null | class GenELMRegressor(BaseELM, RegressorMixin):
"""
ELMRegressor is a regressor based on the Extreme Learning Machine.
An Extreme Learning Machine (ELM) is a single layer feedforward
network with a random hidden layer components and ordinary linear
least squares fitting of the hidden->output weights by default.
[1][2]
Parameters
----------
`hidden_layer` : random_layer instance, optional
(default=MLPRandomLayer(random_state=0))
`regressor` : regressor instance, optional (default=None)
If provided, this object is used to perform the regression from hidden
unit activations to the outputs and subsequent predictions. If not
present, an ordinary linear least squares fit is performed
Attributes
----------
`coefs_` : numpy array
Fitted regression coefficients if no regressor supplied.
`fitted_` : bool
Flag set when fit has been called already.
`hidden_activations_` : numpy array of shape [n_samples, n_hidden]
Hidden layer activations for last input.
See Also
--------
RBFRandomLayer, MLPRandomLayer, ELMRegressor, ELMClassifier
References
----------
.. [1] http://www.extreme-learning-machines.org
.. [2] G.-B. Huang, Q.-Y. Zhu and C.-K. Siew, "Extreme Learning Machine:
Theory and Applications", Neurocomputing, vol. 70, pp. 489-501,
2006.
"""
def __init__(self, hidden_layer=MLPRandomLayer(random_state=0), regressor=None):
"""
:param hidden_layer:
:param regressor:
:return:
"""
super(GenELMRegressor, self).__init__(hidden_layer, regressor)
self.coefs_ = None
self.fitted_ = False
self.hidden_activations_ = None
def fit(self, X, y):
"""
Fit the model using X, y as training data.
Parameters
----------
X : {array-like, sparse matrix} of shape [n_samples, n_features]
Training vectors, where n_samples is the number of samples
and n_features is the number of features.
y : array-like of shape [n_samples, n_outputs]
Target values (class labels in classification, real numbers in
regression)
Returns
-------
self : object
Returns an instance of self.
"""
# fit random hidden layer and compute the hidden layer activations
self.hidden_activations_ = self.hidden_layer.fit_transform(X)
# solve the regression from hidden activations to outputs
self._fit_regression(as_float_array(y, copy=True))
return self
def _get_predictions(self):
"""
get predictions using internal least squares/supplied regressor
"""
if self.regressor is None:
preds = safe_sparse_dot(self.hidden_activations_, self.coefs_)
else:
preds = self.regressor.predict(self.hidden_activations_)
return preds
def predict(self, X):
"""
Predict values using the model
Parameters
----------
X : {array-like, sparse matrix} of shape [n_samples, n_features]
Returns
-------
C : numpy array of shape [n_samples, n_outputs]
Predicted values.
"""
if not self.fitted_:
raise ValueError("ELMRegressor not fitted")
# compute hidden layer activations
self.hidden_activations_ = self.hidden_layer.transform(X)
# compute output predictions for new hidden activations
predictions = self._get_predictions()
return predictions
|
wdm0006/sklearn-extensions | sklearn_extensions/extreme_learning_machines/elm.py | GenELMRegressor.fit | python | def fit(self, X, y):
# fit random hidden layer and compute the hidden layer activations
self.hidden_activations_ = self.hidden_layer.fit_transform(X)
# solve the regression from hidden activations to outputs
self._fit_regression(as_float_array(y, copy=True))
return self | Fit the model using X, y as training data.
Parameters
----------
X : {array-like, sparse matrix} of shape [n_samples, n_features]
Training vectors, where n_samples is the number of samples
and n_features is the number of features.
y : array-like of shape [n_samples, n_outputs]
Target values (class labels in classification, real numbers in
regression)
Returns
-------
self : object
Returns an instance of self. | train | https://github.com/wdm0006/sklearn-extensions/blob/329f3efdb8c3c3a367b264f7c76c76411a784530/sklearn_extensions/extreme_learning_machines/elm.py#L162-L188 | null | class GenELMRegressor(BaseELM, RegressorMixin):
"""
ELMRegressor is a regressor based on the Extreme Learning Machine.
An Extreme Learning Machine (ELM) is a single layer feedforward
network with a random hidden layer components and ordinary linear
least squares fitting of the hidden->output weights by default.
[1][2]
Parameters
----------
`hidden_layer` : random_layer instance, optional
(default=MLPRandomLayer(random_state=0))
`regressor` : regressor instance, optional (default=None)
If provided, this object is used to perform the regression from hidden
unit activations to the outputs and subsequent predictions. If not
present, an ordinary linear least squares fit is performed
Attributes
----------
`coefs_` : numpy array
Fitted regression coefficients if no regressor supplied.
`fitted_` : bool
Flag set when fit has been called already.
`hidden_activations_` : numpy array of shape [n_samples, n_hidden]
Hidden layer activations for last input.
See Also
--------
RBFRandomLayer, MLPRandomLayer, ELMRegressor, ELMClassifier
References
----------
.. [1] http://www.extreme-learning-machines.org
.. [2] G.-B. Huang, Q.-Y. Zhu and C.-K. Siew, "Extreme Learning Machine:
Theory and Applications", Neurocomputing, vol. 70, pp. 489-501,
2006.
"""
def __init__(self, hidden_layer=MLPRandomLayer(random_state=0), regressor=None):
"""
:param hidden_layer:
:param regressor:
:return:
"""
super(GenELMRegressor, self).__init__(hidden_layer, regressor)
self.coefs_ = None
self.fitted_ = False
self.hidden_activations_ = None
def _fit_regression(self, y):
"""
fit regression using pseudo-inverse
or supplied regressor
"""
if self.regressor is None:
self.coefs_ = safe_sparse_dot(pinv2(self.hidden_activations_), y)
else:
self.regressor.fit(self.hidden_activations_, y)
self.fitted_ = True
def _get_predictions(self):
"""
get predictions using internal least squares/supplied regressor
"""
if self.regressor is None:
preds = safe_sparse_dot(self.hidden_activations_, self.coefs_)
else:
preds = self.regressor.predict(self.hidden_activations_)
return preds
def predict(self, X):
"""
Predict values using the model
Parameters
----------
X : {array-like, sparse matrix} of shape [n_samples, n_features]
Returns
-------
C : numpy array of shape [n_samples, n_outputs]
Predicted values.
"""
if not self.fitted_:
raise ValueError("ELMRegressor not fitted")
# compute hidden layer activations
self.hidden_activations_ = self.hidden_layer.transform(X)
# compute output predictions for new hidden activations
predictions = self._get_predictions()
return predictions
|
wdm0006/sklearn-extensions | sklearn_extensions/extreme_learning_machines/elm.py | GenELMRegressor._get_predictions | python | def _get_predictions(self):
if self.regressor is None:
preds = safe_sparse_dot(self.hidden_activations_, self.coefs_)
else:
preds = self.regressor.predict(self.hidden_activations_)
return preds | get predictions using internal least squares/supplied regressor | train | https://github.com/wdm0006/sklearn-extensions/blob/329f3efdb8c3c3a367b264f7c76c76411a784530/sklearn_extensions/extreme_learning_machines/elm.py#L190-L199 | null | class GenELMRegressor(BaseELM, RegressorMixin):
"""
ELMRegressor is a regressor based on the Extreme Learning Machine.
An Extreme Learning Machine (ELM) is a single layer feedforward
network with a random hidden layer components and ordinary linear
least squares fitting of the hidden->output weights by default.
[1][2]
Parameters
----------
`hidden_layer` : random_layer instance, optional
(default=MLPRandomLayer(random_state=0))
`regressor` : regressor instance, optional (default=None)
If provided, this object is used to perform the regression from hidden
unit activations to the outputs and subsequent predictions. If not
present, an ordinary linear least squares fit is performed
Attributes
----------
`coefs_` : numpy array
Fitted regression coefficients if no regressor supplied.
`fitted_` : bool
Flag set when fit has been called already.
`hidden_activations_` : numpy array of shape [n_samples, n_hidden]
Hidden layer activations for last input.
See Also
--------
RBFRandomLayer, MLPRandomLayer, ELMRegressor, ELMClassifier
References
----------
.. [1] http://www.extreme-learning-machines.org
.. [2] G.-B. Huang, Q.-Y. Zhu and C.-K. Siew, "Extreme Learning Machine:
Theory and Applications", Neurocomputing, vol. 70, pp. 489-501,
2006.
"""
def __init__(self, hidden_layer=MLPRandomLayer(random_state=0), regressor=None):
"""
:param hidden_layer:
:param regressor:
:return:
"""
super(GenELMRegressor, self).__init__(hidden_layer, regressor)
self.coefs_ = None
self.fitted_ = False
self.hidden_activations_ = None
def _fit_regression(self, y):
"""
fit regression using pseudo-inverse
or supplied regressor
"""
if self.regressor is None:
self.coefs_ = safe_sparse_dot(pinv2(self.hidden_activations_), y)
else:
self.regressor.fit(self.hidden_activations_, y)
self.fitted_ = True
def fit(self, X, y):
"""
Fit the model using X, y as training data.
Parameters
----------
X : {array-like, sparse matrix} of shape [n_samples, n_features]
Training vectors, where n_samples is the number of samples
and n_features is the number of features.
y : array-like of shape [n_samples, n_outputs]
Target values (class labels in classification, real numbers in
regression)
Returns
-------
self : object
Returns an instance of self.
"""
# fit random hidden layer and compute the hidden layer activations
self.hidden_activations_ = self.hidden_layer.fit_transform(X)
# solve the regression from hidden activations to outputs
self._fit_regression(as_float_array(y, copy=True))
return self
def predict(self, X):
"""
Predict values using the model
Parameters
----------
X : {array-like, sparse matrix} of shape [n_samples, n_features]
Returns
-------
C : numpy array of shape [n_samples, n_outputs]
Predicted values.
"""
if not self.fitted_:
raise ValueError("ELMRegressor not fitted")
# compute hidden layer activations
self.hidden_activations_ = self.hidden_layer.transform(X)
# compute output predictions for new hidden activations
predictions = self._get_predictions()
return predictions
|
wdm0006/sklearn-extensions | sklearn_extensions/extreme_learning_machines/elm.py | GenELMRegressor.predict | python | def predict(self, X):
if not self.fitted_:
raise ValueError("ELMRegressor not fitted")
# compute hidden layer activations
self.hidden_activations_ = self.hidden_layer.transform(X)
# compute output predictions for new hidden activations
predictions = self._get_predictions()
return predictions | Predict values using the model
Parameters
----------
X : {array-like, sparse matrix} of shape [n_samples, n_features]
Returns
-------
C : numpy array of shape [n_samples, n_outputs]
Predicted values. | train | https://github.com/wdm0006/sklearn-extensions/blob/329f3efdb8c3c3a367b264f7c76c76411a784530/sklearn_extensions/extreme_learning_machines/elm.py#L201-L223 | null | class GenELMRegressor(BaseELM, RegressorMixin):
"""
ELMRegressor is a regressor based on the Extreme Learning Machine.
An Extreme Learning Machine (ELM) is a single layer feedforward
network with a random hidden layer components and ordinary linear
least squares fitting of the hidden->output weights by default.
[1][2]
Parameters
----------
`hidden_layer` : random_layer instance, optional
(default=MLPRandomLayer(random_state=0))
`regressor` : regressor instance, optional (default=None)
If provided, this object is used to perform the regression from hidden
unit activations to the outputs and subsequent predictions. If not
present, an ordinary linear least squares fit is performed
Attributes
----------
`coefs_` : numpy array
Fitted regression coefficients if no regressor supplied.
`fitted_` : bool
Flag set when fit has been called already.
`hidden_activations_` : numpy array of shape [n_samples, n_hidden]
Hidden layer activations for last input.
See Also
--------
RBFRandomLayer, MLPRandomLayer, ELMRegressor, ELMClassifier
References
----------
.. [1] http://www.extreme-learning-machines.org
.. [2] G.-B. Huang, Q.-Y. Zhu and C.-K. Siew, "Extreme Learning Machine:
Theory and Applications", Neurocomputing, vol. 70, pp. 489-501,
2006.
"""
def __init__(self, hidden_layer=MLPRandomLayer(random_state=0), regressor=None):
"""
:param hidden_layer:
:param regressor:
:return:
"""
super(GenELMRegressor, self).__init__(hidden_layer, regressor)
self.coefs_ = None
self.fitted_ = False
self.hidden_activations_ = None
def _fit_regression(self, y):
"""
fit regression using pseudo-inverse
or supplied regressor
"""
if self.regressor is None:
self.coefs_ = safe_sparse_dot(pinv2(self.hidden_activations_), y)
else:
self.regressor.fit(self.hidden_activations_, y)
self.fitted_ = True
def fit(self, X, y):
"""
Fit the model using X, y as training data.
Parameters
----------
X : {array-like, sparse matrix} of shape [n_samples, n_features]
Training vectors, where n_samples is the number of samples
and n_features is the number of features.
y : array-like of shape [n_samples, n_outputs]
Target values (class labels in classification, real numbers in
regression)
Returns
-------
self : object
Returns an instance of self.
"""
# fit random hidden layer and compute the hidden layer activations
self.hidden_activations_ = self.hidden_layer.fit_transform(X)
# solve the regression from hidden activations to outputs
self._fit_regression(as_float_array(y, copy=True))
return self
def _get_predictions(self):
"""
get predictions using internal least squares/supplied regressor
"""
if self.regressor is None:
preds = safe_sparse_dot(self.hidden_activations_, self.coefs_)
else:
preds = self.regressor.predict(self.hidden_activations_)
return preds
|
wdm0006/sklearn-extensions | sklearn_extensions/extreme_learning_machines/elm.py | GenELMClassifier.fit | python | def fit(self, X, y):
self.classes_ = np.unique(y)
y_bin = self.binarizer.fit_transform(y)
self.genelm_regressor_.fit(X, y_bin)
return self | Fit the model using X, y as training data.
Parameters
----------
X : {array-like, sparse matrix} of shape [n_samples, n_features]
Training vectors, where n_samples is the number of samples
and n_features is the number of features.
y : array-like of shape [n_samples, n_outputs]
Target values (class labels in classification, real numbers in
regression)
Returns
-------
self : object
Returns an instance of self. | train | https://github.com/wdm0006/sklearn-extensions/blob/329f3efdb8c3c3a367b264f7c76c76411a784530/sklearn_extensions/extreme_learning_machines/elm.py#L299-L324 | null | class GenELMClassifier(BaseELM, ClassifierMixin):
"""
GenELMClassifier is a classifier based on the Extreme Learning Machine.
An Extreme Learning Machine (ELM) is a single layer feedforward
network with a random hidden layer components and ordinary linear
least squares fitting of the hidden->output weights by default.
[1][2]
Parameters
----------
`hidden_layer` : random_layer instance, optional
(default=MLPRandomLayer(random_state=0))
`binarizer` : LabelBinarizer, optional
(default=LabelBinarizer(-1, 1))
`regressor` : regressor instance, optional (default=None)
If provided, this object is used to perform the regression from hidden
unit activations to the outputs and subsequent predictions. If not
present, an ordinary linear least squares fit is performed
Attributes
----------
`classes_` : numpy array of shape [n_classes]
Array of class labels
`genelm_regressor_` : ELMRegressor instance
Performs actual fit of binarized values
See Also
--------
RBFRandomLayer, MLPRandomLayer, ELMRegressor, ELMClassifier
References
----------
.. [1] http://www.extreme-learning-machines.org
.. [2] G.-B. Huang, Q.-Y. Zhu and C.-K. Siew, "Extreme Learning Machine:
Theory and Applications", Neurocomputing, vol. 70, pp. 489-501,
2006.
"""
def __init__(self, hidden_layer=MLPRandomLayer(random_state=0), binarizer=LabelBinarizer(-1, 1), regressor=None):
"""
:param hidden_layer:
:param binarizer:
:param regressor:
:return:
"""
super(GenELMClassifier, self).__init__(hidden_layer, regressor)
self.binarizer = binarizer
self.classes_ = None
self.genelm_regressor_ = GenELMRegressor(hidden_layer, regressor)
def decision_function(self, X):
"""
This function return the decision function values related to each
class on an array of test vectors X.
Parameters
----------
X : array-like of shape [n_samples, n_features]
Returns
-------
C : array of shape [n_samples, n_classes] or [n_samples,]
Decision function values related to each class, per sample.
In the two-class case, the shape is [n_samples,]
"""
return self.genelm_regressor_.predict(X)
def predict(self, X):
"""Predict values using the model
Parameters
----------
X : {array-like, sparse matrix} of shape [n_samples, n_features]
Returns
-------
C : numpy array of shape [n_samples, n_outputs]
Predicted values.
"""
raw_predictions = self.decision_function(X)
class_predictions = self.binarizer.inverse_transform(raw_predictions)
return class_predictions
|
wdm0006/sklearn-extensions | sklearn_extensions/extreme_learning_machines/elm.py | GenELMClassifier.predict | python | def predict(self, X):
raw_predictions = self.decision_function(X)
class_predictions = self.binarizer.inverse_transform(raw_predictions)
return class_predictions | Predict values using the model
Parameters
----------
X : {array-like, sparse matrix} of shape [n_samples, n_features]
Returns
-------
C : numpy array of shape [n_samples, n_outputs]
Predicted values. | train | https://github.com/wdm0006/sklearn-extensions/blob/329f3efdb8c3c3a367b264f7c76c76411a784530/sklearn_extensions/extreme_learning_machines/elm.py#L326-L341 | null | class GenELMClassifier(BaseELM, ClassifierMixin):
"""
GenELMClassifier is a classifier based on the Extreme Learning Machine.
An Extreme Learning Machine (ELM) is a single layer feedforward
network with a random hidden layer components and ordinary linear
least squares fitting of the hidden->output weights by default.
[1][2]
Parameters
----------
`hidden_layer` : random_layer instance, optional
(default=MLPRandomLayer(random_state=0))
`binarizer` : LabelBinarizer, optional
(default=LabelBinarizer(-1, 1))
`regressor` : regressor instance, optional (default=None)
If provided, this object is used to perform the regression from hidden
unit activations to the outputs and subsequent predictions. If not
present, an ordinary linear least squares fit is performed
Attributes
----------
`classes_` : numpy array of shape [n_classes]
Array of class labels
`genelm_regressor_` : ELMRegressor instance
Performs actual fit of binarized values
See Also
--------
RBFRandomLayer, MLPRandomLayer, ELMRegressor, ELMClassifier
References
----------
.. [1] http://www.extreme-learning-machines.org
.. [2] G.-B. Huang, Q.-Y. Zhu and C.-K. Siew, "Extreme Learning Machine:
Theory and Applications", Neurocomputing, vol. 70, pp. 489-501,
2006.
"""
def __init__(self, hidden_layer=MLPRandomLayer(random_state=0), binarizer=LabelBinarizer(-1, 1), regressor=None):
"""
:param hidden_layer:
:param binarizer:
:param regressor:
:return:
"""
super(GenELMClassifier, self).__init__(hidden_layer, regressor)
self.binarizer = binarizer
self.classes_ = None
self.genelm_regressor_ = GenELMRegressor(hidden_layer, regressor)
def decision_function(self, X):
"""
This function return the decision function values related to each
class on an array of test vectors X.
Parameters
----------
X : array-like of shape [n_samples, n_features]
Returns
-------
C : array of shape [n_samples, n_classes] or [n_samples,]
Decision function values related to each class, per sample.
In the two-class case, the shape is [n_samples,]
"""
return self.genelm_regressor_.predict(X)
def fit(self, X, y):
"""
Fit the model using X, y as training data.
Parameters
----------
X : {array-like, sparse matrix} of shape [n_samples, n_features]
Training vectors, where n_samples is the number of samples
and n_features is the number of features.
y : array-like of shape [n_samples, n_outputs]
Target values (class labels in classification, real numbers in
regression)
Returns
-------
self : object
Returns an instance of self.
"""
self.classes_ = np.unique(y)
y_bin = self.binarizer.fit_transform(y)
self.genelm_regressor_.fit(X, y_bin)
return self
|
wdm0006/sklearn-extensions | sklearn_extensions/extreme_learning_machines/elm.py | ELMRegressor._create_random_layer | python | def _create_random_layer(self):
return RandomLayer(n_hidden=self.n_hidden,
alpha=self.alpha,
random_state=self.random_state,
activation_func=self.activation_func,
activation_args=self.activation_args,
user_components=self.user_components,
rbf_width=self.rbf_width) | Pass init params to RandomLayer | train | https://github.com/wdm0006/sklearn-extensions/blob/329f3efdb8c3c3a367b264f7c76c76411a784530/sklearn_extensions/extreme_learning_machines/elm.py#L430-L439 | null | class ELMRegressor(BaseEstimator, RegressorMixin):
"""
ELMRegressor is a regressor based on the Extreme Learning Machine.
An Extreme Learning Machine (ELM) is a single layer feedforward
network with a random hidden layer components and ordinary linear
least squares fitting of the hidden->output weights by default.
[1][2]
ELMRegressor is a wrapper for an GenELMRegressor that uses a
RandomLayer and passes the __init__ parameters through
to the hidden layer generated by the fit() method.
Parameters
----------
`n_hidden` : int, optional (default=20)
Number of units to generate in the SimpleRandomLayer
`alpha` : float, optional (default=0.5)
Mixing coefficient for distance and dot product input activations:
activation = alpha*mlp_activation + (1-alpha)*rbf_width*rbf_activation
`rbf_width` : float, optional (default=1.0)
multiplier on rbf_activation
`activation_func` : {callable, string} optional (default='tanh')
Function used to transform input activation
It must be one of 'tanh', 'sine', 'tribas', 'inv_tribase', 'sigmoid',
'hardlim', 'softlim', 'gaussian', 'multiquadric', 'inv_multiquadric' or
a callable. If none is given, 'tanh' will be used. If a callable
is given, it will be used to compute the hidden unit activations.
`activation_args` : dictionary, optional (default=None)
Supplies keyword arguments for a callable activation_func
`user_components`: dictionary, optional (default=None)
dictionary containing values for components that woud otherwise be
randomly generated. Valid key/value pairs are as follows:
'radii' : array-like of shape [n_hidden]
'centers': array-like of shape [n_hidden, n_features]
'biases' : array-like of shape [n_hidden]
'weights': array-like of shape [n_hidden, n_features]
`regressor` : regressor instance, optional (default=None)
If provided, this object is used to perform the regression from hidden
unit activations to the outputs and subsequent predictions. If not
present, an ordinary linear least squares fit is performed
`random_state` : int, RandomState instance or None (default=None)
Control the pseudo random number generator used to generate the
hidden unit weights at fit time.
Attributes
----------
`genelm_regressor_` : GenELMRegressor object
Wrapped object that actually performs the fit.
See Also
--------
RandomLayer, RBFRandomLayer, MLPRandomLayer,
GenELMRegressor, GenELMClassifier, ELMClassifier
References
----------
.. [1] http://www.extreme-learning-machines.org
.. [2] G.-B. Huang, Q.-Y. Zhu and C.-K. Siew, "Extreme Learning Machine:
Theory and Applications", Neurocomputing, vol. 70, pp. 489-501,
2006.
"""
def __init__(self, n_hidden=20, alpha=0.5, rbf_width=1.0, activation_func='tanh', activation_args=None,
user_components=None, regressor=None, random_state=None):
self.n_hidden = n_hidden
self.alpha = alpha
self.random_state = random_state
self.activation_func = activation_func
self.activation_args = activation_args
self.user_components = user_components
self.rbf_width = rbf_width
self.regressor = regressor
self._genelm_regressor = None
def fit(self, X, y):
"""
Fit the model using X, y as training data.
Parameters
----------
X : {array-like, sparse matrix} of shape [n_samples, n_features]
Training vectors, where n_samples is the number of samples
and n_features is the number of features.
y : array-like of shape [n_samples, n_outputs]
Target values (class labels in classification, real numbers in
regression)
Returns
-------
self : object
Returns an instance of self.
"""
rhl = self._create_random_layer()
self._genelm_regressor = GenELMRegressor(hidden_layer=rhl,
regressor=self.regressor)
self._genelm_regressor.fit(X, y)
return self
def predict(self, X):
"""
Predict values using the model
Parameters
----------
X : {array-like, sparse matrix} of shape [n_samples, n_features]
Returns
-------
C : numpy array of shape [n_samples, n_outputs]
Predicted values.
"""
if self._genelm_regressor is None:
raise ValueError("SimpleELMRegressor not fitted")
return self._genelm_regressor.predict(X)
|
wdm0006/sklearn-extensions | sklearn_extensions/extreme_learning_machines/elm.py | ELMRegressor.fit | python | def fit(self, X, y):
rhl = self._create_random_layer()
self._genelm_regressor = GenELMRegressor(hidden_layer=rhl,
regressor=self.regressor)
self._genelm_regressor.fit(X, y)
return self | Fit the model using X, y as training data.
Parameters
----------
X : {array-like, sparse matrix} of shape [n_samples, n_features]
Training vectors, where n_samples is the number of samples
and n_features is the number of features.
y : array-like of shape [n_samples, n_outputs]
Target values (class labels in classification, real numbers in
regression)
Returns
-------
self : object
Returns an instance of self. | train | https://github.com/wdm0006/sklearn-extensions/blob/329f3efdb8c3c3a367b264f7c76c76411a784530/sklearn_extensions/extreme_learning_machines/elm.py#L441-L465 | null | class ELMRegressor(BaseEstimator, RegressorMixin):
"""
ELMRegressor is a regressor based on the Extreme Learning Machine.
An Extreme Learning Machine (ELM) is a single layer feedforward
network with a random hidden layer components and ordinary linear
least squares fitting of the hidden->output weights by default.
[1][2]
ELMRegressor is a wrapper for an GenELMRegressor that uses a
RandomLayer and passes the __init__ parameters through
to the hidden layer generated by the fit() method.
Parameters
----------
`n_hidden` : int, optional (default=20)
Number of units to generate in the SimpleRandomLayer
`alpha` : float, optional (default=0.5)
Mixing coefficient for distance and dot product input activations:
activation = alpha*mlp_activation + (1-alpha)*rbf_width*rbf_activation
`rbf_width` : float, optional (default=1.0)
multiplier on rbf_activation
`activation_func` : {callable, string} optional (default='tanh')
Function used to transform input activation
It must be one of 'tanh', 'sine', 'tribas', 'inv_tribase', 'sigmoid',
'hardlim', 'softlim', 'gaussian', 'multiquadric', 'inv_multiquadric' or
a callable. If none is given, 'tanh' will be used. If a callable
is given, it will be used to compute the hidden unit activations.
`activation_args` : dictionary, optional (default=None)
Supplies keyword arguments for a callable activation_func
`user_components`: dictionary, optional (default=None)
dictionary containing values for components that woud otherwise be
randomly generated. Valid key/value pairs are as follows:
'radii' : array-like of shape [n_hidden]
'centers': array-like of shape [n_hidden, n_features]
'biases' : array-like of shape [n_hidden]
'weights': array-like of shape [n_hidden, n_features]
`regressor` : regressor instance, optional (default=None)
If provided, this object is used to perform the regression from hidden
unit activations to the outputs and subsequent predictions. If not
present, an ordinary linear least squares fit is performed
`random_state` : int, RandomState instance or None (default=None)
Control the pseudo random number generator used to generate the
hidden unit weights at fit time.
Attributes
----------
`genelm_regressor_` : GenELMRegressor object
Wrapped object that actually performs the fit.
See Also
--------
RandomLayer, RBFRandomLayer, MLPRandomLayer,
GenELMRegressor, GenELMClassifier, ELMClassifier
References
----------
.. [1] http://www.extreme-learning-machines.org
.. [2] G.-B. Huang, Q.-Y. Zhu and C.-K. Siew, "Extreme Learning Machine:
Theory and Applications", Neurocomputing, vol. 70, pp. 489-501,
2006.
"""
def __init__(self, n_hidden=20, alpha=0.5, rbf_width=1.0, activation_func='tanh', activation_args=None,
user_components=None, regressor=None, random_state=None):
self.n_hidden = n_hidden
self.alpha = alpha
self.random_state = random_state
self.activation_func = activation_func
self.activation_args = activation_args
self.user_components = user_components
self.rbf_width = rbf_width
self.regressor = regressor
self._genelm_regressor = None
def _create_random_layer(self):
"""Pass init params to RandomLayer"""
return RandomLayer(n_hidden=self.n_hidden,
alpha=self.alpha,
random_state=self.random_state,
activation_func=self.activation_func,
activation_args=self.activation_args,
user_components=self.user_components,
rbf_width=self.rbf_width)
def predict(self, X):
"""
Predict values using the model
Parameters
----------
X : {array-like, sparse matrix} of shape [n_samples, n_features]
Returns
-------
C : numpy array of shape [n_samples, n_outputs]
Predicted values.
"""
if self._genelm_regressor is None:
raise ValueError("SimpleELMRegressor not fitted")
return self._genelm_regressor.predict(X)
|
wdm0006/sklearn-extensions | sklearn_extensions/extreme_learning_machines/elm.py | ELMRegressor.predict | python | def predict(self, X):
if self._genelm_regressor is None:
raise ValueError("SimpleELMRegressor not fitted")
return self._genelm_regressor.predict(X) | Predict values using the model
Parameters
----------
X : {array-like, sparse matrix} of shape [n_samples, n_features]
Returns
-------
C : numpy array of shape [n_samples, n_outputs]
Predicted values. | train | https://github.com/wdm0006/sklearn-extensions/blob/329f3efdb8c3c3a367b264f7c76c76411a784530/sklearn_extensions/extreme_learning_machines/elm.py#L467-L483 | null | class ELMRegressor(BaseEstimator, RegressorMixin):
"""
ELMRegressor is a regressor based on the Extreme Learning Machine.
An Extreme Learning Machine (ELM) is a single layer feedforward
network with a random hidden layer components and ordinary linear
least squares fitting of the hidden->output weights by default.
[1][2]
ELMRegressor is a wrapper for an GenELMRegressor that uses a
RandomLayer and passes the __init__ parameters through
to the hidden layer generated by the fit() method.
Parameters
----------
`n_hidden` : int, optional (default=20)
Number of units to generate in the SimpleRandomLayer
`alpha` : float, optional (default=0.5)
Mixing coefficient for distance and dot product input activations:
activation = alpha*mlp_activation + (1-alpha)*rbf_width*rbf_activation
`rbf_width` : float, optional (default=1.0)
multiplier on rbf_activation
`activation_func` : {callable, string} optional (default='tanh')
Function used to transform input activation
It must be one of 'tanh', 'sine', 'tribas', 'inv_tribase', 'sigmoid',
'hardlim', 'softlim', 'gaussian', 'multiquadric', 'inv_multiquadric' or
a callable. If none is given, 'tanh' will be used. If a callable
is given, it will be used to compute the hidden unit activations.
`activation_args` : dictionary, optional (default=None)
Supplies keyword arguments for a callable activation_func
`user_components`: dictionary, optional (default=None)
dictionary containing values for components that woud otherwise be
randomly generated. Valid key/value pairs are as follows:
'radii' : array-like of shape [n_hidden]
'centers': array-like of shape [n_hidden, n_features]
'biases' : array-like of shape [n_hidden]
'weights': array-like of shape [n_hidden, n_features]
`regressor` : regressor instance, optional (default=None)
If provided, this object is used to perform the regression from hidden
unit activations to the outputs and subsequent predictions. If not
present, an ordinary linear least squares fit is performed
`random_state` : int, RandomState instance or None (default=None)
Control the pseudo random number generator used to generate the
hidden unit weights at fit time.
Attributes
----------
`genelm_regressor_` : GenELMRegressor object
Wrapped object that actually performs the fit.
See Also
--------
RandomLayer, RBFRandomLayer, MLPRandomLayer,
GenELMRegressor, GenELMClassifier, ELMClassifier
References
----------
.. [1] http://www.extreme-learning-machines.org
.. [2] G.-B. Huang, Q.-Y. Zhu and C.-K. Siew, "Extreme Learning Machine:
Theory and Applications", Neurocomputing, vol. 70, pp. 489-501,
2006.
"""
def __init__(self, n_hidden=20, alpha=0.5, rbf_width=1.0, activation_func='tanh', activation_args=None,
user_components=None, regressor=None, random_state=None):
self.n_hidden = n_hidden
self.alpha = alpha
self.random_state = random_state
self.activation_func = activation_func
self.activation_args = activation_args
self.user_components = user_components
self.rbf_width = rbf_width
self.regressor = regressor
self._genelm_regressor = None
def _create_random_layer(self):
"""Pass init params to RandomLayer"""
return RandomLayer(n_hidden=self.n_hidden,
alpha=self.alpha,
random_state=self.random_state,
activation_func=self.activation_func,
activation_args=self.activation_args,
user_components=self.user_components,
rbf_width=self.rbf_width)
def fit(self, X, y):
"""
Fit the model using X, y as training data.
Parameters
----------
X : {array-like, sparse matrix} of shape [n_samples, n_features]
Training vectors, where n_samples is the number of samples
and n_features is the number of features.
y : array-like of shape [n_samples, n_outputs]
Target values (class labels in classification, real numbers in
regression)
Returns
-------
self : object
Returns an instance of self.
"""
rhl = self._create_random_layer()
self._genelm_regressor = GenELMRegressor(hidden_layer=rhl,
regressor=self.regressor)
self._genelm_regressor.fit(X, y)
return self
|
wdm0006/sklearn-extensions | sklearn_extensions/extreme_learning_machines/elm.py | ELMClassifier.fit | python | def fit(self, X, y):
self.classes_ = np.unique(y)
y_bin = self.binarizer.fit_transform(y)
super(ELMClassifier, self).fit(X, y_bin)
return self | Fit the model using X, y as training data.
Parameters
----------
X : {array-like, sparse matrix} of shape [n_samples, n_features]
Training vectors, where n_samples is the number of samples
and n_features is the number of features.
y : array-like of shape [n_samples, n_outputs]
Target values (class labels in classification, real numbers in
regression)
Returns
-------
self : object
Returns an instance of self. | train | https://github.com/wdm0006/sklearn-extensions/blob/329f3efdb8c3c3a367b264f7c76c76411a784530/sklearn_extensions/extreme_learning_machines/elm.py#L572-L598 | null | class ELMClassifier(ELMRegressor):
"""
ELMClassifier is a classifier based on the Extreme Learning Machine.
An Extreme Learning Machine (ELM) is a single layer feedforward
network with a random hidden layer components and ordinary linear
least squares fitting of the hidden->output weights by default.
[1][2]
ELMClassifier is an ELMRegressor subclass that first binarizes the
data, then uses the superclass to compute the decision function that
is then unbinarized to yield the prediction.
The params for the RandomLayer used in the input transform are
exposed in the ELMClassifier constructor.
Parameters
----------
`n_hidden` : int, optional (default=20)
Number of units to generate in the SimpleRandomLayer
`activation_func` : {callable, string} optional (default='tanh')
Function used to transform input activation
It must be one of 'tanh', 'sine', 'tribas', 'inv_tribase', 'sigmoid',
'hardlim', 'softlim', 'gaussian', 'multiquadric', 'inv_multiquadric' or
a callable. If none is given, 'tanh' will be used. If a callable
is given, it will be used to compute the hidden unit activations.
`activation_args` : dictionary, optional (default=None)
Supplies keyword arguments for a callable activation_func
`random_state` : int, RandomState instance or None (default=None)
Control the pseudo random number generator used to generate the
hidden unit weights at fit time.
Attributes
----------
`classes_` : numpy array of shape [n_classes]
Array of class labels
See Also
--------
RandomLayer, RBFRandomLayer, MLPRandomLayer,
GenELMRegressor, GenELMClassifier, ELMClassifier
References
----------
.. [1] http://www.extreme-learning-machines.org
.. [2] G.-B. Huang, Q.-Y. Zhu and C.-K. Siew, "Extreme Learning Machine:
Theory and Applications", Neurocomputing, vol. 70, pp. 489-501,
2006.
"""
def __init__(self, n_hidden=20, alpha=0.5, rbf_width=1.0, activation_func='tanh', activation_args=None,
user_components=None, regressor=None, binarizer=LabelBinarizer(-1, 1), random_state=None):
super(ELMClassifier, self).__init__(n_hidden=n_hidden,
alpha=alpha,
random_state=random_state,
activation_func=activation_func,
activation_args=activation_args,
user_components=user_components,
rbf_width=rbf_width,
regressor=regressor)
self.classes_ = None
self.binarizer = binarizer
def decision_function(self, X):
"""
This function return the decision function values related to each
class on an array of test vectors X.
Parameters
----------
X : array-like of shape [n_samples, n_features]
Returns
-------
C : array of shape [n_samples, n_classes] or [n_samples,]
Decision function values related to each class, per sample.
In the two-class case, the shape is [n_samples,]
"""
return super(ELMClassifier, self).predict(X)
def predict(self, X):
"""
Predict values using the model
Parameters
----------
X : {array-like, sparse matrix} of shape [n_samples, n_features]
Returns
-------
C : numpy array of shape [n_samples, n_outputs]
Predicted values.
"""
raw_predictions = self.decision_function(X)
class_predictions = self.binarizer.inverse_transform(raw_predictions)
return class_predictions
def score(self, X, y):
"""Force use of accuracy score since we don't inherit
from ClassifierMixin"""
from sklearn.metrics import accuracy_score
return accuracy_score(y, self.predict(X))
|
wdm0006/sklearn-extensions | sklearn_extensions/extreme_learning_machines/elm.py | ELMClassifier.score | python | def score(self, X, y):
from sklearn.metrics import accuracy_score
return accuracy_score(y, self.predict(X)) | Force use of accuracy score since we don't inherit
from ClassifierMixin | train | https://github.com/wdm0006/sklearn-extensions/blob/329f3efdb8c3c3a367b264f7c76c76411a784530/sklearn_extensions/extreme_learning_machines/elm.py#L618-L623 | null | class ELMClassifier(ELMRegressor):
"""
ELMClassifier is a classifier based on the Extreme Learning Machine.
An Extreme Learning Machine (ELM) is a single layer feedforward
network with a random hidden layer components and ordinary linear
least squares fitting of the hidden->output weights by default.
[1][2]
ELMClassifier is an ELMRegressor subclass that first binarizes the
data, then uses the superclass to compute the decision function that
is then unbinarized to yield the prediction.
The params for the RandomLayer used in the input transform are
exposed in the ELMClassifier constructor.
Parameters
----------
`n_hidden` : int, optional (default=20)
Number of units to generate in the SimpleRandomLayer
`activation_func` : {callable, string} optional (default='tanh')
Function used to transform input activation
It must be one of 'tanh', 'sine', 'tribas', 'inv_tribase', 'sigmoid',
'hardlim', 'softlim', 'gaussian', 'multiquadric', 'inv_multiquadric' or
a callable. If none is given, 'tanh' will be used. If a callable
is given, it will be used to compute the hidden unit activations.
`activation_args` : dictionary, optional (default=None)
Supplies keyword arguments for a callable activation_func
`random_state` : int, RandomState instance or None (default=None)
Control the pseudo random number generator used to generate the
hidden unit weights at fit time.
Attributes
----------
`classes_` : numpy array of shape [n_classes]
Array of class labels
See Also
--------
RandomLayer, RBFRandomLayer, MLPRandomLayer,
GenELMRegressor, GenELMClassifier, ELMClassifier
References
----------
.. [1] http://www.extreme-learning-machines.org
.. [2] G.-B. Huang, Q.-Y. Zhu and C.-K. Siew, "Extreme Learning Machine:
Theory and Applications", Neurocomputing, vol. 70, pp. 489-501,
2006.
"""
def __init__(self, n_hidden=20, alpha=0.5, rbf_width=1.0, activation_func='tanh', activation_args=None,
user_components=None, regressor=None, binarizer=LabelBinarizer(-1, 1), random_state=None):
super(ELMClassifier, self).__init__(n_hidden=n_hidden,
alpha=alpha,
random_state=random_state,
activation_func=activation_func,
activation_args=activation_args,
user_components=user_components,
rbf_width=rbf_width,
regressor=regressor)
self.classes_ = None
self.binarizer = binarizer
def decision_function(self, X):
"""
This function return the decision function values related to each
class on an array of test vectors X.
Parameters
----------
X : array-like of shape [n_samples, n_features]
Returns
-------
C : array of shape [n_samples, n_classes] or [n_samples,]
Decision function values related to each class, per sample.
In the two-class case, the shape is [n_samples,]
"""
return super(ELMClassifier, self).predict(X)
def fit(self, X, y):
"""
Fit the model using X, y as training data.
Parameters
----------
X : {array-like, sparse matrix} of shape [n_samples, n_features]
Training vectors, where n_samples is the number of samples
and n_features is the number of features.
y : array-like of shape [n_samples, n_outputs]
Target values (class labels in classification, real numbers in
regression)
Returns
-------
self : object
Returns an instance of self.
"""
self.classes_ = np.unique(y)
y_bin = self.binarizer.fit_transform(y)
super(ELMClassifier, self).fit(X, y_bin)
return self
def predict(self, X):
"""
Predict values using the model
Parameters
----------
X : {array-like, sparse matrix} of shape [n_samples, n_features]
Returns
-------
C : numpy array of shape [n_samples, n_outputs]
Predicted values.
"""
raw_predictions = self.decision_function(X)
class_predictions = self.binarizer.inverse_transform(raw_predictions)
return class_predictions
|
gamechanger/schemer | schemer/extension_types.py | Mixed | python | def Mixed(*types):
if len(types) < 2:
raise ValueError("Mixed type requires at least 2 specific types")
types = set(types) # dedupe
class MixedType(type):
def __instancecheck__(cls, instance):
"""Returns true if the given value is an instance of
one of the types enclosed by this mixed type."""
for mtype in types:
if isinstance(instance, mtype):
return True
return False
class Mixed(object):
__metaclass__ = MixedType
return Mixed | Mixed type, used to indicate a field in a schema can be
one of many types. Use as a last resort only.
The Mixed type can be used directly as a class to indicate
any type is permitted for a given field:
`"my_field": {"type": Mixed}`
It can also be instantiated with list of specific types the
field may is allowed to be for more control:
`"my_field": {"type": Mixed(ObjectId, int)}` | train | https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/extension_types.py#L1-L28 | null | |
gamechanger/schemer | schemer/validators.py | one_of | python | def one_of(*args):
if len(args) == 1 and isinstance(args[0], list):
items = args[0]
else:
items = list(args)
def validate(value):
if not value in items:
return e("{} is not in the list {}", value, items)
return validate | Validates that a field value matches one of the values
given to this validator. | train | https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/validators.py#L8-L21 | null | import re
from pprint import pformat
def e(string, *args):
"""Function which formats error messages."""
return string.format(*[pformat(arg) for arg in args])
def gte(min_value):
"""
Validates that a field value is greater than or equal to the
value given to this validator.
"""
def validate(value):
if value < min_value:
return e("{} is not greater than or equal to {}", value, min_value)
return validate
def lte(max_value):
"""
Validates that a field value is less than or equal to the
value given to this validator.
"""
def validate(value):
if value > max_value:
return e("{} is not less than or equal to {}", value, max_value)
return validate
def gt(gt_value):
"""
Validates that a field value is greater than the
value given to this validator.
"""
def validate(value):
if value <= gt_value:
return e("{} is not greater than {}", value, gt_value)
return validate
def lt(lt_value):
"""
Validates that a field value is less than the
value given to this validator.
"""
def validate(value):
if value >= lt_value:
return e("{} is not less than {}", value, lt_value)
return validate
def between(min_value, max_value):
"""
Validates that a field value is between the two values
given to this validator.
"""
def validate(value):
if value < min_value:
return e("{} is not greater than or equal to {}",
value, min_value)
if value > max_value:
return e("{} is not less than or equal to {}",
value, max_value)
return validate
def length(min=None, max=None):
"""
Validates that a field value's length is between the bounds given to this
validator.
"""
def validate(value):
if min and len(value) < min:
return e("{} does not have a length of at least {}", value, min)
if max and len(value) > max:
return e("{} does not have a length of at most {}", value, max)
return validate
def match(pattern):
"""
Validates that a field value matches the regex given to this validator.
"""
regex = re.compile(pattern)
def validate(value):
if not regex.match(value):
return e("{} does not match the pattern {}", value, pattern)
return validate
def is_email():
"""
Validates that a fields value is a valid email address.
"""
email = (
ur'(?!^\.)' # No dot at start
ur'(?!.*\.@)' # No dot before at sign
ur'(?!.*@\.)' # No dot after at sign
ur'(?!.*\.$)' # No dot at the end
ur'(?!.*\.\.)' # No double dots anywhere
ur'^\S+' # Starts with one or more non-whitespace characters
ur'@' # Contains an at sign
ur'\S+$' # Ends with one or more non-whitespace characters
)
regex = re.compile(email, re.IGNORECASE | re.UNICODE)
def validate(value):
if not regex.match(value):
return e("{} is not a valid email address", value)
return validate
def is_url():
"""
Validates that a fields value is a valid URL.
"""
# Stolen from Django
regex = re.compile(
r'^(?:http|ftp)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain...
r'localhost|' #localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
def validate(value):
if not regex.match(value):
return e("{} is not a valid URL", value)
return validate
def each_item(*validators):
"""
A wrapper which applies the given validators to each item in a field
value of type `list`.
Example usage in a Schema:
"my_list_field": {"type": Array(int), "validates": each_item(lte(10))}
"""
def validate(value):
for item in value:
for validator in validators:
error = validator(item)
if error:
return error
return None
return validate
def distinct():
"""
Validates that all items in the given field list value are distinct,
i.e. that the list contains no duplicates.
"""
def validate(value):
for i, item in enumerate(value):
if item in value[i+1:]:
return e("{} is not a distinct set of values", value)
return validate
|
gamechanger/schemer | schemer/validators.py | gte | python | def gte(min_value):
def validate(value):
if value < min_value:
return e("{} is not greater than or equal to {}", value, min_value)
return validate | Validates that a field value is greater than or equal to the
value given to this validator. | train | https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/validators.py#L24-L32 | null | import re
from pprint import pformat
def e(string, *args):
"""Function which formats error messages."""
return string.format(*[pformat(arg) for arg in args])
def one_of(*args):
"""
Validates that a field value matches one of the values
given to this validator.
"""
if len(args) == 1 and isinstance(args[0], list):
items = args[0]
else:
items = list(args)
def validate(value):
if not value in items:
return e("{} is not in the list {}", value, items)
return validate
def lte(max_value):
"""
Validates that a field value is less than or equal to the
value given to this validator.
"""
def validate(value):
if value > max_value:
return e("{} is not less than or equal to {}", value, max_value)
return validate
def gt(gt_value):
"""
Validates that a field value is greater than the
value given to this validator.
"""
def validate(value):
if value <= gt_value:
return e("{} is not greater than {}", value, gt_value)
return validate
def lt(lt_value):
"""
Validates that a field value is less than the
value given to this validator.
"""
def validate(value):
if value >= lt_value:
return e("{} is not less than {}", value, lt_value)
return validate
def between(min_value, max_value):
"""
Validates that a field value is between the two values
given to this validator.
"""
def validate(value):
if value < min_value:
return e("{} is not greater than or equal to {}",
value, min_value)
if value > max_value:
return e("{} is not less than or equal to {}",
value, max_value)
return validate
def length(min=None, max=None):
"""
Validates that a field value's length is between the bounds given to this
validator.
"""
def validate(value):
if min and len(value) < min:
return e("{} does not have a length of at least {}", value, min)
if max and len(value) > max:
return e("{} does not have a length of at most {}", value, max)
return validate
def match(pattern):
"""
Validates that a field value matches the regex given to this validator.
"""
regex = re.compile(pattern)
def validate(value):
if not regex.match(value):
return e("{} does not match the pattern {}", value, pattern)
return validate
def is_email():
"""
Validates that a fields value is a valid email address.
"""
email = (
ur'(?!^\.)' # No dot at start
ur'(?!.*\.@)' # No dot before at sign
ur'(?!.*@\.)' # No dot after at sign
ur'(?!.*\.$)' # No dot at the end
ur'(?!.*\.\.)' # No double dots anywhere
ur'^\S+' # Starts with one or more non-whitespace characters
ur'@' # Contains an at sign
ur'\S+$' # Ends with one or more non-whitespace characters
)
regex = re.compile(email, re.IGNORECASE | re.UNICODE)
def validate(value):
if not regex.match(value):
return e("{} is not a valid email address", value)
return validate
def is_url():
"""
Validates that a fields value is a valid URL.
"""
# Stolen from Django
regex = re.compile(
r'^(?:http|ftp)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain...
r'localhost|' #localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
def validate(value):
if not regex.match(value):
return e("{} is not a valid URL", value)
return validate
def each_item(*validators):
"""
A wrapper which applies the given validators to each item in a field
value of type `list`.
Example usage in a Schema:
"my_list_field": {"type": Array(int), "validates": each_item(lte(10))}
"""
def validate(value):
for item in value:
for validator in validators:
error = validator(item)
if error:
return error
return None
return validate
def distinct():
"""
Validates that all items in the given field list value are distinct,
i.e. that the list contains no duplicates.
"""
def validate(value):
for i, item in enumerate(value):
if item in value[i+1:]:
return e("{} is not a distinct set of values", value)
return validate
|
gamechanger/schemer | schemer/validators.py | lte | python | def lte(max_value):
def validate(value):
if value > max_value:
return e("{} is not less than or equal to {}", value, max_value)
return validate | Validates that a field value is less than or equal to the
value given to this validator. | train | https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/validators.py#L35-L43 | null | import re
from pprint import pformat
def e(string, *args):
"""Function which formats error messages."""
return string.format(*[pformat(arg) for arg in args])
def one_of(*args):
"""
Validates that a field value matches one of the values
given to this validator.
"""
if len(args) == 1 and isinstance(args[0], list):
items = args[0]
else:
items = list(args)
def validate(value):
if not value in items:
return e("{} is not in the list {}", value, items)
return validate
def gte(min_value):
"""
Validates that a field value is greater than or equal to the
value given to this validator.
"""
def validate(value):
if value < min_value:
return e("{} is not greater than or equal to {}", value, min_value)
return validate
def gt(gt_value):
"""
Validates that a field value is greater than the
value given to this validator.
"""
def validate(value):
if value <= gt_value:
return e("{} is not greater than {}", value, gt_value)
return validate
def lt(lt_value):
"""
Validates that a field value is less than the
value given to this validator.
"""
def validate(value):
if value >= lt_value:
return e("{} is not less than {}", value, lt_value)
return validate
def between(min_value, max_value):
"""
Validates that a field value is between the two values
given to this validator.
"""
def validate(value):
if value < min_value:
return e("{} is not greater than or equal to {}",
value, min_value)
if value > max_value:
return e("{} is not less than or equal to {}",
value, max_value)
return validate
def length(min=None, max=None):
"""
Validates that a field value's length is between the bounds given to this
validator.
"""
def validate(value):
if min and len(value) < min:
return e("{} does not have a length of at least {}", value, min)
if max and len(value) > max:
return e("{} does not have a length of at most {}", value, max)
return validate
def match(pattern):
"""
Validates that a field value matches the regex given to this validator.
"""
regex = re.compile(pattern)
def validate(value):
if not regex.match(value):
return e("{} does not match the pattern {}", value, pattern)
return validate
def is_email():
"""
Validates that a fields value is a valid email address.
"""
email = (
ur'(?!^\.)' # No dot at start
ur'(?!.*\.@)' # No dot before at sign
ur'(?!.*@\.)' # No dot after at sign
ur'(?!.*\.$)' # No dot at the end
ur'(?!.*\.\.)' # No double dots anywhere
ur'^\S+' # Starts with one or more non-whitespace characters
ur'@' # Contains an at sign
ur'\S+$' # Ends with one or more non-whitespace characters
)
regex = re.compile(email, re.IGNORECASE | re.UNICODE)
def validate(value):
if not regex.match(value):
return e("{} is not a valid email address", value)
return validate
def is_url():
"""
Validates that a fields value is a valid URL.
"""
# Stolen from Django
regex = re.compile(
r'^(?:http|ftp)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain...
r'localhost|' #localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
def validate(value):
if not regex.match(value):
return e("{} is not a valid URL", value)
return validate
def each_item(*validators):
"""
A wrapper which applies the given validators to each item in a field
value of type `list`.
Example usage in a Schema:
"my_list_field": {"type": Array(int), "validates": each_item(lte(10))}
"""
def validate(value):
for item in value:
for validator in validators:
error = validator(item)
if error:
return error
return None
return validate
def distinct():
"""
Validates that all items in the given field list value are distinct,
i.e. that the list contains no duplicates.
"""
def validate(value):
for i, item in enumerate(value):
if item in value[i+1:]:
return e("{} is not a distinct set of values", value)
return validate
|
gamechanger/schemer | schemer/validators.py | gt | python | def gt(gt_value):
def validate(value):
if value <= gt_value:
return e("{} is not greater than {}", value, gt_value)
return validate | Validates that a field value is greater than the
value given to this validator. | train | https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/validators.py#L46-L54 | null | import re
from pprint import pformat
def e(string, *args):
"""Function which formats error messages."""
return string.format(*[pformat(arg) for arg in args])
def one_of(*args):
"""
Validates that a field value matches one of the values
given to this validator.
"""
if len(args) == 1 and isinstance(args[0], list):
items = args[0]
else:
items = list(args)
def validate(value):
if not value in items:
return e("{} is not in the list {}", value, items)
return validate
def gte(min_value):
"""
Validates that a field value is greater than or equal to the
value given to this validator.
"""
def validate(value):
if value < min_value:
return e("{} is not greater than or equal to {}", value, min_value)
return validate
def lte(max_value):
"""
Validates that a field value is less than or equal to the
value given to this validator.
"""
def validate(value):
if value > max_value:
return e("{} is not less than or equal to {}", value, max_value)
return validate
def lt(lt_value):
"""
Validates that a field value is less than the
value given to this validator.
"""
def validate(value):
if value >= lt_value:
return e("{} is not less than {}", value, lt_value)
return validate
def between(min_value, max_value):
"""
Validates that a field value is between the two values
given to this validator.
"""
def validate(value):
if value < min_value:
return e("{} is not greater than or equal to {}",
value, min_value)
if value > max_value:
return e("{} is not less than or equal to {}",
value, max_value)
return validate
def length(min=None, max=None):
"""
Validates that a field value's length is between the bounds given to this
validator.
"""
def validate(value):
if min and len(value) < min:
return e("{} does not have a length of at least {}", value, min)
if max and len(value) > max:
return e("{} does not have a length of at most {}", value, max)
return validate
def match(pattern):
"""
Validates that a field value matches the regex given to this validator.
"""
regex = re.compile(pattern)
def validate(value):
if not regex.match(value):
return e("{} does not match the pattern {}", value, pattern)
return validate
def is_email():
"""
Validates that a fields value is a valid email address.
"""
email = (
ur'(?!^\.)' # No dot at start
ur'(?!.*\.@)' # No dot before at sign
ur'(?!.*@\.)' # No dot after at sign
ur'(?!.*\.$)' # No dot at the end
ur'(?!.*\.\.)' # No double dots anywhere
ur'^\S+' # Starts with one or more non-whitespace characters
ur'@' # Contains an at sign
ur'\S+$' # Ends with one or more non-whitespace characters
)
regex = re.compile(email, re.IGNORECASE | re.UNICODE)
def validate(value):
if not regex.match(value):
return e("{} is not a valid email address", value)
return validate
def is_url():
"""
Validates that a fields value is a valid URL.
"""
# Stolen from Django
regex = re.compile(
r'^(?:http|ftp)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain...
r'localhost|' #localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
def validate(value):
if not regex.match(value):
return e("{} is not a valid URL", value)
return validate
def each_item(*validators):
"""
A wrapper which applies the given validators to each item in a field
value of type `list`.
Example usage in a Schema:
"my_list_field": {"type": Array(int), "validates": each_item(lte(10))}
"""
def validate(value):
for item in value:
for validator in validators:
error = validator(item)
if error:
return error
return None
return validate
def distinct():
"""
Validates that all items in the given field list value are distinct,
i.e. that the list contains no duplicates.
"""
def validate(value):
for i, item in enumerate(value):
if item in value[i+1:]:
return e("{} is not a distinct set of values", value)
return validate
|
gamechanger/schemer | schemer/validators.py | lt | python | def lt(lt_value):
def validate(value):
if value >= lt_value:
return e("{} is not less than {}", value, lt_value)
return validate | Validates that a field value is less than the
value given to this validator. | train | https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/validators.py#L57-L65 | null | import re
from pprint import pformat
def e(string, *args):
"""Function which formats error messages."""
return string.format(*[pformat(arg) for arg in args])
def one_of(*args):
"""
Validates that a field value matches one of the values
given to this validator.
"""
if len(args) == 1 and isinstance(args[0], list):
items = args[0]
else:
items = list(args)
def validate(value):
if not value in items:
return e("{} is not in the list {}", value, items)
return validate
def gte(min_value):
"""
Validates that a field value is greater than or equal to the
value given to this validator.
"""
def validate(value):
if value < min_value:
return e("{} is not greater than or equal to {}", value, min_value)
return validate
def lte(max_value):
"""
Validates that a field value is less than or equal to the
value given to this validator.
"""
def validate(value):
if value > max_value:
return e("{} is not less than or equal to {}", value, max_value)
return validate
def gt(gt_value):
"""
Validates that a field value is greater than the
value given to this validator.
"""
def validate(value):
if value <= gt_value:
return e("{} is not greater than {}", value, gt_value)
return validate
def between(min_value, max_value):
"""
Validates that a field value is between the two values
given to this validator.
"""
def validate(value):
if value < min_value:
return e("{} is not greater than or equal to {}",
value, min_value)
if value > max_value:
return e("{} is not less than or equal to {}",
value, max_value)
return validate
def length(min=None, max=None):
"""
Validates that a field value's length is between the bounds given to this
validator.
"""
def validate(value):
if min and len(value) < min:
return e("{} does not have a length of at least {}", value, min)
if max and len(value) > max:
return e("{} does not have a length of at most {}", value, max)
return validate
def match(pattern):
"""
Validates that a field value matches the regex given to this validator.
"""
regex = re.compile(pattern)
def validate(value):
if not regex.match(value):
return e("{} does not match the pattern {}", value, pattern)
return validate
def is_email():
"""
Validates that a fields value is a valid email address.
"""
email = (
ur'(?!^\.)' # No dot at start
ur'(?!.*\.@)' # No dot before at sign
ur'(?!.*@\.)' # No dot after at sign
ur'(?!.*\.$)' # No dot at the end
ur'(?!.*\.\.)' # No double dots anywhere
ur'^\S+' # Starts with one or more non-whitespace characters
ur'@' # Contains an at sign
ur'\S+$' # Ends with one or more non-whitespace characters
)
regex = re.compile(email, re.IGNORECASE | re.UNICODE)
def validate(value):
if not regex.match(value):
return e("{} is not a valid email address", value)
return validate
def is_url():
"""
Validates that a fields value is a valid URL.
"""
# Stolen from Django
regex = re.compile(
r'^(?:http|ftp)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain...
r'localhost|' #localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
def validate(value):
if not regex.match(value):
return e("{} is not a valid URL", value)
return validate
def each_item(*validators):
"""
A wrapper which applies the given validators to each item in a field
value of type `list`.
Example usage in a Schema:
"my_list_field": {"type": Array(int), "validates": each_item(lte(10))}
"""
def validate(value):
for item in value:
for validator in validators:
error = validator(item)
if error:
return error
return None
return validate
def distinct():
"""
Validates that all items in the given field list value are distinct,
i.e. that the list contains no duplicates.
"""
def validate(value):
for i, item in enumerate(value):
if item in value[i+1:]:
return e("{} is not a distinct set of values", value)
return validate
|
gamechanger/schemer | schemer/validators.py | between | python | def between(min_value, max_value):
def validate(value):
if value < min_value:
return e("{} is not greater than or equal to {}",
value, min_value)
if value > max_value:
return e("{} is not less than or equal to {}",
value, max_value)
return validate | Validates that a field value is between the two values
given to this validator. | train | https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/validators.py#L68-L80 | null | import re
from pprint import pformat
def e(string, *args):
"""Function which formats error messages."""
return string.format(*[pformat(arg) for arg in args])
def one_of(*args):
"""
Validates that a field value matches one of the values
given to this validator.
"""
if len(args) == 1 and isinstance(args[0], list):
items = args[0]
else:
items = list(args)
def validate(value):
if not value in items:
return e("{} is not in the list {}", value, items)
return validate
def gte(min_value):
"""
Validates that a field value is greater than or equal to the
value given to this validator.
"""
def validate(value):
if value < min_value:
return e("{} is not greater than or equal to {}", value, min_value)
return validate
def lte(max_value):
"""
Validates that a field value is less than or equal to the
value given to this validator.
"""
def validate(value):
if value > max_value:
return e("{} is not less than or equal to {}", value, max_value)
return validate
def gt(gt_value):
"""
Validates that a field value is greater than the
value given to this validator.
"""
def validate(value):
if value <= gt_value:
return e("{} is not greater than {}", value, gt_value)
return validate
def lt(lt_value):
"""
Validates that a field value is less than the
value given to this validator.
"""
def validate(value):
if value >= lt_value:
return e("{} is not less than {}", value, lt_value)
return validate
def length(min=None, max=None):
"""
Validates that a field value's length is between the bounds given to this
validator.
"""
def validate(value):
if min and len(value) < min:
return e("{} does not have a length of at least {}", value, min)
if max and len(value) > max:
return e("{} does not have a length of at most {}", value, max)
return validate
def match(pattern):
"""
Validates that a field value matches the regex given to this validator.
"""
regex = re.compile(pattern)
def validate(value):
if not regex.match(value):
return e("{} does not match the pattern {}", value, pattern)
return validate
def is_email():
"""
Validates that a fields value is a valid email address.
"""
email = (
ur'(?!^\.)' # No dot at start
ur'(?!.*\.@)' # No dot before at sign
ur'(?!.*@\.)' # No dot after at sign
ur'(?!.*\.$)' # No dot at the end
ur'(?!.*\.\.)' # No double dots anywhere
ur'^\S+' # Starts with one or more non-whitespace characters
ur'@' # Contains an at sign
ur'\S+$' # Ends with one or more non-whitespace characters
)
regex = re.compile(email, re.IGNORECASE | re.UNICODE)
def validate(value):
if not regex.match(value):
return e("{} is not a valid email address", value)
return validate
def is_url():
"""
Validates that a fields value is a valid URL.
"""
# Stolen from Django
regex = re.compile(
r'^(?:http|ftp)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain...
r'localhost|' #localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
def validate(value):
if not regex.match(value):
return e("{} is not a valid URL", value)
return validate
def each_item(*validators):
"""
A wrapper which applies the given validators to each item in a field
value of type `list`.
Example usage in a Schema:
"my_list_field": {"type": Array(int), "validates": each_item(lte(10))}
"""
def validate(value):
for item in value:
for validator in validators:
error = validator(item)
if error:
return error
return None
return validate
def distinct():
"""
Validates that all items in the given field list value are distinct,
i.e. that the list contains no duplicates.
"""
def validate(value):
for i, item in enumerate(value):
if item in value[i+1:]:
return e("{} is not a distinct set of values", value)
return validate
|
gamechanger/schemer | schemer/validators.py | length | python | def length(min=None, max=None):
def validate(value):
if min and len(value) < min:
return e("{} does not have a length of at least {}", value, min)
if max and len(value) > max:
return e("{} does not have a length of at most {}", value, max)
return validate | Validates that a field value's length is between the bounds given to this
validator. | train | https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/validators.py#L83-L94 | null | import re
from pprint import pformat
def e(string, *args):
"""Function which formats error messages."""
return string.format(*[pformat(arg) for arg in args])
def one_of(*args):
"""
Validates that a field value matches one of the values
given to this validator.
"""
if len(args) == 1 and isinstance(args[0], list):
items = args[0]
else:
items = list(args)
def validate(value):
if not value in items:
return e("{} is not in the list {}", value, items)
return validate
def gte(min_value):
"""
Validates that a field value is greater than or equal to the
value given to this validator.
"""
def validate(value):
if value < min_value:
return e("{} is not greater than or equal to {}", value, min_value)
return validate
def lte(max_value):
"""
Validates that a field value is less than or equal to the
value given to this validator.
"""
def validate(value):
if value > max_value:
return e("{} is not less than or equal to {}", value, max_value)
return validate
def gt(gt_value):
"""
Validates that a field value is greater than the
value given to this validator.
"""
def validate(value):
if value <= gt_value:
return e("{} is not greater than {}", value, gt_value)
return validate
def lt(lt_value):
"""
Validates that a field value is less than the
value given to this validator.
"""
def validate(value):
if value >= lt_value:
return e("{} is not less than {}", value, lt_value)
return validate
def between(min_value, max_value):
"""
Validates that a field value is between the two values
given to this validator.
"""
def validate(value):
if value < min_value:
return e("{} is not greater than or equal to {}",
value, min_value)
if value > max_value:
return e("{} is not less than or equal to {}",
value, max_value)
return validate
def match(pattern):
"""
Validates that a field value matches the regex given to this validator.
"""
regex = re.compile(pattern)
def validate(value):
if not regex.match(value):
return e("{} does not match the pattern {}", value, pattern)
return validate
def is_email():
"""
Validates that a fields value is a valid email address.
"""
email = (
ur'(?!^\.)' # No dot at start
ur'(?!.*\.@)' # No dot before at sign
ur'(?!.*@\.)' # No dot after at sign
ur'(?!.*\.$)' # No dot at the end
ur'(?!.*\.\.)' # No double dots anywhere
ur'^\S+' # Starts with one or more non-whitespace characters
ur'@' # Contains an at sign
ur'\S+$' # Ends with one or more non-whitespace characters
)
regex = re.compile(email, re.IGNORECASE | re.UNICODE)
def validate(value):
if not regex.match(value):
return e("{} is not a valid email address", value)
return validate
def is_url():
"""
Validates that a fields value is a valid URL.
"""
# Stolen from Django
regex = re.compile(
r'^(?:http|ftp)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain...
r'localhost|' #localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
def validate(value):
if not regex.match(value):
return e("{} is not a valid URL", value)
return validate
def each_item(*validators):
"""
A wrapper which applies the given validators to each item in a field
value of type `list`.
Example usage in a Schema:
"my_list_field": {"type": Array(int), "validates": each_item(lte(10))}
"""
def validate(value):
for item in value:
for validator in validators:
error = validator(item)
if error:
return error
return None
return validate
def distinct():
"""
Validates that all items in the given field list value are distinct,
i.e. that the list contains no duplicates.
"""
def validate(value):
for i, item in enumerate(value):
if item in value[i+1:]:
return e("{} is not a distinct set of values", value)
return validate
|
gamechanger/schemer | schemer/validators.py | match | python | def match(pattern):
regex = re.compile(pattern)
def validate(value):
if not regex.match(value):
return e("{} does not match the pattern {}", value, pattern)
return validate | Validates that a field value matches the regex given to this validator. | train | https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/validators.py#L97-L106 | null | import re
from pprint import pformat
def e(string, *args):
"""Function which formats error messages."""
return string.format(*[pformat(arg) for arg in args])
def one_of(*args):
"""
Validates that a field value matches one of the values
given to this validator.
"""
if len(args) == 1 and isinstance(args[0], list):
items = args[0]
else:
items = list(args)
def validate(value):
if not value in items:
return e("{} is not in the list {}", value, items)
return validate
def gte(min_value):
"""
Validates that a field value is greater than or equal to the
value given to this validator.
"""
def validate(value):
if value < min_value:
return e("{} is not greater than or equal to {}", value, min_value)
return validate
def lte(max_value):
"""
Validates that a field value is less than or equal to the
value given to this validator.
"""
def validate(value):
if value > max_value:
return e("{} is not less than or equal to {}", value, max_value)
return validate
def gt(gt_value):
"""
Validates that a field value is greater than the
value given to this validator.
"""
def validate(value):
if value <= gt_value:
return e("{} is not greater than {}", value, gt_value)
return validate
def lt(lt_value):
"""
Validates that a field value is less than the
value given to this validator.
"""
def validate(value):
if value >= lt_value:
return e("{} is not less than {}", value, lt_value)
return validate
def between(min_value, max_value):
"""
Validates that a field value is between the two values
given to this validator.
"""
def validate(value):
if value < min_value:
return e("{} is not greater than or equal to {}",
value, min_value)
if value > max_value:
return e("{} is not less than or equal to {}",
value, max_value)
return validate
def length(min=None, max=None):
"""
Validates that a field value's length is between the bounds given to this
validator.
"""
def validate(value):
if min and len(value) < min:
return e("{} does not have a length of at least {}", value, min)
if max and len(value) > max:
return e("{} does not have a length of at most {}", value, max)
return validate
def is_email():
"""
Validates that a fields value is a valid email address.
"""
email = (
ur'(?!^\.)' # No dot at start
ur'(?!.*\.@)' # No dot before at sign
ur'(?!.*@\.)' # No dot after at sign
ur'(?!.*\.$)' # No dot at the end
ur'(?!.*\.\.)' # No double dots anywhere
ur'^\S+' # Starts with one or more non-whitespace characters
ur'@' # Contains an at sign
ur'\S+$' # Ends with one or more non-whitespace characters
)
regex = re.compile(email, re.IGNORECASE | re.UNICODE)
def validate(value):
if not regex.match(value):
return e("{} is not a valid email address", value)
return validate
def is_url():
"""
Validates that a fields value is a valid URL.
"""
# Stolen from Django
regex = re.compile(
r'^(?:http|ftp)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain...
r'localhost|' #localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
def validate(value):
if not regex.match(value):
return e("{} is not a valid URL", value)
return validate
def each_item(*validators):
"""
A wrapper which applies the given validators to each item in a field
value of type `list`.
Example usage in a Schema:
"my_list_field": {"type": Array(int), "validates": each_item(lte(10))}
"""
def validate(value):
for item in value:
for validator in validators:
error = validator(item)
if error:
return error
return None
return validate
def distinct():
"""
Validates that all items in the given field list value are distinct,
i.e. that the list contains no duplicates.
"""
def validate(value):
for i, item in enumerate(value):
if item in value[i+1:]:
return e("{} is not a distinct set of values", value)
return validate
|
gamechanger/schemer | schemer/validators.py | is_email | python | def is_email():
email = (
ur'(?!^\.)' # No dot at start
ur'(?!.*\.@)' # No dot before at sign
ur'(?!.*@\.)' # No dot after at sign
ur'(?!.*\.$)' # No dot at the end
ur'(?!.*\.\.)' # No double dots anywhere
ur'^\S+' # Starts with one or more non-whitespace characters
ur'@' # Contains an at sign
ur'\S+$' # Ends with one or more non-whitespace characters
)
regex = re.compile(email, re.IGNORECASE | re.UNICODE)
def validate(value):
if not regex.match(value):
return e("{} is not a valid email address", value)
return validate | Validates that a fields value is a valid email address. | train | https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/validators.py#L108-L129 | null | import re
from pprint import pformat
def e(string, *args):
"""Function which formats error messages."""
return string.format(*[pformat(arg) for arg in args])
def one_of(*args):
"""
Validates that a field value matches one of the values
given to this validator.
"""
if len(args) == 1 and isinstance(args[0], list):
items = args[0]
else:
items = list(args)
def validate(value):
if not value in items:
return e("{} is not in the list {}", value, items)
return validate
def gte(min_value):
"""
Validates that a field value is greater than or equal to the
value given to this validator.
"""
def validate(value):
if value < min_value:
return e("{} is not greater than or equal to {}", value, min_value)
return validate
def lte(max_value):
"""
Validates that a field value is less than or equal to the
value given to this validator.
"""
def validate(value):
if value > max_value:
return e("{} is not less than or equal to {}", value, max_value)
return validate
def gt(gt_value):
"""
Validates that a field value is greater than the
value given to this validator.
"""
def validate(value):
if value <= gt_value:
return e("{} is not greater than {}", value, gt_value)
return validate
def lt(lt_value):
"""
Validates that a field value is less than the
value given to this validator.
"""
def validate(value):
if value >= lt_value:
return e("{} is not less than {}", value, lt_value)
return validate
def between(min_value, max_value):
"""
Validates that a field value is between the two values
given to this validator.
"""
def validate(value):
if value < min_value:
return e("{} is not greater than or equal to {}",
value, min_value)
if value > max_value:
return e("{} is not less than or equal to {}",
value, max_value)
return validate
def length(min=None, max=None):
"""
Validates that a field value's length is between the bounds given to this
validator.
"""
def validate(value):
if min and len(value) < min:
return e("{} does not have a length of at least {}", value, min)
if max and len(value) > max:
return e("{} does not have a length of at most {}", value, max)
return validate
def match(pattern):
"""
Validates that a field value matches the regex given to this validator.
"""
regex = re.compile(pattern)
def validate(value):
if not regex.match(value):
return e("{} does not match the pattern {}", value, pattern)
return validate
def is_url():
"""
Validates that a fields value is a valid URL.
"""
# Stolen from Django
regex = re.compile(
r'^(?:http|ftp)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain...
r'localhost|' #localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
def validate(value):
if not regex.match(value):
return e("{} is not a valid URL", value)
return validate
def each_item(*validators):
"""
A wrapper which applies the given validators to each item in a field
value of type `list`.
Example usage in a Schema:
"my_list_field": {"type": Array(int), "validates": each_item(lte(10))}
"""
def validate(value):
for item in value:
for validator in validators:
error = validator(item)
if error:
return error
return None
return validate
def distinct():
"""
Validates that all items in the given field list value are distinct,
i.e. that the list contains no duplicates.
"""
def validate(value):
for i, item in enumerate(value):
if item in value[i+1:]:
return e("{} is not a distinct set of values", value)
return validate
|
gamechanger/schemer | schemer/validators.py | is_url | python | def is_url():
# Stolen from Django
regex = re.compile(
r'^(?:http|ftp)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain...
r'localhost|' #localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
def validate(value):
if not regex.match(value):
return e("{} is not a valid URL", value)
return validate | Validates that a fields value is a valid URL. | train | https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/validators.py#L131-L147 | null | import re
from pprint import pformat
def e(string, *args):
"""Function which formats error messages."""
return string.format(*[pformat(arg) for arg in args])
def one_of(*args):
"""
Validates that a field value matches one of the values
given to this validator.
"""
if len(args) == 1 and isinstance(args[0], list):
items = args[0]
else:
items = list(args)
def validate(value):
if not value in items:
return e("{} is not in the list {}", value, items)
return validate
def gte(min_value):
"""
Validates that a field value is greater than or equal to the
value given to this validator.
"""
def validate(value):
if value < min_value:
return e("{} is not greater than or equal to {}", value, min_value)
return validate
def lte(max_value):
"""
Validates that a field value is less than or equal to the
value given to this validator.
"""
def validate(value):
if value > max_value:
return e("{} is not less than or equal to {}", value, max_value)
return validate
def gt(gt_value):
"""
Validates that a field value is greater than the
value given to this validator.
"""
def validate(value):
if value <= gt_value:
return e("{} is not greater than {}", value, gt_value)
return validate
def lt(lt_value):
"""
Validates that a field value is less than the
value given to this validator.
"""
def validate(value):
if value >= lt_value:
return e("{} is not less than {}", value, lt_value)
return validate
def between(min_value, max_value):
"""
Validates that a field value is between the two values
given to this validator.
"""
def validate(value):
if value < min_value:
return e("{} is not greater than or equal to {}",
value, min_value)
if value > max_value:
return e("{} is not less than or equal to {}",
value, max_value)
return validate
def length(min=None, max=None):
"""
Validates that a field value's length is between the bounds given to this
validator.
"""
def validate(value):
if min and len(value) < min:
return e("{} does not have a length of at least {}", value, min)
if max and len(value) > max:
return e("{} does not have a length of at most {}", value, max)
return validate
def match(pattern):
"""
Validates that a field value matches the regex given to this validator.
"""
regex = re.compile(pattern)
def validate(value):
if not regex.match(value):
return e("{} does not match the pattern {}", value, pattern)
return validate
def is_email():
"""
Validates that a fields value is a valid email address.
"""
email = (
ur'(?!^\.)' # No dot at start
ur'(?!.*\.@)' # No dot before at sign
ur'(?!.*@\.)' # No dot after at sign
ur'(?!.*\.$)' # No dot at the end
ur'(?!.*\.\.)' # No double dots anywhere
ur'^\S+' # Starts with one or more non-whitespace characters
ur'@' # Contains an at sign
ur'\S+$' # Ends with one or more non-whitespace characters
)
regex = re.compile(email, re.IGNORECASE | re.UNICODE)
def validate(value):
if not regex.match(value):
return e("{} is not a valid email address", value)
return validate
def each_item(*validators):
"""
A wrapper which applies the given validators to each item in a field
value of type `list`.
Example usage in a Schema:
"my_list_field": {"type": Array(int), "validates": each_item(lte(10))}
"""
def validate(value):
for item in value:
for validator in validators:
error = validator(item)
if error:
return error
return None
return validate
def distinct():
"""
Validates that all items in the given field list value are distinct,
i.e. that the list contains no duplicates.
"""
def validate(value):
for i, item in enumerate(value):
if item in value[i+1:]:
return e("{} is not a distinct set of values", value)
return validate
|
gamechanger/schemer | schemer/validators.py | each_item | python | def each_item(*validators):
def validate(value):
for item in value:
for validator in validators:
error = validator(item)
if error:
return error
return None
return validate | A wrapper which applies the given validators to each item in a field
value of type `list`.
Example usage in a Schema:
"my_list_field": {"type": Array(int), "validates": each_item(lte(10))} | train | https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/validators.py#L150-L166 | null | import re
from pprint import pformat
def e(string, *args):
"""Function which formats error messages."""
return string.format(*[pformat(arg) for arg in args])
def one_of(*args):
"""
Validates that a field value matches one of the values
given to this validator.
"""
if len(args) == 1 and isinstance(args[0], list):
items = args[0]
else:
items = list(args)
def validate(value):
if not value in items:
return e("{} is not in the list {}", value, items)
return validate
def gte(min_value):
"""
Validates that a field value is greater than or equal to the
value given to this validator.
"""
def validate(value):
if value < min_value:
return e("{} is not greater than or equal to {}", value, min_value)
return validate
def lte(max_value):
"""
Validates that a field value is less than or equal to the
value given to this validator.
"""
def validate(value):
if value > max_value:
return e("{} is not less than or equal to {}", value, max_value)
return validate
def gt(gt_value):
"""
Validates that a field value is greater than the
value given to this validator.
"""
def validate(value):
if value <= gt_value:
return e("{} is not greater than {}", value, gt_value)
return validate
def lt(lt_value):
"""
Validates that a field value is less than the
value given to this validator.
"""
def validate(value):
if value >= lt_value:
return e("{} is not less than {}", value, lt_value)
return validate
def between(min_value, max_value):
"""
Validates that a field value is between the two values
given to this validator.
"""
def validate(value):
if value < min_value:
return e("{} is not greater than or equal to {}",
value, min_value)
if value > max_value:
return e("{} is not less than or equal to {}",
value, max_value)
return validate
def length(min=None, max=None):
"""
Validates that a field value's length is between the bounds given to this
validator.
"""
def validate(value):
if min and len(value) < min:
return e("{} does not have a length of at least {}", value, min)
if max and len(value) > max:
return e("{} does not have a length of at most {}", value, max)
return validate
def match(pattern):
"""
Validates that a field value matches the regex given to this validator.
"""
regex = re.compile(pattern)
def validate(value):
if not regex.match(value):
return e("{} does not match the pattern {}", value, pattern)
return validate
def is_email():
"""
Validates that a fields value is a valid email address.
"""
email = (
ur'(?!^\.)' # No dot at start
ur'(?!.*\.@)' # No dot before at sign
ur'(?!.*@\.)' # No dot after at sign
ur'(?!.*\.$)' # No dot at the end
ur'(?!.*\.\.)' # No double dots anywhere
ur'^\S+' # Starts with one or more non-whitespace characters
ur'@' # Contains an at sign
ur'\S+$' # Ends with one or more non-whitespace characters
)
regex = re.compile(email, re.IGNORECASE | re.UNICODE)
def validate(value):
if not regex.match(value):
return e("{} is not a valid email address", value)
return validate
def is_url():
"""
Validates that a fields value is a valid URL.
"""
# Stolen from Django
regex = re.compile(
r'^(?:http|ftp)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain...
r'localhost|' #localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
def validate(value):
if not regex.match(value):
return e("{} is not a valid URL", value)
return validate
def distinct():
"""
Validates that all items in the given field list value are distinct,
i.e. that the list contains no duplicates.
"""
def validate(value):
for i, item in enumerate(value):
if item in value[i+1:]:
return e("{} is not a distinct set of values", value)
return validate
|
gamechanger/schemer | schemer/validators.py | distinct | python | def distinct():
def validate(value):
for i, item in enumerate(value):
if item in value[i+1:]:
return e("{} is not a distinct set of values", value)
return validate | Validates that all items in the given field list value are distinct,
i.e. that the list contains no duplicates. | train | https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/validators.py#L169-L178 | null | import re
from pprint import pformat
def e(string, *args):
"""Function which formats error messages."""
return string.format(*[pformat(arg) for arg in args])
def one_of(*args):
"""
Validates that a field value matches one of the values
given to this validator.
"""
if len(args) == 1 and isinstance(args[0], list):
items = args[0]
else:
items = list(args)
def validate(value):
if not value in items:
return e("{} is not in the list {}", value, items)
return validate
def gte(min_value):
"""
Validates that a field value is greater than or equal to the
value given to this validator.
"""
def validate(value):
if value < min_value:
return e("{} is not greater than or equal to {}", value, min_value)
return validate
def lte(max_value):
"""
Validates that a field value is less than or equal to the
value given to this validator.
"""
def validate(value):
if value > max_value:
return e("{} is not less than or equal to {}", value, max_value)
return validate
def gt(gt_value):
"""
Validates that a field value is greater than the
value given to this validator.
"""
def validate(value):
if value <= gt_value:
return e("{} is not greater than {}", value, gt_value)
return validate
def lt(lt_value):
"""
Validates that a field value is less than the
value given to this validator.
"""
def validate(value):
if value >= lt_value:
return e("{} is not less than {}", value, lt_value)
return validate
def between(min_value, max_value):
"""
Validates that a field value is between the two values
given to this validator.
"""
def validate(value):
if value < min_value:
return e("{} is not greater than or equal to {}",
value, min_value)
if value > max_value:
return e("{} is not less than or equal to {}",
value, max_value)
return validate
def length(min=None, max=None):
"""
Validates that a field value's length is between the bounds given to this
validator.
"""
def validate(value):
if min and len(value) < min:
return e("{} does not have a length of at least {}", value, min)
if max and len(value) > max:
return e("{} does not have a length of at most {}", value, max)
return validate
def match(pattern):
"""
Validates that a field value matches the regex given to this validator.
"""
regex = re.compile(pattern)
def validate(value):
if not regex.match(value):
return e("{} does not match the pattern {}", value, pattern)
return validate
def is_email():
"""
Validates that a fields value is a valid email address.
"""
email = (
ur'(?!^\.)' # No dot at start
ur'(?!.*\.@)' # No dot before at sign
ur'(?!.*@\.)' # No dot after at sign
ur'(?!.*\.$)' # No dot at the end
ur'(?!.*\.\.)' # No double dots anywhere
ur'^\S+' # Starts with one or more non-whitespace characters
ur'@' # Contains an at sign
ur'\S+$' # Ends with one or more non-whitespace characters
)
regex = re.compile(email, re.IGNORECASE | re.UNICODE)
def validate(value):
if not regex.match(value):
return e("{} is not a valid email address", value)
return validate
def is_url():
"""
Validates that a fields value is a valid URL.
"""
# Stolen from Django
regex = re.compile(
r'^(?:http|ftp)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain...
r'localhost|' #localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
def validate(value):
if not regex.match(value):
return e("{} is not a valid URL", value)
return validate
def each_item(*validators):
"""
A wrapper which applies the given validators to each item in a field
value of type `list`.
Example usage in a Schema:
"my_list_field": {"type": Array(int), "validates": each_item(lte(10))}
"""
def validate(value):
for item in value:
for validator in validators:
error = validator(item)
if error:
return error
return None
return validate
|
gamechanger/schemer | schemer/__init__.py | Schema.apply_defaults | python | def apply_defaults(self, instance):
for field, spec in self.doc_spec.iteritems():
field_type = spec['type']
if field not in instance:
if 'default' in spec:
default = spec['default']
if callable(default):
instance[field] = default()
else:
instance[field] = copy.deepcopy(default)
# Determine if a value already exists for the field
if field in instance:
value = instance[field]
# recurse into nested docs
if isinstance(field_type, Schema) and isinstance(value, dict):
field_type.apply_defaults(value)
elif isinstance(field_type, Array) and isinstance(field_type.contained_type, Schema) and isinstance(value, list):
for item in value:
field_type.contained_type.apply_defaults(item) | Applies the defaults described by the this schema to the given
document instance as appropriate. Defaults are only applied to
fields which are currently unset. | train | https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/__init__.py#L26-L49 | null | class Schema(object):
"""A Schema encapsulates the structure and constraints of a dict."""
def __init__(self, doc_spec, strict=True, validates=[]):
self._doc_spec = doc_spec
self._virtuals = {}
self._strict = strict
self._verify()
self._validates = validates
@property
def doc_spec(self):
return self._doc_spec
def validate(self, instance):
"""Validates the given document against this schema. Raises a
ValidationException if there are any failures."""
errors = {}
self._validate_instance(instance, errors)
if len(errors) > 0:
raise ValidationException(errors)
def _append_path(self, prefix, field):
"""Appends the given field to the given path prefix."""
if prefix:
return "{}.{}".format(prefix, field)
else:
return field
def _verify(self, path_prefix=None):
"""Verifies that this schema's doc spec is valid and makes sense."""
for field, spec in self.doc_spec.iteritems():
path = self._append_path(path_prefix, field)
# Standard dict-based spec
if isinstance(spec, dict):
self._verify_field_spec(spec, path)
else:
raise SchemaFormatException("Invalid field definition for {}", path)
def _verify_field_spec(self, spec, path):
"""Verifies a given field specification is valid, recursing into nested schemas if required."""
# Required should be a boolean
if 'required' in spec and not isinstance(spec['required'], bool):
raise SchemaFormatException("{} required declaration should be True or False", path)
# Required should be a boolean
if 'nullable' in spec and not isinstance(spec['nullable'], bool):
raise SchemaFormatException("{} nullable declaration should be True or False", path)
# Must have a type specified
if 'type' not in spec:
raise SchemaFormatException("{} has no type declared.", path)
self._verify_type(spec, path)
# Validations should be either a single function or array of functions
if 'validates' in spec:
self._verify_validates(spec, path)
# Defaults must be of the correct type or a function
if 'default' in spec:
self._verify_default(spec, path)
# Only expected spec keys are supported
if not set(spec.keys()).issubset(set(['type', 'required', 'validates', 'default', 'nullable'])):
raise SchemaFormatException("Unsupported field spec item at {}. Items: "+repr(spec.keys()), path)
def _verify_type(self, spec, path):
"""Verify that the 'type' in the spec is valid"""
field_type = spec['type']
if isinstance(field_type, Schema):
# Nested documents cannot have validation
if not set(spec.keys()).issubset(set(['type', 'required', 'nullable', 'default'])):
raise SchemaFormatException("Unsupported field spec item at {}. Items: "+repr(spec.keys()), path)
return
elif isinstance(field_type, Array):
if not isinstance(field_type.contained_type, (type, Schema, Array, types.FunctionType)):
raise SchemaFormatException("Unsupported field type contained by Array at {}.", path)
elif not isinstance(field_type, type) and not isinstance(field_type, types.FunctionType):
raise SchemaFormatException("Unsupported field type at {}. Type must be a type, a function, an Array or another Schema", path)
def _valid_schema_default(self, value):
return isinstance(value, dict)
def _verify_default(self, spec, path):
"""Verifies that the default specified in the given spec is valid."""
field_type = spec['type']
default = spec['default']
# If it's a function there's nothing we can really do except assume its valid
if callable(default):
return
if isinstance(field_type, Array):
# Verify we'd got a list as our default
if not isinstance(default, list):
raise SchemaFormatException("Default value for Array at {} is not a list of values.", path)
# Ensure the contents are of the correct type
for i, item in enumerate(default):
if isinstance(field_type.contained_type, Schema):
if not self._valid_schema_default(item):
raise SchemaFormatException("Default value for Schema is not valid.", path)
elif not isinstance(item, field_type.contained_type):
raise SchemaFormatException("Not all items in the default list for the Array field at {} are of the correct type.", path)
elif isinstance(field_type, Schema):
if not self._valid_schema_default(default):
raise SchemaFormatException("Default value for Schema is not valid.", path)
else:
if not isinstance(default, field_type):
raise SchemaFormatException("Default value for {} is not of the nominated type.", path)
def _verify_validates(self, spec, path):
"""Verify thats the 'validates' argument is valid."""
validates = spec['validates']
if isinstance(validates, list):
for validator in validates:
self._verify_validator(validator, path)
else:
self._verify_validator(validates, path)
def _verify_validator(self, validator, path):
"""Verifies that a given validator associated with the field at the given path is legitimate."""
# Validator should be a function
if not callable(validator):
raise SchemaFormatException("Invalid validations for {}", path)
# Validator should accept a single argument
(args, varargs, keywords, defaults) = getargspec(validator)
if len(args) != 1:
raise SchemaFormatException("Invalid validations for {}", path)
def _validate_instance(self, instance, errors, path_prefix=''):
"""Validates that the given instance of a document conforms to the given schema's
structure and validations. Any validation errors are added to the given errors
collection. The caller should assume the instance is considered valid if the
errors collection is empty when this method returns."""
if not isinstance(instance, dict):
errors[path_prefix] = "Expected instance of dict to validate against schema."
return
# validate against the schema level validators
self._apply_validations(errors, path_prefix, self._validates, instance)
# Loop over each field in the schema and check the instance value conforms
# to its spec
for field, spec in self.doc_spec.iteritems():
path = self._append_path(path_prefix, field)
# If the field is present, validate it's value.
if field in instance:
self._validate_value(instance[field], spec, path, errors)
else:
# If not, add an error if it was a required key.
if spec.get('required', False):
errors[path] = "{} is required.".format(path)
# Now loop over each field in the given instance and make sure we don't
# have any fields not declared in the schema, unless strict mode has been
# explicitly disabled.
if self._strict:
for field in instance:
if field not in self.doc_spec:
errors[self._append_path(path_prefix, field)] = "Unexpected document field not present in schema"
def _validate_value(self, value, field_spec, path, errors):
"""Validates that the given field value is valid given the associated
field spec and path. Any validation failures are added to the given errors
collection."""
# Check if the value is None and add an error if the field is not nullable.
# Note that for backward compatibility reasons, the default value of 'nullable'
# is the inverse of 'required' (which use to mean both that the key be present
# and not set to None).
if value is None:
if not field_spec.get('nullable', not field_spec.get('required', False)):
errors[path] = "{} is not nullable.".format(path)
return
# All fields should have a type
field_type = field_spec['type']
if isinstance(field_type, types.FunctionType):
try:
field_type = field_type(value)
except Exception as e:
raise SchemaFormatException("Dynamic schema function raised exception: {}".format(str(e)), path)
if not isinstance(field_type, (type, Schema, Array)):
raise SchemaFormatException("Dynamic schema function did not return a type at path {}", path)
# If our field is an embedded document, recurse into it
if isinstance(field_type, Schema):
if isinstance(value, dict):
field_type._validate_instance(value, errors, path)
else:
errors[path] = "{} should be an embedded document".format(path)
return
elif isinstance(field_type, Array):
if isinstance(value, list):
is_dynamic = isinstance(field_type.contained_type, types.FunctionType)
for i, item in enumerate(value):
contained_type = field_type.contained_type
if is_dynamic:
contained_type = contained_type(item)
instance_path = self._append_path(path, i)
if isinstance(contained_type, Schema):
contained_type._validate_instance(item, errors, instance_path)
elif not isinstance(item, contained_type):
errors[instance_path] = "Array item at {} is of incorrect type".format(instance_path)
continue
else:
errors[path] = "{} should be an embedded array".format(path)
return
elif not isinstance(value, field_type):
errors[path] = "Field should be of type {}".format(field_type)
return
validations = field_spec.get('validates', None)
if validations is None:
return
self._apply_validations(errors, path, validations, value)
def _apply_validations(self, errors, path, validations, value):
def apply(fn):
error = fn(value)
if error:
errors[path] = error
if isinstance(validations, list):
for validation in validations:
apply(validation)
else:
apply(validations)
|
gamechanger/schemer | schemer/__init__.py | Schema.validate | python | def validate(self, instance):
errors = {}
self._validate_instance(instance, errors)
if len(errors) > 0:
raise ValidationException(errors) | Validates the given document against this schema. Raises a
ValidationException if there are any failures. | train | https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/__init__.py#L51-L58 | [
"def _validate_instance(self, instance, errors, path_prefix=''):\n \"\"\"Validates that the given instance of a document conforms to the given schema's\n structure and validations. Any validation errors are added to the given errors\n collection. The caller should assume the instance is considered valid if... | class Schema(object):
"""A Schema encapsulates the structure and constraints of a dict."""
def __init__(self, doc_spec, strict=True, validates=[]):
self._doc_spec = doc_spec
self._virtuals = {}
self._strict = strict
self._verify()
self._validates = validates
@property
def doc_spec(self):
return self._doc_spec
def apply_defaults(self, instance):
"""Applies the defaults described by the this schema to the given
document instance as appropriate. Defaults are only applied to
fields which are currently unset."""
for field, spec in self.doc_spec.iteritems():
field_type = spec['type']
if field not in instance:
if 'default' in spec:
default = spec['default']
if callable(default):
instance[field] = default()
else:
instance[field] = copy.deepcopy(default)
# Determine if a value already exists for the field
if field in instance:
value = instance[field]
# recurse into nested docs
if isinstance(field_type, Schema) and isinstance(value, dict):
field_type.apply_defaults(value)
elif isinstance(field_type, Array) and isinstance(field_type.contained_type, Schema) and isinstance(value, list):
for item in value:
field_type.contained_type.apply_defaults(item)
def _append_path(self, prefix, field):
"""Appends the given field to the given path prefix."""
if prefix:
return "{}.{}".format(prefix, field)
else:
return field
def _verify(self, path_prefix=None):
"""Verifies that this schema's doc spec is valid and makes sense."""
for field, spec in self.doc_spec.iteritems():
path = self._append_path(path_prefix, field)
# Standard dict-based spec
if isinstance(spec, dict):
self._verify_field_spec(spec, path)
else:
raise SchemaFormatException("Invalid field definition for {}", path)
def _verify_field_spec(self, spec, path):
"""Verifies a given field specification is valid, recursing into nested schemas if required."""
# Required should be a boolean
if 'required' in spec and not isinstance(spec['required'], bool):
raise SchemaFormatException("{} required declaration should be True or False", path)
# Required should be a boolean
if 'nullable' in spec and not isinstance(spec['nullable'], bool):
raise SchemaFormatException("{} nullable declaration should be True or False", path)
# Must have a type specified
if 'type' not in spec:
raise SchemaFormatException("{} has no type declared.", path)
self._verify_type(spec, path)
# Validations should be either a single function or array of functions
if 'validates' in spec:
self._verify_validates(spec, path)
# Defaults must be of the correct type or a function
if 'default' in spec:
self._verify_default(spec, path)
# Only expected spec keys are supported
if not set(spec.keys()).issubset(set(['type', 'required', 'validates', 'default', 'nullable'])):
raise SchemaFormatException("Unsupported field spec item at {}. Items: "+repr(spec.keys()), path)
def _verify_type(self, spec, path):
"""Verify that the 'type' in the spec is valid"""
field_type = spec['type']
if isinstance(field_type, Schema):
# Nested documents cannot have validation
if not set(spec.keys()).issubset(set(['type', 'required', 'nullable', 'default'])):
raise SchemaFormatException("Unsupported field spec item at {}. Items: "+repr(spec.keys()), path)
return
elif isinstance(field_type, Array):
if not isinstance(field_type.contained_type, (type, Schema, Array, types.FunctionType)):
raise SchemaFormatException("Unsupported field type contained by Array at {}.", path)
elif not isinstance(field_type, type) and not isinstance(field_type, types.FunctionType):
raise SchemaFormatException("Unsupported field type at {}. Type must be a type, a function, an Array or another Schema", path)
def _valid_schema_default(self, value):
return isinstance(value, dict)
def _verify_default(self, spec, path):
"""Verifies that the default specified in the given spec is valid."""
field_type = spec['type']
default = spec['default']
# If it's a function there's nothing we can really do except assume its valid
if callable(default):
return
if isinstance(field_type, Array):
# Verify we'd got a list as our default
if not isinstance(default, list):
raise SchemaFormatException("Default value for Array at {} is not a list of values.", path)
# Ensure the contents are of the correct type
for i, item in enumerate(default):
if isinstance(field_type.contained_type, Schema):
if not self._valid_schema_default(item):
raise SchemaFormatException("Default value for Schema is not valid.", path)
elif not isinstance(item, field_type.contained_type):
raise SchemaFormatException("Not all items in the default list for the Array field at {} are of the correct type.", path)
elif isinstance(field_type, Schema):
if not self._valid_schema_default(default):
raise SchemaFormatException("Default value for Schema is not valid.", path)
else:
if not isinstance(default, field_type):
raise SchemaFormatException("Default value for {} is not of the nominated type.", path)
def _verify_validates(self, spec, path):
"""Verify thats the 'validates' argument is valid."""
validates = spec['validates']
if isinstance(validates, list):
for validator in validates:
self._verify_validator(validator, path)
else:
self._verify_validator(validates, path)
def _verify_validator(self, validator, path):
"""Verifies that a given validator associated with the field at the given path is legitimate."""
# Validator should be a function
if not callable(validator):
raise SchemaFormatException("Invalid validations for {}", path)
# Validator should accept a single argument
(args, varargs, keywords, defaults) = getargspec(validator)
if len(args) != 1:
raise SchemaFormatException("Invalid validations for {}", path)
def _validate_instance(self, instance, errors, path_prefix=''):
"""Validates that the given instance of a document conforms to the given schema's
structure and validations. Any validation errors are added to the given errors
collection. The caller should assume the instance is considered valid if the
errors collection is empty when this method returns."""
if not isinstance(instance, dict):
errors[path_prefix] = "Expected instance of dict to validate against schema."
return
# validate against the schema level validators
self._apply_validations(errors, path_prefix, self._validates, instance)
# Loop over each field in the schema and check the instance value conforms
# to its spec
for field, spec in self.doc_spec.iteritems():
path = self._append_path(path_prefix, field)
# If the field is present, validate it's value.
if field in instance:
self._validate_value(instance[field], spec, path, errors)
else:
# If not, add an error if it was a required key.
if spec.get('required', False):
errors[path] = "{} is required.".format(path)
# Now loop over each field in the given instance and make sure we don't
# have any fields not declared in the schema, unless strict mode has been
# explicitly disabled.
if self._strict:
for field in instance:
if field not in self.doc_spec:
errors[self._append_path(path_prefix, field)] = "Unexpected document field not present in schema"
def _validate_value(self, value, field_spec, path, errors):
"""Validates that the given field value is valid given the associated
field spec and path. Any validation failures are added to the given errors
collection."""
# Check if the value is None and add an error if the field is not nullable.
# Note that for backward compatibility reasons, the default value of 'nullable'
# is the inverse of 'required' (which use to mean both that the key be present
# and not set to None).
if value is None:
if not field_spec.get('nullable', not field_spec.get('required', False)):
errors[path] = "{} is not nullable.".format(path)
return
# All fields should have a type
field_type = field_spec['type']
if isinstance(field_type, types.FunctionType):
try:
field_type = field_type(value)
except Exception as e:
raise SchemaFormatException("Dynamic schema function raised exception: {}".format(str(e)), path)
if not isinstance(field_type, (type, Schema, Array)):
raise SchemaFormatException("Dynamic schema function did not return a type at path {}", path)
# If our field is an embedded document, recurse into it
if isinstance(field_type, Schema):
if isinstance(value, dict):
field_type._validate_instance(value, errors, path)
else:
errors[path] = "{} should be an embedded document".format(path)
return
elif isinstance(field_type, Array):
if isinstance(value, list):
is_dynamic = isinstance(field_type.contained_type, types.FunctionType)
for i, item in enumerate(value):
contained_type = field_type.contained_type
if is_dynamic:
contained_type = contained_type(item)
instance_path = self._append_path(path, i)
if isinstance(contained_type, Schema):
contained_type._validate_instance(item, errors, instance_path)
elif not isinstance(item, contained_type):
errors[instance_path] = "Array item at {} is of incorrect type".format(instance_path)
continue
else:
errors[path] = "{} should be an embedded array".format(path)
return
elif not isinstance(value, field_type):
errors[path] = "Field should be of type {}".format(field_type)
return
validations = field_spec.get('validates', None)
if validations is None:
return
self._apply_validations(errors, path, validations, value)
def _apply_validations(self, errors, path, validations, value):
def apply(fn):
error = fn(value)
if error:
errors[path] = error
if isinstance(validations, list):
for validation in validations:
apply(validation)
else:
apply(validations)
|
gamechanger/schemer | schemer/__init__.py | Schema._verify | python | def _verify(self, path_prefix=None):
for field, spec in self.doc_spec.iteritems():
path = self._append_path(path_prefix, field)
# Standard dict-based spec
if isinstance(spec, dict):
self._verify_field_spec(spec, path)
else:
raise SchemaFormatException("Invalid field definition for {}", path) | Verifies that this schema's doc spec is valid and makes sense. | train | https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/__init__.py#L67-L76 | [
"def _append_path(self, prefix, field):\n \"\"\"Appends the given field to the given path prefix.\"\"\"\n if prefix:\n return \"{}.{}\".format(prefix, field)\n else:\n return field\n",
"def _verify_field_spec(self, spec, path):\n \"\"\"Verifies a given field specification is valid, recur... | class Schema(object):
"""A Schema encapsulates the structure and constraints of a dict."""
def __init__(self, doc_spec, strict=True, validates=[]):
self._doc_spec = doc_spec
self._virtuals = {}
self._strict = strict
self._verify()
self._validates = validates
@property
def doc_spec(self):
return self._doc_spec
def apply_defaults(self, instance):
"""Applies the defaults described by the this schema to the given
document instance as appropriate. Defaults are only applied to
fields which are currently unset."""
for field, spec in self.doc_spec.iteritems():
field_type = spec['type']
if field not in instance:
if 'default' in spec:
default = spec['default']
if callable(default):
instance[field] = default()
else:
instance[field] = copy.deepcopy(default)
# Determine if a value already exists for the field
if field in instance:
value = instance[field]
# recurse into nested docs
if isinstance(field_type, Schema) and isinstance(value, dict):
field_type.apply_defaults(value)
elif isinstance(field_type, Array) and isinstance(field_type.contained_type, Schema) and isinstance(value, list):
for item in value:
field_type.contained_type.apply_defaults(item)
def validate(self, instance):
"""Validates the given document against this schema. Raises a
ValidationException if there are any failures."""
errors = {}
self._validate_instance(instance, errors)
if len(errors) > 0:
raise ValidationException(errors)
def _append_path(self, prefix, field):
"""Appends the given field to the given path prefix."""
if prefix:
return "{}.{}".format(prefix, field)
else:
return field
def _verify_field_spec(self, spec, path):
"""Verifies a given field specification is valid, recursing into nested schemas if required."""
# Required should be a boolean
if 'required' in spec and not isinstance(spec['required'], bool):
raise SchemaFormatException("{} required declaration should be True or False", path)
# Required should be a boolean
if 'nullable' in spec and not isinstance(spec['nullable'], bool):
raise SchemaFormatException("{} nullable declaration should be True or False", path)
# Must have a type specified
if 'type' not in spec:
raise SchemaFormatException("{} has no type declared.", path)
self._verify_type(spec, path)
# Validations should be either a single function or array of functions
if 'validates' in spec:
self._verify_validates(spec, path)
# Defaults must be of the correct type or a function
if 'default' in spec:
self._verify_default(spec, path)
# Only expected spec keys are supported
if not set(spec.keys()).issubset(set(['type', 'required', 'validates', 'default', 'nullable'])):
raise SchemaFormatException("Unsupported field spec item at {}. Items: "+repr(spec.keys()), path)
def _verify_type(self, spec, path):
"""Verify that the 'type' in the spec is valid"""
field_type = spec['type']
if isinstance(field_type, Schema):
# Nested documents cannot have validation
if not set(spec.keys()).issubset(set(['type', 'required', 'nullable', 'default'])):
raise SchemaFormatException("Unsupported field spec item at {}. Items: "+repr(spec.keys()), path)
return
elif isinstance(field_type, Array):
if not isinstance(field_type.contained_type, (type, Schema, Array, types.FunctionType)):
raise SchemaFormatException("Unsupported field type contained by Array at {}.", path)
elif not isinstance(field_type, type) and not isinstance(field_type, types.FunctionType):
raise SchemaFormatException("Unsupported field type at {}. Type must be a type, a function, an Array or another Schema", path)
def _valid_schema_default(self, value):
return isinstance(value, dict)
def _verify_default(self, spec, path):
"""Verifies that the default specified in the given spec is valid."""
field_type = spec['type']
default = spec['default']
# If it's a function there's nothing we can really do except assume its valid
if callable(default):
return
if isinstance(field_type, Array):
# Verify we'd got a list as our default
if not isinstance(default, list):
raise SchemaFormatException("Default value for Array at {} is not a list of values.", path)
# Ensure the contents are of the correct type
for i, item in enumerate(default):
if isinstance(field_type.contained_type, Schema):
if not self._valid_schema_default(item):
raise SchemaFormatException("Default value for Schema is not valid.", path)
elif not isinstance(item, field_type.contained_type):
raise SchemaFormatException("Not all items in the default list for the Array field at {} are of the correct type.", path)
elif isinstance(field_type, Schema):
if not self._valid_schema_default(default):
raise SchemaFormatException("Default value for Schema is not valid.", path)
else:
if not isinstance(default, field_type):
raise SchemaFormatException("Default value for {} is not of the nominated type.", path)
def _verify_validates(self, spec, path):
"""Verify thats the 'validates' argument is valid."""
validates = spec['validates']
if isinstance(validates, list):
for validator in validates:
self._verify_validator(validator, path)
else:
self._verify_validator(validates, path)
def _verify_validator(self, validator, path):
"""Verifies that a given validator associated with the field at the given path is legitimate."""
# Validator should be a function
if not callable(validator):
raise SchemaFormatException("Invalid validations for {}", path)
# Validator should accept a single argument
(args, varargs, keywords, defaults) = getargspec(validator)
if len(args) != 1:
raise SchemaFormatException("Invalid validations for {}", path)
def _validate_instance(self, instance, errors, path_prefix=''):
"""Validates that the given instance of a document conforms to the given schema's
structure and validations. Any validation errors are added to the given errors
collection. The caller should assume the instance is considered valid if the
errors collection is empty when this method returns."""
if not isinstance(instance, dict):
errors[path_prefix] = "Expected instance of dict to validate against schema."
return
# validate against the schema level validators
self._apply_validations(errors, path_prefix, self._validates, instance)
# Loop over each field in the schema and check the instance value conforms
# to its spec
for field, spec in self.doc_spec.iteritems():
path = self._append_path(path_prefix, field)
# If the field is present, validate it's value.
if field in instance:
self._validate_value(instance[field], spec, path, errors)
else:
# If not, add an error if it was a required key.
if spec.get('required', False):
errors[path] = "{} is required.".format(path)
# Now loop over each field in the given instance and make sure we don't
# have any fields not declared in the schema, unless strict mode has been
# explicitly disabled.
if self._strict:
for field in instance:
if field not in self.doc_spec:
errors[self._append_path(path_prefix, field)] = "Unexpected document field not present in schema"
def _validate_value(self, value, field_spec, path, errors):
"""Validates that the given field value is valid given the associated
field spec and path. Any validation failures are added to the given errors
collection."""
# Check if the value is None and add an error if the field is not nullable.
# Note that for backward compatibility reasons, the default value of 'nullable'
# is the inverse of 'required' (which use to mean both that the key be present
# and not set to None).
if value is None:
if not field_spec.get('nullable', not field_spec.get('required', False)):
errors[path] = "{} is not nullable.".format(path)
return
# All fields should have a type
field_type = field_spec['type']
if isinstance(field_type, types.FunctionType):
try:
field_type = field_type(value)
except Exception as e:
raise SchemaFormatException("Dynamic schema function raised exception: {}".format(str(e)), path)
if not isinstance(field_type, (type, Schema, Array)):
raise SchemaFormatException("Dynamic schema function did not return a type at path {}", path)
# If our field is an embedded document, recurse into it
if isinstance(field_type, Schema):
if isinstance(value, dict):
field_type._validate_instance(value, errors, path)
else:
errors[path] = "{} should be an embedded document".format(path)
return
elif isinstance(field_type, Array):
if isinstance(value, list):
is_dynamic = isinstance(field_type.contained_type, types.FunctionType)
for i, item in enumerate(value):
contained_type = field_type.contained_type
if is_dynamic:
contained_type = contained_type(item)
instance_path = self._append_path(path, i)
if isinstance(contained_type, Schema):
contained_type._validate_instance(item, errors, instance_path)
elif not isinstance(item, contained_type):
errors[instance_path] = "Array item at {} is of incorrect type".format(instance_path)
continue
else:
errors[path] = "{} should be an embedded array".format(path)
return
elif not isinstance(value, field_type):
errors[path] = "Field should be of type {}".format(field_type)
return
validations = field_spec.get('validates', None)
if validations is None:
return
self._apply_validations(errors, path, validations, value)
def _apply_validations(self, errors, path, validations, value):
def apply(fn):
error = fn(value)
if error:
errors[path] = error
if isinstance(validations, list):
for validation in validations:
apply(validation)
else:
apply(validations)
|
gamechanger/schemer | schemer/__init__.py | Schema._verify_field_spec | python | def _verify_field_spec(self, spec, path):
# Required should be a boolean
if 'required' in spec and not isinstance(spec['required'], bool):
raise SchemaFormatException("{} required declaration should be True or False", path)
# Required should be a boolean
if 'nullable' in spec and not isinstance(spec['nullable'], bool):
raise SchemaFormatException("{} nullable declaration should be True or False", path)
# Must have a type specified
if 'type' not in spec:
raise SchemaFormatException("{} has no type declared.", path)
self._verify_type(spec, path)
# Validations should be either a single function or array of functions
if 'validates' in spec:
self._verify_validates(spec, path)
# Defaults must be of the correct type or a function
if 'default' in spec:
self._verify_default(spec, path)
# Only expected spec keys are supported
if not set(spec.keys()).issubset(set(['type', 'required', 'validates', 'default', 'nullable'])):
raise SchemaFormatException("Unsupported field spec item at {}. Items: "+repr(spec.keys()), path) | Verifies a given field specification is valid, recursing into nested schemas if required. | train | https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/__init__.py#L79-L106 | [
"def _verify_type(self, spec, path):\n \"\"\"Verify that the 'type' in the spec is valid\"\"\"\n field_type = spec['type']\n\n if isinstance(field_type, Schema):\n # Nested documents cannot have validation\n if not set(spec.keys()).issubset(set(['type', 'required', 'nullable', 'default'])):\n... | class Schema(object):
"""A Schema encapsulates the structure and constraints of a dict."""
def __init__(self, doc_spec, strict=True, validates=[]):
self._doc_spec = doc_spec
self._virtuals = {}
self._strict = strict
self._verify()
self._validates = validates
@property
def doc_spec(self):
return self._doc_spec
def apply_defaults(self, instance):
"""Applies the defaults described by the this schema to the given
document instance as appropriate. Defaults are only applied to
fields which are currently unset."""
for field, spec in self.doc_spec.iteritems():
field_type = spec['type']
if field not in instance:
if 'default' in spec:
default = spec['default']
if callable(default):
instance[field] = default()
else:
instance[field] = copy.deepcopy(default)
# Determine if a value already exists for the field
if field in instance:
value = instance[field]
# recurse into nested docs
if isinstance(field_type, Schema) and isinstance(value, dict):
field_type.apply_defaults(value)
elif isinstance(field_type, Array) and isinstance(field_type.contained_type, Schema) and isinstance(value, list):
for item in value:
field_type.contained_type.apply_defaults(item)
def validate(self, instance):
"""Validates the given document against this schema. Raises a
ValidationException if there are any failures."""
errors = {}
self._validate_instance(instance, errors)
if len(errors) > 0:
raise ValidationException(errors)
def _append_path(self, prefix, field):
"""Appends the given field to the given path prefix."""
if prefix:
return "{}.{}".format(prefix, field)
else:
return field
def _verify(self, path_prefix=None):
"""Verifies that this schema's doc spec is valid and makes sense."""
for field, spec in self.doc_spec.iteritems():
path = self._append_path(path_prefix, field)
# Standard dict-based spec
if isinstance(spec, dict):
self._verify_field_spec(spec, path)
else:
raise SchemaFormatException("Invalid field definition for {}", path)
def _verify_type(self, spec, path):
"""Verify that the 'type' in the spec is valid"""
field_type = spec['type']
if isinstance(field_type, Schema):
# Nested documents cannot have validation
if not set(spec.keys()).issubset(set(['type', 'required', 'nullable', 'default'])):
raise SchemaFormatException("Unsupported field spec item at {}. Items: "+repr(spec.keys()), path)
return
elif isinstance(field_type, Array):
if not isinstance(field_type.contained_type, (type, Schema, Array, types.FunctionType)):
raise SchemaFormatException("Unsupported field type contained by Array at {}.", path)
elif not isinstance(field_type, type) and not isinstance(field_type, types.FunctionType):
raise SchemaFormatException("Unsupported field type at {}. Type must be a type, a function, an Array or another Schema", path)
def _valid_schema_default(self, value):
return isinstance(value, dict)
def _verify_default(self, spec, path):
"""Verifies that the default specified in the given spec is valid."""
field_type = spec['type']
default = spec['default']
# If it's a function there's nothing we can really do except assume its valid
if callable(default):
return
if isinstance(field_type, Array):
# Verify we'd got a list as our default
if not isinstance(default, list):
raise SchemaFormatException("Default value for Array at {} is not a list of values.", path)
# Ensure the contents are of the correct type
for i, item in enumerate(default):
if isinstance(field_type.contained_type, Schema):
if not self._valid_schema_default(item):
raise SchemaFormatException("Default value for Schema is not valid.", path)
elif not isinstance(item, field_type.contained_type):
raise SchemaFormatException("Not all items in the default list for the Array field at {} are of the correct type.", path)
elif isinstance(field_type, Schema):
if not self._valid_schema_default(default):
raise SchemaFormatException("Default value for Schema is not valid.", path)
else:
if not isinstance(default, field_type):
raise SchemaFormatException("Default value for {} is not of the nominated type.", path)
def _verify_validates(self, spec, path):
"""Verify thats the 'validates' argument is valid."""
validates = spec['validates']
if isinstance(validates, list):
for validator in validates:
self._verify_validator(validator, path)
else:
self._verify_validator(validates, path)
def _verify_validator(self, validator, path):
"""Verifies that a given validator associated with the field at the given path is legitimate."""
# Validator should be a function
if not callable(validator):
raise SchemaFormatException("Invalid validations for {}", path)
# Validator should accept a single argument
(args, varargs, keywords, defaults) = getargspec(validator)
if len(args) != 1:
raise SchemaFormatException("Invalid validations for {}", path)
def _validate_instance(self, instance, errors, path_prefix=''):
"""Validates that the given instance of a document conforms to the given schema's
structure and validations. Any validation errors are added to the given errors
collection. The caller should assume the instance is considered valid if the
errors collection is empty when this method returns."""
if not isinstance(instance, dict):
errors[path_prefix] = "Expected instance of dict to validate against schema."
return
# validate against the schema level validators
self._apply_validations(errors, path_prefix, self._validates, instance)
# Loop over each field in the schema and check the instance value conforms
# to its spec
for field, spec in self.doc_spec.iteritems():
path = self._append_path(path_prefix, field)
# If the field is present, validate it's value.
if field in instance:
self._validate_value(instance[field], spec, path, errors)
else:
# If not, add an error if it was a required key.
if spec.get('required', False):
errors[path] = "{} is required.".format(path)
# Now loop over each field in the given instance and make sure we don't
# have any fields not declared in the schema, unless strict mode has been
# explicitly disabled.
if self._strict:
for field in instance:
if field not in self.doc_spec:
errors[self._append_path(path_prefix, field)] = "Unexpected document field not present in schema"
def _validate_value(self, value, field_spec, path, errors):
"""Validates that the given field value is valid given the associated
field spec and path. Any validation failures are added to the given errors
collection."""
# Check if the value is None and add an error if the field is not nullable.
# Note that for backward compatibility reasons, the default value of 'nullable'
# is the inverse of 'required' (which use to mean both that the key be present
# and not set to None).
if value is None:
if not field_spec.get('nullable', not field_spec.get('required', False)):
errors[path] = "{} is not nullable.".format(path)
return
# All fields should have a type
field_type = field_spec['type']
if isinstance(field_type, types.FunctionType):
try:
field_type = field_type(value)
except Exception as e:
raise SchemaFormatException("Dynamic schema function raised exception: {}".format(str(e)), path)
if not isinstance(field_type, (type, Schema, Array)):
raise SchemaFormatException("Dynamic schema function did not return a type at path {}", path)
# If our field is an embedded document, recurse into it
if isinstance(field_type, Schema):
if isinstance(value, dict):
field_type._validate_instance(value, errors, path)
else:
errors[path] = "{} should be an embedded document".format(path)
return
elif isinstance(field_type, Array):
if isinstance(value, list):
is_dynamic = isinstance(field_type.contained_type, types.FunctionType)
for i, item in enumerate(value):
contained_type = field_type.contained_type
if is_dynamic:
contained_type = contained_type(item)
instance_path = self._append_path(path, i)
if isinstance(contained_type, Schema):
contained_type._validate_instance(item, errors, instance_path)
elif not isinstance(item, contained_type):
errors[instance_path] = "Array item at {} is of incorrect type".format(instance_path)
continue
else:
errors[path] = "{} should be an embedded array".format(path)
return
elif not isinstance(value, field_type):
errors[path] = "Field should be of type {}".format(field_type)
return
validations = field_spec.get('validates', None)
if validations is None:
return
self._apply_validations(errors, path, validations, value)
def _apply_validations(self, errors, path, validations, value):
def apply(fn):
error = fn(value)
if error:
errors[path] = error
if isinstance(validations, list):
for validation in validations:
apply(validation)
else:
apply(validations)
|
gamechanger/schemer | schemer/__init__.py | Schema._verify_type | python | def _verify_type(self, spec, path):
field_type = spec['type']
if isinstance(field_type, Schema):
# Nested documents cannot have validation
if not set(spec.keys()).issubset(set(['type', 'required', 'nullable', 'default'])):
raise SchemaFormatException("Unsupported field spec item at {}. Items: "+repr(spec.keys()), path)
return
elif isinstance(field_type, Array):
if not isinstance(field_type.contained_type, (type, Schema, Array, types.FunctionType)):
raise SchemaFormatException("Unsupported field type contained by Array at {}.", path)
elif not isinstance(field_type, type) and not isinstance(field_type, types.FunctionType):
raise SchemaFormatException("Unsupported field type at {}. Type must be a type, a function, an Array or another Schema", path) | Verify that the 'type' in the spec is valid | train | https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/__init__.py#L108-L123 | null | class Schema(object):
"""A Schema encapsulates the structure and constraints of a dict."""
def __init__(self, doc_spec, strict=True, validates=[]):
self._doc_spec = doc_spec
self._virtuals = {}
self._strict = strict
self._verify()
self._validates = validates
@property
def doc_spec(self):
return self._doc_spec
def apply_defaults(self, instance):
"""Applies the defaults described by the this schema to the given
document instance as appropriate. Defaults are only applied to
fields which are currently unset."""
for field, spec in self.doc_spec.iteritems():
field_type = spec['type']
if field not in instance:
if 'default' in spec:
default = spec['default']
if callable(default):
instance[field] = default()
else:
instance[field] = copy.deepcopy(default)
# Determine if a value already exists for the field
if field in instance:
value = instance[field]
# recurse into nested docs
if isinstance(field_type, Schema) and isinstance(value, dict):
field_type.apply_defaults(value)
elif isinstance(field_type, Array) and isinstance(field_type.contained_type, Schema) and isinstance(value, list):
for item in value:
field_type.contained_type.apply_defaults(item)
def validate(self, instance):
"""Validates the given document against this schema. Raises a
ValidationException if there are any failures."""
errors = {}
self._validate_instance(instance, errors)
if len(errors) > 0:
raise ValidationException(errors)
def _append_path(self, prefix, field):
"""Appends the given field to the given path prefix."""
if prefix:
return "{}.{}".format(prefix, field)
else:
return field
def _verify(self, path_prefix=None):
"""Verifies that this schema's doc spec is valid and makes sense."""
for field, spec in self.doc_spec.iteritems():
path = self._append_path(path_prefix, field)
# Standard dict-based spec
if isinstance(spec, dict):
self._verify_field_spec(spec, path)
else:
raise SchemaFormatException("Invalid field definition for {}", path)
def _verify_field_spec(self, spec, path):
"""Verifies a given field specification is valid, recursing into nested schemas if required."""
# Required should be a boolean
if 'required' in spec and not isinstance(spec['required'], bool):
raise SchemaFormatException("{} required declaration should be True or False", path)
# Required should be a boolean
if 'nullable' in spec and not isinstance(spec['nullable'], bool):
raise SchemaFormatException("{} nullable declaration should be True or False", path)
# Must have a type specified
if 'type' not in spec:
raise SchemaFormatException("{} has no type declared.", path)
self._verify_type(spec, path)
# Validations should be either a single function or array of functions
if 'validates' in spec:
self._verify_validates(spec, path)
# Defaults must be of the correct type or a function
if 'default' in spec:
self._verify_default(spec, path)
# Only expected spec keys are supported
if not set(spec.keys()).issubset(set(['type', 'required', 'validates', 'default', 'nullable'])):
raise SchemaFormatException("Unsupported field spec item at {}. Items: "+repr(spec.keys()), path)
def _valid_schema_default(self, value):
return isinstance(value, dict)
def _verify_default(self, spec, path):
"""Verifies that the default specified in the given spec is valid."""
field_type = spec['type']
default = spec['default']
# If it's a function there's nothing we can really do except assume its valid
if callable(default):
return
if isinstance(field_type, Array):
# Verify we'd got a list as our default
if not isinstance(default, list):
raise SchemaFormatException("Default value for Array at {} is not a list of values.", path)
# Ensure the contents are of the correct type
for i, item in enumerate(default):
if isinstance(field_type.contained_type, Schema):
if not self._valid_schema_default(item):
raise SchemaFormatException("Default value for Schema is not valid.", path)
elif not isinstance(item, field_type.contained_type):
raise SchemaFormatException("Not all items in the default list for the Array field at {} are of the correct type.", path)
elif isinstance(field_type, Schema):
if not self._valid_schema_default(default):
raise SchemaFormatException("Default value for Schema is not valid.", path)
else:
if not isinstance(default, field_type):
raise SchemaFormatException("Default value for {} is not of the nominated type.", path)
def _verify_validates(self, spec, path):
"""Verify thats the 'validates' argument is valid."""
validates = spec['validates']
if isinstance(validates, list):
for validator in validates:
self._verify_validator(validator, path)
else:
self._verify_validator(validates, path)
def _verify_validator(self, validator, path):
"""Verifies that a given validator associated with the field at the given path is legitimate."""
# Validator should be a function
if not callable(validator):
raise SchemaFormatException("Invalid validations for {}", path)
# Validator should accept a single argument
(args, varargs, keywords, defaults) = getargspec(validator)
if len(args) != 1:
raise SchemaFormatException("Invalid validations for {}", path)
def _validate_instance(self, instance, errors, path_prefix=''):
"""Validates that the given instance of a document conforms to the given schema's
structure and validations. Any validation errors are added to the given errors
collection. The caller should assume the instance is considered valid if the
errors collection is empty when this method returns."""
if not isinstance(instance, dict):
errors[path_prefix] = "Expected instance of dict to validate against schema."
return
# validate against the schema level validators
self._apply_validations(errors, path_prefix, self._validates, instance)
# Loop over each field in the schema and check the instance value conforms
# to its spec
for field, spec in self.doc_spec.iteritems():
path = self._append_path(path_prefix, field)
# If the field is present, validate it's value.
if field in instance:
self._validate_value(instance[field], spec, path, errors)
else:
# If not, add an error if it was a required key.
if spec.get('required', False):
errors[path] = "{} is required.".format(path)
# Now loop over each field in the given instance and make sure we don't
# have any fields not declared in the schema, unless strict mode has been
# explicitly disabled.
if self._strict:
for field in instance:
if field not in self.doc_spec:
errors[self._append_path(path_prefix, field)] = "Unexpected document field not present in schema"
def _validate_value(self, value, field_spec, path, errors):
"""Validates that the given field value is valid given the associated
field spec and path. Any validation failures are added to the given errors
collection."""
# Check if the value is None and add an error if the field is not nullable.
# Note that for backward compatibility reasons, the default value of 'nullable'
# is the inverse of 'required' (which use to mean both that the key be present
# and not set to None).
if value is None:
if not field_spec.get('nullable', not field_spec.get('required', False)):
errors[path] = "{} is not nullable.".format(path)
return
# All fields should have a type
field_type = field_spec['type']
if isinstance(field_type, types.FunctionType):
try:
field_type = field_type(value)
except Exception as e:
raise SchemaFormatException("Dynamic schema function raised exception: {}".format(str(e)), path)
if not isinstance(field_type, (type, Schema, Array)):
raise SchemaFormatException("Dynamic schema function did not return a type at path {}", path)
# If our field is an embedded document, recurse into it
if isinstance(field_type, Schema):
if isinstance(value, dict):
field_type._validate_instance(value, errors, path)
else:
errors[path] = "{} should be an embedded document".format(path)
return
elif isinstance(field_type, Array):
if isinstance(value, list):
is_dynamic = isinstance(field_type.contained_type, types.FunctionType)
for i, item in enumerate(value):
contained_type = field_type.contained_type
if is_dynamic:
contained_type = contained_type(item)
instance_path = self._append_path(path, i)
if isinstance(contained_type, Schema):
contained_type._validate_instance(item, errors, instance_path)
elif not isinstance(item, contained_type):
errors[instance_path] = "Array item at {} is of incorrect type".format(instance_path)
continue
else:
errors[path] = "{} should be an embedded array".format(path)
return
elif not isinstance(value, field_type):
errors[path] = "Field should be of type {}".format(field_type)
return
validations = field_spec.get('validates', None)
if validations is None:
return
self._apply_validations(errors, path, validations, value)
def _apply_validations(self, errors, path, validations, value):
def apply(fn):
error = fn(value)
if error:
errors[path] = error
if isinstance(validations, list):
for validation in validations:
apply(validation)
else:
apply(validations)
|
gamechanger/schemer | schemer/__init__.py | Schema._verify_default | python | def _verify_default(self, spec, path):
field_type = spec['type']
default = spec['default']
# If it's a function there's nothing we can really do except assume its valid
if callable(default):
return
if isinstance(field_type, Array):
# Verify we'd got a list as our default
if not isinstance(default, list):
raise SchemaFormatException("Default value for Array at {} is not a list of values.", path)
# Ensure the contents are of the correct type
for i, item in enumerate(default):
if isinstance(field_type.contained_type, Schema):
if not self._valid_schema_default(item):
raise SchemaFormatException("Default value for Schema is not valid.", path)
elif not isinstance(item, field_type.contained_type):
raise SchemaFormatException("Not all items in the default list for the Array field at {} are of the correct type.", path)
elif isinstance(field_type, Schema):
if not self._valid_schema_default(default):
raise SchemaFormatException("Default value for Schema is not valid.", path)
else:
if not isinstance(default, field_type):
raise SchemaFormatException("Default value for {} is not of the nominated type.", path) | Verifies that the default specified in the given spec is valid. | train | https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/__init__.py#L128-L156 | null | class Schema(object):
"""A Schema encapsulates the structure and constraints of a dict."""
def __init__(self, doc_spec, strict=True, validates=[]):
self._doc_spec = doc_spec
self._virtuals = {}
self._strict = strict
self._verify()
self._validates = validates
@property
def doc_spec(self):
return self._doc_spec
def apply_defaults(self, instance):
"""Applies the defaults described by the this schema to the given
document instance as appropriate. Defaults are only applied to
fields which are currently unset."""
for field, spec in self.doc_spec.iteritems():
field_type = spec['type']
if field not in instance:
if 'default' in spec:
default = spec['default']
if callable(default):
instance[field] = default()
else:
instance[field] = copy.deepcopy(default)
# Determine if a value already exists for the field
if field in instance:
value = instance[field]
# recurse into nested docs
if isinstance(field_type, Schema) and isinstance(value, dict):
field_type.apply_defaults(value)
elif isinstance(field_type, Array) and isinstance(field_type.contained_type, Schema) and isinstance(value, list):
for item in value:
field_type.contained_type.apply_defaults(item)
def validate(self, instance):
"""Validates the given document against this schema. Raises a
ValidationException if there are any failures."""
errors = {}
self._validate_instance(instance, errors)
if len(errors) > 0:
raise ValidationException(errors)
def _append_path(self, prefix, field):
"""Appends the given field to the given path prefix."""
if prefix:
return "{}.{}".format(prefix, field)
else:
return field
def _verify(self, path_prefix=None):
"""Verifies that this schema's doc spec is valid and makes sense."""
for field, spec in self.doc_spec.iteritems():
path = self._append_path(path_prefix, field)
# Standard dict-based spec
if isinstance(spec, dict):
self._verify_field_spec(spec, path)
else:
raise SchemaFormatException("Invalid field definition for {}", path)
def _verify_field_spec(self, spec, path):
"""Verifies a given field specification is valid, recursing into nested schemas if required."""
# Required should be a boolean
if 'required' in spec and not isinstance(spec['required'], bool):
raise SchemaFormatException("{} required declaration should be True or False", path)
# Required should be a boolean
if 'nullable' in spec and not isinstance(spec['nullable'], bool):
raise SchemaFormatException("{} nullable declaration should be True or False", path)
# Must have a type specified
if 'type' not in spec:
raise SchemaFormatException("{} has no type declared.", path)
self._verify_type(spec, path)
# Validations should be either a single function or array of functions
if 'validates' in spec:
self._verify_validates(spec, path)
# Defaults must be of the correct type or a function
if 'default' in spec:
self._verify_default(spec, path)
# Only expected spec keys are supported
if not set(spec.keys()).issubset(set(['type', 'required', 'validates', 'default', 'nullable'])):
raise SchemaFormatException("Unsupported field spec item at {}. Items: "+repr(spec.keys()), path)
def _verify_type(self, spec, path):
"""Verify that the 'type' in the spec is valid"""
field_type = spec['type']
if isinstance(field_type, Schema):
# Nested documents cannot have validation
if not set(spec.keys()).issubset(set(['type', 'required', 'nullable', 'default'])):
raise SchemaFormatException("Unsupported field spec item at {}. Items: "+repr(spec.keys()), path)
return
elif isinstance(field_type, Array):
if not isinstance(field_type.contained_type, (type, Schema, Array, types.FunctionType)):
raise SchemaFormatException("Unsupported field type contained by Array at {}.", path)
elif not isinstance(field_type, type) and not isinstance(field_type, types.FunctionType):
raise SchemaFormatException("Unsupported field type at {}. Type must be a type, a function, an Array or another Schema", path)
def _valid_schema_default(self, value):
return isinstance(value, dict)
def _verify_validates(self, spec, path):
"""Verify thats the 'validates' argument is valid."""
validates = spec['validates']
if isinstance(validates, list):
for validator in validates:
self._verify_validator(validator, path)
else:
self._verify_validator(validates, path)
def _verify_validator(self, validator, path):
"""Verifies that a given validator associated with the field at the given path is legitimate."""
# Validator should be a function
if not callable(validator):
raise SchemaFormatException("Invalid validations for {}", path)
# Validator should accept a single argument
(args, varargs, keywords, defaults) = getargspec(validator)
if len(args) != 1:
raise SchemaFormatException("Invalid validations for {}", path)
def _validate_instance(self, instance, errors, path_prefix=''):
"""Validates that the given instance of a document conforms to the given schema's
structure and validations. Any validation errors are added to the given errors
collection. The caller should assume the instance is considered valid if the
errors collection is empty when this method returns."""
if not isinstance(instance, dict):
errors[path_prefix] = "Expected instance of dict to validate against schema."
return
# validate against the schema level validators
self._apply_validations(errors, path_prefix, self._validates, instance)
# Loop over each field in the schema and check the instance value conforms
# to its spec
for field, spec in self.doc_spec.iteritems():
path = self._append_path(path_prefix, field)
# If the field is present, validate it's value.
if field in instance:
self._validate_value(instance[field], spec, path, errors)
else:
# If not, add an error if it was a required key.
if spec.get('required', False):
errors[path] = "{} is required.".format(path)
# Now loop over each field in the given instance and make sure we don't
# have any fields not declared in the schema, unless strict mode has been
# explicitly disabled.
if self._strict:
for field in instance:
if field not in self.doc_spec:
errors[self._append_path(path_prefix, field)] = "Unexpected document field not present in schema"
def _validate_value(self, value, field_spec, path, errors):
"""Validates that the given field value is valid given the associated
field spec and path. Any validation failures are added to the given errors
collection."""
# Check if the value is None and add an error if the field is not nullable.
# Note that for backward compatibility reasons, the default value of 'nullable'
# is the inverse of 'required' (which use to mean both that the key be present
# and not set to None).
if value is None:
if not field_spec.get('nullable', not field_spec.get('required', False)):
errors[path] = "{} is not nullable.".format(path)
return
# All fields should have a type
field_type = field_spec['type']
if isinstance(field_type, types.FunctionType):
try:
field_type = field_type(value)
except Exception as e:
raise SchemaFormatException("Dynamic schema function raised exception: {}".format(str(e)), path)
if not isinstance(field_type, (type, Schema, Array)):
raise SchemaFormatException("Dynamic schema function did not return a type at path {}", path)
# If our field is an embedded document, recurse into it
if isinstance(field_type, Schema):
if isinstance(value, dict):
field_type._validate_instance(value, errors, path)
else:
errors[path] = "{} should be an embedded document".format(path)
return
elif isinstance(field_type, Array):
if isinstance(value, list):
is_dynamic = isinstance(field_type.contained_type, types.FunctionType)
for i, item in enumerate(value):
contained_type = field_type.contained_type
if is_dynamic:
contained_type = contained_type(item)
instance_path = self._append_path(path, i)
if isinstance(contained_type, Schema):
contained_type._validate_instance(item, errors, instance_path)
elif not isinstance(item, contained_type):
errors[instance_path] = "Array item at {} is of incorrect type".format(instance_path)
continue
else:
errors[path] = "{} should be an embedded array".format(path)
return
elif not isinstance(value, field_type):
errors[path] = "Field should be of type {}".format(field_type)
return
validations = field_spec.get('validates', None)
if validations is None:
return
self._apply_validations(errors, path, validations, value)
def _apply_validations(self, errors, path, validations, value):
def apply(fn):
error = fn(value)
if error:
errors[path] = error
if isinstance(validations, list):
for validation in validations:
apply(validation)
else:
apply(validations)
|
gamechanger/schemer | schemer/__init__.py | Schema._verify_validates | python | def _verify_validates(self, spec, path):
validates = spec['validates']
if isinstance(validates, list):
for validator in validates:
self._verify_validator(validator, path)
else:
self._verify_validator(validates, path) | Verify thats the 'validates' argument is valid. | train | https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/__init__.py#L159-L167 | null | class Schema(object):
"""A Schema encapsulates the structure and constraints of a dict."""
def __init__(self, doc_spec, strict=True, validates=[]):
self._doc_spec = doc_spec
self._virtuals = {}
self._strict = strict
self._verify()
self._validates = validates
@property
def doc_spec(self):
return self._doc_spec
def apply_defaults(self, instance):
"""Applies the defaults described by the this schema to the given
document instance as appropriate. Defaults are only applied to
fields which are currently unset."""
for field, spec in self.doc_spec.iteritems():
field_type = spec['type']
if field not in instance:
if 'default' in spec:
default = spec['default']
if callable(default):
instance[field] = default()
else:
instance[field] = copy.deepcopy(default)
# Determine if a value already exists for the field
if field in instance:
value = instance[field]
# recurse into nested docs
if isinstance(field_type, Schema) and isinstance(value, dict):
field_type.apply_defaults(value)
elif isinstance(field_type, Array) and isinstance(field_type.contained_type, Schema) and isinstance(value, list):
for item in value:
field_type.contained_type.apply_defaults(item)
def validate(self, instance):
"""Validates the given document against this schema. Raises a
ValidationException if there are any failures."""
errors = {}
self._validate_instance(instance, errors)
if len(errors) > 0:
raise ValidationException(errors)
def _append_path(self, prefix, field):
"""Appends the given field to the given path prefix."""
if prefix:
return "{}.{}".format(prefix, field)
else:
return field
def _verify(self, path_prefix=None):
"""Verifies that this schema's doc spec is valid and makes sense."""
for field, spec in self.doc_spec.iteritems():
path = self._append_path(path_prefix, field)
# Standard dict-based spec
if isinstance(spec, dict):
self._verify_field_spec(spec, path)
else:
raise SchemaFormatException("Invalid field definition for {}", path)
def _verify_field_spec(self, spec, path):
"""Verifies a given field specification is valid, recursing into nested schemas if required."""
# Required should be a boolean
if 'required' in spec and not isinstance(spec['required'], bool):
raise SchemaFormatException("{} required declaration should be True or False", path)
# Required should be a boolean
if 'nullable' in spec and not isinstance(spec['nullable'], bool):
raise SchemaFormatException("{} nullable declaration should be True or False", path)
# Must have a type specified
if 'type' not in spec:
raise SchemaFormatException("{} has no type declared.", path)
self._verify_type(spec, path)
# Validations should be either a single function or array of functions
if 'validates' in spec:
self._verify_validates(spec, path)
# Defaults must be of the correct type or a function
if 'default' in spec:
self._verify_default(spec, path)
# Only expected spec keys are supported
if not set(spec.keys()).issubset(set(['type', 'required', 'validates', 'default', 'nullable'])):
raise SchemaFormatException("Unsupported field spec item at {}. Items: "+repr(spec.keys()), path)
def _verify_type(self, spec, path):
"""Verify that the 'type' in the spec is valid"""
field_type = spec['type']
if isinstance(field_type, Schema):
# Nested documents cannot have validation
if not set(spec.keys()).issubset(set(['type', 'required', 'nullable', 'default'])):
raise SchemaFormatException("Unsupported field spec item at {}. Items: "+repr(spec.keys()), path)
return
elif isinstance(field_type, Array):
if not isinstance(field_type.contained_type, (type, Schema, Array, types.FunctionType)):
raise SchemaFormatException("Unsupported field type contained by Array at {}.", path)
elif not isinstance(field_type, type) and not isinstance(field_type, types.FunctionType):
raise SchemaFormatException("Unsupported field type at {}. Type must be a type, a function, an Array or another Schema", path)
def _valid_schema_default(self, value):
return isinstance(value, dict)
def _verify_default(self, spec, path):
"""Verifies that the default specified in the given spec is valid."""
field_type = spec['type']
default = spec['default']
# If it's a function there's nothing we can really do except assume its valid
if callable(default):
return
if isinstance(field_type, Array):
# Verify we'd got a list as our default
if not isinstance(default, list):
raise SchemaFormatException("Default value for Array at {} is not a list of values.", path)
# Ensure the contents are of the correct type
for i, item in enumerate(default):
if isinstance(field_type.contained_type, Schema):
if not self._valid_schema_default(item):
raise SchemaFormatException("Default value for Schema is not valid.", path)
elif not isinstance(item, field_type.contained_type):
raise SchemaFormatException("Not all items in the default list for the Array field at {} are of the correct type.", path)
elif isinstance(field_type, Schema):
if not self._valid_schema_default(default):
raise SchemaFormatException("Default value for Schema is not valid.", path)
else:
if not isinstance(default, field_type):
raise SchemaFormatException("Default value for {} is not of the nominated type.", path)
def _verify_validator(self, validator, path):
"""Verifies that a given validator associated with the field at the given path is legitimate."""
# Validator should be a function
if not callable(validator):
raise SchemaFormatException("Invalid validations for {}", path)
# Validator should accept a single argument
(args, varargs, keywords, defaults) = getargspec(validator)
if len(args) != 1:
raise SchemaFormatException("Invalid validations for {}", path)
def _validate_instance(self, instance, errors, path_prefix=''):
"""Validates that the given instance of a document conforms to the given schema's
structure and validations. Any validation errors are added to the given errors
collection. The caller should assume the instance is considered valid if the
errors collection is empty when this method returns."""
if not isinstance(instance, dict):
errors[path_prefix] = "Expected instance of dict to validate against schema."
return
# validate against the schema level validators
self._apply_validations(errors, path_prefix, self._validates, instance)
# Loop over each field in the schema and check the instance value conforms
# to its spec
for field, spec in self.doc_spec.iteritems():
path = self._append_path(path_prefix, field)
# If the field is present, validate it's value.
if field in instance:
self._validate_value(instance[field], spec, path, errors)
else:
# If not, add an error if it was a required key.
if spec.get('required', False):
errors[path] = "{} is required.".format(path)
# Now loop over each field in the given instance and make sure we don't
# have any fields not declared in the schema, unless strict mode has been
# explicitly disabled.
if self._strict:
for field in instance:
if field not in self.doc_spec:
errors[self._append_path(path_prefix, field)] = "Unexpected document field not present in schema"
def _validate_value(self, value, field_spec, path, errors):
"""Validates that the given field value is valid given the associated
field spec and path. Any validation failures are added to the given errors
collection."""
# Check if the value is None and add an error if the field is not nullable.
# Note that for backward compatibility reasons, the default value of 'nullable'
# is the inverse of 'required' (which use to mean both that the key be present
# and not set to None).
if value is None:
if not field_spec.get('nullable', not field_spec.get('required', False)):
errors[path] = "{} is not nullable.".format(path)
return
# All fields should have a type
field_type = field_spec['type']
if isinstance(field_type, types.FunctionType):
try:
field_type = field_type(value)
except Exception as e:
raise SchemaFormatException("Dynamic schema function raised exception: {}".format(str(e)), path)
if not isinstance(field_type, (type, Schema, Array)):
raise SchemaFormatException("Dynamic schema function did not return a type at path {}", path)
# If our field is an embedded document, recurse into it
if isinstance(field_type, Schema):
if isinstance(value, dict):
field_type._validate_instance(value, errors, path)
else:
errors[path] = "{} should be an embedded document".format(path)
return
elif isinstance(field_type, Array):
if isinstance(value, list):
is_dynamic = isinstance(field_type.contained_type, types.FunctionType)
for i, item in enumerate(value):
contained_type = field_type.contained_type
if is_dynamic:
contained_type = contained_type(item)
instance_path = self._append_path(path, i)
if isinstance(contained_type, Schema):
contained_type._validate_instance(item, errors, instance_path)
elif not isinstance(item, contained_type):
errors[instance_path] = "Array item at {} is of incorrect type".format(instance_path)
continue
else:
errors[path] = "{} should be an embedded array".format(path)
return
elif not isinstance(value, field_type):
errors[path] = "Field should be of type {}".format(field_type)
return
validations = field_spec.get('validates', None)
if validations is None:
return
self._apply_validations(errors, path, validations, value)
def _apply_validations(self, errors, path, validations, value):
def apply(fn):
error = fn(value)
if error:
errors[path] = error
if isinstance(validations, list):
for validation in validations:
apply(validation)
else:
apply(validations)
|
gamechanger/schemer | schemer/__init__.py | Schema._verify_validator | python | def _verify_validator(self, validator, path):
# Validator should be a function
if not callable(validator):
raise SchemaFormatException("Invalid validations for {}", path)
# Validator should accept a single argument
(args, varargs, keywords, defaults) = getargspec(validator)
if len(args) != 1:
raise SchemaFormatException("Invalid validations for {}", path) | Verifies that a given validator associated with the field at the given path is legitimate. | train | https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/__init__.py#L170-L180 | null | class Schema(object):
"""A Schema encapsulates the structure and constraints of a dict."""
def __init__(self, doc_spec, strict=True, validates=[]):
self._doc_spec = doc_spec
self._virtuals = {}
self._strict = strict
self._verify()
self._validates = validates
@property
def doc_spec(self):
return self._doc_spec
def apply_defaults(self, instance):
"""Applies the defaults described by the this schema to the given
document instance as appropriate. Defaults are only applied to
fields which are currently unset."""
for field, spec in self.doc_spec.iteritems():
field_type = spec['type']
if field not in instance:
if 'default' in spec:
default = spec['default']
if callable(default):
instance[field] = default()
else:
instance[field] = copy.deepcopy(default)
# Determine if a value already exists for the field
if field in instance:
value = instance[field]
# recurse into nested docs
if isinstance(field_type, Schema) and isinstance(value, dict):
field_type.apply_defaults(value)
elif isinstance(field_type, Array) and isinstance(field_type.contained_type, Schema) and isinstance(value, list):
for item in value:
field_type.contained_type.apply_defaults(item)
def validate(self, instance):
"""Validates the given document against this schema. Raises a
ValidationException if there are any failures."""
errors = {}
self._validate_instance(instance, errors)
if len(errors) > 0:
raise ValidationException(errors)
def _append_path(self, prefix, field):
"""Appends the given field to the given path prefix."""
if prefix:
return "{}.{}".format(prefix, field)
else:
return field
def _verify(self, path_prefix=None):
"""Verifies that this schema's doc spec is valid and makes sense."""
for field, spec in self.doc_spec.iteritems():
path = self._append_path(path_prefix, field)
# Standard dict-based spec
if isinstance(spec, dict):
self._verify_field_spec(spec, path)
else:
raise SchemaFormatException("Invalid field definition for {}", path)
def _verify_field_spec(self, spec, path):
"""Verifies a given field specification is valid, recursing into nested schemas if required."""
# Required should be a boolean
if 'required' in spec and not isinstance(spec['required'], bool):
raise SchemaFormatException("{} required declaration should be True or False", path)
# Required should be a boolean
if 'nullable' in spec and not isinstance(spec['nullable'], bool):
raise SchemaFormatException("{} nullable declaration should be True or False", path)
# Must have a type specified
if 'type' not in spec:
raise SchemaFormatException("{} has no type declared.", path)
self._verify_type(spec, path)
# Validations should be either a single function or array of functions
if 'validates' in spec:
self._verify_validates(spec, path)
# Defaults must be of the correct type or a function
if 'default' in spec:
self._verify_default(spec, path)
# Only expected spec keys are supported
if not set(spec.keys()).issubset(set(['type', 'required', 'validates', 'default', 'nullable'])):
raise SchemaFormatException("Unsupported field spec item at {}. Items: "+repr(spec.keys()), path)
def _verify_type(self, spec, path):
"""Verify that the 'type' in the spec is valid"""
field_type = spec['type']
if isinstance(field_type, Schema):
# Nested documents cannot have validation
if not set(spec.keys()).issubset(set(['type', 'required', 'nullable', 'default'])):
raise SchemaFormatException("Unsupported field spec item at {}. Items: "+repr(spec.keys()), path)
return
elif isinstance(field_type, Array):
if not isinstance(field_type.contained_type, (type, Schema, Array, types.FunctionType)):
raise SchemaFormatException("Unsupported field type contained by Array at {}.", path)
elif not isinstance(field_type, type) and not isinstance(field_type, types.FunctionType):
raise SchemaFormatException("Unsupported field type at {}. Type must be a type, a function, an Array or another Schema", path)
def _valid_schema_default(self, value):
return isinstance(value, dict)
def _verify_default(self, spec, path):
"""Verifies that the default specified in the given spec is valid."""
field_type = spec['type']
default = spec['default']
# If it's a function there's nothing we can really do except assume its valid
if callable(default):
return
if isinstance(field_type, Array):
# Verify we'd got a list as our default
if not isinstance(default, list):
raise SchemaFormatException("Default value for Array at {} is not a list of values.", path)
# Ensure the contents are of the correct type
for i, item in enumerate(default):
if isinstance(field_type.contained_type, Schema):
if not self._valid_schema_default(item):
raise SchemaFormatException("Default value for Schema is not valid.", path)
elif not isinstance(item, field_type.contained_type):
raise SchemaFormatException("Not all items in the default list for the Array field at {} are of the correct type.", path)
elif isinstance(field_type, Schema):
if not self._valid_schema_default(default):
raise SchemaFormatException("Default value for Schema is not valid.", path)
else:
if not isinstance(default, field_type):
raise SchemaFormatException("Default value for {} is not of the nominated type.", path)
def _verify_validates(self, spec, path):
"""Verify thats the 'validates' argument is valid."""
validates = spec['validates']
if isinstance(validates, list):
for validator in validates:
self._verify_validator(validator, path)
else:
self._verify_validator(validates, path)
def _validate_instance(self, instance, errors, path_prefix=''):
"""Validates that the given instance of a document conforms to the given schema's
structure and validations. Any validation errors are added to the given errors
collection. The caller should assume the instance is considered valid if the
errors collection is empty when this method returns."""
if not isinstance(instance, dict):
errors[path_prefix] = "Expected instance of dict to validate against schema."
return
# validate against the schema level validators
self._apply_validations(errors, path_prefix, self._validates, instance)
# Loop over each field in the schema and check the instance value conforms
# to its spec
for field, spec in self.doc_spec.iteritems():
path = self._append_path(path_prefix, field)
# If the field is present, validate it's value.
if field in instance:
self._validate_value(instance[field], spec, path, errors)
else:
# If not, add an error if it was a required key.
if spec.get('required', False):
errors[path] = "{} is required.".format(path)
# Now loop over each field in the given instance and make sure we don't
# have any fields not declared in the schema, unless strict mode has been
# explicitly disabled.
if self._strict:
for field in instance:
if field not in self.doc_spec:
errors[self._append_path(path_prefix, field)] = "Unexpected document field not present in schema"
def _validate_value(self, value, field_spec, path, errors):
"""Validates that the given field value is valid given the associated
field spec and path. Any validation failures are added to the given errors
collection."""
# Check if the value is None and add an error if the field is not nullable.
# Note that for backward compatibility reasons, the default value of 'nullable'
# is the inverse of 'required' (which use to mean both that the key be present
# and not set to None).
if value is None:
if not field_spec.get('nullable', not field_spec.get('required', False)):
errors[path] = "{} is not nullable.".format(path)
return
# All fields should have a type
field_type = field_spec['type']
if isinstance(field_type, types.FunctionType):
try:
field_type = field_type(value)
except Exception as e:
raise SchemaFormatException("Dynamic schema function raised exception: {}".format(str(e)), path)
if not isinstance(field_type, (type, Schema, Array)):
raise SchemaFormatException("Dynamic schema function did not return a type at path {}", path)
# If our field is an embedded document, recurse into it
if isinstance(field_type, Schema):
if isinstance(value, dict):
field_type._validate_instance(value, errors, path)
else:
errors[path] = "{} should be an embedded document".format(path)
return
elif isinstance(field_type, Array):
if isinstance(value, list):
is_dynamic = isinstance(field_type.contained_type, types.FunctionType)
for i, item in enumerate(value):
contained_type = field_type.contained_type
if is_dynamic:
contained_type = contained_type(item)
instance_path = self._append_path(path, i)
if isinstance(contained_type, Schema):
contained_type._validate_instance(item, errors, instance_path)
elif not isinstance(item, contained_type):
errors[instance_path] = "Array item at {} is of incorrect type".format(instance_path)
continue
else:
errors[path] = "{} should be an embedded array".format(path)
return
elif not isinstance(value, field_type):
errors[path] = "Field should be of type {}".format(field_type)
return
validations = field_spec.get('validates', None)
if validations is None:
return
self._apply_validations(errors, path, validations, value)
def _apply_validations(self, errors, path, validations, value):
def apply(fn):
error = fn(value)
if error:
errors[path] = error
if isinstance(validations, list):
for validation in validations:
apply(validation)
else:
apply(validations)
|
gamechanger/schemer | schemer/__init__.py | Schema._validate_instance | python | def _validate_instance(self, instance, errors, path_prefix=''):
if not isinstance(instance, dict):
errors[path_prefix] = "Expected instance of dict to validate against schema."
return
# validate against the schema level validators
self._apply_validations(errors, path_prefix, self._validates, instance)
# Loop over each field in the schema and check the instance value conforms
# to its spec
for field, spec in self.doc_spec.iteritems():
path = self._append_path(path_prefix, field)
# If the field is present, validate it's value.
if field in instance:
self._validate_value(instance[field], spec, path, errors)
else:
# If not, add an error if it was a required key.
if spec.get('required', False):
errors[path] = "{} is required.".format(path)
# Now loop over each field in the given instance and make sure we don't
# have any fields not declared in the schema, unless strict mode has been
# explicitly disabled.
if self._strict:
for field in instance:
if field not in self.doc_spec:
errors[self._append_path(path_prefix, field)] = "Unexpected document field not present in schema" | Validates that the given instance of a document conforms to the given schema's
structure and validations. Any validation errors are added to the given errors
collection. The caller should assume the instance is considered valid if the
errors collection is empty when this method returns. | train | https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/__init__.py#L183-L215 | [
"def _append_path(self, prefix, field):\n \"\"\"Appends the given field to the given path prefix.\"\"\"\n if prefix:\n return \"{}.{}\".format(prefix, field)\n else:\n return field\n",
"def _validate_value(self, value, field_spec, path, errors):\n \"\"\"Validates that the given field val... | class Schema(object):
"""A Schema encapsulates the structure and constraints of a dict."""
def __init__(self, doc_spec, strict=True, validates=[]):
self._doc_spec = doc_spec
self._virtuals = {}
self._strict = strict
self._verify()
self._validates = validates
@property
def doc_spec(self):
return self._doc_spec
def apply_defaults(self, instance):
"""Applies the defaults described by the this schema to the given
document instance as appropriate. Defaults are only applied to
fields which are currently unset."""
for field, spec in self.doc_spec.iteritems():
field_type = spec['type']
if field not in instance:
if 'default' in spec:
default = spec['default']
if callable(default):
instance[field] = default()
else:
instance[field] = copy.deepcopy(default)
# Determine if a value already exists for the field
if field in instance:
value = instance[field]
# recurse into nested docs
if isinstance(field_type, Schema) and isinstance(value, dict):
field_type.apply_defaults(value)
elif isinstance(field_type, Array) and isinstance(field_type.contained_type, Schema) and isinstance(value, list):
for item in value:
field_type.contained_type.apply_defaults(item)
def validate(self, instance):
"""Validates the given document against this schema. Raises a
ValidationException if there are any failures."""
errors = {}
self._validate_instance(instance, errors)
if len(errors) > 0:
raise ValidationException(errors)
def _append_path(self, prefix, field):
"""Appends the given field to the given path prefix."""
if prefix:
return "{}.{}".format(prefix, field)
else:
return field
def _verify(self, path_prefix=None):
"""Verifies that this schema's doc spec is valid and makes sense."""
for field, spec in self.doc_spec.iteritems():
path = self._append_path(path_prefix, field)
# Standard dict-based spec
if isinstance(spec, dict):
self._verify_field_spec(spec, path)
else:
raise SchemaFormatException("Invalid field definition for {}", path)
def _verify_field_spec(self, spec, path):
"""Verifies a given field specification is valid, recursing into nested schemas if required."""
# Required should be a boolean
if 'required' in spec and not isinstance(spec['required'], bool):
raise SchemaFormatException("{} required declaration should be True or False", path)
# Required should be a boolean
if 'nullable' in spec and not isinstance(spec['nullable'], bool):
raise SchemaFormatException("{} nullable declaration should be True or False", path)
# Must have a type specified
if 'type' not in spec:
raise SchemaFormatException("{} has no type declared.", path)
self._verify_type(spec, path)
# Validations should be either a single function or array of functions
if 'validates' in spec:
self._verify_validates(spec, path)
# Defaults must be of the correct type or a function
if 'default' in spec:
self._verify_default(spec, path)
# Only expected spec keys are supported
if not set(spec.keys()).issubset(set(['type', 'required', 'validates', 'default', 'nullable'])):
raise SchemaFormatException("Unsupported field spec item at {}. Items: "+repr(spec.keys()), path)
def _verify_type(self, spec, path):
"""Verify that the 'type' in the spec is valid"""
field_type = spec['type']
if isinstance(field_type, Schema):
# Nested documents cannot have validation
if not set(spec.keys()).issubset(set(['type', 'required', 'nullable', 'default'])):
raise SchemaFormatException("Unsupported field spec item at {}. Items: "+repr(spec.keys()), path)
return
elif isinstance(field_type, Array):
if not isinstance(field_type.contained_type, (type, Schema, Array, types.FunctionType)):
raise SchemaFormatException("Unsupported field type contained by Array at {}.", path)
elif not isinstance(field_type, type) and not isinstance(field_type, types.FunctionType):
raise SchemaFormatException("Unsupported field type at {}. Type must be a type, a function, an Array or another Schema", path)
def _valid_schema_default(self, value):
return isinstance(value, dict)
def _verify_default(self, spec, path):
"""Verifies that the default specified in the given spec is valid."""
field_type = spec['type']
default = spec['default']
# If it's a function there's nothing we can really do except assume its valid
if callable(default):
return
if isinstance(field_type, Array):
# Verify we'd got a list as our default
if not isinstance(default, list):
raise SchemaFormatException("Default value for Array at {} is not a list of values.", path)
# Ensure the contents are of the correct type
for i, item in enumerate(default):
if isinstance(field_type.contained_type, Schema):
if not self._valid_schema_default(item):
raise SchemaFormatException("Default value for Schema is not valid.", path)
elif not isinstance(item, field_type.contained_type):
raise SchemaFormatException("Not all items in the default list for the Array field at {} are of the correct type.", path)
elif isinstance(field_type, Schema):
if not self._valid_schema_default(default):
raise SchemaFormatException("Default value for Schema is not valid.", path)
else:
if not isinstance(default, field_type):
raise SchemaFormatException("Default value for {} is not of the nominated type.", path)
def _verify_validates(self, spec, path):
"""Verify thats the 'validates' argument is valid."""
validates = spec['validates']
if isinstance(validates, list):
for validator in validates:
self._verify_validator(validator, path)
else:
self._verify_validator(validates, path)
def _verify_validator(self, validator, path):
"""Verifies that a given validator associated with the field at the given path is legitimate."""
# Validator should be a function
if not callable(validator):
raise SchemaFormatException("Invalid validations for {}", path)
# Validator should accept a single argument
(args, varargs, keywords, defaults) = getargspec(validator)
if len(args) != 1:
raise SchemaFormatException("Invalid validations for {}", path)
def _validate_value(self, value, field_spec, path, errors):
"""Validates that the given field value is valid given the associated
field spec and path. Any validation failures are added to the given errors
collection."""
# Check if the value is None and add an error if the field is not nullable.
# Note that for backward compatibility reasons, the default value of 'nullable'
# is the inverse of 'required' (which use to mean both that the key be present
# and not set to None).
if value is None:
if not field_spec.get('nullable', not field_spec.get('required', False)):
errors[path] = "{} is not nullable.".format(path)
return
# All fields should have a type
field_type = field_spec['type']
if isinstance(field_type, types.FunctionType):
try:
field_type = field_type(value)
except Exception as e:
raise SchemaFormatException("Dynamic schema function raised exception: {}".format(str(e)), path)
if not isinstance(field_type, (type, Schema, Array)):
raise SchemaFormatException("Dynamic schema function did not return a type at path {}", path)
# If our field is an embedded document, recurse into it
if isinstance(field_type, Schema):
if isinstance(value, dict):
field_type._validate_instance(value, errors, path)
else:
errors[path] = "{} should be an embedded document".format(path)
return
elif isinstance(field_type, Array):
if isinstance(value, list):
is_dynamic = isinstance(field_type.contained_type, types.FunctionType)
for i, item in enumerate(value):
contained_type = field_type.contained_type
if is_dynamic:
contained_type = contained_type(item)
instance_path = self._append_path(path, i)
if isinstance(contained_type, Schema):
contained_type._validate_instance(item, errors, instance_path)
elif not isinstance(item, contained_type):
errors[instance_path] = "Array item at {} is of incorrect type".format(instance_path)
continue
else:
errors[path] = "{} should be an embedded array".format(path)
return
elif not isinstance(value, field_type):
errors[path] = "Field should be of type {}".format(field_type)
return
validations = field_spec.get('validates', None)
if validations is None:
return
self._apply_validations(errors, path, validations, value)
def _apply_validations(self, errors, path, validations, value):
def apply(fn):
error = fn(value)
if error:
errors[path] = error
if isinstance(validations, list):
for validation in validations:
apply(validation)
else:
apply(validations)
|
gamechanger/schemer | schemer/__init__.py | Schema._validate_value | python | def _validate_value(self, value, field_spec, path, errors):
# Check if the value is None and add an error if the field is not nullable.
# Note that for backward compatibility reasons, the default value of 'nullable'
# is the inverse of 'required' (which use to mean both that the key be present
# and not set to None).
if value is None:
if not field_spec.get('nullable', not field_spec.get('required', False)):
errors[path] = "{} is not nullable.".format(path)
return
# All fields should have a type
field_type = field_spec['type']
if isinstance(field_type, types.FunctionType):
try:
field_type = field_type(value)
except Exception as e:
raise SchemaFormatException("Dynamic schema function raised exception: {}".format(str(e)), path)
if not isinstance(field_type, (type, Schema, Array)):
raise SchemaFormatException("Dynamic schema function did not return a type at path {}", path)
# If our field is an embedded document, recurse into it
if isinstance(field_type, Schema):
if isinstance(value, dict):
field_type._validate_instance(value, errors, path)
else:
errors[path] = "{} should be an embedded document".format(path)
return
elif isinstance(field_type, Array):
if isinstance(value, list):
is_dynamic = isinstance(field_type.contained_type, types.FunctionType)
for i, item in enumerate(value):
contained_type = field_type.contained_type
if is_dynamic:
contained_type = contained_type(item)
instance_path = self._append_path(path, i)
if isinstance(contained_type, Schema):
contained_type._validate_instance(item, errors, instance_path)
elif not isinstance(item, contained_type):
errors[instance_path] = "Array item at {} is of incorrect type".format(instance_path)
continue
else:
errors[path] = "{} should be an embedded array".format(path)
return
elif not isinstance(value, field_type):
errors[path] = "Field should be of type {}".format(field_type)
return
validations = field_spec.get('validates', None)
if validations is None:
return
self._apply_validations(errors, path, validations, value) | Validates that the given field value is valid given the associated
field spec and path. Any validation failures are added to the given errors
collection. | train | https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/__init__.py#L217-L274 | null | class Schema(object):
"""A Schema encapsulates the structure and constraints of a dict."""
def __init__(self, doc_spec, strict=True, validates=[]):
self._doc_spec = doc_spec
self._virtuals = {}
self._strict = strict
self._verify()
self._validates = validates
@property
def doc_spec(self):
return self._doc_spec
def apply_defaults(self, instance):
"""Applies the defaults described by the this schema to the given
document instance as appropriate. Defaults are only applied to
fields which are currently unset."""
for field, spec in self.doc_spec.iteritems():
field_type = spec['type']
if field not in instance:
if 'default' in spec:
default = spec['default']
if callable(default):
instance[field] = default()
else:
instance[field] = copy.deepcopy(default)
# Determine if a value already exists for the field
if field in instance:
value = instance[field]
# recurse into nested docs
if isinstance(field_type, Schema) and isinstance(value, dict):
field_type.apply_defaults(value)
elif isinstance(field_type, Array) and isinstance(field_type.contained_type, Schema) and isinstance(value, list):
for item in value:
field_type.contained_type.apply_defaults(item)
def validate(self, instance):
"""Validates the given document against this schema. Raises a
ValidationException if there are any failures."""
errors = {}
self._validate_instance(instance, errors)
if len(errors) > 0:
raise ValidationException(errors)
def _append_path(self, prefix, field):
"""Appends the given field to the given path prefix."""
if prefix:
return "{}.{}".format(prefix, field)
else:
return field
def _verify(self, path_prefix=None):
"""Verifies that this schema's doc spec is valid and makes sense."""
for field, spec in self.doc_spec.iteritems():
path = self._append_path(path_prefix, field)
# Standard dict-based spec
if isinstance(spec, dict):
self._verify_field_spec(spec, path)
else:
raise SchemaFormatException("Invalid field definition for {}", path)
def _verify_field_spec(self, spec, path):
"""Verifies a given field specification is valid, recursing into nested schemas if required."""
# Required should be a boolean
if 'required' in spec and not isinstance(spec['required'], bool):
raise SchemaFormatException("{} required declaration should be True or False", path)
# Required should be a boolean
if 'nullable' in spec and not isinstance(spec['nullable'], bool):
raise SchemaFormatException("{} nullable declaration should be True or False", path)
# Must have a type specified
if 'type' not in spec:
raise SchemaFormatException("{} has no type declared.", path)
self._verify_type(spec, path)
# Validations should be either a single function or array of functions
if 'validates' in spec:
self._verify_validates(spec, path)
# Defaults must be of the correct type or a function
if 'default' in spec:
self._verify_default(spec, path)
# Only expected spec keys are supported
if not set(spec.keys()).issubset(set(['type', 'required', 'validates', 'default', 'nullable'])):
raise SchemaFormatException("Unsupported field spec item at {}. Items: "+repr(spec.keys()), path)
def _verify_type(self, spec, path):
"""Verify that the 'type' in the spec is valid"""
field_type = spec['type']
if isinstance(field_type, Schema):
# Nested documents cannot have validation
if not set(spec.keys()).issubset(set(['type', 'required', 'nullable', 'default'])):
raise SchemaFormatException("Unsupported field spec item at {}. Items: "+repr(spec.keys()), path)
return
elif isinstance(field_type, Array):
if not isinstance(field_type.contained_type, (type, Schema, Array, types.FunctionType)):
raise SchemaFormatException("Unsupported field type contained by Array at {}.", path)
elif not isinstance(field_type, type) and not isinstance(field_type, types.FunctionType):
raise SchemaFormatException("Unsupported field type at {}. Type must be a type, a function, an Array or another Schema", path)
def _valid_schema_default(self, value):
return isinstance(value, dict)
def _verify_default(self, spec, path):
"""Verifies that the default specified in the given spec is valid."""
field_type = spec['type']
default = spec['default']
# If it's a function there's nothing we can really do except assume its valid
if callable(default):
return
if isinstance(field_type, Array):
# Verify we'd got a list as our default
if not isinstance(default, list):
raise SchemaFormatException("Default value for Array at {} is not a list of values.", path)
# Ensure the contents are of the correct type
for i, item in enumerate(default):
if isinstance(field_type.contained_type, Schema):
if not self._valid_schema_default(item):
raise SchemaFormatException("Default value for Schema is not valid.", path)
elif not isinstance(item, field_type.contained_type):
raise SchemaFormatException("Not all items in the default list for the Array field at {} are of the correct type.", path)
elif isinstance(field_type, Schema):
if not self._valid_schema_default(default):
raise SchemaFormatException("Default value for Schema is not valid.", path)
else:
if not isinstance(default, field_type):
raise SchemaFormatException("Default value for {} is not of the nominated type.", path)
def _verify_validates(self, spec, path):
"""Verify thats the 'validates' argument is valid."""
validates = spec['validates']
if isinstance(validates, list):
for validator in validates:
self._verify_validator(validator, path)
else:
self._verify_validator(validates, path)
def _verify_validator(self, validator, path):
"""Verifies that a given validator associated with the field at the given path is legitimate."""
# Validator should be a function
if not callable(validator):
raise SchemaFormatException("Invalid validations for {}", path)
# Validator should accept a single argument
(args, varargs, keywords, defaults) = getargspec(validator)
if len(args) != 1:
raise SchemaFormatException("Invalid validations for {}", path)
def _validate_instance(self, instance, errors, path_prefix=''):
"""Validates that the given instance of a document conforms to the given schema's
structure and validations. Any validation errors are added to the given errors
collection. The caller should assume the instance is considered valid if the
errors collection is empty when this method returns."""
if not isinstance(instance, dict):
errors[path_prefix] = "Expected instance of dict to validate against schema."
return
# validate against the schema level validators
self._apply_validations(errors, path_prefix, self._validates, instance)
# Loop over each field in the schema and check the instance value conforms
# to its spec
for field, spec in self.doc_spec.iteritems():
path = self._append_path(path_prefix, field)
# If the field is present, validate it's value.
if field in instance:
self._validate_value(instance[field], spec, path, errors)
else:
# If not, add an error if it was a required key.
if spec.get('required', False):
errors[path] = "{} is required.".format(path)
# Now loop over each field in the given instance and make sure we don't
# have any fields not declared in the schema, unless strict mode has been
# explicitly disabled.
if self._strict:
for field in instance:
if field not in self.doc_spec:
errors[self._append_path(path_prefix, field)] = "Unexpected document field not present in schema"
def _apply_validations(self, errors, path, validations, value):
def apply(fn):
error = fn(value)
if error:
errors[path] = error
if isinstance(validations, list):
for validation in validations:
apply(validation)
else:
apply(validations)
|
rbit/pydtls | dtls/x509.py | decode_cert | python | def decode_cert(cert):
ret_dict = {}
subject_xname = X509_get_subject_name(cert.value)
ret_dict["subject"] = _create_tuple_for_X509_NAME(subject_xname)
notAfter = X509_get_notAfter(cert.value)
ret_dict["notAfter"] = ASN1_TIME_print(notAfter)
peer_alt_names = _get_peer_alt_names(cert)
if peer_alt_names is not None:
ret_dict["subjectAltName"] = peer_alt_names
return ret_dict | Convert an X509 certificate into a Python dictionary
This function converts the given X509 certificate into a Python dictionary
in the manner established by the Python standard library's ssl module. | train | https://github.com/rbit/pydtls/blob/41a71fccd990347d0de5f42418fea1e4e733359c/dtls/x509.py#L61-L79 | [
"def ASN1_TIME_print(asn1_time):\n bio = _BIO(BIO_new(BIO_s_mem()))\n _ASN1_TIME_print(bio.value, asn1_time)\n return BIO_gets(bio.value)\n",
"def X509_get_notAfter(x509):\n x509_raw = X509.from_param(x509)\n x509_ptr = cast(x509_raw, POINTER(X509_st))\n notAfter = x509_ptr.contents.cert_info.co... | # X509: certificate support.
# Copyright 2012 Ray Brown
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# The License is also distributed with this work in the file named "LICENSE."
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""X509 Certificate
This module provides support for X509 certificates through the OpenSSL library.
This support includes mapping certificate data to Python dictionaries in the
manner established by the Python standard library's ssl module. This module is
required because the standard library's ssl module does not provide its support
for certificates from arbitrary sources, but instead only for certificates
retrieved from servers during handshaking or get_server_certificate by its
CPython _ssl implementation module. This author is aware of the latter module's
_test_decode_certificate function, but has decided not to use this function
because it is undocumented, and because its use would tie PyDTLS to the CPython
interpreter.
"""
from logging import getLogger
from openssl import *
from util import _Rsrc, _BIO
_logger = getLogger(__name__)
class _X509(_Rsrc):
"""Wrapper for the cryptographic library's X509 resource"""
def __init__(self, value):
super(_X509, self).__init__(value)
def __del__(self):
_logger.debug("Freeing X509: %d", self.raw)
X509_free(self._value)
self._value = None
class _STACK(_Rsrc):
"""Wrapper for the cryptographic library's stacks"""
def __init__(self, value):
super(_STACK, self).__init__(value)
def __del__(self):
_logger.debug("Freeing stack: %d", self.raw)
sk_pop_free(self._value)
self._value = None
def _test_decode_cert(cert_filename):
"""format_cert testing
Test the certificate conversion functionality with a PEM-encoded X509
certificate.
"""
cert_file = _BIO(BIO_new_file(cert_filename, "rb"))
cert = _X509(PEM_read_bio_X509_AUX(cert_file.value))
return decode_cert(cert)
def _create_tuple_for_attribute(name, value):
name_str = OBJ_obj2txt(name, False)
value_str = decode_ASN1_STRING(value)
return name_str, value_str
def _create_tuple_for_X509_NAME(xname):
distinguished_name = []
relative_distinguished_name = []
level = -1
for ind in range(X509_NAME_entry_count(xname)):
name_entry_ptr = X509_NAME_get_entry(xname, ind)
name_entry = name_entry_ptr.contents
if level >= 0 and level != name_entry.set:
distinguished_name.append(tuple(relative_distinguished_name))
relative_distinguished_name = []
level = name_entry.set
asn1_object = X509_NAME_ENTRY_get_object(name_entry_ptr)
asn1_string = X509_NAME_ENTRY_get_data(name_entry_ptr)
attribute_tuple = _create_tuple_for_attribute(asn1_object, asn1_string)
relative_distinguished_name.append(attribute_tuple)
if relative_distinguished_name:
distinguished_name.append(tuple(relative_distinguished_name))
return tuple(distinguished_name)
def _get_peer_alt_names(cert):
ret_list = None
ext_index = -1
while True:
ext_index = X509_get_ext_by_NID(cert.value, NID_subject_alt_name,
ext_index)
if ext_index < 0:
break
if ret_list is None:
ret_list = []
ext_ptr = X509_get_ext(cert.value, ext_index)
method_ptr = X509V3_EXT_get(ext_ptr)
general_names = _STACK(ASN1_item_d2i(method_ptr.contents,
ext_ptr.contents.value.contents))
for name_index in range(sk_num(general_names.value)):
name_ptr = sk_value(general_names.value, name_index)
if name_ptr.contents.type == GEN_DIRNAME:
name_tuple = "DirName", \
_create_tuple_for_X509_NAME(name_ptr.contents.d.directoryName)
else:
name_str = GENERAL_NAME_print(name_ptr)
name_tuple = tuple(name_str.split(':', 1))
ret_list.append(name_tuple)
return tuple(ret_list) if ret_list is not None else None
|
rbit/pydtls | dtls/patch.py | _get_server_certificate | python | def _get_server_certificate(addr, ssl_version=PROTOCOL_SSLv23, ca_certs=None):
if ssl_version not in (PROTOCOL_DTLS, PROTOCOL_DTLSv1, PROTOCOL_DTLSv1_2):
return _orig_get_server_certificate(addr, ssl_version, ca_certs)
if ca_certs is not None:
cert_reqs = ssl.CERT_REQUIRED
else:
cert_reqs = ssl.CERT_NONE
af = getaddrinfo(addr[0], addr[1])[0][0]
s = ssl.wrap_socket(socket(af, SOCK_DGRAM),
ssl_version=ssl_version,
cert_reqs=cert_reqs, ca_certs=ca_certs)
s.connect(addr)
dercert = s.getpeercert(True)
s.close()
return ssl.DER_cert_to_PEM_cert(dercert) | Retrieve a server certificate
Retrieve the certificate from the server at the specified address,
and return it as a PEM-encoded string.
If 'ca_certs' is specified, validate the server cert against it.
If 'ssl_version' is specified, use it in the connection attempt. | train | https://github.com/rbit/pydtls/blob/41a71fccd990347d0de5f42418fea1e4e733359c/dtls/patch.py#L100-L123 | null | # Patch: patching of the Python stadard library's ssl module for transparent
# use of datagram sockets.
# Copyright 2012 Ray Brown
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# The License is also distributed with this work in the file named "LICENSE."
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Patch
This module is used to patch the Python standard library's ssl module. Patching
has the following effects:
* The constant PROTOCOL_DTLSv1 is added at ssl module level
* DTLSv1's protocol name is added to the ssl module's id-to-name dictionary
* The constants DTLS_OPENSSL_VERSION* are added at the ssl module level
* Instantiation of ssl.SSLSocket with sock.type == socket.SOCK_DGRAM is
supported and leads to substitution of this module's DTLS code paths for
that SSLSocket instance
* Direct instantiation of SSLSocket as well as instantiation through
ssl.wrap_socket are supported
* Invocation of the function get_server_certificate with a value of
PROTOCOL_DTLSv1 for the parameter ssl_version is supported
"""
from socket import socket, getaddrinfo, _delegate_methods, error as socket_error
from socket import AF_INET, SOCK_STREAM, SOCK_DGRAM
from ssl import PROTOCOL_SSLv23, CERT_NONE
from types import MethodType
from weakref import proxy
import errno
from sslconnection import SSLConnection, PROTOCOL_DTLS, PROTOCOL_DTLSv1, PROTOCOL_DTLSv1_2
from sslconnection import DTLS_OPENSSL_VERSION_NUMBER, DTLS_OPENSSL_VERSION, DTLS_OPENSSL_VERSION_INFO
from sslconnection import SSL_BUILD_CHAIN_FLAG_NONE, SSL_BUILD_CHAIN_FLAG_UNTRUSTED, \
SSL_BUILD_CHAIN_FLAG_NO_ROOT, SSL_BUILD_CHAIN_FLAG_CHECK, SSL_BUILD_CHAIN_FLAG_IGNORE_ERROR, SSL_BUILD_CHAIN_FLAG_CLEAR_ERROR
from err import raise_as_ssl_module_error, patch_ssl_errors
def do_patch():
import ssl as _ssl # import to be avoided if ssl module is never patched
global _orig_SSLSocket_init, _orig_get_server_certificate
global ssl
ssl = _ssl
if hasattr(ssl, "PROTOCOL_DTLSv1"):
return
_orig_wrap_socket = ssl.wrap_socket
ssl.wrap_socket = _wrap_socket
ssl.PROTOCOL_DTLS = PROTOCOL_DTLS
ssl.PROTOCOL_DTLSv1 = PROTOCOL_DTLSv1
ssl.PROTOCOL_DTLSv1_2 = PROTOCOL_DTLSv1_2
ssl._PROTOCOL_NAMES[PROTOCOL_DTLS] = "DTLS"
ssl._PROTOCOL_NAMES[PROTOCOL_DTLSv1] = "DTLSv1"
ssl._PROTOCOL_NAMES[PROTOCOL_DTLSv1_2] = "DTLSv1.2"
ssl.DTLS_OPENSSL_VERSION_NUMBER = DTLS_OPENSSL_VERSION_NUMBER
ssl.DTLS_OPENSSL_VERSION = DTLS_OPENSSL_VERSION
ssl.DTLS_OPENSSL_VERSION_INFO = DTLS_OPENSSL_VERSION_INFO
ssl.SSL_BUILD_CHAIN_FLAG_NONE = SSL_BUILD_CHAIN_FLAG_NONE
ssl.SSL_BUILD_CHAIN_FLAG_UNTRUSTED = SSL_BUILD_CHAIN_FLAG_UNTRUSTED
ssl.SSL_BUILD_CHAIN_FLAG_NO_ROOT = SSL_BUILD_CHAIN_FLAG_NO_ROOT
ssl.SSL_BUILD_CHAIN_FLAG_CHECK = SSL_BUILD_CHAIN_FLAG_CHECK
ssl.SSL_BUILD_CHAIN_FLAG_IGNORE_ERROR = SSL_BUILD_CHAIN_FLAG_IGNORE_ERROR
ssl.SSL_BUILD_CHAIN_FLAG_CLEAR_ERROR = SSL_BUILD_CHAIN_FLAG_CLEAR_ERROR
_orig_SSLSocket_init = ssl.SSLSocket.__init__
_orig_get_server_certificate = ssl.get_server_certificate
ssl.SSLSocket.__init__ = _SSLSocket_init
ssl.get_server_certificate = _get_server_certificate
patch_ssl_errors()
raise_as_ssl_module_error()
def _wrap_socket(sock, keyfile=None, certfile=None,
server_side=False, cert_reqs=CERT_NONE,
ssl_version=PROTOCOL_DTLS, ca_certs=None,
do_handshake_on_connect=True,
suppress_ragged_eofs=True,
ciphers=None,
cb_user_config_ssl_ctx=None,
cb_user_config_ssl=None):
return ssl.SSLSocket(sock, keyfile=keyfile, certfile=certfile,
server_side=server_side, cert_reqs=cert_reqs,
ssl_version=ssl_version, ca_certs=ca_certs,
do_handshake_on_connect=do_handshake_on_connect,
suppress_ragged_eofs=suppress_ragged_eofs,
ciphers=ciphers,
cb_user_config_ssl_ctx=cb_user_config_ssl_ctx,
cb_user_config_ssl=cb_user_config_ssl)
def _SSLSocket_init(self, sock=None, keyfile=None, certfile=None,
server_side=False, cert_reqs=CERT_NONE,
ssl_version=PROTOCOL_DTLS, ca_certs=None,
do_handshake_on_connect=True,
family=AF_INET, type=SOCK_STREAM, proto=0, fileno=None,
suppress_ragged_eofs=True, npn_protocols=None, ciphers=None,
server_hostname=None,
_context=None,
cb_user_config_ssl_ctx=None,
cb_user_config_ssl=None):
is_connection = is_datagram = False
if isinstance(sock, SSLConnection):
is_connection = True
elif hasattr(sock, "type") and sock.type == SOCK_DGRAM:
is_datagram = True
if not is_connection and not is_datagram:
# Non-DTLS code path
return _orig_SSLSocket_init(self, sock=sock, keyfile=keyfile,
certfile=certfile, server_side=server_side,
cert_reqs=cert_reqs,
ssl_version=ssl_version, ca_certs=ca_certs,
do_handshake_on_connect=
do_handshake_on_connect,
family=family, type=type, proto=proto,
fileno=fileno,
suppress_ragged_eofs=suppress_ragged_eofs,
npn_protocols=npn_protocols,
ciphers=ciphers,
server_hostname=server_hostname,
_context=_context)
# DTLS code paths: datagram socket and newly accepted DTLS connection
if is_datagram:
socket.__init__(self, _sock=sock._sock)
else:
socket.__init__(self, _sock=sock.get_socket(True)._sock)
# Copy instance initialization from SSLSocket class
for attr in _delegate_methods:
try:
delattr(self, attr)
except AttributeError:
pass
if certfile and not keyfile:
keyfile = certfile
if is_datagram:
# see if it's connected
try:
socket.getpeername(self)
except socket_error, e:
if e.errno != errno.ENOTCONN:
raise
# no, no connection yet
self._connected = False
self._sslobj = None
else:
# yes, create the SSL object
self._connected = True
self._sslobj = SSLConnection(sock, keyfile, certfile,
server_side, cert_reqs,
ssl_version, ca_certs,
do_handshake_on_connect,
suppress_ragged_eofs, ciphers,
cb_user_config_ssl_ctx=cb_user_config_ssl_ctx,
cb_user_config_ssl=cb_user_config_ssl)
else:
self._connected = True
self._sslobj = sock
class FakeContext(object):
check_hostname = False
self._context = FakeContext()
self.keyfile = keyfile
self.certfile = certfile
self.cert_reqs = cert_reqs
self.ssl_version = ssl_version
self.ca_certs = ca_certs
self.ciphers = ciphers
self.do_handshake_on_connect = do_handshake_on_connect
self.suppress_ragged_eofs = suppress_ragged_eofs
self._makefile_refs = 0
self._user_config_ssl_ctx = cb_user_config_ssl_ctx
self._user_config_ssl = cb_user_config_ssl
# Perform method substitution and addition (without reference cycle)
self._real_connect = MethodType(_SSLSocket_real_connect, proxy(self))
self.listen = MethodType(_SSLSocket_listen, proxy(self))
self.accept = MethodType(_SSLSocket_accept, proxy(self))
self.get_timeout = MethodType(_SSLSocket_get_timeout, proxy(self))
self.handle_timeout = MethodType(_SSLSocket_handle_timeout, proxy(self))
# Extra
self.getpeercertchain = MethodType(_getpeercertchain, proxy(self))
def _getpeercertchain(self, binary_form=False):
return self._sslobj.getpeercertchain(binary_form)
def _SSLSocket_listen(self, ignored):
if self._connected:
raise ValueError("attempt to listen on connected SSLSocket!")
if self._sslobj:
return
self._sslobj = SSLConnection(socket(_sock=self._sock),
self.keyfile, self.certfile, True,
self.cert_reqs, self.ssl_version,
self.ca_certs,
self.do_handshake_on_connect,
self.suppress_ragged_eofs, self.ciphers,
cb_user_config_ssl_ctx=self._user_config_ssl_ctx,
cb_user_config_ssl=self._user_config_ssl)
def _SSLSocket_accept(self):
if self._connected:
raise ValueError("attempt to accept on connected SSLSocket!")
if not self._sslobj:
raise ValueError("attempt to accept on SSLSocket prior to listen!")
acc_ret = self._sslobj.accept()
if not acc_ret:
return
new_conn, addr = acc_ret
new_ssl_sock = ssl.SSLSocket(new_conn, self.keyfile, self.certfile, True,
self.cert_reqs, self.ssl_version,
self.ca_certs,
self.do_handshake_on_connect,
self.suppress_ragged_eofs, self.ciphers,
cb_user_config_ssl_ctx=self._user_config_ssl_ctx,
cb_user_config_ssl=self._user_config_ssl)
return new_ssl_sock, addr
def _SSLSocket_real_connect(self, addr, return_errno):
if self._connected:
raise ValueError("attempt to connect already-connected SSLSocket!")
self._sslobj = SSLConnection(socket(_sock=self._sock),
self.keyfile, self.certfile, False,
self.cert_reqs, self.ssl_version,
self.ca_certs,
self.do_handshake_on_connect,
self.suppress_ragged_eofs, self.ciphers,
cb_user_config_ssl_ctx=self._user_config_ssl_ctx,
cb_user_config_ssl=self._user_config_ssl)
try:
self._sslobj.connect(addr)
except socket_error as e:
if return_errno:
return e.errno
else:
self._sslobj = None
raise e
self._connected = True
return 0
if __name__ == "__main__":
do_patch()
def _SSLSocket_get_timeout(self):
return self._sslobj.get_timeout()
def _SSLSocket_handle_timeout(self):
return self._sslobj.handle_timeout()
|
rbit/pydtls | dtls/sslconnection.py | SSLContext.set_curves | python | def set_curves(self, curves):
u''' Set supported curves by name, nid or nist.
:param str | tuple(int) curves: Example "secp384r1:secp256k1", (715, 714), "P-384", "K-409:B-409:K-571", ...
:return: 1 for success and 0 for failure
'''
retVal = None
if isinstance(curves, str):
retVal = SSL_CTX_set1_curves_list(self._ctx, curves)
elif isinstance(curves, tuple):
retVal = SSL_CTX_set1_curves(self._ctx, curves, len(curves))
return retVal | u''' Set supported curves by name, nid or nist.
:param str | tuple(int) curves: Example "secp384r1:secp256k1", (715, 714), "P-384", "K-409:B-409:K-571", ...
:return: 1 for success and 0 for failure | train | https://github.com/rbit/pydtls/blob/41a71fccd990347d0de5f42418fea1e4e733359c/dtls/sslconnection.py#L206-L217 | null | class SSLContext(object):
def __init__(self, ctx):
self._ctx = ctx
def set_ciphers(self, ciphers):
u'''
s.a. https://www.openssl.org/docs/man1.1.0/apps/ciphers.html
:param str ciphers: Example "AES256-SHA:ECDHE-ECDSA-AES256-SHA", ...
:return: 1 for success and 0 for failure
'''
retVal = SSL_CTX_set_cipher_list(self._ctx, ciphers)
return retVal
def set_sigalgs(self, sigalgs):
u'''
s.a. https://www.openssl.org/docs/man1.1.0/ssl/SSL_CTX_set1_sigalgs_list.html
:param str sigalgs: Example "RSA+SHA256", "ECDSA+SHA256", ...
:return: 1 for success and 0 for failure
'''
retVal = SSL_CTX_set1_sigalgs_list(self._ctx, sigalgs)
return retVal
@staticmethod
def get_ec_nist2nid(nist):
if not isinstance(nist, tuple):
nist = nist.split(":")
nid = tuple(EC_curve_nist2nid(x) for x in nist)
return nid
@staticmethod
def get_ec_nid2nist(nid):
if not isinstance(nid, tuple):
nid = (nid, )
nist = ":".join([EC_curve_nid2nist(x) for x in nid])
return nist
@staticmethod
def get_ec_available(bAsName=True):
curves = get_elliptic_curves()
return sorted([x.name for x in curves] if bAsName else [x.nid for x in curves])
def set_ecdh_curve(self, curve_name=None):
u''' Select a curve to use for ECDH(E) key exchange or set it to auto mode
Used for server only!
s.a. openssl.exe ecparam -list_curves
:param None | str curve_name: None = Auto-mode, "secp256k1", "secp384r1", ...
:return: 1 for success and 0 for failure
'''
if curve_name:
retVal = SSL_CTX_set_ecdh_auto(self._ctx, 0)
avail_curves = get_elliptic_curves()
key = [curve for curve in avail_curves if curve.name == curve_name][0].to_EC_KEY()
retVal &= SSL_CTX_set_tmp_ecdh(self._ctx, key)
else:
retVal = SSL_CTX_set_ecdh_auto(self._ctx, 1)
return retVal
def build_cert_chain(self, flags=SSL_BUILD_CHAIN_FLAG_NONE):
u'''
Used for server side only!
:param flags:
:return: 1 for success and 0 for failure
'''
retVal = SSL_CTX_build_cert_chain(self._ctx, flags)
return retVal
def set_ssl_logging(self, enable=False, func=_ssl_logging_cb):
u''' Enable or disable SSL logging
:param True | False enable: Enable or disable SSL logging
:param func: Callback function for logging
'''
if enable:
SSL_CTX_set_info_callback(self._ctx, func)
else:
SSL_CTX_set_info_callback(self._ctx, 0)
|
rbit/pydtls | dtls/sslconnection.py | SSLContext.set_ecdh_curve | python | def set_ecdh_curve(self, curve_name=None):
u''' Select a curve to use for ECDH(E) key exchange or set it to auto mode
Used for server only!
s.a. openssl.exe ecparam -list_curves
:param None | str curve_name: None = Auto-mode, "secp256k1", "secp384r1", ...
:return: 1 for success and 0 for failure
'''
if curve_name:
retVal = SSL_CTX_set_ecdh_auto(self._ctx, 0)
avail_curves = get_elliptic_curves()
key = [curve for curve in avail_curves if curve.name == curve_name][0].to_EC_KEY()
retVal &= SSL_CTX_set_tmp_ecdh(self._ctx, key)
else:
retVal = SSL_CTX_set_ecdh_auto(self._ctx, 1)
return retVal | u''' Select a curve to use for ECDH(E) key exchange or set it to auto mode
Used for server only!
s.a. openssl.exe ecparam -list_curves
:param None | str curve_name: None = Auto-mode, "secp256k1", "secp384r1", ...
:return: 1 for success and 0 for failure | train | https://github.com/rbit/pydtls/blob/41a71fccd990347d0de5f42418fea1e4e733359c/dtls/sslconnection.py#L238-L255 | [
"def get_elliptic_curves():\n u''' Return the available curves. If not yet loaded, then load them once.\n\n :rtype: list\n '''\n return _EllipticCurve._get_elliptic_curves()\n",
"def SSL_CTX_set_ecdh_auto(ctx, onoff):\n return _SSL_CTX_ctrl(ctx, SSL_CTRL_SET_ECDH_AUTO, onoff, None)\n",
"def SSL_C... | class SSLContext(object):
def __init__(self, ctx):
self._ctx = ctx
def set_ciphers(self, ciphers):
u'''
s.a. https://www.openssl.org/docs/man1.1.0/apps/ciphers.html
:param str ciphers: Example "AES256-SHA:ECDHE-ECDSA-AES256-SHA", ...
:return: 1 for success and 0 for failure
'''
retVal = SSL_CTX_set_cipher_list(self._ctx, ciphers)
return retVal
def set_sigalgs(self, sigalgs):
u'''
s.a. https://www.openssl.org/docs/man1.1.0/ssl/SSL_CTX_set1_sigalgs_list.html
:param str sigalgs: Example "RSA+SHA256", "ECDSA+SHA256", ...
:return: 1 for success and 0 for failure
'''
retVal = SSL_CTX_set1_sigalgs_list(self._ctx, sigalgs)
return retVal
def set_curves(self, curves):
u''' Set supported curves by name, nid or nist.
:param str | tuple(int) curves: Example "secp384r1:secp256k1", (715, 714), "P-384", "K-409:B-409:K-571", ...
:return: 1 for success and 0 for failure
'''
retVal = None
if isinstance(curves, str):
retVal = SSL_CTX_set1_curves_list(self._ctx, curves)
elif isinstance(curves, tuple):
retVal = SSL_CTX_set1_curves(self._ctx, curves, len(curves))
return retVal
@staticmethod
def get_ec_nist2nid(nist):
if not isinstance(nist, tuple):
nist = nist.split(":")
nid = tuple(EC_curve_nist2nid(x) for x in nist)
return nid
@staticmethod
def get_ec_nid2nist(nid):
if not isinstance(nid, tuple):
nid = (nid, )
nist = ":".join([EC_curve_nid2nist(x) for x in nid])
return nist
@staticmethod
def get_ec_available(bAsName=True):
curves = get_elliptic_curves()
return sorted([x.name for x in curves] if bAsName else [x.nid for x in curves])
def build_cert_chain(self, flags=SSL_BUILD_CHAIN_FLAG_NONE):
u'''
Used for server side only!
:param flags:
:return: 1 for success and 0 for failure
'''
retVal = SSL_CTX_build_cert_chain(self._ctx, flags)
return retVal
def set_ssl_logging(self, enable=False, func=_ssl_logging_cb):
u''' Enable or disable SSL logging
:param True | False enable: Enable or disable SSL logging
:param func: Callback function for logging
'''
if enable:
SSL_CTX_set_info_callback(self._ctx, func)
else:
SSL_CTX_set_info_callback(self._ctx, 0)
|
rbit/pydtls | dtls/sslconnection.py | SSLContext.build_cert_chain | python | def build_cert_chain(self, flags=SSL_BUILD_CHAIN_FLAG_NONE):
u'''
Used for server side only!
:param flags:
:return: 1 for success and 0 for failure
'''
retVal = SSL_CTX_build_cert_chain(self._ctx, flags)
return retVal | u'''
Used for server side only!
:param flags:
:return: 1 for success and 0 for failure | train | https://github.com/rbit/pydtls/blob/41a71fccd990347d0de5f42418fea1e4e733359c/dtls/sslconnection.py#L257-L265 | [
"def SSL_CTX_build_cert_chain(ctx, flags):\n return _SSL_CTX_ctrl(ctx, SSL_CTRL_BUILD_CERT_CHAIN, flags, None)\n"
] | class SSLContext(object):
def __init__(self, ctx):
self._ctx = ctx
def set_ciphers(self, ciphers):
u'''
s.a. https://www.openssl.org/docs/man1.1.0/apps/ciphers.html
:param str ciphers: Example "AES256-SHA:ECDHE-ECDSA-AES256-SHA", ...
:return: 1 for success and 0 for failure
'''
retVal = SSL_CTX_set_cipher_list(self._ctx, ciphers)
return retVal
def set_sigalgs(self, sigalgs):
u'''
s.a. https://www.openssl.org/docs/man1.1.0/ssl/SSL_CTX_set1_sigalgs_list.html
:param str sigalgs: Example "RSA+SHA256", "ECDSA+SHA256", ...
:return: 1 for success and 0 for failure
'''
retVal = SSL_CTX_set1_sigalgs_list(self._ctx, sigalgs)
return retVal
def set_curves(self, curves):
u''' Set supported curves by name, nid or nist.
:param str | tuple(int) curves: Example "secp384r1:secp256k1", (715, 714), "P-384", "K-409:B-409:K-571", ...
:return: 1 for success and 0 for failure
'''
retVal = None
if isinstance(curves, str):
retVal = SSL_CTX_set1_curves_list(self._ctx, curves)
elif isinstance(curves, tuple):
retVal = SSL_CTX_set1_curves(self._ctx, curves, len(curves))
return retVal
@staticmethod
def get_ec_nist2nid(nist):
if not isinstance(nist, tuple):
nist = nist.split(":")
nid = tuple(EC_curve_nist2nid(x) for x in nist)
return nid
@staticmethod
def get_ec_nid2nist(nid):
if not isinstance(nid, tuple):
nid = (nid, )
nist = ":".join([EC_curve_nid2nist(x) for x in nid])
return nist
@staticmethod
def get_ec_available(bAsName=True):
curves = get_elliptic_curves()
return sorted([x.name for x in curves] if bAsName else [x.nid for x in curves])
def set_ecdh_curve(self, curve_name=None):
u''' Select a curve to use for ECDH(E) key exchange or set it to auto mode
Used for server only!
s.a. openssl.exe ecparam -list_curves
:param None | str curve_name: None = Auto-mode, "secp256k1", "secp384r1", ...
:return: 1 for success and 0 for failure
'''
if curve_name:
retVal = SSL_CTX_set_ecdh_auto(self._ctx, 0)
avail_curves = get_elliptic_curves()
key = [curve for curve in avail_curves if curve.name == curve_name][0].to_EC_KEY()
retVal &= SSL_CTX_set_tmp_ecdh(self._ctx, key)
else:
retVal = SSL_CTX_set_ecdh_auto(self._ctx, 1)
return retVal
def set_ssl_logging(self, enable=False, func=_ssl_logging_cb):
u''' Enable or disable SSL logging
:param True | False enable: Enable or disable SSL logging
:param func: Callback function for logging
'''
if enable:
SSL_CTX_set_info_callback(self._ctx, func)
else:
SSL_CTX_set_info_callback(self._ctx, 0)
|
rbit/pydtls | dtls/sslconnection.py | SSLContext.set_ssl_logging | python | def set_ssl_logging(self, enable=False, func=_ssl_logging_cb):
u''' Enable or disable SSL logging
:param True | False enable: Enable or disable SSL logging
:param func: Callback function for logging
'''
if enable:
SSL_CTX_set_info_callback(self._ctx, func)
else:
SSL_CTX_set_info_callback(self._ctx, 0) | u''' Enable or disable SSL logging
:param True | False enable: Enable or disable SSL logging
:param func: Callback function for logging | train | https://github.com/rbit/pydtls/blob/41a71fccd990347d0de5f42418fea1e4e733359c/dtls/sslconnection.py#L267-L276 | [
"def SSL_CTX_set_info_callback(ctx, app_info_cb):\n \"\"\"\n Set the info callback\n\n :param callback: The Python callback to use\n :return: None\n \"\"\"\n def py_info_callback(ssl, where, ret):\n try:\n app_info_cb(SSL(ssl), where, ret)\n except:\n pass\n ... | class SSLContext(object):
def __init__(self, ctx):
self._ctx = ctx
def set_ciphers(self, ciphers):
u'''
s.a. https://www.openssl.org/docs/man1.1.0/apps/ciphers.html
:param str ciphers: Example "AES256-SHA:ECDHE-ECDSA-AES256-SHA", ...
:return: 1 for success and 0 for failure
'''
retVal = SSL_CTX_set_cipher_list(self._ctx, ciphers)
return retVal
def set_sigalgs(self, sigalgs):
u'''
s.a. https://www.openssl.org/docs/man1.1.0/ssl/SSL_CTX_set1_sigalgs_list.html
:param str sigalgs: Example "RSA+SHA256", "ECDSA+SHA256", ...
:return: 1 for success and 0 for failure
'''
retVal = SSL_CTX_set1_sigalgs_list(self._ctx, sigalgs)
return retVal
def set_curves(self, curves):
u''' Set supported curves by name, nid or nist.
:param str | tuple(int) curves: Example "secp384r1:secp256k1", (715, 714), "P-384", "K-409:B-409:K-571", ...
:return: 1 for success and 0 for failure
'''
retVal = None
if isinstance(curves, str):
retVal = SSL_CTX_set1_curves_list(self._ctx, curves)
elif isinstance(curves, tuple):
retVal = SSL_CTX_set1_curves(self._ctx, curves, len(curves))
return retVal
@staticmethod
def get_ec_nist2nid(nist):
if not isinstance(nist, tuple):
nist = nist.split(":")
nid = tuple(EC_curve_nist2nid(x) for x in nist)
return nid
@staticmethod
def get_ec_nid2nist(nid):
if not isinstance(nid, tuple):
nid = (nid, )
nist = ":".join([EC_curve_nid2nist(x) for x in nid])
return nist
@staticmethod
def get_ec_available(bAsName=True):
curves = get_elliptic_curves()
return sorted([x.name for x in curves] if bAsName else [x.nid for x in curves])
def set_ecdh_curve(self, curve_name=None):
u''' Select a curve to use for ECDH(E) key exchange or set it to auto mode
Used for server only!
s.a. openssl.exe ecparam -list_curves
:param None | str curve_name: None = Auto-mode, "secp256k1", "secp384r1", ...
:return: 1 for success and 0 for failure
'''
if curve_name:
retVal = SSL_CTX_set_ecdh_auto(self._ctx, 0)
avail_curves = get_elliptic_curves()
key = [curve for curve in avail_curves if curve.name == curve_name][0].to_EC_KEY()
retVal &= SSL_CTX_set_tmp_ecdh(self._ctx, key)
else:
retVal = SSL_CTX_set_ecdh_auto(self._ctx, 1)
return retVal
def build_cert_chain(self, flags=SSL_BUILD_CHAIN_FLAG_NONE):
u'''
Used for server side only!
:param flags:
:return: 1 for success and 0 for failure
'''
retVal = SSL_CTX_build_cert_chain(self._ctx, flags)
return retVal
|
rbit/pydtls | dtls/sslconnection.py | SSLConnection.get_socket | python | def get_socket(self, inbound):
if inbound and hasattr(self, "_rsock"):
return self._rsock
return self._sock | Retrieve a socket used by this connection
When inbound is True, then the socket from which this connection reads
data is retrieved. Otherwise the socket to which this connection writes
data is retrieved.
Read and write sockets differ depending on whether this is a server- or
a client-side connection, and on whether a routing demux is in use. | train | https://github.com/rbit/pydtls/blob/41a71fccd990347d0de5f42418fea1e4e733359c/dtls/sslconnection.py#L575-L588 | null | class SSLConnection(object):
"""DTLS peer association
This class associates two DTLS peer instances, wrapping OpenSSL library
state including SSL (struct ssl_st), SSL_CTX, and BIO instances.
"""
_rnd_key = urandom(16)
def _init_server(self, peer_address):
if self._sock.type != socket.SOCK_DGRAM:
raise InvalidSocketError("sock must be of type SOCK_DGRAM")
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
if peer_address:
# Connect directly to this client peer, bypassing the demux
rsock = self._sock
BIO_dgram_set_connected(self._wbio.value, peer_address)
else:
from demux import UDPDemux
self._udp_demux = UDPDemux(self._sock)
rsock = self._udp_demux.get_connection(None)
if rsock is self._sock:
self._rbio = self._wbio
else:
self._rsock = rsock
self._rbio = _BIO(BIO_new_dgram(self._rsock.fileno(), BIO_NOCLOSE))
server_method = DTLS_server_method
if self._ssl_version == PROTOCOL_DTLSv1_2:
server_method = DTLSv1_2_server_method
elif self._ssl_version == PROTOCOL_DTLSv1:
server_method = DTLSv1_server_method
self._ctx = _CTX(SSL_CTX_new(server_method()))
self._intf_ssl_ctx = SSLContext(self._ctx.value)
SSL_CTX_set_session_cache_mode(self._ctx.value, SSL_SESS_CACHE_OFF)
if self._cert_reqs == CERT_NONE:
verify_mode = SSL_VERIFY_NONE
elif self._cert_reqs == CERT_OPTIONAL:
verify_mode = SSL_VERIFY_PEER | SSL_VERIFY_CLIENT_ONCE
else:
verify_mode = SSL_VERIFY_PEER | SSL_VERIFY_CLIENT_ONCE | \
SSL_VERIFY_FAIL_IF_NO_PEER_CERT
self._config_ssl_ctx(verify_mode)
if not peer_address:
# Configure UDP listening socket
self._listening = False
self._listening_peer_address = None
self._pending_peer_address = None
self._cb_keepalive = SSL_CTX_set_cookie_cb(
self._ctx.value,
_CallbackProxy(self._generate_cookie_cb),
_CallbackProxy(self._verify_cookie_cb))
self._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(self._ssl.value)
SSL_set_accept_state(self._ssl.value)
if peer_address and self._do_handshake_on_connect:
return lambda: self.do_handshake()
def _init_client(self, peer_address):
if self._sock.type != socket.SOCK_DGRAM:
raise InvalidSocketError("sock must be of type SOCK_DGRAM")
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = self._wbio
client_method = DTLSv1_2_client_method # no "any" exists, therefore use v1_2 (highest possible)
if self._ssl_version == PROTOCOL_DTLSv1_2:
client_method = DTLSv1_2_client_method
elif self._ssl_version == PROTOCOL_DTLSv1:
client_method = DTLSv1_client_method
self._ctx = _CTX(SSL_CTX_new(client_method()))
self._intf_ssl_ctx = SSLContext(self._ctx.value)
if self._cert_reqs == CERT_NONE:
verify_mode = SSL_VERIFY_NONE
else:
verify_mode = SSL_VERIFY_PEER
self._config_ssl_ctx(verify_mode)
self._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(self._ssl.value)
SSL_set_connect_state(self._ssl.value)
if peer_address:
return lambda: self.connect(peer_address)
def _config_ssl_ctx(self, verify_mode):
SSL_CTX_set_verify(self._ctx.value, verify_mode)
SSL_CTX_set_read_ahead(self._ctx.value, 1)
# Compression occurs at the stream layer now, leading to datagram
# corruption when packet loss occurs
SSL_CTX_set_options(self._ctx.value, SSL_OP_NO_COMPRESSION)
if self._certfile:
SSL_CTX_use_certificate_chain_file(self._ctx.value, self._certfile)
if self._keyfile:
SSL_CTX_use_PrivateKey_file(self._ctx.value, self._keyfile,
SSL_FILE_TYPE_PEM)
if self._ca_certs:
SSL_CTX_load_verify_locations(self._ctx.value, self._ca_certs, None)
if self._ciphers:
try:
SSL_CTX_set_cipher_list(self._ctx.value, self._ciphers)
except openssl_error() as err:
raise_ssl_error(ERR_NO_CIPHER, err)
if self._user_config_ssl_ctx:
self._user_config_ssl_ctx(self._intf_ssl_ctx)
def _copy_server(self):
source = self._sock
self._udp_demux = source._udp_demux
rsock = self._udp_demux.get_connection(source._pending_peer_address)
self._ctx = source._ctx
self._ssl = source._ssl
new_source_wbio = _BIO(BIO_new_dgram(source._sock.fileno(),
BIO_NOCLOSE))
if hasattr(source, "_rsock"):
self._sock = source._sock
self._rsock = rsock
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = _BIO(BIO_new_dgram(rsock.fileno(), BIO_NOCLOSE))
new_source_rbio = _BIO(BIO_new_dgram(source._rsock.fileno(),
BIO_NOCLOSE))
BIO_dgram_set_peer(self._wbio.value, source._pending_peer_address)
else:
self._sock = rsock
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = self._wbio
new_source_rbio = new_source_wbio
BIO_dgram_set_connected(self._wbio.value,
source._pending_peer_address)
source._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(source._ssl.value)
SSL_set_accept_state(source._ssl.value)
if self._user_config_ssl:
self._user_config_ssl(self._intf_ssl)
source._rbio = new_source_rbio
source._wbio = new_source_wbio
SSL_set_bio(source._ssl.value,
new_source_rbio.value,
new_source_wbio.value)
new_source_rbio.disown()
new_source_wbio.disown()
def _reconnect_unwrapped(self):
source = self._sock
self._sock = source._wsock
self._udp_demux = source._demux
self._rsock = source._rsock
self._ctx = source._ctx
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = _BIO(BIO_new_dgram(self._rsock.fileno(), BIO_NOCLOSE))
BIO_dgram_set_peer(self._wbio.value, source._peer_address)
self._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(self._ssl.value)
SSL_set_accept_state(self._ssl.value)
if self._user_config_ssl:
self._user_config_ssl(self._intf_ssl)
if self._do_handshake_on_connect:
return lambda: self.do_handshake()
def _check_nbio(self):
timeout = self._sock.gettimeout()
if self._wbio_nb != timeout is not None:
BIO_set_nbio(self._wbio.value, timeout is not None)
self._wbio_nb = timeout is not None
if self._wbio is not self._rbio:
timeout = self._rsock.gettimeout()
if self._rbio_nb != timeout is not None:
BIO_set_nbio(self._rbio.value, timeout is not None)
self._rbio_nb = timeout is not None
return timeout # read channel timeout
def _wrap_socket_library_call(self, call, timeout_error):
timeout_sec_start = timeout_sec = self._check_nbio()
# Pass the call if the socket is blocking or non-blocking
if not timeout_sec: # None (blocking) or zero (non-blocking)
return call()
start_time = datetime.datetime.now()
read_sock = self.get_socket(True)
need_select = False
while timeout_sec > 0:
if need_select:
if not select([read_sock], [], [], timeout_sec)[0]:
break
timeout_sec = timeout_sec_start - \
(datetime.datetime.now() - start_time).total_seconds()
try:
return call()
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_WANT_READ:
need_select = True
continue
raise
raise_ssl_error(timeout_error)
def _get_cookie(self, ssl):
assert self._listening
assert self._ssl.raw == ssl.raw
if self._listening_peer_address:
peer_address = self._listening_peer_address
else:
peer_address = BIO_dgram_get_peer(self._rbio.value)
cookie_hmac = hmac.new(self._rnd_key, str(peer_address))
return cookie_hmac.digest()
def _generate_cookie_cb(self, ssl):
return self._get_cookie(ssl)
def _verify_cookie_cb(self, ssl, cookie):
if self._get_cookie(ssl) != cookie:
raise Exception("DTLS cookie mismatch")
def __init__(self, sock, keyfile=None, certfile=None,
server_side=False, cert_reqs=CERT_NONE,
ssl_version=PROTOCOL_DTLS, ca_certs=None,
do_handshake_on_connect=True,
suppress_ragged_eofs=True, ciphers=None,
cb_user_config_ssl_ctx=None,
cb_user_config_ssl=None):
"""Constructor
Arguments:
these arguments match the ones of the SSLSocket class in the
standard library's ssl module
"""
if keyfile and not certfile or certfile and not keyfile:
raise_ssl_error(ERR_BOTH_KEY_CERT_FILES)
if server_side and not keyfile:
raise_ssl_error(ERR_BOTH_KEY_CERT_FILES_SVR)
if cert_reqs != CERT_NONE and not ca_certs:
raise_ssl_error(ERR_NO_CERTS)
if not ciphers:
ciphers = "DEFAULT"
self._sock = sock
self._keyfile = keyfile
self._certfile = certfile
self._cert_reqs = cert_reqs
self._ssl_version = ssl_version
self._ca_certs = ca_certs
self._do_handshake_on_connect = do_handshake_on_connect
self._suppress_ragged_eofs = suppress_ragged_eofs
self._ciphers = ciphers
self._handshake_done = False
self._wbio_nb = self._rbio_nb = False
self._user_config_ssl_ctx = cb_user_config_ssl_ctx
self._intf_ssl_ctx = None
self._user_config_ssl = cb_user_config_ssl
self._intf_ssl = None
if isinstance(sock, SSLConnection):
post_init = self._copy_server()
elif isinstance(sock, _UnwrappedSocket):
post_init = self._reconnect_unwrapped()
else:
try:
peer_address = sock.getpeername()
except socket.error:
peer_address = None
if server_side:
post_init = self._init_server(peer_address)
else:
post_init = self._init_client(peer_address)
if self._user_config_ssl:
self._user_config_ssl(self._intf_ssl)
if sys.platform.startswith('win') and \
not (SSL_get_options(self._ssl.value) & SSL_OP_NO_QUERY_MTU):
SSL_set_options(self._ssl.value, SSL_OP_NO_QUERY_MTU)
DTLS_set_link_mtu(self._ssl.value, 576)
SSL_set_bio(self._ssl.value, self._rbio.value, self._wbio.value)
self._rbio.disown()
self._wbio.disown()
if post_init:
post_init()
def listen(self):
"""Server-side cookie exchange
This method reads datagrams from the socket and initiates cookie
exchange, upon whose successful conclusion one can then proceed to
the accept method. Alternatively, accept can be called directly, in
which case it will call this method. In order to prevent denial-of-
service attacks, only a small, constant set of computing resources
are used during the listen phase.
On some platforms, listen must be called so that packets will be
forwarded to accepted connections. Doing so is therefore recommened
in all cases for portable code.
Return value: a peer address if a datagram from a new peer was
encountered, None if a datagram for a known peer was forwarded
"""
if not hasattr(self, "_listening"):
raise InvalidSocketError("listen called on non-listening socket")
self._pending_peer_address = None
try:
peer_address = self._udp_demux.service()
except socket.timeout:
peer_address = None
except socket.error as sock_err:
if sock_err.errno != errno.EWOULDBLOCK:
_logger.exception("Unexpected socket error in listen")
raise
peer_address = None
if not peer_address:
_logger.debug("Listen returning without peer")
return
# The demux advises that a datagram from a new peer may have arrived
if type(peer_address) is tuple:
# For this type of demux, the write BIO must be pointed at the peer
BIO_dgram_set_peer(self._wbio.value, peer_address)
self._udp_demux.forward()
self._listening_peer_address = peer_address
self._check_nbio()
self._listening = True
try:
_logger.debug("Invoking DTLSv1_listen for ssl: %d",
self._ssl.raw)
dtls_peer_address = DTLSv1_listen(self._ssl.value)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_WANT_READ:
# This method must be called again to forward the next datagram
_logger.debug("DTLSv1_listen must be resumed")
return
elif err.errqueue and err.errqueue[0][0] == ERR_WRONG_VERSION_NUMBER:
_logger.debug("Wrong version number; aborting handshake")
raise
elif err.errqueue and err.errqueue[0][0] == ERR_COOKIE_MISMATCH:
_logger.debug("Mismatching cookie received; aborting handshake")
raise
elif err.errqueue and err.errqueue[0][0] == ERR_NO_SHARED_CIPHER:
_logger.debug("No shared cipher; aborting handshake")
raise
_logger.exception("Unexpected error in DTLSv1_listen")
raise
finally:
self._listening = False
self._listening_peer_address = None
if type(peer_address) is tuple:
_logger.debug("New local peer: %s", dtls_peer_address)
self._pending_peer_address = peer_address
else:
self._pending_peer_address = dtls_peer_address
_logger.debug("New peer: %s", self._pending_peer_address)
return self._pending_peer_address
def accept(self):
"""Server-side UDP connection establishment
This method returns a server-side SSLConnection object, connected to
that peer most recently returned from the listen method and not yet
connected. If there is no such peer, then the listen method is invoked.
Return value: SSLConnection connected to a new peer, None if packet
forwarding only to an existing peer occurred.
"""
if not self._pending_peer_address:
if not self.listen():
_logger.debug("Accept returning without connection")
return
new_conn = SSLConnection(self, self._keyfile, self._certfile, True,
self._cert_reqs, self._ssl_version,
self._ca_certs, self._do_handshake_on_connect,
self._suppress_ragged_eofs, self._ciphers,
cb_user_config_ssl_ctx=self._user_config_ssl_ctx,
cb_user_config_ssl=self._user_config_ssl)
new_peer = self._pending_peer_address
self._pending_peer_address = None
if self._do_handshake_on_connect:
# Note that since that connection's socket was just created in its
# constructor, the following operation must be blocking; hence
# handshake-on-connect can only be used with a routing demux if
# listen is serviced by a separate application thread, or else we
# will hang in this call
new_conn.do_handshake()
_logger.debug("Accept returning new connection for new peer")
return new_conn, new_peer
def connect(self, peer_address):
"""Client-side UDP connection establishment
This method connects this object's underlying socket. It subsequently
performs a handshake if do_handshake_on_connect was set during
initialization.
Arguments:
peer_address - address tuple of server peer
"""
self._sock.connect(peer_address)
peer_address = self._sock.getpeername() # substituted host addrinfo
BIO_dgram_set_connected(self._wbio.value, peer_address)
assert self._wbio is self._rbio
if self._do_handshake_on_connect:
self.do_handshake()
def do_handshake(self):
"""Perform a handshake with the peer
This method forces an explicit handshake to be performed with either
the client or server peer.
"""
_logger.debug("Initiating handshake...")
try:
self._wrap_socket_library_call(
lambda: SSL_do_handshake(self._ssl.value),
ERR_HANDSHAKE_TIMEOUT)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_SYSCALL and err.result == -1:
raise_ssl_error(ERR_PORT_UNREACHABLE, err)
raise
self._handshake_done = True
_logger.debug("...completed handshake")
def read(self, len=1024, buffer=None):
"""Read data from connection
Read up to len bytes and return them.
Arguments:
len -- maximum number of bytes to read
Return value:
string containing read bytes
"""
try:
return self._wrap_socket_library_call(
lambda: SSL_read(self._ssl.value, len, buffer), ERR_READ_TIMEOUT)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_SYSCALL and err.result == -1:
raise_ssl_error(ERR_PORT_UNREACHABLE, err)
raise
def write(self, data):
"""Write data to connection
Write data as string of bytes.
Arguments:
data -- buffer containing data to be written
Return value:
number of bytes actually transmitted
"""
try:
ret = self._wrap_socket_library_call(
lambda: SSL_write(self._ssl.value, data), ERR_WRITE_TIMEOUT)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_SYSCALL and err.result == -1:
raise_ssl_error(ERR_PORT_UNREACHABLE, err)
raise
if ret:
self._handshake_done = True
return ret
def shutdown(self):
"""Shut down the DTLS connection
This method attemps to complete a bidirectional shutdown between
peers. For non-blocking sockets, it should be called repeatedly until
it no longer raises continuation request exceptions.
"""
if hasattr(self, "_listening"):
# Listening server-side sockets cannot be shut down
return
try:
self._wrap_socket_library_call(
lambda: SSL_shutdown(self._ssl.value), ERR_READ_TIMEOUT)
except openssl_error() as err:
if err.result == 0:
# close-notify alert was just sent; wait for same from peer
# Note: while it might seem wise to suppress further read-aheads
# with SSL_set_read_ahead here, doing so causes a shutdown
# failure (ret: -1, SSL_ERROR_SYSCALL) on the DTLS shutdown
# initiator side. And test_starttls does pass.
self._wrap_socket_library_call(
lambda: SSL_shutdown(self._ssl.value), ERR_READ_TIMEOUT)
else:
raise
if hasattr(self, "_rsock"):
# Return wrapped connected server socket (non-listening)
return _UnwrappedSocket(self._sock, self._rsock, self._udp_demux,
self._ctx,
BIO_dgram_get_peer(self._wbio.value))
# Return unwrapped client-side socket or unwrapped server-side socket
# for single-socket servers
return self._sock
def getpeercert(self, binary_form=False):
"""Retrieve the peer's certificate
When binary form is requested, the peer's DER-encoded certficate is
returned if it was transmitted during the handshake.
When binary form is not requested, and the peer's certificate has been
validated, then a certificate dictionary is returned. If the certificate
was not validated, an empty dictionary is returned.
In all cases, None is returned if no certificate was received from the
peer.
"""
try:
peer_cert = _X509(SSL_get_peer_certificate(self._ssl.value))
except openssl_error():
return
if binary_form:
return i2d_X509(peer_cert.value)
if self._cert_reqs == CERT_NONE:
return {}
return decode_cert(peer_cert)
peer_certificate = getpeercert # compatibility with _ssl call interface
def getpeercertchain(self, binary_form=False):
try:
stack, num, certs = SSL_get_peer_cert_chain(self._ssl.value)
except openssl_error():
return
peer_cert_chain = [_Rsrc(cert) for cert in certs]
ret = []
if binary_form:
ret = [i2d_X509(x.value) for x in peer_cert_chain]
elif len(peer_cert_chain):
ret = [decode_cert(x) for x in peer_cert_chain]
return ret
def cipher(self):
"""Retrieve information about the current cipher
Return a triple consisting of cipher name, SSL protocol version defining
its use, and the number of secret bits. Return None if handshaking
has not been completed.
"""
if not self._handshake_done:
return
current_cipher = SSL_get_current_cipher(self._ssl.value)
cipher_name = SSL_CIPHER_get_name(current_cipher)
cipher_version = SSL_CIPHER_get_version(current_cipher)
cipher_bits = SSL_CIPHER_get_bits(current_cipher)
return cipher_name, cipher_version, cipher_bits
def pending(self):
"""Retrieve number of buffered bytes
Return the number of bytes that have been read from the socket and
buffered by this connection. Return 0 if no bytes have been buffered.
"""
return SSL_pending(self._ssl.value)
def get_timeout(self):
"""Retrieve the retransmission timedelta
Since datagrams are subject to packet loss, DTLS will perform
packet retransmission if a response is not received after a certain
time interval during the handshaking phase. When using non-blocking
sockets, the application must call back after that time interval to
allow for the retransmission to occur. This method returns the
timedelta after which to perform the call to handle_timeout, or None
if no such callback is needed given the current handshake state.
"""
return DTLSv1_get_timeout(self._ssl.value)
def handle_timeout(self):
"""Perform datagram retransmission, if required
This method should be called after the timedelta retrieved from
get_timeout has expired, and no datagrams were received in the
meantime. If datagrams were received, a new timeout needs to be
requested.
Return value:
True -- retransmissions were performed successfully
False -- a timeout was not in effect or had not yet expired
Exceptions:
Raised when retransmissions fail or too many timeouts occur.
"""
return DTLSv1_handle_timeout(self._ssl.value)
|
rbit/pydtls | dtls/sslconnection.py | SSLConnection.listen | python | def listen(self):
if not hasattr(self, "_listening"):
raise InvalidSocketError("listen called on non-listening socket")
self._pending_peer_address = None
try:
peer_address = self._udp_demux.service()
except socket.timeout:
peer_address = None
except socket.error as sock_err:
if sock_err.errno != errno.EWOULDBLOCK:
_logger.exception("Unexpected socket error in listen")
raise
peer_address = None
if not peer_address:
_logger.debug("Listen returning without peer")
return
# The demux advises that a datagram from a new peer may have arrived
if type(peer_address) is tuple:
# For this type of demux, the write BIO must be pointed at the peer
BIO_dgram_set_peer(self._wbio.value, peer_address)
self._udp_demux.forward()
self._listening_peer_address = peer_address
self._check_nbio()
self._listening = True
try:
_logger.debug("Invoking DTLSv1_listen for ssl: %d",
self._ssl.raw)
dtls_peer_address = DTLSv1_listen(self._ssl.value)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_WANT_READ:
# This method must be called again to forward the next datagram
_logger.debug("DTLSv1_listen must be resumed")
return
elif err.errqueue and err.errqueue[0][0] == ERR_WRONG_VERSION_NUMBER:
_logger.debug("Wrong version number; aborting handshake")
raise
elif err.errqueue and err.errqueue[0][0] == ERR_COOKIE_MISMATCH:
_logger.debug("Mismatching cookie received; aborting handshake")
raise
elif err.errqueue and err.errqueue[0][0] == ERR_NO_SHARED_CIPHER:
_logger.debug("No shared cipher; aborting handshake")
raise
_logger.exception("Unexpected error in DTLSv1_listen")
raise
finally:
self._listening = False
self._listening_peer_address = None
if type(peer_address) is tuple:
_logger.debug("New local peer: %s", dtls_peer_address)
self._pending_peer_address = peer_address
else:
self._pending_peer_address = dtls_peer_address
_logger.debug("New peer: %s", self._pending_peer_address)
return self._pending_peer_address | Server-side cookie exchange
This method reads datagrams from the socket and initiates cookie
exchange, upon whose successful conclusion one can then proceed to
the accept method. Alternatively, accept can be called directly, in
which case it will call this method. In order to prevent denial-of-
service attacks, only a small, constant set of computing resources
are used during the listen phase.
On some platforms, listen must be called so that packets will be
forwarded to accepted connections. Doing so is therefore recommened
in all cases for portable code.
Return value: a peer address if a datagram from a new peer was
encountered, None if a datagram for a known peer was forwarded | train | https://github.com/rbit/pydtls/blob/41a71fccd990347d0de5f42418fea1e4e733359c/dtls/sslconnection.py#L590-L664 | null | class SSLConnection(object):
"""DTLS peer association
This class associates two DTLS peer instances, wrapping OpenSSL library
state including SSL (struct ssl_st), SSL_CTX, and BIO instances.
"""
_rnd_key = urandom(16)
def _init_server(self, peer_address):
if self._sock.type != socket.SOCK_DGRAM:
raise InvalidSocketError("sock must be of type SOCK_DGRAM")
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
if peer_address:
# Connect directly to this client peer, bypassing the demux
rsock = self._sock
BIO_dgram_set_connected(self._wbio.value, peer_address)
else:
from demux import UDPDemux
self._udp_demux = UDPDemux(self._sock)
rsock = self._udp_demux.get_connection(None)
if rsock is self._sock:
self._rbio = self._wbio
else:
self._rsock = rsock
self._rbio = _BIO(BIO_new_dgram(self._rsock.fileno(), BIO_NOCLOSE))
server_method = DTLS_server_method
if self._ssl_version == PROTOCOL_DTLSv1_2:
server_method = DTLSv1_2_server_method
elif self._ssl_version == PROTOCOL_DTLSv1:
server_method = DTLSv1_server_method
self._ctx = _CTX(SSL_CTX_new(server_method()))
self._intf_ssl_ctx = SSLContext(self._ctx.value)
SSL_CTX_set_session_cache_mode(self._ctx.value, SSL_SESS_CACHE_OFF)
if self._cert_reqs == CERT_NONE:
verify_mode = SSL_VERIFY_NONE
elif self._cert_reqs == CERT_OPTIONAL:
verify_mode = SSL_VERIFY_PEER | SSL_VERIFY_CLIENT_ONCE
else:
verify_mode = SSL_VERIFY_PEER | SSL_VERIFY_CLIENT_ONCE | \
SSL_VERIFY_FAIL_IF_NO_PEER_CERT
self._config_ssl_ctx(verify_mode)
if not peer_address:
# Configure UDP listening socket
self._listening = False
self._listening_peer_address = None
self._pending_peer_address = None
self._cb_keepalive = SSL_CTX_set_cookie_cb(
self._ctx.value,
_CallbackProxy(self._generate_cookie_cb),
_CallbackProxy(self._verify_cookie_cb))
self._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(self._ssl.value)
SSL_set_accept_state(self._ssl.value)
if peer_address and self._do_handshake_on_connect:
return lambda: self.do_handshake()
def _init_client(self, peer_address):
if self._sock.type != socket.SOCK_DGRAM:
raise InvalidSocketError("sock must be of type SOCK_DGRAM")
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = self._wbio
client_method = DTLSv1_2_client_method # no "any" exists, therefore use v1_2 (highest possible)
if self._ssl_version == PROTOCOL_DTLSv1_2:
client_method = DTLSv1_2_client_method
elif self._ssl_version == PROTOCOL_DTLSv1:
client_method = DTLSv1_client_method
self._ctx = _CTX(SSL_CTX_new(client_method()))
self._intf_ssl_ctx = SSLContext(self._ctx.value)
if self._cert_reqs == CERT_NONE:
verify_mode = SSL_VERIFY_NONE
else:
verify_mode = SSL_VERIFY_PEER
self._config_ssl_ctx(verify_mode)
self._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(self._ssl.value)
SSL_set_connect_state(self._ssl.value)
if peer_address:
return lambda: self.connect(peer_address)
def _config_ssl_ctx(self, verify_mode):
SSL_CTX_set_verify(self._ctx.value, verify_mode)
SSL_CTX_set_read_ahead(self._ctx.value, 1)
# Compression occurs at the stream layer now, leading to datagram
# corruption when packet loss occurs
SSL_CTX_set_options(self._ctx.value, SSL_OP_NO_COMPRESSION)
if self._certfile:
SSL_CTX_use_certificate_chain_file(self._ctx.value, self._certfile)
if self._keyfile:
SSL_CTX_use_PrivateKey_file(self._ctx.value, self._keyfile,
SSL_FILE_TYPE_PEM)
if self._ca_certs:
SSL_CTX_load_verify_locations(self._ctx.value, self._ca_certs, None)
if self._ciphers:
try:
SSL_CTX_set_cipher_list(self._ctx.value, self._ciphers)
except openssl_error() as err:
raise_ssl_error(ERR_NO_CIPHER, err)
if self._user_config_ssl_ctx:
self._user_config_ssl_ctx(self._intf_ssl_ctx)
def _copy_server(self):
source = self._sock
self._udp_demux = source._udp_demux
rsock = self._udp_demux.get_connection(source._pending_peer_address)
self._ctx = source._ctx
self._ssl = source._ssl
new_source_wbio = _BIO(BIO_new_dgram(source._sock.fileno(),
BIO_NOCLOSE))
if hasattr(source, "_rsock"):
self._sock = source._sock
self._rsock = rsock
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = _BIO(BIO_new_dgram(rsock.fileno(), BIO_NOCLOSE))
new_source_rbio = _BIO(BIO_new_dgram(source._rsock.fileno(),
BIO_NOCLOSE))
BIO_dgram_set_peer(self._wbio.value, source._pending_peer_address)
else:
self._sock = rsock
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = self._wbio
new_source_rbio = new_source_wbio
BIO_dgram_set_connected(self._wbio.value,
source._pending_peer_address)
source._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(source._ssl.value)
SSL_set_accept_state(source._ssl.value)
if self._user_config_ssl:
self._user_config_ssl(self._intf_ssl)
source._rbio = new_source_rbio
source._wbio = new_source_wbio
SSL_set_bio(source._ssl.value,
new_source_rbio.value,
new_source_wbio.value)
new_source_rbio.disown()
new_source_wbio.disown()
def _reconnect_unwrapped(self):
source = self._sock
self._sock = source._wsock
self._udp_demux = source._demux
self._rsock = source._rsock
self._ctx = source._ctx
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = _BIO(BIO_new_dgram(self._rsock.fileno(), BIO_NOCLOSE))
BIO_dgram_set_peer(self._wbio.value, source._peer_address)
self._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(self._ssl.value)
SSL_set_accept_state(self._ssl.value)
if self._user_config_ssl:
self._user_config_ssl(self._intf_ssl)
if self._do_handshake_on_connect:
return lambda: self.do_handshake()
def _check_nbio(self):
timeout = self._sock.gettimeout()
if self._wbio_nb != timeout is not None:
BIO_set_nbio(self._wbio.value, timeout is not None)
self._wbio_nb = timeout is not None
if self._wbio is not self._rbio:
timeout = self._rsock.gettimeout()
if self._rbio_nb != timeout is not None:
BIO_set_nbio(self._rbio.value, timeout is not None)
self._rbio_nb = timeout is not None
return timeout # read channel timeout
def _wrap_socket_library_call(self, call, timeout_error):
timeout_sec_start = timeout_sec = self._check_nbio()
# Pass the call if the socket is blocking or non-blocking
if not timeout_sec: # None (blocking) or zero (non-blocking)
return call()
start_time = datetime.datetime.now()
read_sock = self.get_socket(True)
need_select = False
while timeout_sec > 0:
if need_select:
if not select([read_sock], [], [], timeout_sec)[0]:
break
timeout_sec = timeout_sec_start - \
(datetime.datetime.now() - start_time).total_seconds()
try:
return call()
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_WANT_READ:
need_select = True
continue
raise
raise_ssl_error(timeout_error)
def _get_cookie(self, ssl):
assert self._listening
assert self._ssl.raw == ssl.raw
if self._listening_peer_address:
peer_address = self._listening_peer_address
else:
peer_address = BIO_dgram_get_peer(self._rbio.value)
cookie_hmac = hmac.new(self._rnd_key, str(peer_address))
return cookie_hmac.digest()
def _generate_cookie_cb(self, ssl):
return self._get_cookie(ssl)
def _verify_cookie_cb(self, ssl, cookie):
if self._get_cookie(ssl) != cookie:
raise Exception("DTLS cookie mismatch")
def __init__(self, sock, keyfile=None, certfile=None,
server_side=False, cert_reqs=CERT_NONE,
ssl_version=PROTOCOL_DTLS, ca_certs=None,
do_handshake_on_connect=True,
suppress_ragged_eofs=True, ciphers=None,
cb_user_config_ssl_ctx=None,
cb_user_config_ssl=None):
"""Constructor
Arguments:
these arguments match the ones of the SSLSocket class in the
standard library's ssl module
"""
if keyfile and not certfile or certfile and not keyfile:
raise_ssl_error(ERR_BOTH_KEY_CERT_FILES)
if server_side and not keyfile:
raise_ssl_error(ERR_BOTH_KEY_CERT_FILES_SVR)
if cert_reqs != CERT_NONE and not ca_certs:
raise_ssl_error(ERR_NO_CERTS)
if not ciphers:
ciphers = "DEFAULT"
self._sock = sock
self._keyfile = keyfile
self._certfile = certfile
self._cert_reqs = cert_reqs
self._ssl_version = ssl_version
self._ca_certs = ca_certs
self._do_handshake_on_connect = do_handshake_on_connect
self._suppress_ragged_eofs = suppress_ragged_eofs
self._ciphers = ciphers
self._handshake_done = False
self._wbio_nb = self._rbio_nb = False
self._user_config_ssl_ctx = cb_user_config_ssl_ctx
self._intf_ssl_ctx = None
self._user_config_ssl = cb_user_config_ssl
self._intf_ssl = None
if isinstance(sock, SSLConnection):
post_init = self._copy_server()
elif isinstance(sock, _UnwrappedSocket):
post_init = self._reconnect_unwrapped()
else:
try:
peer_address = sock.getpeername()
except socket.error:
peer_address = None
if server_side:
post_init = self._init_server(peer_address)
else:
post_init = self._init_client(peer_address)
if self._user_config_ssl:
self._user_config_ssl(self._intf_ssl)
if sys.platform.startswith('win') and \
not (SSL_get_options(self._ssl.value) & SSL_OP_NO_QUERY_MTU):
SSL_set_options(self._ssl.value, SSL_OP_NO_QUERY_MTU)
DTLS_set_link_mtu(self._ssl.value, 576)
SSL_set_bio(self._ssl.value, self._rbio.value, self._wbio.value)
self._rbio.disown()
self._wbio.disown()
if post_init:
post_init()
def get_socket(self, inbound):
"""Retrieve a socket used by this connection
When inbound is True, then the socket from which this connection reads
data is retrieved. Otherwise the socket to which this connection writes
data is retrieved.
Read and write sockets differ depending on whether this is a server- or
a client-side connection, and on whether a routing demux is in use.
"""
if inbound and hasattr(self, "_rsock"):
return self._rsock
return self._sock
def accept(self):
"""Server-side UDP connection establishment
This method returns a server-side SSLConnection object, connected to
that peer most recently returned from the listen method and not yet
connected. If there is no such peer, then the listen method is invoked.
Return value: SSLConnection connected to a new peer, None if packet
forwarding only to an existing peer occurred.
"""
if not self._pending_peer_address:
if not self.listen():
_logger.debug("Accept returning without connection")
return
new_conn = SSLConnection(self, self._keyfile, self._certfile, True,
self._cert_reqs, self._ssl_version,
self._ca_certs, self._do_handshake_on_connect,
self._suppress_ragged_eofs, self._ciphers,
cb_user_config_ssl_ctx=self._user_config_ssl_ctx,
cb_user_config_ssl=self._user_config_ssl)
new_peer = self._pending_peer_address
self._pending_peer_address = None
if self._do_handshake_on_connect:
# Note that since that connection's socket was just created in its
# constructor, the following operation must be blocking; hence
# handshake-on-connect can only be used with a routing demux if
# listen is serviced by a separate application thread, or else we
# will hang in this call
new_conn.do_handshake()
_logger.debug("Accept returning new connection for new peer")
return new_conn, new_peer
def connect(self, peer_address):
"""Client-side UDP connection establishment
This method connects this object's underlying socket. It subsequently
performs a handshake if do_handshake_on_connect was set during
initialization.
Arguments:
peer_address - address tuple of server peer
"""
self._sock.connect(peer_address)
peer_address = self._sock.getpeername() # substituted host addrinfo
BIO_dgram_set_connected(self._wbio.value, peer_address)
assert self._wbio is self._rbio
if self._do_handshake_on_connect:
self.do_handshake()
def do_handshake(self):
"""Perform a handshake with the peer
This method forces an explicit handshake to be performed with either
the client or server peer.
"""
_logger.debug("Initiating handshake...")
try:
self._wrap_socket_library_call(
lambda: SSL_do_handshake(self._ssl.value),
ERR_HANDSHAKE_TIMEOUT)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_SYSCALL and err.result == -1:
raise_ssl_error(ERR_PORT_UNREACHABLE, err)
raise
self._handshake_done = True
_logger.debug("...completed handshake")
def read(self, len=1024, buffer=None):
"""Read data from connection
Read up to len bytes and return them.
Arguments:
len -- maximum number of bytes to read
Return value:
string containing read bytes
"""
try:
return self._wrap_socket_library_call(
lambda: SSL_read(self._ssl.value, len, buffer), ERR_READ_TIMEOUT)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_SYSCALL and err.result == -1:
raise_ssl_error(ERR_PORT_UNREACHABLE, err)
raise
def write(self, data):
"""Write data to connection
Write data as string of bytes.
Arguments:
data -- buffer containing data to be written
Return value:
number of bytes actually transmitted
"""
try:
ret = self._wrap_socket_library_call(
lambda: SSL_write(self._ssl.value, data), ERR_WRITE_TIMEOUT)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_SYSCALL and err.result == -1:
raise_ssl_error(ERR_PORT_UNREACHABLE, err)
raise
if ret:
self._handshake_done = True
return ret
def shutdown(self):
"""Shut down the DTLS connection
This method attemps to complete a bidirectional shutdown between
peers. For non-blocking sockets, it should be called repeatedly until
it no longer raises continuation request exceptions.
"""
if hasattr(self, "_listening"):
# Listening server-side sockets cannot be shut down
return
try:
self._wrap_socket_library_call(
lambda: SSL_shutdown(self._ssl.value), ERR_READ_TIMEOUT)
except openssl_error() as err:
if err.result == 0:
# close-notify alert was just sent; wait for same from peer
# Note: while it might seem wise to suppress further read-aheads
# with SSL_set_read_ahead here, doing so causes a shutdown
# failure (ret: -1, SSL_ERROR_SYSCALL) on the DTLS shutdown
# initiator side. And test_starttls does pass.
self._wrap_socket_library_call(
lambda: SSL_shutdown(self._ssl.value), ERR_READ_TIMEOUT)
else:
raise
if hasattr(self, "_rsock"):
# Return wrapped connected server socket (non-listening)
return _UnwrappedSocket(self._sock, self._rsock, self._udp_demux,
self._ctx,
BIO_dgram_get_peer(self._wbio.value))
# Return unwrapped client-side socket or unwrapped server-side socket
# for single-socket servers
return self._sock
def getpeercert(self, binary_form=False):
"""Retrieve the peer's certificate
When binary form is requested, the peer's DER-encoded certficate is
returned if it was transmitted during the handshake.
When binary form is not requested, and the peer's certificate has been
validated, then a certificate dictionary is returned. If the certificate
was not validated, an empty dictionary is returned.
In all cases, None is returned if no certificate was received from the
peer.
"""
try:
peer_cert = _X509(SSL_get_peer_certificate(self._ssl.value))
except openssl_error():
return
if binary_form:
return i2d_X509(peer_cert.value)
if self._cert_reqs == CERT_NONE:
return {}
return decode_cert(peer_cert)
peer_certificate = getpeercert # compatibility with _ssl call interface
def getpeercertchain(self, binary_form=False):
try:
stack, num, certs = SSL_get_peer_cert_chain(self._ssl.value)
except openssl_error():
return
peer_cert_chain = [_Rsrc(cert) for cert in certs]
ret = []
if binary_form:
ret = [i2d_X509(x.value) for x in peer_cert_chain]
elif len(peer_cert_chain):
ret = [decode_cert(x) for x in peer_cert_chain]
return ret
def cipher(self):
"""Retrieve information about the current cipher
Return a triple consisting of cipher name, SSL protocol version defining
its use, and the number of secret bits. Return None if handshaking
has not been completed.
"""
if not self._handshake_done:
return
current_cipher = SSL_get_current_cipher(self._ssl.value)
cipher_name = SSL_CIPHER_get_name(current_cipher)
cipher_version = SSL_CIPHER_get_version(current_cipher)
cipher_bits = SSL_CIPHER_get_bits(current_cipher)
return cipher_name, cipher_version, cipher_bits
def pending(self):
"""Retrieve number of buffered bytes
Return the number of bytes that have been read from the socket and
buffered by this connection. Return 0 if no bytes have been buffered.
"""
return SSL_pending(self._ssl.value)
def get_timeout(self):
"""Retrieve the retransmission timedelta
Since datagrams are subject to packet loss, DTLS will perform
packet retransmission if a response is not received after a certain
time interval during the handshaking phase. When using non-blocking
sockets, the application must call back after that time interval to
allow for the retransmission to occur. This method returns the
timedelta after which to perform the call to handle_timeout, or None
if no such callback is needed given the current handshake state.
"""
return DTLSv1_get_timeout(self._ssl.value)
def handle_timeout(self):
"""Perform datagram retransmission, if required
This method should be called after the timedelta retrieved from
get_timeout has expired, and no datagrams were received in the
meantime. If datagrams were received, a new timeout needs to be
requested.
Return value:
True -- retransmissions were performed successfully
False -- a timeout was not in effect or had not yet expired
Exceptions:
Raised when retransmissions fail or too many timeouts occur.
"""
return DTLSv1_handle_timeout(self._ssl.value)
|
rbit/pydtls | dtls/sslconnection.py | SSLConnection.accept | python | def accept(self):
if not self._pending_peer_address:
if not self.listen():
_logger.debug("Accept returning without connection")
return
new_conn = SSLConnection(self, self._keyfile, self._certfile, True,
self._cert_reqs, self._ssl_version,
self._ca_certs, self._do_handshake_on_connect,
self._suppress_ragged_eofs, self._ciphers,
cb_user_config_ssl_ctx=self._user_config_ssl_ctx,
cb_user_config_ssl=self._user_config_ssl)
new_peer = self._pending_peer_address
self._pending_peer_address = None
if self._do_handshake_on_connect:
# Note that since that connection's socket was just created in its
# constructor, the following operation must be blocking; hence
# handshake-on-connect can only be used with a routing demux if
# listen is serviced by a separate application thread, or else we
# will hang in this call
new_conn.do_handshake()
_logger.debug("Accept returning new connection for new peer")
return new_conn, new_peer | Server-side UDP connection establishment
This method returns a server-side SSLConnection object, connected to
that peer most recently returned from the listen method and not yet
connected. If there is no such peer, then the listen method is invoked.
Return value: SSLConnection connected to a new peer, None if packet
forwarding only to an existing peer occurred. | train | https://github.com/rbit/pydtls/blob/41a71fccd990347d0de5f42418fea1e4e733359c/dtls/sslconnection.py#L666-L697 | null | class SSLConnection(object):
"""DTLS peer association
This class associates two DTLS peer instances, wrapping OpenSSL library
state including SSL (struct ssl_st), SSL_CTX, and BIO instances.
"""
_rnd_key = urandom(16)
def _init_server(self, peer_address):
if self._sock.type != socket.SOCK_DGRAM:
raise InvalidSocketError("sock must be of type SOCK_DGRAM")
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
if peer_address:
# Connect directly to this client peer, bypassing the demux
rsock = self._sock
BIO_dgram_set_connected(self._wbio.value, peer_address)
else:
from demux import UDPDemux
self._udp_demux = UDPDemux(self._sock)
rsock = self._udp_demux.get_connection(None)
if rsock is self._sock:
self._rbio = self._wbio
else:
self._rsock = rsock
self._rbio = _BIO(BIO_new_dgram(self._rsock.fileno(), BIO_NOCLOSE))
server_method = DTLS_server_method
if self._ssl_version == PROTOCOL_DTLSv1_2:
server_method = DTLSv1_2_server_method
elif self._ssl_version == PROTOCOL_DTLSv1:
server_method = DTLSv1_server_method
self._ctx = _CTX(SSL_CTX_new(server_method()))
self._intf_ssl_ctx = SSLContext(self._ctx.value)
SSL_CTX_set_session_cache_mode(self._ctx.value, SSL_SESS_CACHE_OFF)
if self._cert_reqs == CERT_NONE:
verify_mode = SSL_VERIFY_NONE
elif self._cert_reqs == CERT_OPTIONAL:
verify_mode = SSL_VERIFY_PEER | SSL_VERIFY_CLIENT_ONCE
else:
verify_mode = SSL_VERIFY_PEER | SSL_VERIFY_CLIENT_ONCE | \
SSL_VERIFY_FAIL_IF_NO_PEER_CERT
self._config_ssl_ctx(verify_mode)
if not peer_address:
# Configure UDP listening socket
self._listening = False
self._listening_peer_address = None
self._pending_peer_address = None
self._cb_keepalive = SSL_CTX_set_cookie_cb(
self._ctx.value,
_CallbackProxy(self._generate_cookie_cb),
_CallbackProxy(self._verify_cookie_cb))
self._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(self._ssl.value)
SSL_set_accept_state(self._ssl.value)
if peer_address and self._do_handshake_on_connect:
return lambda: self.do_handshake()
def _init_client(self, peer_address):
if self._sock.type != socket.SOCK_DGRAM:
raise InvalidSocketError("sock must be of type SOCK_DGRAM")
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = self._wbio
client_method = DTLSv1_2_client_method # no "any" exists, therefore use v1_2 (highest possible)
if self._ssl_version == PROTOCOL_DTLSv1_2:
client_method = DTLSv1_2_client_method
elif self._ssl_version == PROTOCOL_DTLSv1:
client_method = DTLSv1_client_method
self._ctx = _CTX(SSL_CTX_new(client_method()))
self._intf_ssl_ctx = SSLContext(self._ctx.value)
if self._cert_reqs == CERT_NONE:
verify_mode = SSL_VERIFY_NONE
else:
verify_mode = SSL_VERIFY_PEER
self._config_ssl_ctx(verify_mode)
self._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(self._ssl.value)
SSL_set_connect_state(self._ssl.value)
if peer_address:
return lambda: self.connect(peer_address)
def _config_ssl_ctx(self, verify_mode):
SSL_CTX_set_verify(self._ctx.value, verify_mode)
SSL_CTX_set_read_ahead(self._ctx.value, 1)
# Compression occurs at the stream layer now, leading to datagram
# corruption when packet loss occurs
SSL_CTX_set_options(self._ctx.value, SSL_OP_NO_COMPRESSION)
if self._certfile:
SSL_CTX_use_certificate_chain_file(self._ctx.value, self._certfile)
if self._keyfile:
SSL_CTX_use_PrivateKey_file(self._ctx.value, self._keyfile,
SSL_FILE_TYPE_PEM)
if self._ca_certs:
SSL_CTX_load_verify_locations(self._ctx.value, self._ca_certs, None)
if self._ciphers:
try:
SSL_CTX_set_cipher_list(self._ctx.value, self._ciphers)
except openssl_error() as err:
raise_ssl_error(ERR_NO_CIPHER, err)
if self._user_config_ssl_ctx:
self._user_config_ssl_ctx(self._intf_ssl_ctx)
def _copy_server(self):
source = self._sock
self._udp_demux = source._udp_demux
rsock = self._udp_demux.get_connection(source._pending_peer_address)
self._ctx = source._ctx
self._ssl = source._ssl
new_source_wbio = _BIO(BIO_new_dgram(source._sock.fileno(),
BIO_NOCLOSE))
if hasattr(source, "_rsock"):
self._sock = source._sock
self._rsock = rsock
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = _BIO(BIO_new_dgram(rsock.fileno(), BIO_NOCLOSE))
new_source_rbio = _BIO(BIO_new_dgram(source._rsock.fileno(),
BIO_NOCLOSE))
BIO_dgram_set_peer(self._wbio.value, source._pending_peer_address)
else:
self._sock = rsock
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = self._wbio
new_source_rbio = new_source_wbio
BIO_dgram_set_connected(self._wbio.value,
source._pending_peer_address)
source._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(source._ssl.value)
SSL_set_accept_state(source._ssl.value)
if self._user_config_ssl:
self._user_config_ssl(self._intf_ssl)
source._rbio = new_source_rbio
source._wbio = new_source_wbio
SSL_set_bio(source._ssl.value,
new_source_rbio.value,
new_source_wbio.value)
new_source_rbio.disown()
new_source_wbio.disown()
def _reconnect_unwrapped(self):
source = self._sock
self._sock = source._wsock
self._udp_demux = source._demux
self._rsock = source._rsock
self._ctx = source._ctx
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = _BIO(BIO_new_dgram(self._rsock.fileno(), BIO_NOCLOSE))
BIO_dgram_set_peer(self._wbio.value, source._peer_address)
self._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(self._ssl.value)
SSL_set_accept_state(self._ssl.value)
if self._user_config_ssl:
self._user_config_ssl(self._intf_ssl)
if self._do_handshake_on_connect:
return lambda: self.do_handshake()
def _check_nbio(self):
timeout = self._sock.gettimeout()
if self._wbio_nb != timeout is not None:
BIO_set_nbio(self._wbio.value, timeout is not None)
self._wbio_nb = timeout is not None
if self._wbio is not self._rbio:
timeout = self._rsock.gettimeout()
if self._rbio_nb != timeout is not None:
BIO_set_nbio(self._rbio.value, timeout is not None)
self._rbio_nb = timeout is not None
return timeout # read channel timeout
def _wrap_socket_library_call(self, call, timeout_error):
timeout_sec_start = timeout_sec = self._check_nbio()
# Pass the call if the socket is blocking or non-blocking
if not timeout_sec: # None (blocking) or zero (non-blocking)
return call()
start_time = datetime.datetime.now()
read_sock = self.get_socket(True)
need_select = False
while timeout_sec > 0:
if need_select:
if not select([read_sock], [], [], timeout_sec)[0]:
break
timeout_sec = timeout_sec_start - \
(datetime.datetime.now() - start_time).total_seconds()
try:
return call()
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_WANT_READ:
need_select = True
continue
raise
raise_ssl_error(timeout_error)
def _get_cookie(self, ssl):
assert self._listening
assert self._ssl.raw == ssl.raw
if self._listening_peer_address:
peer_address = self._listening_peer_address
else:
peer_address = BIO_dgram_get_peer(self._rbio.value)
cookie_hmac = hmac.new(self._rnd_key, str(peer_address))
return cookie_hmac.digest()
def _generate_cookie_cb(self, ssl):
return self._get_cookie(ssl)
def _verify_cookie_cb(self, ssl, cookie):
if self._get_cookie(ssl) != cookie:
raise Exception("DTLS cookie mismatch")
def __init__(self, sock, keyfile=None, certfile=None,
server_side=False, cert_reqs=CERT_NONE,
ssl_version=PROTOCOL_DTLS, ca_certs=None,
do_handshake_on_connect=True,
suppress_ragged_eofs=True, ciphers=None,
cb_user_config_ssl_ctx=None,
cb_user_config_ssl=None):
"""Constructor
Arguments:
these arguments match the ones of the SSLSocket class in the
standard library's ssl module
"""
if keyfile and not certfile or certfile and not keyfile:
raise_ssl_error(ERR_BOTH_KEY_CERT_FILES)
if server_side and not keyfile:
raise_ssl_error(ERR_BOTH_KEY_CERT_FILES_SVR)
if cert_reqs != CERT_NONE and not ca_certs:
raise_ssl_error(ERR_NO_CERTS)
if not ciphers:
ciphers = "DEFAULT"
self._sock = sock
self._keyfile = keyfile
self._certfile = certfile
self._cert_reqs = cert_reqs
self._ssl_version = ssl_version
self._ca_certs = ca_certs
self._do_handshake_on_connect = do_handshake_on_connect
self._suppress_ragged_eofs = suppress_ragged_eofs
self._ciphers = ciphers
self._handshake_done = False
self._wbio_nb = self._rbio_nb = False
self._user_config_ssl_ctx = cb_user_config_ssl_ctx
self._intf_ssl_ctx = None
self._user_config_ssl = cb_user_config_ssl
self._intf_ssl = None
if isinstance(sock, SSLConnection):
post_init = self._copy_server()
elif isinstance(sock, _UnwrappedSocket):
post_init = self._reconnect_unwrapped()
else:
try:
peer_address = sock.getpeername()
except socket.error:
peer_address = None
if server_side:
post_init = self._init_server(peer_address)
else:
post_init = self._init_client(peer_address)
if self._user_config_ssl:
self._user_config_ssl(self._intf_ssl)
if sys.platform.startswith('win') and \
not (SSL_get_options(self._ssl.value) & SSL_OP_NO_QUERY_MTU):
SSL_set_options(self._ssl.value, SSL_OP_NO_QUERY_MTU)
DTLS_set_link_mtu(self._ssl.value, 576)
SSL_set_bio(self._ssl.value, self._rbio.value, self._wbio.value)
self._rbio.disown()
self._wbio.disown()
if post_init:
post_init()
def get_socket(self, inbound):
"""Retrieve a socket used by this connection
When inbound is True, then the socket from which this connection reads
data is retrieved. Otherwise the socket to which this connection writes
data is retrieved.
Read and write sockets differ depending on whether this is a server- or
a client-side connection, and on whether a routing demux is in use.
"""
if inbound and hasattr(self, "_rsock"):
return self._rsock
return self._sock
def listen(self):
"""Server-side cookie exchange
This method reads datagrams from the socket and initiates cookie
exchange, upon whose successful conclusion one can then proceed to
the accept method. Alternatively, accept can be called directly, in
which case it will call this method. In order to prevent denial-of-
service attacks, only a small, constant set of computing resources
are used during the listen phase.
On some platforms, listen must be called so that packets will be
forwarded to accepted connections. Doing so is therefore recommened
in all cases for portable code.
Return value: a peer address if a datagram from a new peer was
encountered, None if a datagram for a known peer was forwarded
"""
if not hasattr(self, "_listening"):
raise InvalidSocketError("listen called on non-listening socket")
self._pending_peer_address = None
try:
peer_address = self._udp_demux.service()
except socket.timeout:
peer_address = None
except socket.error as sock_err:
if sock_err.errno != errno.EWOULDBLOCK:
_logger.exception("Unexpected socket error in listen")
raise
peer_address = None
if not peer_address:
_logger.debug("Listen returning without peer")
return
# The demux advises that a datagram from a new peer may have arrived
if type(peer_address) is tuple:
# For this type of demux, the write BIO must be pointed at the peer
BIO_dgram_set_peer(self._wbio.value, peer_address)
self._udp_demux.forward()
self._listening_peer_address = peer_address
self._check_nbio()
self._listening = True
try:
_logger.debug("Invoking DTLSv1_listen for ssl: %d",
self._ssl.raw)
dtls_peer_address = DTLSv1_listen(self._ssl.value)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_WANT_READ:
# This method must be called again to forward the next datagram
_logger.debug("DTLSv1_listen must be resumed")
return
elif err.errqueue and err.errqueue[0][0] == ERR_WRONG_VERSION_NUMBER:
_logger.debug("Wrong version number; aborting handshake")
raise
elif err.errqueue and err.errqueue[0][0] == ERR_COOKIE_MISMATCH:
_logger.debug("Mismatching cookie received; aborting handshake")
raise
elif err.errqueue and err.errqueue[0][0] == ERR_NO_SHARED_CIPHER:
_logger.debug("No shared cipher; aborting handshake")
raise
_logger.exception("Unexpected error in DTLSv1_listen")
raise
finally:
self._listening = False
self._listening_peer_address = None
if type(peer_address) is tuple:
_logger.debug("New local peer: %s", dtls_peer_address)
self._pending_peer_address = peer_address
else:
self._pending_peer_address = dtls_peer_address
_logger.debug("New peer: %s", self._pending_peer_address)
return self._pending_peer_address
def connect(self, peer_address):
"""Client-side UDP connection establishment
This method connects this object's underlying socket. It subsequently
performs a handshake if do_handshake_on_connect was set during
initialization.
Arguments:
peer_address - address tuple of server peer
"""
self._sock.connect(peer_address)
peer_address = self._sock.getpeername() # substituted host addrinfo
BIO_dgram_set_connected(self._wbio.value, peer_address)
assert self._wbio is self._rbio
if self._do_handshake_on_connect:
self.do_handshake()
def do_handshake(self):
"""Perform a handshake with the peer
This method forces an explicit handshake to be performed with either
the client or server peer.
"""
_logger.debug("Initiating handshake...")
try:
self._wrap_socket_library_call(
lambda: SSL_do_handshake(self._ssl.value),
ERR_HANDSHAKE_TIMEOUT)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_SYSCALL and err.result == -1:
raise_ssl_error(ERR_PORT_UNREACHABLE, err)
raise
self._handshake_done = True
_logger.debug("...completed handshake")
def read(self, len=1024, buffer=None):
"""Read data from connection
Read up to len bytes and return them.
Arguments:
len -- maximum number of bytes to read
Return value:
string containing read bytes
"""
try:
return self._wrap_socket_library_call(
lambda: SSL_read(self._ssl.value, len, buffer), ERR_READ_TIMEOUT)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_SYSCALL and err.result == -1:
raise_ssl_error(ERR_PORT_UNREACHABLE, err)
raise
def write(self, data):
"""Write data to connection
Write data as string of bytes.
Arguments:
data -- buffer containing data to be written
Return value:
number of bytes actually transmitted
"""
try:
ret = self._wrap_socket_library_call(
lambda: SSL_write(self._ssl.value, data), ERR_WRITE_TIMEOUT)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_SYSCALL and err.result == -1:
raise_ssl_error(ERR_PORT_UNREACHABLE, err)
raise
if ret:
self._handshake_done = True
return ret
def shutdown(self):
"""Shut down the DTLS connection
This method attemps to complete a bidirectional shutdown between
peers. For non-blocking sockets, it should be called repeatedly until
it no longer raises continuation request exceptions.
"""
if hasattr(self, "_listening"):
# Listening server-side sockets cannot be shut down
return
try:
self._wrap_socket_library_call(
lambda: SSL_shutdown(self._ssl.value), ERR_READ_TIMEOUT)
except openssl_error() as err:
if err.result == 0:
# close-notify alert was just sent; wait for same from peer
# Note: while it might seem wise to suppress further read-aheads
# with SSL_set_read_ahead here, doing so causes a shutdown
# failure (ret: -1, SSL_ERROR_SYSCALL) on the DTLS shutdown
# initiator side. And test_starttls does pass.
self._wrap_socket_library_call(
lambda: SSL_shutdown(self._ssl.value), ERR_READ_TIMEOUT)
else:
raise
if hasattr(self, "_rsock"):
# Return wrapped connected server socket (non-listening)
return _UnwrappedSocket(self._sock, self._rsock, self._udp_demux,
self._ctx,
BIO_dgram_get_peer(self._wbio.value))
# Return unwrapped client-side socket or unwrapped server-side socket
# for single-socket servers
return self._sock
def getpeercert(self, binary_form=False):
"""Retrieve the peer's certificate
When binary form is requested, the peer's DER-encoded certficate is
returned if it was transmitted during the handshake.
When binary form is not requested, and the peer's certificate has been
validated, then a certificate dictionary is returned. If the certificate
was not validated, an empty dictionary is returned.
In all cases, None is returned if no certificate was received from the
peer.
"""
try:
peer_cert = _X509(SSL_get_peer_certificate(self._ssl.value))
except openssl_error():
return
if binary_form:
return i2d_X509(peer_cert.value)
if self._cert_reqs == CERT_NONE:
return {}
return decode_cert(peer_cert)
peer_certificate = getpeercert # compatibility with _ssl call interface
def getpeercertchain(self, binary_form=False):
try:
stack, num, certs = SSL_get_peer_cert_chain(self._ssl.value)
except openssl_error():
return
peer_cert_chain = [_Rsrc(cert) for cert in certs]
ret = []
if binary_form:
ret = [i2d_X509(x.value) for x in peer_cert_chain]
elif len(peer_cert_chain):
ret = [decode_cert(x) for x in peer_cert_chain]
return ret
def cipher(self):
"""Retrieve information about the current cipher
Return a triple consisting of cipher name, SSL protocol version defining
its use, and the number of secret bits. Return None if handshaking
has not been completed.
"""
if not self._handshake_done:
return
current_cipher = SSL_get_current_cipher(self._ssl.value)
cipher_name = SSL_CIPHER_get_name(current_cipher)
cipher_version = SSL_CIPHER_get_version(current_cipher)
cipher_bits = SSL_CIPHER_get_bits(current_cipher)
return cipher_name, cipher_version, cipher_bits
def pending(self):
"""Retrieve number of buffered bytes
Return the number of bytes that have been read from the socket and
buffered by this connection. Return 0 if no bytes have been buffered.
"""
return SSL_pending(self._ssl.value)
def get_timeout(self):
"""Retrieve the retransmission timedelta
Since datagrams are subject to packet loss, DTLS will perform
packet retransmission if a response is not received after a certain
time interval during the handshaking phase. When using non-blocking
sockets, the application must call back after that time interval to
allow for the retransmission to occur. This method returns the
timedelta after which to perform the call to handle_timeout, or None
if no such callback is needed given the current handshake state.
"""
return DTLSv1_get_timeout(self._ssl.value)
def handle_timeout(self):
"""Perform datagram retransmission, if required
This method should be called after the timedelta retrieved from
get_timeout has expired, and no datagrams were received in the
meantime. If datagrams were received, a new timeout needs to be
requested.
Return value:
True -- retransmissions were performed successfully
False -- a timeout was not in effect or had not yet expired
Exceptions:
Raised when retransmissions fail or too many timeouts occur.
"""
return DTLSv1_handle_timeout(self._ssl.value)
|
rbit/pydtls | dtls/sslconnection.py | SSLConnection.connect | python | def connect(self, peer_address):
self._sock.connect(peer_address)
peer_address = self._sock.getpeername() # substituted host addrinfo
BIO_dgram_set_connected(self._wbio.value, peer_address)
assert self._wbio is self._rbio
if self._do_handshake_on_connect:
self.do_handshake() | Client-side UDP connection establishment
This method connects this object's underlying socket. It subsequently
performs a handshake if do_handshake_on_connect was set during
initialization.
Arguments:
peer_address - address tuple of server peer | train | https://github.com/rbit/pydtls/blob/41a71fccd990347d0de5f42418fea1e4e733359c/dtls/sslconnection.py#L699-L715 | [
"def BIO_dgram_set_connected(bio, peer_address):\n su = sockaddr_u_from_addr_tuple(peer_address)\n return _BIO_ctrl(bio, BIO_CTRL_DGRAM_SET_CONNECTED, 0, byref(su))\n"
] | class SSLConnection(object):
"""DTLS peer association
This class associates two DTLS peer instances, wrapping OpenSSL library
state including SSL (struct ssl_st), SSL_CTX, and BIO instances.
"""
_rnd_key = urandom(16)
def _init_server(self, peer_address):
if self._sock.type != socket.SOCK_DGRAM:
raise InvalidSocketError("sock must be of type SOCK_DGRAM")
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
if peer_address:
# Connect directly to this client peer, bypassing the demux
rsock = self._sock
BIO_dgram_set_connected(self._wbio.value, peer_address)
else:
from demux import UDPDemux
self._udp_demux = UDPDemux(self._sock)
rsock = self._udp_demux.get_connection(None)
if rsock is self._sock:
self._rbio = self._wbio
else:
self._rsock = rsock
self._rbio = _BIO(BIO_new_dgram(self._rsock.fileno(), BIO_NOCLOSE))
server_method = DTLS_server_method
if self._ssl_version == PROTOCOL_DTLSv1_2:
server_method = DTLSv1_2_server_method
elif self._ssl_version == PROTOCOL_DTLSv1:
server_method = DTLSv1_server_method
self._ctx = _CTX(SSL_CTX_new(server_method()))
self._intf_ssl_ctx = SSLContext(self._ctx.value)
SSL_CTX_set_session_cache_mode(self._ctx.value, SSL_SESS_CACHE_OFF)
if self._cert_reqs == CERT_NONE:
verify_mode = SSL_VERIFY_NONE
elif self._cert_reqs == CERT_OPTIONAL:
verify_mode = SSL_VERIFY_PEER | SSL_VERIFY_CLIENT_ONCE
else:
verify_mode = SSL_VERIFY_PEER | SSL_VERIFY_CLIENT_ONCE | \
SSL_VERIFY_FAIL_IF_NO_PEER_CERT
self._config_ssl_ctx(verify_mode)
if not peer_address:
# Configure UDP listening socket
self._listening = False
self._listening_peer_address = None
self._pending_peer_address = None
self._cb_keepalive = SSL_CTX_set_cookie_cb(
self._ctx.value,
_CallbackProxy(self._generate_cookie_cb),
_CallbackProxy(self._verify_cookie_cb))
self._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(self._ssl.value)
SSL_set_accept_state(self._ssl.value)
if peer_address and self._do_handshake_on_connect:
return lambda: self.do_handshake()
def _init_client(self, peer_address):
if self._sock.type != socket.SOCK_DGRAM:
raise InvalidSocketError("sock must be of type SOCK_DGRAM")
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = self._wbio
client_method = DTLSv1_2_client_method # no "any" exists, therefore use v1_2 (highest possible)
if self._ssl_version == PROTOCOL_DTLSv1_2:
client_method = DTLSv1_2_client_method
elif self._ssl_version == PROTOCOL_DTLSv1:
client_method = DTLSv1_client_method
self._ctx = _CTX(SSL_CTX_new(client_method()))
self._intf_ssl_ctx = SSLContext(self._ctx.value)
if self._cert_reqs == CERT_NONE:
verify_mode = SSL_VERIFY_NONE
else:
verify_mode = SSL_VERIFY_PEER
self._config_ssl_ctx(verify_mode)
self._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(self._ssl.value)
SSL_set_connect_state(self._ssl.value)
if peer_address:
return lambda: self.connect(peer_address)
def _config_ssl_ctx(self, verify_mode):
SSL_CTX_set_verify(self._ctx.value, verify_mode)
SSL_CTX_set_read_ahead(self._ctx.value, 1)
# Compression occurs at the stream layer now, leading to datagram
# corruption when packet loss occurs
SSL_CTX_set_options(self._ctx.value, SSL_OP_NO_COMPRESSION)
if self._certfile:
SSL_CTX_use_certificate_chain_file(self._ctx.value, self._certfile)
if self._keyfile:
SSL_CTX_use_PrivateKey_file(self._ctx.value, self._keyfile,
SSL_FILE_TYPE_PEM)
if self._ca_certs:
SSL_CTX_load_verify_locations(self._ctx.value, self._ca_certs, None)
if self._ciphers:
try:
SSL_CTX_set_cipher_list(self._ctx.value, self._ciphers)
except openssl_error() as err:
raise_ssl_error(ERR_NO_CIPHER, err)
if self._user_config_ssl_ctx:
self._user_config_ssl_ctx(self._intf_ssl_ctx)
def _copy_server(self):
source = self._sock
self._udp_demux = source._udp_demux
rsock = self._udp_demux.get_connection(source._pending_peer_address)
self._ctx = source._ctx
self._ssl = source._ssl
new_source_wbio = _BIO(BIO_new_dgram(source._sock.fileno(),
BIO_NOCLOSE))
if hasattr(source, "_rsock"):
self._sock = source._sock
self._rsock = rsock
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = _BIO(BIO_new_dgram(rsock.fileno(), BIO_NOCLOSE))
new_source_rbio = _BIO(BIO_new_dgram(source._rsock.fileno(),
BIO_NOCLOSE))
BIO_dgram_set_peer(self._wbio.value, source._pending_peer_address)
else:
self._sock = rsock
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = self._wbio
new_source_rbio = new_source_wbio
BIO_dgram_set_connected(self._wbio.value,
source._pending_peer_address)
source._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(source._ssl.value)
SSL_set_accept_state(source._ssl.value)
if self._user_config_ssl:
self._user_config_ssl(self._intf_ssl)
source._rbio = new_source_rbio
source._wbio = new_source_wbio
SSL_set_bio(source._ssl.value,
new_source_rbio.value,
new_source_wbio.value)
new_source_rbio.disown()
new_source_wbio.disown()
def _reconnect_unwrapped(self):
source = self._sock
self._sock = source._wsock
self._udp_demux = source._demux
self._rsock = source._rsock
self._ctx = source._ctx
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = _BIO(BIO_new_dgram(self._rsock.fileno(), BIO_NOCLOSE))
BIO_dgram_set_peer(self._wbio.value, source._peer_address)
self._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(self._ssl.value)
SSL_set_accept_state(self._ssl.value)
if self._user_config_ssl:
self._user_config_ssl(self._intf_ssl)
if self._do_handshake_on_connect:
return lambda: self.do_handshake()
def _check_nbio(self):
timeout = self._sock.gettimeout()
if self._wbio_nb != timeout is not None:
BIO_set_nbio(self._wbio.value, timeout is not None)
self._wbio_nb = timeout is not None
if self._wbio is not self._rbio:
timeout = self._rsock.gettimeout()
if self._rbio_nb != timeout is not None:
BIO_set_nbio(self._rbio.value, timeout is not None)
self._rbio_nb = timeout is not None
return timeout # read channel timeout
def _wrap_socket_library_call(self, call, timeout_error):
timeout_sec_start = timeout_sec = self._check_nbio()
# Pass the call if the socket is blocking or non-blocking
if not timeout_sec: # None (blocking) or zero (non-blocking)
return call()
start_time = datetime.datetime.now()
read_sock = self.get_socket(True)
need_select = False
while timeout_sec > 0:
if need_select:
if not select([read_sock], [], [], timeout_sec)[0]:
break
timeout_sec = timeout_sec_start - \
(datetime.datetime.now() - start_time).total_seconds()
try:
return call()
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_WANT_READ:
need_select = True
continue
raise
raise_ssl_error(timeout_error)
def _get_cookie(self, ssl):
assert self._listening
assert self._ssl.raw == ssl.raw
if self._listening_peer_address:
peer_address = self._listening_peer_address
else:
peer_address = BIO_dgram_get_peer(self._rbio.value)
cookie_hmac = hmac.new(self._rnd_key, str(peer_address))
return cookie_hmac.digest()
def _generate_cookie_cb(self, ssl):
return self._get_cookie(ssl)
def _verify_cookie_cb(self, ssl, cookie):
if self._get_cookie(ssl) != cookie:
raise Exception("DTLS cookie mismatch")
def __init__(self, sock, keyfile=None, certfile=None,
server_side=False, cert_reqs=CERT_NONE,
ssl_version=PROTOCOL_DTLS, ca_certs=None,
do_handshake_on_connect=True,
suppress_ragged_eofs=True, ciphers=None,
cb_user_config_ssl_ctx=None,
cb_user_config_ssl=None):
"""Constructor
Arguments:
these arguments match the ones of the SSLSocket class in the
standard library's ssl module
"""
if keyfile and not certfile or certfile and not keyfile:
raise_ssl_error(ERR_BOTH_KEY_CERT_FILES)
if server_side and not keyfile:
raise_ssl_error(ERR_BOTH_KEY_CERT_FILES_SVR)
if cert_reqs != CERT_NONE and not ca_certs:
raise_ssl_error(ERR_NO_CERTS)
if not ciphers:
ciphers = "DEFAULT"
self._sock = sock
self._keyfile = keyfile
self._certfile = certfile
self._cert_reqs = cert_reqs
self._ssl_version = ssl_version
self._ca_certs = ca_certs
self._do_handshake_on_connect = do_handshake_on_connect
self._suppress_ragged_eofs = suppress_ragged_eofs
self._ciphers = ciphers
self._handshake_done = False
self._wbio_nb = self._rbio_nb = False
self._user_config_ssl_ctx = cb_user_config_ssl_ctx
self._intf_ssl_ctx = None
self._user_config_ssl = cb_user_config_ssl
self._intf_ssl = None
if isinstance(sock, SSLConnection):
post_init = self._copy_server()
elif isinstance(sock, _UnwrappedSocket):
post_init = self._reconnect_unwrapped()
else:
try:
peer_address = sock.getpeername()
except socket.error:
peer_address = None
if server_side:
post_init = self._init_server(peer_address)
else:
post_init = self._init_client(peer_address)
if self._user_config_ssl:
self._user_config_ssl(self._intf_ssl)
if sys.platform.startswith('win') and \
not (SSL_get_options(self._ssl.value) & SSL_OP_NO_QUERY_MTU):
SSL_set_options(self._ssl.value, SSL_OP_NO_QUERY_MTU)
DTLS_set_link_mtu(self._ssl.value, 576)
SSL_set_bio(self._ssl.value, self._rbio.value, self._wbio.value)
self._rbio.disown()
self._wbio.disown()
if post_init:
post_init()
def get_socket(self, inbound):
"""Retrieve a socket used by this connection
When inbound is True, then the socket from which this connection reads
data is retrieved. Otherwise the socket to which this connection writes
data is retrieved.
Read and write sockets differ depending on whether this is a server- or
a client-side connection, and on whether a routing demux is in use.
"""
if inbound and hasattr(self, "_rsock"):
return self._rsock
return self._sock
def listen(self):
"""Server-side cookie exchange
This method reads datagrams from the socket and initiates cookie
exchange, upon whose successful conclusion one can then proceed to
the accept method. Alternatively, accept can be called directly, in
which case it will call this method. In order to prevent denial-of-
service attacks, only a small, constant set of computing resources
are used during the listen phase.
On some platforms, listen must be called so that packets will be
forwarded to accepted connections. Doing so is therefore recommened
in all cases for portable code.
Return value: a peer address if a datagram from a new peer was
encountered, None if a datagram for a known peer was forwarded
"""
if not hasattr(self, "_listening"):
raise InvalidSocketError("listen called on non-listening socket")
self._pending_peer_address = None
try:
peer_address = self._udp_demux.service()
except socket.timeout:
peer_address = None
except socket.error as sock_err:
if sock_err.errno != errno.EWOULDBLOCK:
_logger.exception("Unexpected socket error in listen")
raise
peer_address = None
if not peer_address:
_logger.debug("Listen returning without peer")
return
# The demux advises that a datagram from a new peer may have arrived
if type(peer_address) is tuple:
# For this type of demux, the write BIO must be pointed at the peer
BIO_dgram_set_peer(self._wbio.value, peer_address)
self._udp_demux.forward()
self._listening_peer_address = peer_address
self._check_nbio()
self._listening = True
try:
_logger.debug("Invoking DTLSv1_listen for ssl: %d",
self._ssl.raw)
dtls_peer_address = DTLSv1_listen(self._ssl.value)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_WANT_READ:
# This method must be called again to forward the next datagram
_logger.debug("DTLSv1_listen must be resumed")
return
elif err.errqueue and err.errqueue[0][0] == ERR_WRONG_VERSION_NUMBER:
_logger.debug("Wrong version number; aborting handshake")
raise
elif err.errqueue and err.errqueue[0][0] == ERR_COOKIE_MISMATCH:
_logger.debug("Mismatching cookie received; aborting handshake")
raise
elif err.errqueue and err.errqueue[0][0] == ERR_NO_SHARED_CIPHER:
_logger.debug("No shared cipher; aborting handshake")
raise
_logger.exception("Unexpected error in DTLSv1_listen")
raise
finally:
self._listening = False
self._listening_peer_address = None
if type(peer_address) is tuple:
_logger.debug("New local peer: %s", dtls_peer_address)
self._pending_peer_address = peer_address
else:
self._pending_peer_address = dtls_peer_address
_logger.debug("New peer: %s", self._pending_peer_address)
return self._pending_peer_address
def accept(self):
"""Server-side UDP connection establishment
This method returns a server-side SSLConnection object, connected to
that peer most recently returned from the listen method and not yet
connected. If there is no such peer, then the listen method is invoked.
Return value: SSLConnection connected to a new peer, None if packet
forwarding only to an existing peer occurred.
"""
if not self._pending_peer_address:
if not self.listen():
_logger.debug("Accept returning without connection")
return
new_conn = SSLConnection(self, self._keyfile, self._certfile, True,
self._cert_reqs, self._ssl_version,
self._ca_certs, self._do_handshake_on_connect,
self._suppress_ragged_eofs, self._ciphers,
cb_user_config_ssl_ctx=self._user_config_ssl_ctx,
cb_user_config_ssl=self._user_config_ssl)
new_peer = self._pending_peer_address
self._pending_peer_address = None
if self._do_handshake_on_connect:
# Note that since that connection's socket was just created in its
# constructor, the following operation must be blocking; hence
# handshake-on-connect can only be used with a routing demux if
# listen is serviced by a separate application thread, or else we
# will hang in this call
new_conn.do_handshake()
_logger.debug("Accept returning new connection for new peer")
return new_conn, new_peer
def do_handshake(self):
"""Perform a handshake with the peer
This method forces an explicit handshake to be performed with either
the client or server peer.
"""
_logger.debug("Initiating handshake...")
try:
self._wrap_socket_library_call(
lambda: SSL_do_handshake(self._ssl.value),
ERR_HANDSHAKE_TIMEOUT)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_SYSCALL and err.result == -1:
raise_ssl_error(ERR_PORT_UNREACHABLE, err)
raise
self._handshake_done = True
_logger.debug("...completed handshake")
def read(self, len=1024, buffer=None):
"""Read data from connection
Read up to len bytes and return them.
Arguments:
len -- maximum number of bytes to read
Return value:
string containing read bytes
"""
try:
return self._wrap_socket_library_call(
lambda: SSL_read(self._ssl.value, len, buffer), ERR_READ_TIMEOUT)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_SYSCALL and err.result == -1:
raise_ssl_error(ERR_PORT_UNREACHABLE, err)
raise
def write(self, data):
"""Write data to connection
Write data as string of bytes.
Arguments:
data -- buffer containing data to be written
Return value:
number of bytes actually transmitted
"""
try:
ret = self._wrap_socket_library_call(
lambda: SSL_write(self._ssl.value, data), ERR_WRITE_TIMEOUT)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_SYSCALL and err.result == -1:
raise_ssl_error(ERR_PORT_UNREACHABLE, err)
raise
if ret:
self._handshake_done = True
return ret
def shutdown(self):
"""Shut down the DTLS connection
This method attemps to complete a bidirectional shutdown between
peers. For non-blocking sockets, it should be called repeatedly until
it no longer raises continuation request exceptions.
"""
if hasattr(self, "_listening"):
# Listening server-side sockets cannot be shut down
return
try:
self._wrap_socket_library_call(
lambda: SSL_shutdown(self._ssl.value), ERR_READ_TIMEOUT)
except openssl_error() as err:
if err.result == 0:
# close-notify alert was just sent; wait for same from peer
# Note: while it might seem wise to suppress further read-aheads
# with SSL_set_read_ahead here, doing so causes a shutdown
# failure (ret: -1, SSL_ERROR_SYSCALL) on the DTLS shutdown
# initiator side. And test_starttls does pass.
self._wrap_socket_library_call(
lambda: SSL_shutdown(self._ssl.value), ERR_READ_TIMEOUT)
else:
raise
if hasattr(self, "_rsock"):
# Return wrapped connected server socket (non-listening)
return _UnwrappedSocket(self._sock, self._rsock, self._udp_demux,
self._ctx,
BIO_dgram_get_peer(self._wbio.value))
# Return unwrapped client-side socket or unwrapped server-side socket
# for single-socket servers
return self._sock
def getpeercert(self, binary_form=False):
"""Retrieve the peer's certificate
When binary form is requested, the peer's DER-encoded certficate is
returned if it was transmitted during the handshake.
When binary form is not requested, and the peer's certificate has been
validated, then a certificate dictionary is returned. If the certificate
was not validated, an empty dictionary is returned.
In all cases, None is returned if no certificate was received from the
peer.
"""
try:
peer_cert = _X509(SSL_get_peer_certificate(self._ssl.value))
except openssl_error():
return
if binary_form:
return i2d_X509(peer_cert.value)
if self._cert_reqs == CERT_NONE:
return {}
return decode_cert(peer_cert)
peer_certificate = getpeercert # compatibility with _ssl call interface
def getpeercertchain(self, binary_form=False):
try:
stack, num, certs = SSL_get_peer_cert_chain(self._ssl.value)
except openssl_error():
return
peer_cert_chain = [_Rsrc(cert) for cert in certs]
ret = []
if binary_form:
ret = [i2d_X509(x.value) for x in peer_cert_chain]
elif len(peer_cert_chain):
ret = [decode_cert(x) for x in peer_cert_chain]
return ret
def cipher(self):
"""Retrieve information about the current cipher
Return a triple consisting of cipher name, SSL protocol version defining
its use, and the number of secret bits. Return None if handshaking
has not been completed.
"""
if not self._handshake_done:
return
current_cipher = SSL_get_current_cipher(self._ssl.value)
cipher_name = SSL_CIPHER_get_name(current_cipher)
cipher_version = SSL_CIPHER_get_version(current_cipher)
cipher_bits = SSL_CIPHER_get_bits(current_cipher)
return cipher_name, cipher_version, cipher_bits
def pending(self):
"""Retrieve number of buffered bytes
Return the number of bytes that have been read from the socket and
buffered by this connection. Return 0 if no bytes have been buffered.
"""
return SSL_pending(self._ssl.value)
def get_timeout(self):
"""Retrieve the retransmission timedelta
Since datagrams are subject to packet loss, DTLS will perform
packet retransmission if a response is not received after a certain
time interval during the handshaking phase. When using non-blocking
sockets, the application must call back after that time interval to
allow for the retransmission to occur. This method returns the
timedelta after which to perform the call to handle_timeout, or None
if no such callback is needed given the current handshake state.
"""
return DTLSv1_get_timeout(self._ssl.value)
def handle_timeout(self):
"""Perform datagram retransmission, if required
This method should be called after the timedelta retrieved from
get_timeout has expired, and no datagrams were received in the
meantime. If datagrams were received, a new timeout needs to be
requested.
Return value:
True -- retransmissions were performed successfully
False -- a timeout was not in effect or had not yet expired
Exceptions:
Raised when retransmissions fail or too many timeouts occur.
"""
return DTLSv1_handle_timeout(self._ssl.value)
|
rbit/pydtls | dtls/sslconnection.py | SSLConnection.do_handshake | python | def do_handshake(self):
_logger.debug("Initiating handshake...")
try:
self._wrap_socket_library_call(
lambda: SSL_do_handshake(self._ssl.value),
ERR_HANDSHAKE_TIMEOUT)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_SYSCALL and err.result == -1:
raise_ssl_error(ERR_PORT_UNREACHABLE, err)
raise
self._handshake_done = True
_logger.debug("...completed handshake") | Perform a handshake with the peer
This method forces an explicit handshake to be performed with either
the client or server peer. | train | https://github.com/rbit/pydtls/blob/41a71fccd990347d0de5f42418fea1e4e733359c/dtls/sslconnection.py#L717-L734 | [
"def openssl_error():\n \"\"\"Return the OpenSSL error type for use in exception clauses\"\"\"\n return _OpenSSLError\n"
] | class SSLConnection(object):
"""DTLS peer association
This class associates two DTLS peer instances, wrapping OpenSSL library
state including SSL (struct ssl_st), SSL_CTX, and BIO instances.
"""
_rnd_key = urandom(16)
def _init_server(self, peer_address):
if self._sock.type != socket.SOCK_DGRAM:
raise InvalidSocketError("sock must be of type SOCK_DGRAM")
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
if peer_address:
# Connect directly to this client peer, bypassing the demux
rsock = self._sock
BIO_dgram_set_connected(self._wbio.value, peer_address)
else:
from demux import UDPDemux
self._udp_demux = UDPDemux(self._sock)
rsock = self._udp_demux.get_connection(None)
if rsock is self._sock:
self._rbio = self._wbio
else:
self._rsock = rsock
self._rbio = _BIO(BIO_new_dgram(self._rsock.fileno(), BIO_NOCLOSE))
server_method = DTLS_server_method
if self._ssl_version == PROTOCOL_DTLSv1_2:
server_method = DTLSv1_2_server_method
elif self._ssl_version == PROTOCOL_DTLSv1:
server_method = DTLSv1_server_method
self._ctx = _CTX(SSL_CTX_new(server_method()))
self._intf_ssl_ctx = SSLContext(self._ctx.value)
SSL_CTX_set_session_cache_mode(self._ctx.value, SSL_SESS_CACHE_OFF)
if self._cert_reqs == CERT_NONE:
verify_mode = SSL_VERIFY_NONE
elif self._cert_reqs == CERT_OPTIONAL:
verify_mode = SSL_VERIFY_PEER | SSL_VERIFY_CLIENT_ONCE
else:
verify_mode = SSL_VERIFY_PEER | SSL_VERIFY_CLIENT_ONCE | \
SSL_VERIFY_FAIL_IF_NO_PEER_CERT
self._config_ssl_ctx(verify_mode)
if not peer_address:
# Configure UDP listening socket
self._listening = False
self._listening_peer_address = None
self._pending_peer_address = None
self._cb_keepalive = SSL_CTX_set_cookie_cb(
self._ctx.value,
_CallbackProxy(self._generate_cookie_cb),
_CallbackProxy(self._verify_cookie_cb))
self._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(self._ssl.value)
SSL_set_accept_state(self._ssl.value)
if peer_address and self._do_handshake_on_connect:
return lambda: self.do_handshake()
def _init_client(self, peer_address):
if self._sock.type != socket.SOCK_DGRAM:
raise InvalidSocketError("sock must be of type SOCK_DGRAM")
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = self._wbio
client_method = DTLSv1_2_client_method # no "any" exists, therefore use v1_2 (highest possible)
if self._ssl_version == PROTOCOL_DTLSv1_2:
client_method = DTLSv1_2_client_method
elif self._ssl_version == PROTOCOL_DTLSv1:
client_method = DTLSv1_client_method
self._ctx = _CTX(SSL_CTX_new(client_method()))
self._intf_ssl_ctx = SSLContext(self._ctx.value)
if self._cert_reqs == CERT_NONE:
verify_mode = SSL_VERIFY_NONE
else:
verify_mode = SSL_VERIFY_PEER
self._config_ssl_ctx(verify_mode)
self._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(self._ssl.value)
SSL_set_connect_state(self._ssl.value)
if peer_address:
return lambda: self.connect(peer_address)
def _config_ssl_ctx(self, verify_mode):
SSL_CTX_set_verify(self._ctx.value, verify_mode)
SSL_CTX_set_read_ahead(self._ctx.value, 1)
# Compression occurs at the stream layer now, leading to datagram
# corruption when packet loss occurs
SSL_CTX_set_options(self._ctx.value, SSL_OP_NO_COMPRESSION)
if self._certfile:
SSL_CTX_use_certificate_chain_file(self._ctx.value, self._certfile)
if self._keyfile:
SSL_CTX_use_PrivateKey_file(self._ctx.value, self._keyfile,
SSL_FILE_TYPE_PEM)
if self._ca_certs:
SSL_CTX_load_verify_locations(self._ctx.value, self._ca_certs, None)
if self._ciphers:
try:
SSL_CTX_set_cipher_list(self._ctx.value, self._ciphers)
except openssl_error() as err:
raise_ssl_error(ERR_NO_CIPHER, err)
if self._user_config_ssl_ctx:
self._user_config_ssl_ctx(self._intf_ssl_ctx)
def _copy_server(self):
source = self._sock
self._udp_demux = source._udp_demux
rsock = self._udp_demux.get_connection(source._pending_peer_address)
self._ctx = source._ctx
self._ssl = source._ssl
new_source_wbio = _BIO(BIO_new_dgram(source._sock.fileno(),
BIO_NOCLOSE))
if hasattr(source, "_rsock"):
self._sock = source._sock
self._rsock = rsock
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = _BIO(BIO_new_dgram(rsock.fileno(), BIO_NOCLOSE))
new_source_rbio = _BIO(BIO_new_dgram(source._rsock.fileno(),
BIO_NOCLOSE))
BIO_dgram_set_peer(self._wbio.value, source._pending_peer_address)
else:
self._sock = rsock
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = self._wbio
new_source_rbio = new_source_wbio
BIO_dgram_set_connected(self._wbio.value,
source._pending_peer_address)
source._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(source._ssl.value)
SSL_set_accept_state(source._ssl.value)
if self._user_config_ssl:
self._user_config_ssl(self._intf_ssl)
source._rbio = new_source_rbio
source._wbio = new_source_wbio
SSL_set_bio(source._ssl.value,
new_source_rbio.value,
new_source_wbio.value)
new_source_rbio.disown()
new_source_wbio.disown()
def _reconnect_unwrapped(self):
source = self._sock
self._sock = source._wsock
self._udp_demux = source._demux
self._rsock = source._rsock
self._ctx = source._ctx
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = _BIO(BIO_new_dgram(self._rsock.fileno(), BIO_NOCLOSE))
BIO_dgram_set_peer(self._wbio.value, source._peer_address)
self._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(self._ssl.value)
SSL_set_accept_state(self._ssl.value)
if self._user_config_ssl:
self._user_config_ssl(self._intf_ssl)
if self._do_handshake_on_connect:
return lambda: self.do_handshake()
def _check_nbio(self):
timeout = self._sock.gettimeout()
if self._wbio_nb != timeout is not None:
BIO_set_nbio(self._wbio.value, timeout is not None)
self._wbio_nb = timeout is not None
if self._wbio is not self._rbio:
timeout = self._rsock.gettimeout()
if self._rbio_nb != timeout is not None:
BIO_set_nbio(self._rbio.value, timeout is not None)
self._rbio_nb = timeout is not None
return timeout # read channel timeout
def _wrap_socket_library_call(self, call, timeout_error):
timeout_sec_start = timeout_sec = self._check_nbio()
# Pass the call if the socket is blocking or non-blocking
if not timeout_sec: # None (blocking) or zero (non-blocking)
return call()
start_time = datetime.datetime.now()
read_sock = self.get_socket(True)
need_select = False
while timeout_sec > 0:
if need_select:
if not select([read_sock], [], [], timeout_sec)[0]:
break
timeout_sec = timeout_sec_start - \
(datetime.datetime.now() - start_time).total_seconds()
try:
return call()
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_WANT_READ:
need_select = True
continue
raise
raise_ssl_error(timeout_error)
def _get_cookie(self, ssl):
assert self._listening
assert self._ssl.raw == ssl.raw
if self._listening_peer_address:
peer_address = self._listening_peer_address
else:
peer_address = BIO_dgram_get_peer(self._rbio.value)
cookie_hmac = hmac.new(self._rnd_key, str(peer_address))
return cookie_hmac.digest()
def _generate_cookie_cb(self, ssl):
return self._get_cookie(ssl)
def _verify_cookie_cb(self, ssl, cookie):
if self._get_cookie(ssl) != cookie:
raise Exception("DTLS cookie mismatch")
def __init__(self, sock, keyfile=None, certfile=None,
server_side=False, cert_reqs=CERT_NONE,
ssl_version=PROTOCOL_DTLS, ca_certs=None,
do_handshake_on_connect=True,
suppress_ragged_eofs=True, ciphers=None,
cb_user_config_ssl_ctx=None,
cb_user_config_ssl=None):
"""Constructor
Arguments:
these arguments match the ones of the SSLSocket class in the
standard library's ssl module
"""
if keyfile and not certfile or certfile and not keyfile:
raise_ssl_error(ERR_BOTH_KEY_CERT_FILES)
if server_side and not keyfile:
raise_ssl_error(ERR_BOTH_KEY_CERT_FILES_SVR)
if cert_reqs != CERT_NONE and not ca_certs:
raise_ssl_error(ERR_NO_CERTS)
if not ciphers:
ciphers = "DEFAULT"
self._sock = sock
self._keyfile = keyfile
self._certfile = certfile
self._cert_reqs = cert_reqs
self._ssl_version = ssl_version
self._ca_certs = ca_certs
self._do_handshake_on_connect = do_handshake_on_connect
self._suppress_ragged_eofs = suppress_ragged_eofs
self._ciphers = ciphers
self._handshake_done = False
self._wbio_nb = self._rbio_nb = False
self._user_config_ssl_ctx = cb_user_config_ssl_ctx
self._intf_ssl_ctx = None
self._user_config_ssl = cb_user_config_ssl
self._intf_ssl = None
if isinstance(sock, SSLConnection):
post_init = self._copy_server()
elif isinstance(sock, _UnwrappedSocket):
post_init = self._reconnect_unwrapped()
else:
try:
peer_address = sock.getpeername()
except socket.error:
peer_address = None
if server_side:
post_init = self._init_server(peer_address)
else:
post_init = self._init_client(peer_address)
if self._user_config_ssl:
self._user_config_ssl(self._intf_ssl)
if sys.platform.startswith('win') and \
not (SSL_get_options(self._ssl.value) & SSL_OP_NO_QUERY_MTU):
SSL_set_options(self._ssl.value, SSL_OP_NO_QUERY_MTU)
DTLS_set_link_mtu(self._ssl.value, 576)
SSL_set_bio(self._ssl.value, self._rbio.value, self._wbio.value)
self._rbio.disown()
self._wbio.disown()
if post_init:
post_init()
def get_socket(self, inbound):
"""Retrieve a socket used by this connection
When inbound is True, then the socket from which this connection reads
data is retrieved. Otherwise the socket to which this connection writes
data is retrieved.
Read and write sockets differ depending on whether this is a server- or
a client-side connection, and on whether a routing demux is in use.
"""
if inbound and hasattr(self, "_rsock"):
return self._rsock
return self._sock
def listen(self):
"""Server-side cookie exchange
This method reads datagrams from the socket and initiates cookie
exchange, upon whose successful conclusion one can then proceed to
the accept method. Alternatively, accept can be called directly, in
which case it will call this method. In order to prevent denial-of-
service attacks, only a small, constant set of computing resources
are used during the listen phase.
On some platforms, listen must be called so that packets will be
forwarded to accepted connections. Doing so is therefore recommened
in all cases for portable code.
Return value: a peer address if a datagram from a new peer was
encountered, None if a datagram for a known peer was forwarded
"""
if not hasattr(self, "_listening"):
raise InvalidSocketError("listen called on non-listening socket")
self._pending_peer_address = None
try:
peer_address = self._udp_demux.service()
except socket.timeout:
peer_address = None
except socket.error as sock_err:
if sock_err.errno != errno.EWOULDBLOCK:
_logger.exception("Unexpected socket error in listen")
raise
peer_address = None
if not peer_address:
_logger.debug("Listen returning without peer")
return
# The demux advises that a datagram from a new peer may have arrived
if type(peer_address) is tuple:
# For this type of demux, the write BIO must be pointed at the peer
BIO_dgram_set_peer(self._wbio.value, peer_address)
self._udp_demux.forward()
self._listening_peer_address = peer_address
self._check_nbio()
self._listening = True
try:
_logger.debug("Invoking DTLSv1_listen for ssl: %d",
self._ssl.raw)
dtls_peer_address = DTLSv1_listen(self._ssl.value)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_WANT_READ:
# This method must be called again to forward the next datagram
_logger.debug("DTLSv1_listen must be resumed")
return
elif err.errqueue and err.errqueue[0][0] == ERR_WRONG_VERSION_NUMBER:
_logger.debug("Wrong version number; aborting handshake")
raise
elif err.errqueue and err.errqueue[0][0] == ERR_COOKIE_MISMATCH:
_logger.debug("Mismatching cookie received; aborting handshake")
raise
elif err.errqueue and err.errqueue[0][0] == ERR_NO_SHARED_CIPHER:
_logger.debug("No shared cipher; aborting handshake")
raise
_logger.exception("Unexpected error in DTLSv1_listen")
raise
finally:
self._listening = False
self._listening_peer_address = None
if type(peer_address) is tuple:
_logger.debug("New local peer: %s", dtls_peer_address)
self._pending_peer_address = peer_address
else:
self._pending_peer_address = dtls_peer_address
_logger.debug("New peer: %s", self._pending_peer_address)
return self._pending_peer_address
def accept(self):
"""Server-side UDP connection establishment
This method returns a server-side SSLConnection object, connected to
that peer most recently returned from the listen method and not yet
connected. If there is no such peer, then the listen method is invoked.
Return value: SSLConnection connected to a new peer, None if packet
forwarding only to an existing peer occurred.
"""
if not self._pending_peer_address:
if not self.listen():
_logger.debug("Accept returning without connection")
return
new_conn = SSLConnection(self, self._keyfile, self._certfile, True,
self._cert_reqs, self._ssl_version,
self._ca_certs, self._do_handshake_on_connect,
self._suppress_ragged_eofs, self._ciphers,
cb_user_config_ssl_ctx=self._user_config_ssl_ctx,
cb_user_config_ssl=self._user_config_ssl)
new_peer = self._pending_peer_address
self._pending_peer_address = None
if self._do_handshake_on_connect:
# Note that since that connection's socket was just created in its
# constructor, the following operation must be blocking; hence
# handshake-on-connect can only be used with a routing demux if
# listen is serviced by a separate application thread, or else we
# will hang in this call
new_conn.do_handshake()
_logger.debug("Accept returning new connection for new peer")
return new_conn, new_peer
def connect(self, peer_address):
"""Client-side UDP connection establishment
This method connects this object's underlying socket. It subsequently
performs a handshake if do_handshake_on_connect was set during
initialization.
Arguments:
peer_address - address tuple of server peer
"""
self._sock.connect(peer_address)
peer_address = self._sock.getpeername() # substituted host addrinfo
BIO_dgram_set_connected(self._wbio.value, peer_address)
assert self._wbio is self._rbio
if self._do_handshake_on_connect:
self.do_handshake()
def read(self, len=1024, buffer=None):
"""Read data from connection
Read up to len bytes and return them.
Arguments:
len -- maximum number of bytes to read
Return value:
string containing read bytes
"""
try:
return self._wrap_socket_library_call(
lambda: SSL_read(self._ssl.value, len, buffer), ERR_READ_TIMEOUT)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_SYSCALL and err.result == -1:
raise_ssl_error(ERR_PORT_UNREACHABLE, err)
raise
def write(self, data):
"""Write data to connection
Write data as string of bytes.
Arguments:
data -- buffer containing data to be written
Return value:
number of bytes actually transmitted
"""
try:
ret = self._wrap_socket_library_call(
lambda: SSL_write(self._ssl.value, data), ERR_WRITE_TIMEOUT)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_SYSCALL and err.result == -1:
raise_ssl_error(ERR_PORT_UNREACHABLE, err)
raise
if ret:
self._handshake_done = True
return ret
def shutdown(self):
"""Shut down the DTLS connection
This method attemps to complete a bidirectional shutdown between
peers. For non-blocking sockets, it should be called repeatedly until
it no longer raises continuation request exceptions.
"""
if hasattr(self, "_listening"):
# Listening server-side sockets cannot be shut down
return
try:
self._wrap_socket_library_call(
lambda: SSL_shutdown(self._ssl.value), ERR_READ_TIMEOUT)
except openssl_error() as err:
if err.result == 0:
# close-notify alert was just sent; wait for same from peer
# Note: while it might seem wise to suppress further read-aheads
# with SSL_set_read_ahead here, doing so causes a shutdown
# failure (ret: -1, SSL_ERROR_SYSCALL) on the DTLS shutdown
# initiator side. And test_starttls does pass.
self._wrap_socket_library_call(
lambda: SSL_shutdown(self._ssl.value), ERR_READ_TIMEOUT)
else:
raise
if hasattr(self, "_rsock"):
# Return wrapped connected server socket (non-listening)
return _UnwrappedSocket(self._sock, self._rsock, self._udp_demux,
self._ctx,
BIO_dgram_get_peer(self._wbio.value))
# Return unwrapped client-side socket or unwrapped server-side socket
# for single-socket servers
return self._sock
def getpeercert(self, binary_form=False):
"""Retrieve the peer's certificate
When binary form is requested, the peer's DER-encoded certficate is
returned if it was transmitted during the handshake.
When binary form is not requested, and the peer's certificate has been
validated, then a certificate dictionary is returned. If the certificate
was not validated, an empty dictionary is returned.
In all cases, None is returned if no certificate was received from the
peer.
"""
try:
peer_cert = _X509(SSL_get_peer_certificate(self._ssl.value))
except openssl_error():
return
if binary_form:
return i2d_X509(peer_cert.value)
if self._cert_reqs == CERT_NONE:
return {}
return decode_cert(peer_cert)
peer_certificate = getpeercert # compatibility with _ssl call interface
def getpeercertchain(self, binary_form=False):
try:
stack, num, certs = SSL_get_peer_cert_chain(self._ssl.value)
except openssl_error():
return
peer_cert_chain = [_Rsrc(cert) for cert in certs]
ret = []
if binary_form:
ret = [i2d_X509(x.value) for x in peer_cert_chain]
elif len(peer_cert_chain):
ret = [decode_cert(x) for x in peer_cert_chain]
return ret
def cipher(self):
"""Retrieve information about the current cipher
Return a triple consisting of cipher name, SSL protocol version defining
its use, and the number of secret bits. Return None if handshaking
has not been completed.
"""
if not self._handshake_done:
return
current_cipher = SSL_get_current_cipher(self._ssl.value)
cipher_name = SSL_CIPHER_get_name(current_cipher)
cipher_version = SSL_CIPHER_get_version(current_cipher)
cipher_bits = SSL_CIPHER_get_bits(current_cipher)
return cipher_name, cipher_version, cipher_bits
def pending(self):
"""Retrieve number of buffered bytes
Return the number of bytes that have been read from the socket and
buffered by this connection. Return 0 if no bytes have been buffered.
"""
return SSL_pending(self._ssl.value)
def get_timeout(self):
"""Retrieve the retransmission timedelta
Since datagrams are subject to packet loss, DTLS will perform
packet retransmission if a response is not received after a certain
time interval during the handshaking phase. When using non-blocking
sockets, the application must call back after that time interval to
allow for the retransmission to occur. This method returns the
timedelta after which to perform the call to handle_timeout, or None
if no such callback is needed given the current handshake state.
"""
return DTLSv1_get_timeout(self._ssl.value)
def handle_timeout(self):
"""Perform datagram retransmission, if required
This method should be called after the timedelta retrieved from
get_timeout has expired, and no datagrams were received in the
meantime. If datagrams were received, a new timeout needs to be
requested.
Return value:
True -- retransmissions were performed successfully
False -- a timeout was not in effect or had not yet expired
Exceptions:
Raised when retransmissions fail or too many timeouts occur.
"""
return DTLSv1_handle_timeout(self._ssl.value)
|
rbit/pydtls | dtls/sslconnection.py | SSLConnection.read | python | def read(self, len=1024, buffer=None):
try:
return self._wrap_socket_library_call(
lambda: SSL_read(self._ssl.value, len, buffer), ERR_READ_TIMEOUT)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_SYSCALL and err.result == -1:
raise_ssl_error(ERR_PORT_UNREACHABLE, err)
raise | Read data from connection
Read up to len bytes and return them.
Arguments:
len -- maximum number of bytes to read
Return value:
string containing read bytes | train | https://github.com/rbit/pydtls/blob/41a71fccd990347d0de5f42418fea1e4e733359c/dtls/sslconnection.py#L736-L753 | [
"def openssl_error():\n \"\"\"Return the OpenSSL error type for use in exception clauses\"\"\"\n return _OpenSSLError\n"
] | class SSLConnection(object):
"""DTLS peer association
This class associates two DTLS peer instances, wrapping OpenSSL library
state including SSL (struct ssl_st), SSL_CTX, and BIO instances.
"""
_rnd_key = urandom(16)
def _init_server(self, peer_address):
if self._sock.type != socket.SOCK_DGRAM:
raise InvalidSocketError("sock must be of type SOCK_DGRAM")
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
if peer_address:
# Connect directly to this client peer, bypassing the demux
rsock = self._sock
BIO_dgram_set_connected(self._wbio.value, peer_address)
else:
from demux import UDPDemux
self._udp_demux = UDPDemux(self._sock)
rsock = self._udp_demux.get_connection(None)
if rsock is self._sock:
self._rbio = self._wbio
else:
self._rsock = rsock
self._rbio = _BIO(BIO_new_dgram(self._rsock.fileno(), BIO_NOCLOSE))
server_method = DTLS_server_method
if self._ssl_version == PROTOCOL_DTLSv1_2:
server_method = DTLSv1_2_server_method
elif self._ssl_version == PROTOCOL_DTLSv1:
server_method = DTLSv1_server_method
self._ctx = _CTX(SSL_CTX_new(server_method()))
self._intf_ssl_ctx = SSLContext(self._ctx.value)
SSL_CTX_set_session_cache_mode(self._ctx.value, SSL_SESS_CACHE_OFF)
if self._cert_reqs == CERT_NONE:
verify_mode = SSL_VERIFY_NONE
elif self._cert_reqs == CERT_OPTIONAL:
verify_mode = SSL_VERIFY_PEER | SSL_VERIFY_CLIENT_ONCE
else:
verify_mode = SSL_VERIFY_PEER | SSL_VERIFY_CLIENT_ONCE | \
SSL_VERIFY_FAIL_IF_NO_PEER_CERT
self._config_ssl_ctx(verify_mode)
if not peer_address:
# Configure UDP listening socket
self._listening = False
self._listening_peer_address = None
self._pending_peer_address = None
self._cb_keepalive = SSL_CTX_set_cookie_cb(
self._ctx.value,
_CallbackProxy(self._generate_cookie_cb),
_CallbackProxy(self._verify_cookie_cb))
self._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(self._ssl.value)
SSL_set_accept_state(self._ssl.value)
if peer_address and self._do_handshake_on_connect:
return lambda: self.do_handshake()
def _init_client(self, peer_address):
if self._sock.type != socket.SOCK_DGRAM:
raise InvalidSocketError("sock must be of type SOCK_DGRAM")
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = self._wbio
client_method = DTLSv1_2_client_method # no "any" exists, therefore use v1_2 (highest possible)
if self._ssl_version == PROTOCOL_DTLSv1_2:
client_method = DTLSv1_2_client_method
elif self._ssl_version == PROTOCOL_DTLSv1:
client_method = DTLSv1_client_method
self._ctx = _CTX(SSL_CTX_new(client_method()))
self._intf_ssl_ctx = SSLContext(self._ctx.value)
if self._cert_reqs == CERT_NONE:
verify_mode = SSL_VERIFY_NONE
else:
verify_mode = SSL_VERIFY_PEER
self._config_ssl_ctx(verify_mode)
self._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(self._ssl.value)
SSL_set_connect_state(self._ssl.value)
if peer_address:
return lambda: self.connect(peer_address)
def _config_ssl_ctx(self, verify_mode):
SSL_CTX_set_verify(self._ctx.value, verify_mode)
SSL_CTX_set_read_ahead(self._ctx.value, 1)
# Compression occurs at the stream layer now, leading to datagram
# corruption when packet loss occurs
SSL_CTX_set_options(self._ctx.value, SSL_OP_NO_COMPRESSION)
if self._certfile:
SSL_CTX_use_certificate_chain_file(self._ctx.value, self._certfile)
if self._keyfile:
SSL_CTX_use_PrivateKey_file(self._ctx.value, self._keyfile,
SSL_FILE_TYPE_PEM)
if self._ca_certs:
SSL_CTX_load_verify_locations(self._ctx.value, self._ca_certs, None)
if self._ciphers:
try:
SSL_CTX_set_cipher_list(self._ctx.value, self._ciphers)
except openssl_error() as err:
raise_ssl_error(ERR_NO_CIPHER, err)
if self._user_config_ssl_ctx:
self._user_config_ssl_ctx(self._intf_ssl_ctx)
def _copy_server(self):
source = self._sock
self._udp_demux = source._udp_demux
rsock = self._udp_demux.get_connection(source._pending_peer_address)
self._ctx = source._ctx
self._ssl = source._ssl
new_source_wbio = _BIO(BIO_new_dgram(source._sock.fileno(),
BIO_NOCLOSE))
if hasattr(source, "_rsock"):
self._sock = source._sock
self._rsock = rsock
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = _BIO(BIO_new_dgram(rsock.fileno(), BIO_NOCLOSE))
new_source_rbio = _BIO(BIO_new_dgram(source._rsock.fileno(),
BIO_NOCLOSE))
BIO_dgram_set_peer(self._wbio.value, source._pending_peer_address)
else:
self._sock = rsock
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = self._wbio
new_source_rbio = new_source_wbio
BIO_dgram_set_connected(self._wbio.value,
source._pending_peer_address)
source._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(source._ssl.value)
SSL_set_accept_state(source._ssl.value)
if self._user_config_ssl:
self._user_config_ssl(self._intf_ssl)
source._rbio = new_source_rbio
source._wbio = new_source_wbio
SSL_set_bio(source._ssl.value,
new_source_rbio.value,
new_source_wbio.value)
new_source_rbio.disown()
new_source_wbio.disown()
def _reconnect_unwrapped(self):
source = self._sock
self._sock = source._wsock
self._udp_demux = source._demux
self._rsock = source._rsock
self._ctx = source._ctx
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = _BIO(BIO_new_dgram(self._rsock.fileno(), BIO_NOCLOSE))
BIO_dgram_set_peer(self._wbio.value, source._peer_address)
self._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(self._ssl.value)
SSL_set_accept_state(self._ssl.value)
if self._user_config_ssl:
self._user_config_ssl(self._intf_ssl)
if self._do_handshake_on_connect:
return lambda: self.do_handshake()
def _check_nbio(self):
timeout = self._sock.gettimeout()
if self._wbio_nb != timeout is not None:
BIO_set_nbio(self._wbio.value, timeout is not None)
self._wbio_nb = timeout is not None
if self._wbio is not self._rbio:
timeout = self._rsock.gettimeout()
if self._rbio_nb != timeout is not None:
BIO_set_nbio(self._rbio.value, timeout is not None)
self._rbio_nb = timeout is not None
return timeout # read channel timeout
def _wrap_socket_library_call(self, call, timeout_error):
timeout_sec_start = timeout_sec = self._check_nbio()
# Pass the call if the socket is blocking or non-blocking
if not timeout_sec: # None (blocking) or zero (non-blocking)
return call()
start_time = datetime.datetime.now()
read_sock = self.get_socket(True)
need_select = False
while timeout_sec > 0:
if need_select:
if not select([read_sock], [], [], timeout_sec)[0]:
break
timeout_sec = timeout_sec_start - \
(datetime.datetime.now() - start_time).total_seconds()
try:
return call()
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_WANT_READ:
need_select = True
continue
raise
raise_ssl_error(timeout_error)
def _get_cookie(self, ssl):
assert self._listening
assert self._ssl.raw == ssl.raw
if self._listening_peer_address:
peer_address = self._listening_peer_address
else:
peer_address = BIO_dgram_get_peer(self._rbio.value)
cookie_hmac = hmac.new(self._rnd_key, str(peer_address))
return cookie_hmac.digest()
def _generate_cookie_cb(self, ssl):
return self._get_cookie(ssl)
def _verify_cookie_cb(self, ssl, cookie):
if self._get_cookie(ssl) != cookie:
raise Exception("DTLS cookie mismatch")
def __init__(self, sock, keyfile=None, certfile=None,
server_side=False, cert_reqs=CERT_NONE,
ssl_version=PROTOCOL_DTLS, ca_certs=None,
do_handshake_on_connect=True,
suppress_ragged_eofs=True, ciphers=None,
cb_user_config_ssl_ctx=None,
cb_user_config_ssl=None):
"""Constructor
Arguments:
these arguments match the ones of the SSLSocket class in the
standard library's ssl module
"""
if keyfile and not certfile or certfile and not keyfile:
raise_ssl_error(ERR_BOTH_KEY_CERT_FILES)
if server_side and not keyfile:
raise_ssl_error(ERR_BOTH_KEY_CERT_FILES_SVR)
if cert_reqs != CERT_NONE and not ca_certs:
raise_ssl_error(ERR_NO_CERTS)
if not ciphers:
ciphers = "DEFAULT"
self._sock = sock
self._keyfile = keyfile
self._certfile = certfile
self._cert_reqs = cert_reqs
self._ssl_version = ssl_version
self._ca_certs = ca_certs
self._do_handshake_on_connect = do_handshake_on_connect
self._suppress_ragged_eofs = suppress_ragged_eofs
self._ciphers = ciphers
self._handshake_done = False
self._wbio_nb = self._rbio_nb = False
self._user_config_ssl_ctx = cb_user_config_ssl_ctx
self._intf_ssl_ctx = None
self._user_config_ssl = cb_user_config_ssl
self._intf_ssl = None
if isinstance(sock, SSLConnection):
post_init = self._copy_server()
elif isinstance(sock, _UnwrappedSocket):
post_init = self._reconnect_unwrapped()
else:
try:
peer_address = sock.getpeername()
except socket.error:
peer_address = None
if server_side:
post_init = self._init_server(peer_address)
else:
post_init = self._init_client(peer_address)
if self._user_config_ssl:
self._user_config_ssl(self._intf_ssl)
if sys.platform.startswith('win') and \
not (SSL_get_options(self._ssl.value) & SSL_OP_NO_QUERY_MTU):
SSL_set_options(self._ssl.value, SSL_OP_NO_QUERY_MTU)
DTLS_set_link_mtu(self._ssl.value, 576)
SSL_set_bio(self._ssl.value, self._rbio.value, self._wbio.value)
self._rbio.disown()
self._wbio.disown()
if post_init:
post_init()
def get_socket(self, inbound):
"""Retrieve a socket used by this connection
When inbound is True, then the socket from which this connection reads
data is retrieved. Otherwise the socket to which this connection writes
data is retrieved.
Read and write sockets differ depending on whether this is a server- or
a client-side connection, and on whether a routing demux is in use.
"""
if inbound and hasattr(self, "_rsock"):
return self._rsock
return self._sock
def listen(self):
"""Server-side cookie exchange
This method reads datagrams from the socket and initiates cookie
exchange, upon whose successful conclusion one can then proceed to
the accept method. Alternatively, accept can be called directly, in
which case it will call this method. In order to prevent denial-of-
service attacks, only a small, constant set of computing resources
are used during the listen phase.
On some platforms, listen must be called so that packets will be
forwarded to accepted connections. Doing so is therefore recommened
in all cases for portable code.
Return value: a peer address if a datagram from a new peer was
encountered, None if a datagram for a known peer was forwarded
"""
if not hasattr(self, "_listening"):
raise InvalidSocketError("listen called on non-listening socket")
self._pending_peer_address = None
try:
peer_address = self._udp_demux.service()
except socket.timeout:
peer_address = None
except socket.error as sock_err:
if sock_err.errno != errno.EWOULDBLOCK:
_logger.exception("Unexpected socket error in listen")
raise
peer_address = None
if not peer_address:
_logger.debug("Listen returning without peer")
return
# The demux advises that a datagram from a new peer may have arrived
if type(peer_address) is tuple:
# For this type of demux, the write BIO must be pointed at the peer
BIO_dgram_set_peer(self._wbio.value, peer_address)
self._udp_demux.forward()
self._listening_peer_address = peer_address
self._check_nbio()
self._listening = True
try:
_logger.debug("Invoking DTLSv1_listen for ssl: %d",
self._ssl.raw)
dtls_peer_address = DTLSv1_listen(self._ssl.value)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_WANT_READ:
# This method must be called again to forward the next datagram
_logger.debug("DTLSv1_listen must be resumed")
return
elif err.errqueue and err.errqueue[0][0] == ERR_WRONG_VERSION_NUMBER:
_logger.debug("Wrong version number; aborting handshake")
raise
elif err.errqueue and err.errqueue[0][0] == ERR_COOKIE_MISMATCH:
_logger.debug("Mismatching cookie received; aborting handshake")
raise
elif err.errqueue and err.errqueue[0][0] == ERR_NO_SHARED_CIPHER:
_logger.debug("No shared cipher; aborting handshake")
raise
_logger.exception("Unexpected error in DTLSv1_listen")
raise
finally:
self._listening = False
self._listening_peer_address = None
if type(peer_address) is tuple:
_logger.debug("New local peer: %s", dtls_peer_address)
self._pending_peer_address = peer_address
else:
self._pending_peer_address = dtls_peer_address
_logger.debug("New peer: %s", self._pending_peer_address)
return self._pending_peer_address
def accept(self):
"""Server-side UDP connection establishment
This method returns a server-side SSLConnection object, connected to
that peer most recently returned from the listen method and not yet
connected. If there is no such peer, then the listen method is invoked.
Return value: SSLConnection connected to a new peer, None if packet
forwarding only to an existing peer occurred.
"""
if not self._pending_peer_address:
if not self.listen():
_logger.debug("Accept returning without connection")
return
new_conn = SSLConnection(self, self._keyfile, self._certfile, True,
self._cert_reqs, self._ssl_version,
self._ca_certs, self._do_handshake_on_connect,
self._suppress_ragged_eofs, self._ciphers,
cb_user_config_ssl_ctx=self._user_config_ssl_ctx,
cb_user_config_ssl=self._user_config_ssl)
new_peer = self._pending_peer_address
self._pending_peer_address = None
if self._do_handshake_on_connect:
# Note that since that connection's socket was just created in its
# constructor, the following operation must be blocking; hence
# handshake-on-connect can only be used with a routing demux if
# listen is serviced by a separate application thread, or else we
# will hang in this call
new_conn.do_handshake()
_logger.debug("Accept returning new connection for new peer")
return new_conn, new_peer
def connect(self, peer_address):
"""Client-side UDP connection establishment
This method connects this object's underlying socket. It subsequently
performs a handshake if do_handshake_on_connect was set during
initialization.
Arguments:
peer_address - address tuple of server peer
"""
self._sock.connect(peer_address)
peer_address = self._sock.getpeername() # substituted host addrinfo
BIO_dgram_set_connected(self._wbio.value, peer_address)
assert self._wbio is self._rbio
if self._do_handshake_on_connect:
self.do_handshake()
def do_handshake(self):
"""Perform a handshake with the peer
This method forces an explicit handshake to be performed with either
the client or server peer.
"""
_logger.debug("Initiating handshake...")
try:
self._wrap_socket_library_call(
lambda: SSL_do_handshake(self._ssl.value),
ERR_HANDSHAKE_TIMEOUT)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_SYSCALL and err.result == -1:
raise_ssl_error(ERR_PORT_UNREACHABLE, err)
raise
self._handshake_done = True
_logger.debug("...completed handshake")
def write(self, data):
"""Write data to connection
Write data as string of bytes.
Arguments:
data -- buffer containing data to be written
Return value:
number of bytes actually transmitted
"""
try:
ret = self._wrap_socket_library_call(
lambda: SSL_write(self._ssl.value, data), ERR_WRITE_TIMEOUT)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_SYSCALL and err.result == -1:
raise_ssl_error(ERR_PORT_UNREACHABLE, err)
raise
if ret:
self._handshake_done = True
return ret
def shutdown(self):
"""Shut down the DTLS connection
This method attemps to complete a bidirectional shutdown between
peers. For non-blocking sockets, it should be called repeatedly until
it no longer raises continuation request exceptions.
"""
if hasattr(self, "_listening"):
# Listening server-side sockets cannot be shut down
return
try:
self._wrap_socket_library_call(
lambda: SSL_shutdown(self._ssl.value), ERR_READ_TIMEOUT)
except openssl_error() as err:
if err.result == 0:
# close-notify alert was just sent; wait for same from peer
# Note: while it might seem wise to suppress further read-aheads
# with SSL_set_read_ahead here, doing so causes a shutdown
# failure (ret: -1, SSL_ERROR_SYSCALL) on the DTLS shutdown
# initiator side. And test_starttls does pass.
self._wrap_socket_library_call(
lambda: SSL_shutdown(self._ssl.value), ERR_READ_TIMEOUT)
else:
raise
if hasattr(self, "_rsock"):
# Return wrapped connected server socket (non-listening)
return _UnwrappedSocket(self._sock, self._rsock, self._udp_demux,
self._ctx,
BIO_dgram_get_peer(self._wbio.value))
# Return unwrapped client-side socket or unwrapped server-side socket
# for single-socket servers
return self._sock
def getpeercert(self, binary_form=False):
"""Retrieve the peer's certificate
When binary form is requested, the peer's DER-encoded certficate is
returned if it was transmitted during the handshake.
When binary form is not requested, and the peer's certificate has been
validated, then a certificate dictionary is returned. If the certificate
was not validated, an empty dictionary is returned.
In all cases, None is returned if no certificate was received from the
peer.
"""
try:
peer_cert = _X509(SSL_get_peer_certificate(self._ssl.value))
except openssl_error():
return
if binary_form:
return i2d_X509(peer_cert.value)
if self._cert_reqs == CERT_NONE:
return {}
return decode_cert(peer_cert)
peer_certificate = getpeercert # compatibility with _ssl call interface
def getpeercertchain(self, binary_form=False):
try:
stack, num, certs = SSL_get_peer_cert_chain(self._ssl.value)
except openssl_error():
return
peer_cert_chain = [_Rsrc(cert) for cert in certs]
ret = []
if binary_form:
ret = [i2d_X509(x.value) for x in peer_cert_chain]
elif len(peer_cert_chain):
ret = [decode_cert(x) for x in peer_cert_chain]
return ret
def cipher(self):
"""Retrieve information about the current cipher
Return a triple consisting of cipher name, SSL protocol version defining
its use, and the number of secret bits. Return None if handshaking
has not been completed.
"""
if not self._handshake_done:
return
current_cipher = SSL_get_current_cipher(self._ssl.value)
cipher_name = SSL_CIPHER_get_name(current_cipher)
cipher_version = SSL_CIPHER_get_version(current_cipher)
cipher_bits = SSL_CIPHER_get_bits(current_cipher)
return cipher_name, cipher_version, cipher_bits
def pending(self):
"""Retrieve number of buffered bytes
Return the number of bytes that have been read from the socket and
buffered by this connection. Return 0 if no bytes have been buffered.
"""
return SSL_pending(self._ssl.value)
def get_timeout(self):
"""Retrieve the retransmission timedelta
Since datagrams are subject to packet loss, DTLS will perform
packet retransmission if a response is not received after a certain
time interval during the handshaking phase. When using non-blocking
sockets, the application must call back after that time interval to
allow for the retransmission to occur. This method returns the
timedelta after which to perform the call to handle_timeout, or None
if no such callback is needed given the current handshake state.
"""
return DTLSv1_get_timeout(self._ssl.value)
def handle_timeout(self):
"""Perform datagram retransmission, if required
This method should be called after the timedelta retrieved from
get_timeout has expired, and no datagrams were received in the
meantime. If datagrams were received, a new timeout needs to be
requested.
Return value:
True -- retransmissions were performed successfully
False -- a timeout was not in effect or had not yet expired
Exceptions:
Raised when retransmissions fail or too many timeouts occur.
"""
return DTLSv1_handle_timeout(self._ssl.value)
|
rbit/pydtls | dtls/sslconnection.py | SSLConnection.write | python | def write(self, data):
try:
ret = self._wrap_socket_library_call(
lambda: SSL_write(self._ssl.value, data), ERR_WRITE_TIMEOUT)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_SYSCALL and err.result == -1:
raise_ssl_error(ERR_PORT_UNREACHABLE, err)
raise
if ret:
self._handshake_done = True
return ret | Write data to connection
Write data as string of bytes.
Arguments:
data -- buffer containing data to be written
Return value:
number of bytes actually transmitted | train | https://github.com/rbit/pydtls/blob/41a71fccd990347d0de5f42418fea1e4e733359c/dtls/sslconnection.py#L755-L776 | [
"def openssl_error():\n \"\"\"Return the OpenSSL error type for use in exception clauses\"\"\"\n return _OpenSSLError\n"
] | class SSLConnection(object):
"""DTLS peer association
This class associates two DTLS peer instances, wrapping OpenSSL library
state including SSL (struct ssl_st), SSL_CTX, and BIO instances.
"""
_rnd_key = urandom(16)
def _init_server(self, peer_address):
if self._sock.type != socket.SOCK_DGRAM:
raise InvalidSocketError("sock must be of type SOCK_DGRAM")
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
if peer_address:
# Connect directly to this client peer, bypassing the demux
rsock = self._sock
BIO_dgram_set_connected(self._wbio.value, peer_address)
else:
from demux import UDPDemux
self._udp_demux = UDPDemux(self._sock)
rsock = self._udp_demux.get_connection(None)
if rsock is self._sock:
self._rbio = self._wbio
else:
self._rsock = rsock
self._rbio = _BIO(BIO_new_dgram(self._rsock.fileno(), BIO_NOCLOSE))
server_method = DTLS_server_method
if self._ssl_version == PROTOCOL_DTLSv1_2:
server_method = DTLSv1_2_server_method
elif self._ssl_version == PROTOCOL_DTLSv1:
server_method = DTLSv1_server_method
self._ctx = _CTX(SSL_CTX_new(server_method()))
self._intf_ssl_ctx = SSLContext(self._ctx.value)
SSL_CTX_set_session_cache_mode(self._ctx.value, SSL_SESS_CACHE_OFF)
if self._cert_reqs == CERT_NONE:
verify_mode = SSL_VERIFY_NONE
elif self._cert_reqs == CERT_OPTIONAL:
verify_mode = SSL_VERIFY_PEER | SSL_VERIFY_CLIENT_ONCE
else:
verify_mode = SSL_VERIFY_PEER | SSL_VERIFY_CLIENT_ONCE | \
SSL_VERIFY_FAIL_IF_NO_PEER_CERT
self._config_ssl_ctx(verify_mode)
if not peer_address:
# Configure UDP listening socket
self._listening = False
self._listening_peer_address = None
self._pending_peer_address = None
self._cb_keepalive = SSL_CTX_set_cookie_cb(
self._ctx.value,
_CallbackProxy(self._generate_cookie_cb),
_CallbackProxy(self._verify_cookie_cb))
self._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(self._ssl.value)
SSL_set_accept_state(self._ssl.value)
if peer_address and self._do_handshake_on_connect:
return lambda: self.do_handshake()
def _init_client(self, peer_address):
if self._sock.type != socket.SOCK_DGRAM:
raise InvalidSocketError("sock must be of type SOCK_DGRAM")
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = self._wbio
client_method = DTLSv1_2_client_method # no "any" exists, therefore use v1_2 (highest possible)
if self._ssl_version == PROTOCOL_DTLSv1_2:
client_method = DTLSv1_2_client_method
elif self._ssl_version == PROTOCOL_DTLSv1:
client_method = DTLSv1_client_method
self._ctx = _CTX(SSL_CTX_new(client_method()))
self._intf_ssl_ctx = SSLContext(self._ctx.value)
if self._cert_reqs == CERT_NONE:
verify_mode = SSL_VERIFY_NONE
else:
verify_mode = SSL_VERIFY_PEER
self._config_ssl_ctx(verify_mode)
self._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(self._ssl.value)
SSL_set_connect_state(self._ssl.value)
if peer_address:
return lambda: self.connect(peer_address)
def _config_ssl_ctx(self, verify_mode):
SSL_CTX_set_verify(self._ctx.value, verify_mode)
SSL_CTX_set_read_ahead(self._ctx.value, 1)
# Compression occurs at the stream layer now, leading to datagram
# corruption when packet loss occurs
SSL_CTX_set_options(self._ctx.value, SSL_OP_NO_COMPRESSION)
if self._certfile:
SSL_CTX_use_certificate_chain_file(self._ctx.value, self._certfile)
if self._keyfile:
SSL_CTX_use_PrivateKey_file(self._ctx.value, self._keyfile,
SSL_FILE_TYPE_PEM)
if self._ca_certs:
SSL_CTX_load_verify_locations(self._ctx.value, self._ca_certs, None)
if self._ciphers:
try:
SSL_CTX_set_cipher_list(self._ctx.value, self._ciphers)
except openssl_error() as err:
raise_ssl_error(ERR_NO_CIPHER, err)
if self._user_config_ssl_ctx:
self._user_config_ssl_ctx(self._intf_ssl_ctx)
def _copy_server(self):
source = self._sock
self._udp_demux = source._udp_demux
rsock = self._udp_demux.get_connection(source._pending_peer_address)
self._ctx = source._ctx
self._ssl = source._ssl
new_source_wbio = _BIO(BIO_new_dgram(source._sock.fileno(),
BIO_NOCLOSE))
if hasattr(source, "_rsock"):
self._sock = source._sock
self._rsock = rsock
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = _BIO(BIO_new_dgram(rsock.fileno(), BIO_NOCLOSE))
new_source_rbio = _BIO(BIO_new_dgram(source._rsock.fileno(),
BIO_NOCLOSE))
BIO_dgram_set_peer(self._wbio.value, source._pending_peer_address)
else:
self._sock = rsock
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = self._wbio
new_source_rbio = new_source_wbio
BIO_dgram_set_connected(self._wbio.value,
source._pending_peer_address)
source._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(source._ssl.value)
SSL_set_accept_state(source._ssl.value)
if self._user_config_ssl:
self._user_config_ssl(self._intf_ssl)
source._rbio = new_source_rbio
source._wbio = new_source_wbio
SSL_set_bio(source._ssl.value,
new_source_rbio.value,
new_source_wbio.value)
new_source_rbio.disown()
new_source_wbio.disown()
def _reconnect_unwrapped(self):
source = self._sock
self._sock = source._wsock
self._udp_demux = source._demux
self._rsock = source._rsock
self._ctx = source._ctx
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = _BIO(BIO_new_dgram(self._rsock.fileno(), BIO_NOCLOSE))
BIO_dgram_set_peer(self._wbio.value, source._peer_address)
self._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(self._ssl.value)
SSL_set_accept_state(self._ssl.value)
if self._user_config_ssl:
self._user_config_ssl(self._intf_ssl)
if self._do_handshake_on_connect:
return lambda: self.do_handshake()
def _check_nbio(self):
timeout = self._sock.gettimeout()
if self._wbio_nb != timeout is not None:
BIO_set_nbio(self._wbio.value, timeout is not None)
self._wbio_nb = timeout is not None
if self._wbio is not self._rbio:
timeout = self._rsock.gettimeout()
if self._rbio_nb != timeout is not None:
BIO_set_nbio(self._rbio.value, timeout is not None)
self._rbio_nb = timeout is not None
return timeout # read channel timeout
def _wrap_socket_library_call(self, call, timeout_error):
timeout_sec_start = timeout_sec = self._check_nbio()
# Pass the call if the socket is blocking or non-blocking
if not timeout_sec: # None (blocking) or zero (non-blocking)
return call()
start_time = datetime.datetime.now()
read_sock = self.get_socket(True)
need_select = False
while timeout_sec > 0:
if need_select:
if not select([read_sock], [], [], timeout_sec)[0]:
break
timeout_sec = timeout_sec_start - \
(datetime.datetime.now() - start_time).total_seconds()
try:
return call()
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_WANT_READ:
need_select = True
continue
raise
raise_ssl_error(timeout_error)
def _get_cookie(self, ssl):
assert self._listening
assert self._ssl.raw == ssl.raw
if self._listening_peer_address:
peer_address = self._listening_peer_address
else:
peer_address = BIO_dgram_get_peer(self._rbio.value)
cookie_hmac = hmac.new(self._rnd_key, str(peer_address))
return cookie_hmac.digest()
def _generate_cookie_cb(self, ssl):
return self._get_cookie(ssl)
def _verify_cookie_cb(self, ssl, cookie):
if self._get_cookie(ssl) != cookie:
raise Exception("DTLS cookie mismatch")
def __init__(self, sock, keyfile=None, certfile=None,
server_side=False, cert_reqs=CERT_NONE,
ssl_version=PROTOCOL_DTLS, ca_certs=None,
do_handshake_on_connect=True,
suppress_ragged_eofs=True, ciphers=None,
cb_user_config_ssl_ctx=None,
cb_user_config_ssl=None):
"""Constructor
Arguments:
these arguments match the ones of the SSLSocket class in the
standard library's ssl module
"""
if keyfile and not certfile or certfile and not keyfile:
raise_ssl_error(ERR_BOTH_KEY_CERT_FILES)
if server_side and not keyfile:
raise_ssl_error(ERR_BOTH_KEY_CERT_FILES_SVR)
if cert_reqs != CERT_NONE and not ca_certs:
raise_ssl_error(ERR_NO_CERTS)
if not ciphers:
ciphers = "DEFAULT"
self._sock = sock
self._keyfile = keyfile
self._certfile = certfile
self._cert_reqs = cert_reqs
self._ssl_version = ssl_version
self._ca_certs = ca_certs
self._do_handshake_on_connect = do_handshake_on_connect
self._suppress_ragged_eofs = suppress_ragged_eofs
self._ciphers = ciphers
self._handshake_done = False
self._wbio_nb = self._rbio_nb = False
self._user_config_ssl_ctx = cb_user_config_ssl_ctx
self._intf_ssl_ctx = None
self._user_config_ssl = cb_user_config_ssl
self._intf_ssl = None
if isinstance(sock, SSLConnection):
post_init = self._copy_server()
elif isinstance(sock, _UnwrappedSocket):
post_init = self._reconnect_unwrapped()
else:
try:
peer_address = sock.getpeername()
except socket.error:
peer_address = None
if server_side:
post_init = self._init_server(peer_address)
else:
post_init = self._init_client(peer_address)
if self._user_config_ssl:
self._user_config_ssl(self._intf_ssl)
if sys.platform.startswith('win') and \
not (SSL_get_options(self._ssl.value) & SSL_OP_NO_QUERY_MTU):
SSL_set_options(self._ssl.value, SSL_OP_NO_QUERY_MTU)
DTLS_set_link_mtu(self._ssl.value, 576)
SSL_set_bio(self._ssl.value, self._rbio.value, self._wbio.value)
self._rbio.disown()
self._wbio.disown()
if post_init:
post_init()
def get_socket(self, inbound):
"""Retrieve a socket used by this connection
When inbound is True, then the socket from which this connection reads
data is retrieved. Otherwise the socket to which this connection writes
data is retrieved.
Read and write sockets differ depending on whether this is a server- or
a client-side connection, and on whether a routing demux is in use.
"""
if inbound and hasattr(self, "_rsock"):
return self._rsock
return self._sock
def listen(self):
"""Server-side cookie exchange
This method reads datagrams from the socket and initiates cookie
exchange, upon whose successful conclusion one can then proceed to
the accept method. Alternatively, accept can be called directly, in
which case it will call this method. In order to prevent denial-of-
service attacks, only a small, constant set of computing resources
are used during the listen phase.
On some platforms, listen must be called so that packets will be
forwarded to accepted connections. Doing so is therefore recommened
in all cases for portable code.
Return value: a peer address if a datagram from a new peer was
encountered, None if a datagram for a known peer was forwarded
"""
if not hasattr(self, "_listening"):
raise InvalidSocketError("listen called on non-listening socket")
self._pending_peer_address = None
try:
peer_address = self._udp_demux.service()
except socket.timeout:
peer_address = None
except socket.error as sock_err:
if sock_err.errno != errno.EWOULDBLOCK:
_logger.exception("Unexpected socket error in listen")
raise
peer_address = None
if not peer_address:
_logger.debug("Listen returning without peer")
return
# The demux advises that a datagram from a new peer may have arrived
if type(peer_address) is tuple:
# For this type of demux, the write BIO must be pointed at the peer
BIO_dgram_set_peer(self._wbio.value, peer_address)
self._udp_demux.forward()
self._listening_peer_address = peer_address
self._check_nbio()
self._listening = True
try:
_logger.debug("Invoking DTLSv1_listen for ssl: %d",
self._ssl.raw)
dtls_peer_address = DTLSv1_listen(self._ssl.value)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_WANT_READ:
# This method must be called again to forward the next datagram
_logger.debug("DTLSv1_listen must be resumed")
return
elif err.errqueue and err.errqueue[0][0] == ERR_WRONG_VERSION_NUMBER:
_logger.debug("Wrong version number; aborting handshake")
raise
elif err.errqueue and err.errqueue[0][0] == ERR_COOKIE_MISMATCH:
_logger.debug("Mismatching cookie received; aborting handshake")
raise
elif err.errqueue and err.errqueue[0][0] == ERR_NO_SHARED_CIPHER:
_logger.debug("No shared cipher; aborting handshake")
raise
_logger.exception("Unexpected error in DTLSv1_listen")
raise
finally:
self._listening = False
self._listening_peer_address = None
if type(peer_address) is tuple:
_logger.debug("New local peer: %s", dtls_peer_address)
self._pending_peer_address = peer_address
else:
self._pending_peer_address = dtls_peer_address
_logger.debug("New peer: %s", self._pending_peer_address)
return self._pending_peer_address
def accept(self):
"""Server-side UDP connection establishment
This method returns a server-side SSLConnection object, connected to
that peer most recently returned from the listen method and not yet
connected. If there is no such peer, then the listen method is invoked.
Return value: SSLConnection connected to a new peer, None if packet
forwarding only to an existing peer occurred.
"""
if not self._pending_peer_address:
if not self.listen():
_logger.debug("Accept returning without connection")
return
new_conn = SSLConnection(self, self._keyfile, self._certfile, True,
self._cert_reqs, self._ssl_version,
self._ca_certs, self._do_handshake_on_connect,
self._suppress_ragged_eofs, self._ciphers,
cb_user_config_ssl_ctx=self._user_config_ssl_ctx,
cb_user_config_ssl=self._user_config_ssl)
new_peer = self._pending_peer_address
self._pending_peer_address = None
if self._do_handshake_on_connect:
# Note that since that connection's socket was just created in its
# constructor, the following operation must be blocking; hence
# handshake-on-connect can only be used with a routing demux if
# listen is serviced by a separate application thread, or else we
# will hang in this call
new_conn.do_handshake()
_logger.debug("Accept returning new connection for new peer")
return new_conn, new_peer
def connect(self, peer_address):
"""Client-side UDP connection establishment
This method connects this object's underlying socket. It subsequently
performs a handshake if do_handshake_on_connect was set during
initialization.
Arguments:
peer_address - address tuple of server peer
"""
self._sock.connect(peer_address)
peer_address = self._sock.getpeername() # substituted host addrinfo
BIO_dgram_set_connected(self._wbio.value, peer_address)
assert self._wbio is self._rbio
if self._do_handshake_on_connect:
self.do_handshake()
def do_handshake(self):
"""Perform a handshake with the peer
This method forces an explicit handshake to be performed with either
the client or server peer.
"""
_logger.debug("Initiating handshake...")
try:
self._wrap_socket_library_call(
lambda: SSL_do_handshake(self._ssl.value),
ERR_HANDSHAKE_TIMEOUT)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_SYSCALL and err.result == -1:
raise_ssl_error(ERR_PORT_UNREACHABLE, err)
raise
self._handshake_done = True
_logger.debug("...completed handshake")
def read(self, len=1024, buffer=None):
"""Read data from connection
Read up to len bytes and return them.
Arguments:
len -- maximum number of bytes to read
Return value:
string containing read bytes
"""
try:
return self._wrap_socket_library_call(
lambda: SSL_read(self._ssl.value, len, buffer), ERR_READ_TIMEOUT)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_SYSCALL and err.result == -1:
raise_ssl_error(ERR_PORT_UNREACHABLE, err)
raise
def shutdown(self):
"""Shut down the DTLS connection
This method attemps to complete a bidirectional shutdown between
peers. For non-blocking sockets, it should be called repeatedly until
it no longer raises continuation request exceptions.
"""
if hasattr(self, "_listening"):
# Listening server-side sockets cannot be shut down
return
try:
self._wrap_socket_library_call(
lambda: SSL_shutdown(self._ssl.value), ERR_READ_TIMEOUT)
except openssl_error() as err:
if err.result == 0:
# close-notify alert was just sent; wait for same from peer
# Note: while it might seem wise to suppress further read-aheads
# with SSL_set_read_ahead here, doing so causes a shutdown
# failure (ret: -1, SSL_ERROR_SYSCALL) on the DTLS shutdown
# initiator side. And test_starttls does pass.
self._wrap_socket_library_call(
lambda: SSL_shutdown(self._ssl.value), ERR_READ_TIMEOUT)
else:
raise
if hasattr(self, "_rsock"):
# Return wrapped connected server socket (non-listening)
return _UnwrappedSocket(self._sock, self._rsock, self._udp_demux,
self._ctx,
BIO_dgram_get_peer(self._wbio.value))
# Return unwrapped client-side socket or unwrapped server-side socket
# for single-socket servers
return self._sock
def getpeercert(self, binary_form=False):
"""Retrieve the peer's certificate
When binary form is requested, the peer's DER-encoded certficate is
returned if it was transmitted during the handshake.
When binary form is not requested, and the peer's certificate has been
validated, then a certificate dictionary is returned. If the certificate
was not validated, an empty dictionary is returned.
In all cases, None is returned if no certificate was received from the
peer.
"""
try:
peer_cert = _X509(SSL_get_peer_certificate(self._ssl.value))
except openssl_error():
return
if binary_form:
return i2d_X509(peer_cert.value)
if self._cert_reqs == CERT_NONE:
return {}
return decode_cert(peer_cert)
peer_certificate = getpeercert # compatibility with _ssl call interface
def getpeercertchain(self, binary_form=False):
try:
stack, num, certs = SSL_get_peer_cert_chain(self._ssl.value)
except openssl_error():
return
peer_cert_chain = [_Rsrc(cert) for cert in certs]
ret = []
if binary_form:
ret = [i2d_X509(x.value) for x in peer_cert_chain]
elif len(peer_cert_chain):
ret = [decode_cert(x) for x in peer_cert_chain]
return ret
def cipher(self):
"""Retrieve information about the current cipher
Return a triple consisting of cipher name, SSL protocol version defining
its use, and the number of secret bits. Return None if handshaking
has not been completed.
"""
if not self._handshake_done:
return
current_cipher = SSL_get_current_cipher(self._ssl.value)
cipher_name = SSL_CIPHER_get_name(current_cipher)
cipher_version = SSL_CIPHER_get_version(current_cipher)
cipher_bits = SSL_CIPHER_get_bits(current_cipher)
return cipher_name, cipher_version, cipher_bits
def pending(self):
"""Retrieve number of buffered bytes
Return the number of bytes that have been read from the socket and
buffered by this connection. Return 0 if no bytes have been buffered.
"""
return SSL_pending(self._ssl.value)
def get_timeout(self):
"""Retrieve the retransmission timedelta
Since datagrams are subject to packet loss, DTLS will perform
packet retransmission if a response is not received after a certain
time interval during the handshaking phase. When using non-blocking
sockets, the application must call back after that time interval to
allow for the retransmission to occur. This method returns the
timedelta after which to perform the call to handle_timeout, or None
if no such callback is needed given the current handshake state.
"""
return DTLSv1_get_timeout(self._ssl.value)
def handle_timeout(self):
"""Perform datagram retransmission, if required
This method should be called after the timedelta retrieved from
get_timeout has expired, and no datagrams were received in the
meantime. If datagrams were received, a new timeout needs to be
requested.
Return value:
True -- retransmissions were performed successfully
False -- a timeout was not in effect or had not yet expired
Exceptions:
Raised when retransmissions fail or too many timeouts occur.
"""
return DTLSv1_handle_timeout(self._ssl.value)
|
rbit/pydtls | dtls/sslconnection.py | SSLConnection.shutdown | python | def shutdown(self):
if hasattr(self, "_listening"):
# Listening server-side sockets cannot be shut down
return
try:
self._wrap_socket_library_call(
lambda: SSL_shutdown(self._ssl.value), ERR_READ_TIMEOUT)
except openssl_error() as err:
if err.result == 0:
# close-notify alert was just sent; wait for same from peer
# Note: while it might seem wise to suppress further read-aheads
# with SSL_set_read_ahead here, doing so causes a shutdown
# failure (ret: -1, SSL_ERROR_SYSCALL) on the DTLS shutdown
# initiator side. And test_starttls does pass.
self._wrap_socket_library_call(
lambda: SSL_shutdown(self._ssl.value), ERR_READ_TIMEOUT)
else:
raise
if hasattr(self, "_rsock"):
# Return wrapped connected server socket (non-listening)
return _UnwrappedSocket(self._sock, self._rsock, self._udp_demux,
self._ctx,
BIO_dgram_get_peer(self._wbio.value))
# Return unwrapped client-side socket or unwrapped server-side socket
# for single-socket servers
return self._sock | Shut down the DTLS connection
This method attemps to complete a bidirectional shutdown between
peers. For non-blocking sockets, it should be called repeatedly until
it no longer raises continuation request exceptions. | train | https://github.com/rbit/pydtls/blob/41a71fccd990347d0de5f42418fea1e4e733359c/dtls/sslconnection.py#L778-L811 | null | class SSLConnection(object):
"""DTLS peer association
This class associates two DTLS peer instances, wrapping OpenSSL library
state including SSL (struct ssl_st), SSL_CTX, and BIO instances.
"""
_rnd_key = urandom(16)
def _init_server(self, peer_address):
if self._sock.type != socket.SOCK_DGRAM:
raise InvalidSocketError("sock must be of type SOCK_DGRAM")
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
if peer_address:
# Connect directly to this client peer, bypassing the demux
rsock = self._sock
BIO_dgram_set_connected(self._wbio.value, peer_address)
else:
from demux import UDPDemux
self._udp_demux = UDPDemux(self._sock)
rsock = self._udp_demux.get_connection(None)
if rsock is self._sock:
self._rbio = self._wbio
else:
self._rsock = rsock
self._rbio = _BIO(BIO_new_dgram(self._rsock.fileno(), BIO_NOCLOSE))
server_method = DTLS_server_method
if self._ssl_version == PROTOCOL_DTLSv1_2:
server_method = DTLSv1_2_server_method
elif self._ssl_version == PROTOCOL_DTLSv1:
server_method = DTLSv1_server_method
self._ctx = _CTX(SSL_CTX_new(server_method()))
self._intf_ssl_ctx = SSLContext(self._ctx.value)
SSL_CTX_set_session_cache_mode(self._ctx.value, SSL_SESS_CACHE_OFF)
if self._cert_reqs == CERT_NONE:
verify_mode = SSL_VERIFY_NONE
elif self._cert_reqs == CERT_OPTIONAL:
verify_mode = SSL_VERIFY_PEER | SSL_VERIFY_CLIENT_ONCE
else:
verify_mode = SSL_VERIFY_PEER | SSL_VERIFY_CLIENT_ONCE | \
SSL_VERIFY_FAIL_IF_NO_PEER_CERT
self._config_ssl_ctx(verify_mode)
if not peer_address:
# Configure UDP listening socket
self._listening = False
self._listening_peer_address = None
self._pending_peer_address = None
self._cb_keepalive = SSL_CTX_set_cookie_cb(
self._ctx.value,
_CallbackProxy(self._generate_cookie_cb),
_CallbackProxy(self._verify_cookie_cb))
self._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(self._ssl.value)
SSL_set_accept_state(self._ssl.value)
if peer_address and self._do_handshake_on_connect:
return lambda: self.do_handshake()
def _init_client(self, peer_address):
if self._sock.type != socket.SOCK_DGRAM:
raise InvalidSocketError("sock must be of type SOCK_DGRAM")
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = self._wbio
client_method = DTLSv1_2_client_method # no "any" exists, therefore use v1_2 (highest possible)
if self._ssl_version == PROTOCOL_DTLSv1_2:
client_method = DTLSv1_2_client_method
elif self._ssl_version == PROTOCOL_DTLSv1:
client_method = DTLSv1_client_method
self._ctx = _CTX(SSL_CTX_new(client_method()))
self._intf_ssl_ctx = SSLContext(self._ctx.value)
if self._cert_reqs == CERT_NONE:
verify_mode = SSL_VERIFY_NONE
else:
verify_mode = SSL_VERIFY_PEER
self._config_ssl_ctx(verify_mode)
self._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(self._ssl.value)
SSL_set_connect_state(self._ssl.value)
if peer_address:
return lambda: self.connect(peer_address)
def _config_ssl_ctx(self, verify_mode):
SSL_CTX_set_verify(self._ctx.value, verify_mode)
SSL_CTX_set_read_ahead(self._ctx.value, 1)
# Compression occurs at the stream layer now, leading to datagram
# corruption when packet loss occurs
SSL_CTX_set_options(self._ctx.value, SSL_OP_NO_COMPRESSION)
if self._certfile:
SSL_CTX_use_certificate_chain_file(self._ctx.value, self._certfile)
if self._keyfile:
SSL_CTX_use_PrivateKey_file(self._ctx.value, self._keyfile,
SSL_FILE_TYPE_PEM)
if self._ca_certs:
SSL_CTX_load_verify_locations(self._ctx.value, self._ca_certs, None)
if self._ciphers:
try:
SSL_CTX_set_cipher_list(self._ctx.value, self._ciphers)
except openssl_error() as err:
raise_ssl_error(ERR_NO_CIPHER, err)
if self._user_config_ssl_ctx:
self._user_config_ssl_ctx(self._intf_ssl_ctx)
def _copy_server(self):
source = self._sock
self._udp_demux = source._udp_demux
rsock = self._udp_demux.get_connection(source._pending_peer_address)
self._ctx = source._ctx
self._ssl = source._ssl
new_source_wbio = _BIO(BIO_new_dgram(source._sock.fileno(),
BIO_NOCLOSE))
if hasattr(source, "_rsock"):
self._sock = source._sock
self._rsock = rsock
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = _BIO(BIO_new_dgram(rsock.fileno(), BIO_NOCLOSE))
new_source_rbio = _BIO(BIO_new_dgram(source._rsock.fileno(),
BIO_NOCLOSE))
BIO_dgram_set_peer(self._wbio.value, source._pending_peer_address)
else:
self._sock = rsock
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = self._wbio
new_source_rbio = new_source_wbio
BIO_dgram_set_connected(self._wbio.value,
source._pending_peer_address)
source._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(source._ssl.value)
SSL_set_accept_state(source._ssl.value)
if self._user_config_ssl:
self._user_config_ssl(self._intf_ssl)
source._rbio = new_source_rbio
source._wbio = new_source_wbio
SSL_set_bio(source._ssl.value,
new_source_rbio.value,
new_source_wbio.value)
new_source_rbio.disown()
new_source_wbio.disown()
def _reconnect_unwrapped(self):
source = self._sock
self._sock = source._wsock
self._udp_demux = source._demux
self._rsock = source._rsock
self._ctx = source._ctx
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = _BIO(BIO_new_dgram(self._rsock.fileno(), BIO_NOCLOSE))
BIO_dgram_set_peer(self._wbio.value, source._peer_address)
self._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(self._ssl.value)
SSL_set_accept_state(self._ssl.value)
if self._user_config_ssl:
self._user_config_ssl(self._intf_ssl)
if self._do_handshake_on_connect:
return lambda: self.do_handshake()
def _check_nbio(self):
timeout = self._sock.gettimeout()
if self._wbio_nb != timeout is not None:
BIO_set_nbio(self._wbio.value, timeout is not None)
self._wbio_nb = timeout is not None
if self._wbio is not self._rbio:
timeout = self._rsock.gettimeout()
if self._rbio_nb != timeout is not None:
BIO_set_nbio(self._rbio.value, timeout is not None)
self._rbio_nb = timeout is not None
return timeout # read channel timeout
def _wrap_socket_library_call(self, call, timeout_error):
timeout_sec_start = timeout_sec = self._check_nbio()
# Pass the call if the socket is blocking or non-blocking
if not timeout_sec: # None (blocking) or zero (non-blocking)
return call()
start_time = datetime.datetime.now()
read_sock = self.get_socket(True)
need_select = False
while timeout_sec > 0:
if need_select:
if not select([read_sock], [], [], timeout_sec)[0]:
break
timeout_sec = timeout_sec_start - \
(datetime.datetime.now() - start_time).total_seconds()
try:
return call()
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_WANT_READ:
need_select = True
continue
raise
raise_ssl_error(timeout_error)
def _get_cookie(self, ssl):
assert self._listening
assert self._ssl.raw == ssl.raw
if self._listening_peer_address:
peer_address = self._listening_peer_address
else:
peer_address = BIO_dgram_get_peer(self._rbio.value)
cookie_hmac = hmac.new(self._rnd_key, str(peer_address))
return cookie_hmac.digest()
def _generate_cookie_cb(self, ssl):
return self._get_cookie(ssl)
def _verify_cookie_cb(self, ssl, cookie):
if self._get_cookie(ssl) != cookie:
raise Exception("DTLS cookie mismatch")
def __init__(self, sock, keyfile=None, certfile=None,
server_side=False, cert_reqs=CERT_NONE,
ssl_version=PROTOCOL_DTLS, ca_certs=None,
do_handshake_on_connect=True,
suppress_ragged_eofs=True, ciphers=None,
cb_user_config_ssl_ctx=None,
cb_user_config_ssl=None):
"""Constructor
Arguments:
these arguments match the ones of the SSLSocket class in the
standard library's ssl module
"""
if keyfile and not certfile or certfile and not keyfile:
raise_ssl_error(ERR_BOTH_KEY_CERT_FILES)
if server_side and not keyfile:
raise_ssl_error(ERR_BOTH_KEY_CERT_FILES_SVR)
if cert_reqs != CERT_NONE and not ca_certs:
raise_ssl_error(ERR_NO_CERTS)
if not ciphers:
ciphers = "DEFAULT"
self._sock = sock
self._keyfile = keyfile
self._certfile = certfile
self._cert_reqs = cert_reqs
self._ssl_version = ssl_version
self._ca_certs = ca_certs
self._do_handshake_on_connect = do_handshake_on_connect
self._suppress_ragged_eofs = suppress_ragged_eofs
self._ciphers = ciphers
self._handshake_done = False
self._wbio_nb = self._rbio_nb = False
self._user_config_ssl_ctx = cb_user_config_ssl_ctx
self._intf_ssl_ctx = None
self._user_config_ssl = cb_user_config_ssl
self._intf_ssl = None
if isinstance(sock, SSLConnection):
post_init = self._copy_server()
elif isinstance(sock, _UnwrappedSocket):
post_init = self._reconnect_unwrapped()
else:
try:
peer_address = sock.getpeername()
except socket.error:
peer_address = None
if server_side:
post_init = self._init_server(peer_address)
else:
post_init = self._init_client(peer_address)
if self._user_config_ssl:
self._user_config_ssl(self._intf_ssl)
if sys.platform.startswith('win') and \
not (SSL_get_options(self._ssl.value) & SSL_OP_NO_QUERY_MTU):
SSL_set_options(self._ssl.value, SSL_OP_NO_QUERY_MTU)
DTLS_set_link_mtu(self._ssl.value, 576)
SSL_set_bio(self._ssl.value, self._rbio.value, self._wbio.value)
self._rbio.disown()
self._wbio.disown()
if post_init:
post_init()
def get_socket(self, inbound):
"""Retrieve a socket used by this connection
When inbound is True, then the socket from which this connection reads
data is retrieved. Otherwise the socket to which this connection writes
data is retrieved.
Read and write sockets differ depending on whether this is a server- or
a client-side connection, and on whether a routing demux is in use.
"""
if inbound and hasattr(self, "_rsock"):
return self._rsock
return self._sock
def listen(self):
"""Server-side cookie exchange
This method reads datagrams from the socket and initiates cookie
exchange, upon whose successful conclusion one can then proceed to
the accept method. Alternatively, accept can be called directly, in
which case it will call this method. In order to prevent denial-of-
service attacks, only a small, constant set of computing resources
are used during the listen phase.
On some platforms, listen must be called so that packets will be
forwarded to accepted connections. Doing so is therefore recommened
in all cases for portable code.
Return value: a peer address if a datagram from a new peer was
encountered, None if a datagram for a known peer was forwarded
"""
if not hasattr(self, "_listening"):
raise InvalidSocketError("listen called on non-listening socket")
self._pending_peer_address = None
try:
peer_address = self._udp_demux.service()
except socket.timeout:
peer_address = None
except socket.error as sock_err:
if sock_err.errno != errno.EWOULDBLOCK:
_logger.exception("Unexpected socket error in listen")
raise
peer_address = None
if not peer_address:
_logger.debug("Listen returning without peer")
return
# The demux advises that a datagram from a new peer may have arrived
if type(peer_address) is tuple:
# For this type of demux, the write BIO must be pointed at the peer
BIO_dgram_set_peer(self._wbio.value, peer_address)
self._udp_demux.forward()
self._listening_peer_address = peer_address
self._check_nbio()
self._listening = True
try:
_logger.debug("Invoking DTLSv1_listen for ssl: %d",
self._ssl.raw)
dtls_peer_address = DTLSv1_listen(self._ssl.value)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_WANT_READ:
# This method must be called again to forward the next datagram
_logger.debug("DTLSv1_listen must be resumed")
return
elif err.errqueue and err.errqueue[0][0] == ERR_WRONG_VERSION_NUMBER:
_logger.debug("Wrong version number; aborting handshake")
raise
elif err.errqueue and err.errqueue[0][0] == ERR_COOKIE_MISMATCH:
_logger.debug("Mismatching cookie received; aborting handshake")
raise
elif err.errqueue and err.errqueue[0][0] == ERR_NO_SHARED_CIPHER:
_logger.debug("No shared cipher; aborting handshake")
raise
_logger.exception("Unexpected error in DTLSv1_listen")
raise
finally:
self._listening = False
self._listening_peer_address = None
if type(peer_address) is tuple:
_logger.debug("New local peer: %s", dtls_peer_address)
self._pending_peer_address = peer_address
else:
self._pending_peer_address = dtls_peer_address
_logger.debug("New peer: %s", self._pending_peer_address)
return self._pending_peer_address
def accept(self):
"""Server-side UDP connection establishment
This method returns a server-side SSLConnection object, connected to
that peer most recently returned from the listen method and not yet
connected. If there is no such peer, then the listen method is invoked.
Return value: SSLConnection connected to a new peer, None if packet
forwarding only to an existing peer occurred.
"""
if not self._pending_peer_address:
if not self.listen():
_logger.debug("Accept returning without connection")
return
new_conn = SSLConnection(self, self._keyfile, self._certfile, True,
self._cert_reqs, self._ssl_version,
self._ca_certs, self._do_handshake_on_connect,
self._suppress_ragged_eofs, self._ciphers,
cb_user_config_ssl_ctx=self._user_config_ssl_ctx,
cb_user_config_ssl=self._user_config_ssl)
new_peer = self._pending_peer_address
self._pending_peer_address = None
if self._do_handshake_on_connect:
# Note that since that connection's socket was just created in its
# constructor, the following operation must be blocking; hence
# handshake-on-connect can only be used with a routing demux if
# listen is serviced by a separate application thread, or else we
# will hang in this call
new_conn.do_handshake()
_logger.debug("Accept returning new connection for new peer")
return new_conn, new_peer
def connect(self, peer_address):
"""Client-side UDP connection establishment
This method connects this object's underlying socket. It subsequently
performs a handshake if do_handshake_on_connect was set during
initialization.
Arguments:
peer_address - address tuple of server peer
"""
self._sock.connect(peer_address)
peer_address = self._sock.getpeername() # substituted host addrinfo
BIO_dgram_set_connected(self._wbio.value, peer_address)
assert self._wbio is self._rbio
if self._do_handshake_on_connect:
self.do_handshake()
def do_handshake(self):
"""Perform a handshake with the peer
This method forces an explicit handshake to be performed with either
the client or server peer.
"""
_logger.debug("Initiating handshake...")
try:
self._wrap_socket_library_call(
lambda: SSL_do_handshake(self._ssl.value),
ERR_HANDSHAKE_TIMEOUT)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_SYSCALL and err.result == -1:
raise_ssl_error(ERR_PORT_UNREACHABLE, err)
raise
self._handshake_done = True
_logger.debug("...completed handshake")
def read(self, len=1024, buffer=None):
"""Read data from connection
Read up to len bytes and return them.
Arguments:
len -- maximum number of bytes to read
Return value:
string containing read bytes
"""
try:
return self._wrap_socket_library_call(
lambda: SSL_read(self._ssl.value, len, buffer), ERR_READ_TIMEOUT)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_SYSCALL and err.result == -1:
raise_ssl_error(ERR_PORT_UNREACHABLE, err)
raise
def write(self, data):
"""Write data to connection
Write data as string of bytes.
Arguments:
data -- buffer containing data to be written
Return value:
number of bytes actually transmitted
"""
try:
ret = self._wrap_socket_library_call(
lambda: SSL_write(self._ssl.value, data), ERR_WRITE_TIMEOUT)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_SYSCALL and err.result == -1:
raise_ssl_error(ERR_PORT_UNREACHABLE, err)
raise
if ret:
self._handshake_done = True
return ret
def getpeercert(self, binary_form=False):
"""Retrieve the peer's certificate
When binary form is requested, the peer's DER-encoded certficate is
returned if it was transmitted during the handshake.
When binary form is not requested, and the peer's certificate has been
validated, then a certificate dictionary is returned. If the certificate
was not validated, an empty dictionary is returned.
In all cases, None is returned if no certificate was received from the
peer.
"""
try:
peer_cert = _X509(SSL_get_peer_certificate(self._ssl.value))
except openssl_error():
return
if binary_form:
return i2d_X509(peer_cert.value)
if self._cert_reqs == CERT_NONE:
return {}
return decode_cert(peer_cert)
peer_certificate = getpeercert # compatibility with _ssl call interface
def getpeercertchain(self, binary_form=False):
try:
stack, num, certs = SSL_get_peer_cert_chain(self._ssl.value)
except openssl_error():
return
peer_cert_chain = [_Rsrc(cert) for cert in certs]
ret = []
if binary_form:
ret = [i2d_X509(x.value) for x in peer_cert_chain]
elif len(peer_cert_chain):
ret = [decode_cert(x) for x in peer_cert_chain]
return ret
def cipher(self):
"""Retrieve information about the current cipher
Return a triple consisting of cipher name, SSL protocol version defining
its use, and the number of secret bits. Return None if handshaking
has not been completed.
"""
if not self._handshake_done:
return
current_cipher = SSL_get_current_cipher(self._ssl.value)
cipher_name = SSL_CIPHER_get_name(current_cipher)
cipher_version = SSL_CIPHER_get_version(current_cipher)
cipher_bits = SSL_CIPHER_get_bits(current_cipher)
return cipher_name, cipher_version, cipher_bits
def pending(self):
"""Retrieve number of buffered bytes
Return the number of bytes that have been read from the socket and
buffered by this connection. Return 0 if no bytes have been buffered.
"""
return SSL_pending(self._ssl.value)
def get_timeout(self):
"""Retrieve the retransmission timedelta
Since datagrams are subject to packet loss, DTLS will perform
packet retransmission if a response is not received after a certain
time interval during the handshaking phase. When using non-blocking
sockets, the application must call back after that time interval to
allow for the retransmission to occur. This method returns the
timedelta after which to perform the call to handle_timeout, or None
if no such callback is needed given the current handshake state.
"""
return DTLSv1_get_timeout(self._ssl.value)
def handle_timeout(self):
"""Perform datagram retransmission, if required
This method should be called after the timedelta retrieved from
get_timeout has expired, and no datagrams were received in the
meantime. If datagrams were received, a new timeout needs to be
requested.
Return value:
True -- retransmissions were performed successfully
False -- a timeout was not in effect or had not yet expired
Exceptions:
Raised when retransmissions fail or too many timeouts occur.
"""
return DTLSv1_handle_timeout(self._ssl.value)
|
rbit/pydtls | dtls/sslconnection.py | SSLConnection.getpeercert | python | def getpeercert(self, binary_form=False):
try:
peer_cert = _X509(SSL_get_peer_certificate(self._ssl.value))
except openssl_error():
return
if binary_form:
return i2d_X509(peer_cert.value)
if self._cert_reqs == CERT_NONE:
return {}
return decode_cert(peer_cert) | Retrieve the peer's certificate
When binary form is requested, the peer's DER-encoded certficate is
returned if it was transmitted during the handshake.
When binary form is not requested, and the peer's certificate has been
validated, then a certificate dictionary is returned. If the certificate
was not validated, an empty dictionary is returned.
In all cases, None is returned if no certificate was received from the
peer. | train | https://github.com/rbit/pydtls/blob/41a71fccd990347d0de5f42418fea1e4e733359c/dtls/sslconnection.py#L813-L836 | [
"def openssl_error():\n \"\"\"Return the OpenSSL error type for use in exception clauses\"\"\"\n return _OpenSSLError\n",
"def i2d_X509(x509):\n bio = _BIO(BIO_new(BIO_s_mem()))\n _i2d_X509_bio(bio.value, x509)\n return BIO_get_mem_data(bio.value)\n"
] | class SSLConnection(object):
"""DTLS peer association
This class associates two DTLS peer instances, wrapping OpenSSL library
state including SSL (struct ssl_st), SSL_CTX, and BIO instances.
"""
_rnd_key = urandom(16)
def _init_server(self, peer_address):
if self._sock.type != socket.SOCK_DGRAM:
raise InvalidSocketError("sock must be of type SOCK_DGRAM")
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
if peer_address:
# Connect directly to this client peer, bypassing the demux
rsock = self._sock
BIO_dgram_set_connected(self._wbio.value, peer_address)
else:
from demux import UDPDemux
self._udp_demux = UDPDemux(self._sock)
rsock = self._udp_demux.get_connection(None)
if rsock is self._sock:
self._rbio = self._wbio
else:
self._rsock = rsock
self._rbio = _BIO(BIO_new_dgram(self._rsock.fileno(), BIO_NOCLOSE))
server_method = DTLS_server_method
if self._ssl_version == PROTOCOL_DTLSv1_2:
server_method = DTLSv1_2_server_method
elif self._ssl_version == PROTOCOL_DTLSv1:
server_method = DTLSv1_server_method
self._ctx = _CTX(SSL_CTX_new(server_method()))
self._intf_ssl_ctx = SSLContext(self._ctx.value)
SSL_CTX_set_session_cache_mode(self._ctx.value, SSL_SESS_CACHE_OFF)
if self._cert_reqs == CERT_NONE:
verify_mode = SSL_VERIFY_NONE
elif self._cert_reqs == CERT_OPTIONAL:
verify_mode = SSL_VERIFY_PEER | SSL_VERIFY_CLIENT_ONCE
else:
verify_mode = SSL_VERIFY_PEER | SSL_VERIFY_CLIENT_ONCE | \
SSL_VERIFY_FAIL_IF_NO_PEER_CERT
self._config_ssl_ctx(verify_mode)
if not peer_address:
# Configure UDP listening socket
self._listening = False
self._listening_peer_address = None
self._pending_peer_address = None
self._cb_keepalive = SSL_CTX_set_cookie_cb(
self._ctx.value,
_CallbackProxy(self._generate_cookie_cb),
_CallbackProxy(self._verify_cookie_cb))
self._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(self._ssl.value)
SSL_set_accept_state(self._ssl.value)
if peer_address and self._do_handshake_on_connect:
return lambda: self.do_handshake()
def _init_client(self, peer_address):
if self._sock.type != socket.SOCK_DGRAM:
raise InvalidSocketError("sock must be of type SOCK_DGRAM")
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = self._wbio
client_method = DTLSv1_2_client_method # no "any" exists, therefore use v1_2 (highest possible)
if self._ssl_version == PROTOCOL_DTLSv1_2:
client_method = DTLSv1_2_client_method
elif self._ssl_version == PROTOCOL_DTLSv1:
client_method = DTLSv1_client_method
self._ctx = _CTX(SSL_CTX_new(client_method()))
self._intf_ssl_ctx = SSLContext(self._ctx.value)
if self._cert_reqs == CERT_NONE:
verify_mode = SSL_VERIFY_NONE
else:
verify_mode = SSL_VERIFY_PEER
self._config_ssl_ctx(verify_mode)
self._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(self._ssl.value)
SSL_set_connect_state(self._ssl.value)
if peer_address:
return lambda: self.connect(peer_address)
def _config_ssl_ctx(self, verify_mode):
SSL_CTX_set_verify(self._ctx.value, verify_mode)
SSL_CTX_set_read_ahead(self._ctx.value, 1)
# Compression occurs at the stream layer now, leading to datagram
# corruption when packet loss occurs
SSL_CTX_set_options(self._ctx.value, SSL_OP_NO_COMPRESSION)
if self._certfile:
SSL_CTX_use_certificate_chain_file(self._ctx.value, self._certfile)
if self._keyfile:
SSL_CTX_use_PrivateKey_file(self._ctx.value, self._keyfile,
SSL_FILE_TYPE_PEM)
if self._ca_certs:
SSL_CTX_load_verify_locations(self._ctx.value, self._ca_certs, None)
if self._ciphers:
try:
SSL_CTX_set_cipher_list(self._ctx.value, self._ciphers)
except openssl_error() as err:
raise_ssl_error(ERR_NO_CIPHER, err)
if self._user_config_ssl_ctx:
self._user_config_ssl_ctx(self._intf_ssl_ctx)
def _copy_server(self):
source = self._sock
self._udp_demux = source._udp_demux
rsock = self._udp_demux.get_connection(source._pending_peer_address)
self._ctx = source._ctx
self._ssl = source._ssl
new_source_wbio = _BIO(BIO_new_dgram(source._sock.fileno(),
BIO_NOCLOSE))
if hasattr(source, "_rsock"):
self._sock = source._sock
self._rsock = rsock
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = _BIO(BIO_new_dgram(rsock.fileno(), BIO_NOCLOSE))
new_source_rbio = _BIO(BIO_new_dgram(source._rsock.fileno(),
BIO_NOCLOSE))
BIO_dgram_set_peer(self._wbio.value, source._pending_peer_address)
else:
self._sock = rsock
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = self._wbio
new_source_rbio = new_source_wbio
BIO_dgram_set_connected(self._wbio.value,
source._pending_peer_address)
source._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(source._ssl.value)
SSL_set_accept_state(source._ssl.value)
if self._user_config_ssl:
self._user_config_ssl(self._intf_ssl)
source._rbio = new_source_rbio
source._wbio = new_source_wbio
SSL_set_bio(source._ssl.value,
new_source_rbio.value,
new_source_wbio.value)
new_source_rbio.disown()
new_source_wbio.disown()
def _reconnect_unwrapped(self):
source = self._sock
self._sock = source._wsock
self._udp_demux = source._demux
self._rsock = source._rsock
self._ctx = source._ctx
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = _BIO(BIO_new_dgram(self._rsock.fileno(), BIO_NOCLOSE))
BIO_dgram_set_peer(self._wbio.value, source._peer_address)
self._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(self._ssl.value)
SSL_set_accept_state(self._ssl.value)
if self._user_config_ssl:
self._user_config_ssl(self._intf_ssl)
if self._do_handshake_on_connect:
return lambda: self.do_handshake()
def _check_nbio(self):
timeout = self._sock.gettimeout()
if self._wbio_nb != timeout is not None:
BIO_set_nbio(self._wbio.value, timeout is not None)
self._wbio_nb = timeout is not None
if self._wbio is not self._rbio:
timeout = self._rsock.gettimeout()
if self._rbio_nb != timeout is not None:
BIO_set_nbio(self._rbio.value, timeout is not None)
self._rbio_nb = timeout is not None
return timeout # read channel timeout
def _wrap_socket_library_call(self, call, timeout_error):
timeout_sec_start = timeout_sec = self._check_nbio()
# Pass the call if the socket is blocking or non-blocking
if not timeout_sec: # None (blocking) or zero (non-blocking)
return call()
start_time = datetime.datetime.now()
read_sock = self.get_socket(True)
need_select = False
while timeout_sec > 0:
if need_select:
if not select([read_sock], [], [], timeout_sec)[0]:
break
timeout_sec = timeout_sec_start - \
(datetime.datetime.now() - start_time).total_seconds()
try:
return call()
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_WANT_READ:
need_select = True
continue
raise
raise_ssl_error(timeout_error)
def _get_cookie(self, ssl):
assert self._listening
assert self._ssl.raw == ssl.raw
if self._listening_peer_address:
peer_address = self._listening_peer_address
else:
peer_address = BIO_dgram_get_peer(self._rbio.value)
cookie_hmac = hmac.new(self._rnd_key, str(peer_address))
return cookie_hmac.digest()
def _generate_cookie_cb(self, ssl):
return self._get_cookie(ssl)
def _verify_cookie_cb(self, ssl, cookie):
if self._get_cookie(ssl) != cookie:
raise Exception("DTLS cookie mismatch")
def __init__(self, sock, keyfile=None, certfile=None,
server_side=False, cert_reqs=CERT_NONE,
ssl_version=PROTOCOL_DTLS, ca_certs=None,
do_handshake_on_connect=True,
suppress_ragged_eofs=True, ciphers=None,
cb_user_config_ssl_ctx=None,
cb_user_config_ssl=None):
"""Constructor
Arguments:
these arguments match the ones of the SSLSocket class in the
standard library's ssl module
"""
if keyfile and not certfile or certfile and not keyfile:
raise_ssl_error(ERR_BOTH_KEY_CERT_FILES)
if server_side and not keyfile:
raise_ssl_error(ERR_BOTH_KEY_CERT_FILES_SVR)
if cert_reqs != CERT_NONE and not ca_certs:
raise_ssl_error(ERR_NO_CERTS)
if not ciphers:
ciphers = "DEFAULT"
self._sock = sock
self._keyfile = keyfile
self._certfile = certfile
self._cert_reqs = cert_reqs
self._ssl_version = ssl_version
self._ca_certs = ca_certs
self._do_handshake_on_connect = do_handshake_on_connect
self._suppress_ragged_eofs = suppress_ragged_eofs
self._ciphers = ciphers
self._handshake_done = False
self._wbio_nb = self._rbio_nb = False
self._user_config_ssl_ctx = cb_user_config_ssl_ctx
self._intf_ssl_ctx = None
self._user_config_ssl = cb_user_config_ssl
self._intf_ssl = None
if isinstance(sock, SSLConnection):
post_init = self._copy_server()
elif isinstance(sock, _UnwrappedSocket):
post_init = self._reconnect_unwrapped()
else:
try:
peer_address = sock.getpeername()
except socket.error:
peer_address = None
if server_side:
post_init = self._init_server(peer_address)
else:
post_init = self._init_client(peer_address)
if self._user_config_ssl:
self._user_config_ssl(self._intf_ssl)
if sys.platform.startswith('win') and \
not (SSL_get_options(self._ssl.value) & SSL_OP_NO_QUERY_MTU):
SSL_set_options(self._ssl.value, SSL_OP_NO_QUERY_MTU)
DTLS_set_link_mtu(self._ssl.value, 576)
SSL_set_bio(self._ssl.value, self._rbio.value, self._wbio.value)
self._rbio.disown()
self._wbio.disown()
if post_init:
post_init()
def get_socket(self, inbound):
"""Retrieve a socket used by this connection
When inbound is True, then the socket from which this connection reads
data is retrieved. Otherwise the socket to which this connection writes
data is retrieved.
Read and write sockets differ depending on whether this is a server- or
a client-side connection, and on whether a routing demux is in use.
"""
if inbound and hasattr(self, "_rsock"):
return self._rsock
return self._sock
def listen(self):
"""Server-side cookie exchange
This method reads datagrams from the socket and initiates cookie
exchange, upon whose successful conclusion one can then proceed to
the accept method. Alternatively, accept can be called directly, in
which case it will call this method. In order to prevent denial-of-
service attacks, only a small, constant set of computing resources
are used during the listen phase.
On some platforms, listen must be called so that packets will be
forwarded to accepted connections. Doing so is therefore recommened
in all cases for portable code.
Return value: a peer address if a datagram from a new peer was
encountered, None if a datagram for a known peer was forwarded
"""
if not hasattr(self, "_listening"):
raise InvalidSocketError("listen called on non-listening socket")
self._pending_peer_address = None
try:
peer_address = self._udp_demux.service()
except socket.timeout:
peer_address = None
except socket.error as sock_err:
if sock_err.errno != errno.EWOULDBLOCK:
_logger.exception("Unexpected socket error in listen")
raise
peer_address = None
if not peer_address:
_logger.debug("Listen returning without peer")
return
# The demux advises that a datagram from a new peer may have arrived
if type(peer_address) is tuple:
# For this type of demux, the write BIO must be pointed at the peer
BIO_dgram_set_peer(self._wbio.value, peer_address)
self._udp_demux.forward()
self._listening_peer_address = peer_address
self._check_nbio()
self._listening = True
try:
_logger.debug("Invoking DTLSv1_listen for ssl: %d",
self._ssl.raw)
dtls_peer_address = DTLSv1_listen(self._ssl.value)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_WANT_READ:
# This method must be called again to forward the next datagram
_logger.debug("DTLSv1_listen must be resumed")
return
elif err.errqueue and err.errqueue[0][0] == ERR_WRONG_VERSION_NUMBER:
_logger.debug("Wrong version number; aborting handshake")
raise
elif err.errqueue and err.errqueue[0][0] == ERR_COOKIE_MISMATCH:
_logger.debug("Mismatching cookie received; aborting handshake")
raise
elif err.errqueue and err.errqueue[0][0] == ERR_NO_SHARED_CIPHER:
_logger.debug("No shared cipher; aborting handshake")
raise
_logger.exception("Unexpected error in DTLSv1_listen")
raise
finally:
self._listening = False
self._listening_peer_address = None
if type(peer_address) is tuple:
_logger.debug("New local peer: %s", dtls_peer_address)
self._pending_peer_address = peer_address
else:
self._pending_peer_address = dtls_peer_address
_logger.debug("New peer: %s", self._pending_peer_address)
return self._pending_peer_address
def accept(self):
"""Server-side UDP connection establishment
This method returns a server-side SSLConnection object, connected to
that peer most recently returned from the listen method and not yet
connected. If there is no such peer, then the listen method is invoked.
Return value: SSLConnection connected to a new peer, None if packet
forwarding only to an existing peer occurred.
"""
if not self._pending_peer_address:
if not self.listen():
_logger.debug("Accept returning without connection")
return
new_conn = SSLConnection(self, self._keyfile, self._certfile, True,
self._cert_reqs, self._ssl_version,
self._ca_certs, self._do_handshake_on_connect,
self._suppress_ragged_eofs, self._ciphers,
cb_user_config_ssl_ctx=self._user_config_ssl_ctx,
cb_user_config_ssl=self._user_config_ssl)
new_peer = self._pending_peer_address
self._pending_peer_address = None
if self._do_handshake_on_connect:
# Note that since that connection's socket was just created in its
# constructor, the following operation must be blocking; hence
# handshake-on-connect can only be used with a routing demux if
# listen is serviced by a separate application thread, or else we
# will hang in this call
new_conn.do_handshake()
_logger.debug("Accept returning new connection for new peer")
return new_conn, new_peer
def connect(self, peer_address):
"""Client-side UDP connection establishment
This method connects this object's underlying socket. It subsequently
performs a handshake if do_handshake_on_connect was set during
initialization.
Arguments:
peer_address - address tuple of server peer
"""
self._sock.connect(peer_address)
peer_address = self._sock.getpeername() # substituted host addrinfo
BIO_dgram_set_connected(self._wbio.value, peer_address)
assert self._wbio is self._rbio
if self._do_handshake_on_connect:
self.do_handshake()
def do_handshake(self):
"""Perform a handshake with the peer
This method forces an explicit handshake to be performed with either
the client or server peer.
"""
_logger.debug("Initiating handshake...")
try:
self._wrap_socket_library_call(
lambda: SSL_do_handshake(self._ssl.value),
ERR_HANDSHAKE_TIMEOUT)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_SYSCALL and err.result == -1:
raise_ssl_error(ERR_PORT_UNREACHABLE, err)
raise
self._handshake_done = True
_logger.debug("...completed handshake")
def read(self, len=1024, buffer=None):
"""Read data from connection
Read up to len bytes and return them.
Arguments:
len -- maximum number of bytes to read
Return value:
string containing read bytes
"""
try:
return self._wrap_socket_library_call(
lambda: SSL_read(self._ssl.value, len, buffer), ERR_READ_TIMEOUT)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_SYSCALL and err.result == -1:
raise_ssl_error(ERR_PORT_UNREACHABLE, err)
raise
def write(self, data):
"""Write data to connection
Write data as string of bytes.
Arguments:
data -- buffer containing data to be written
Return value:
number of bytes actually transmitted
"""
try:
ret = self._wrap_socket_library_call(
lambda: SSL_write(self._ssl.value, data), ERR_WRITE_TIMEOUT)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_SYSCALL and err.result == -1:
raise_ssl_error(ERR_PORT_UNREACHABLE, err)
raise
if ret:
self._handshake_done = True
return ret
def shutdown(self):
"""Shut down the DTLS connection
This method attemps to complete a bidirectional shutdown between
peers. For non-blocking sockets, it should be called repeatedly until
it no longer raises continuation request exceptions.
"""
if hasattr(self, "_listening"):
# Listening server-side sockets cannot be shut down
return
try:
self._wrap_socket_library_call(
lambda: SSL_shutdown(self._ssl.value), ERR_READ_TIMEOUT)
except openssl_error() as err:
if err.result == 0:
# close-notify alert was just sent; wait for same from peer
# Note: while it might seem wise to suppress further read-aheads
# with SSL_set_read_ahead here, doing so causes a shutdown
# failure (ret: -1, SSL_ERROR_SYSCALL) on the DTLS shutdown
# initiator side. And test_starttls does pass.
self._wrap_socket_library_call(
lambda: SSL_shutdown(self._ssl.value), ERR_READ_TIMEOUT)
else:
raise
if hasattr(self, "_rsock"):
# Return wrapped connected server socket (non-listening)
return _UnwrappedSocket(self._sock, self._rsock, self._udp_demux,
self._ctx,
BIO_dgram_get_peer(self._wbio.value))
# Return unwrapped client-side socket or unwrapped server-side socket
# for single-socket servers
return self._sock
peer_certificate = getpeercert # compatibility with _ssl call interface
def getpeercertchain(self, binary_form=False):
try:
stack, num, certs = SSL_get_peer_cert_chain(self._ssl.value)
except openssl_error():
return
peer_cert_chain = [_Rsrc(cert) for cert in certs]
ret = []
if binary_form:
ret = [i2d_X509(x.value) for x in peer_cert_chain]
elif len(peer_cert_chain):
ret = [decode_cert(x) for x in peer_cert_chain]
return ret
def cipher(self):
"""Retrieve information about the current cipher
Return a triple consisting of cipher name, SSL protocol version defining
its use, and the number of secret bits. Return None if handshaking
has not been completed.
"""
if not self._handshake_done:
return
current_cipher = SSL_get_current_cipher(self._ssl.value)
cipher_name = SSL_CIPHER_get_name(current_cipher)
cipher_version = SSL_CIPHER_get_version(current_cipher)
cipher_bits = SSL_CIPHER_get_bits(current_cipher)
return cipher_name, cipher_version, cipher_bits
def pending(self):
"""Retrieve number of buffered bytes
Return the number of bytes that have been read from the socket and
buffered by this connection. Return 0 if no bytes have been buffered.
"""
return SSL_pending(self._ssl.value)
def get_timeout(self):
"""Retrieve the retransmission timedelta
Since datagrams are subject to packet loss, DTLS will perform
packet retransmission if a response is not received after a certain
time interval during the handshaking phase. When using non-blocking
sockets, the application must call back after that time interval to
allow for the retransmission to occur. This method returns the
timedelta after which to perform the call to handle_timeout, or None
if no such callback is needed given the current handshake state.
"""
return DTLSv1_get_timeout(self._ssl.value)
def handle_timeout(self):
"""Perform datagram retransmission, if required
This method should be called after the timedelta retrieved from
get_timeout has expired, and no datagrams were received in the
meantime. If datagrams were received, a new timeout needs to be
requested.
Return value:
True -- retransmissions were performed successfully
False -- a timeout was not in effect or had not yet expired
Exceptions:
Raised when retransmissions fail or too many timeouts occur.
"""
return DTLSv1_handle_timeout(self._ssl.value)
|
rbit/pydtls | dtls/sslconnection.py | SSLConnection.cipher | python | def cipher(self):
if not self._handshake_done:
return
current_cipher = SSL_get_current_cipher(self._ssl.value)
cipher_name = SSL_CIPHER_get_name(current_cipher)
cipher_version = SSL_CIPHER_get_version(current_cipher)
cipher_bits = SSL_CIPHER_get_bits(current_cipher)
return cipher_name, cipher_version, cipher_bits | Retrieve information about the current cipher
Return a triple consisting of cipher name, SSL protocol version defining
its use, and the number of secret bits. Return None if handshaking
has not been completed. | train | https://github.com/rbit/pydtls/blob/41a71fccd990347d0de5f42418fea1e4e733359c/dtls/sslconnection.py#L855-L870 | null | class SSLConnection(object):
"""DTLS peer association
This class associates two DTLS peer instances, wrapping OpenSSL library
state including SSL (struct ssl_st), SSL_CTX, and BIO instances.
"""
_rnd_key = urandom(16)
def _init_server(self, peer_address):
if self._sock.type != socket.SOCK_DGRAM:
raise InvalidSocketError("sock must be of type SOCK_DGRAM")
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
if peer_address:
# Connect directly to this client peer, bypassing the demux
rsock = self._sock
BIO_dgram_set_connected(self._wbio.value, peer_address)
else:
from demux import UDPDemux
self._udp_demux = UDPDemux(self._sock)
rsock = self._udp_demux.get_connection(None)
if rsock is self._sock:
self._rbio = self._wbio
else:
self._rsock = rsock
self._rbio = _BIO(BIO_new_dgram(self._rsock.fileno(), BIO_NOCLOSE))
server_method = DTLS_server_method
if self._ssl_version == PROTOCOL_DTLSv1_2:
server_method = DTLSv1_2_server_method
elif self._ssl_version == PROTOCOL_DTLSv1:
server_method = DTLSv1_server_method
self._ctx = _CTX(SSL_CTX_new(server_method()))
self._intf_ssl_ctx = SSLContext(self._ctx.value)
SSL_CTX_set_session_cache_mode(self._ctx.value, SSL_SESS_CACHE_OFF)
if self._cert_reqs == CERT_NONE:
verify_mode = SSL_VERIFY_NONE
elif self._cert_reqs == CERT_OPTIONAL:
verify_mode = SSL_VERIFY_PEER | SSL_VERIFY_CLIENT_ONCE
else:
verify_mode = SSL_VERIFY_PEER | SSL_VERIFY_CLIENT_ONCE | \
SSL_VERIFY_FAIL_IF_NO_PEER_CERT
self._config_ssl_ctx(verify_mode)
if not peer_address:
# Configure UDP listening socket
self._listening = False
self._listening_peer_address = None
self._pending_peer_address = None
self._cb_keepalive = SSL_CTX_set_cookie_cb(
self._ctx.value,
_CallbackProxy(self._generate_cookie_cb),
_CallbackProxy(self._verify_cookie_cb))
self._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(self._ssl.value)
SSL_set_accept_state(self._ssl.value)
if peer_address and self._do_handshake_on_connect:
return lambda: self.do_handshake()
def _init_client(self, peer_address):
if self._sock.type != socket.SOCK_DGRAM:
raise InvalidSocketError("sock must be of type SOCK_DGRAM")
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = self._wbio
client_method = DTLSv1_2_client_method # no "any" exists, therefore use v1_2 (highest possible)
if self._ssl_version == PROTOCOL_DTLSv1_2:
client_method = DTLSv1_2_client_method
elif self._ssl_version == PROTOCOL_DTLSv1:
client_method = DTLSv1_client_method
self._ctx = _CTX(SSL_CTX_new(client_method()))
self._intf_ssl_ctx = SSLContext(self._ctx.value)
if self._cert_reqs == CERT_NONE:
verify_mode = SSL_VERIFY_NONE
else:
verify_mode = SSL_VERIFY_PEER
self._config_ssl_ctx(verify_mode)
self._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(self._ssl.value)
SSL_set_connect_state(self._ssl.value)
if peer_address:
return lambda: self.connect(peer_address)
def _config_ssl_ctx(self, verify_mode):
SSL_CTX_set_verify(self._ctx.value, verify_mode)
SSL_CTX_set_read_ahead(self._ctx.value, 1)
# Compression occurs at the stream layer now, leading to datagram
# corruption when packet loss occurs
SSL_CTX_set_options(self._ctx.value, SSL_OP_NO_COMPRESSION)
if self._certfile:
SSL_CTX_use_certificate_chain_file(self._ctx.value, self._certfile)
if self._keyfile:
SSL_CTX_use_PrivateKey_file(self._ctx.value, self._keyfile,
SSL_FILE_TYPE_PEM)
if self._ca_certs:
SSL_CTX_load_verify_locations(self._ctx.value, self._ca_certs, None)
if self._ciphers:
try:
SSL_CTX_set_cipher_list(self._ctx.value, self._ciphers)
except openssl_error() as err:
raise_ssl_error(ERR_NO_CIPHER, err)
if self._user_config_ssl_ctx:
self._user_config_ssl_ctx(self._intf_ssl_ctx)
def _copy_server(self):
source = self._sock
self._udp_demux = source._udp_demux
rsock = self._udp_demux.get_connection(source._pending_peer_address)
self._ctx = source._ctx
self._ssl = source._ssl
new_source_wbio = _BIO(BIO_new_dgram(source._sock.fileno(),
BIO_NOCLOSE))
if hasattr(source, "_rsock"):
self._sock = source._sock
self._rsock = rsock
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = _BIO(BIO_new_dgram(rsock.fileno(), BIO_NOCLOSE))
new_source_rbio = _BIO(BIO_new_dgram(source._rsock.fileno(),
BIO_NOCLOSE))
BIO_dgram_set_peer(self._wbio.value, source._pending_peer_address)
else:
self._sock = rsock
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = self._wbio
new_source_rbio = new_source_wbio
BIO_dgram_set_connected(self._wbio.value,
source._pending_peer_address)
source._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(source._ssl.value)
SSL_set_accept_state(source._ssl.value)
if self._user_config_ssl:
self._user_config_ssl(self._intf_ssl)
source._rbio = new_source_rbio
source._wbio = new_source_wbio
SSL_set_bio(source._ssl.value,
new_source_rbio.value,
new_source_wbio.value)
new_source_rbio.disown()
new_source_wbio.disown()
def _reconnect_unwrapped(self):
source = self._sock
self._sock = source._wsock
self._udp_demux = source._demux
self._rsock = source._rsock
self._ctx = source._ctx
self._wbio = _BIO(BIO_new_dgram(self._sock.fileno(), BIO_NOCLOSE))
self._rbio = _BIO(BIO_new_dgram(self._rsock.fileno(), BIO_NOCLOSE))
BIO_dgram_set_peer(self._wbio.value, source._peer_address)
self._ssl = _SSL(SSL_new(self._ctx.value))
self._intf_ssl = SSL(self._ssl.value)
SSL_set_accept_state(self._ssl.value)
if self._user_config_ssl:
self._user_config_ssl(self._intf_ssl)
if self._do_handshake_on_connect:
return lambda: self.do_handshake()
def _check_nbio(self):
timeout = self._sock.gettimeout()
if self._wbio_nb != timeout is not None:
BIO_set_nbio(self._wbio.value, timeout is not None)
self._wbio_nb = timeout is not None
if self._wbio is not self._rbio:
timeout = self._rsock.gettimeout()
if self._rbio_nb != timeout is not None:
BIO_set_nbio(self._rbio.value, timeout is not None)
self._rbio_nb = timeout is not None
return timeout # read channel timeout
def _wrap_socket_library_call(self, call, timeout_error):
timeout_sec_start = timeout_sec = self._check_nbio()
# Pass the call if the socket is blocking or non-blocking
if not timeout_sec: # None (blocking) or zero (non-blocking)
return call()
start_time = datetime.datetime.now()
read_sock = self.get_socket(True)
need_select = False
while timeout_sec > 0:
if need_select:
if not select([read_sock], [], [], timeout_sec)[0]:
break
timeout_sec = timeout_sec_start - \
(datetime.datetime.now() - start_time).total_seconds()
try:
return call()
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_WANT_READ:
need_select = True
continue
raise
raise_ssl_error(timeout_error)
def _get_cookie(self, ssl):
assert self._listening
assert self._ssl.raw == ssl.raw
if self._listening_peer_address:
peer_address = self._listening_peer_address
else:
peer_address = BIO_dgram_get_peer(self._rbio.value)
cookie_hmac = hmac.new(self._rnd_key, str(peer_address))
return cookie_hmac.digest()
def _generate_cookie_cb(self, ssl):
return self._get_cookie(ssl)
def _verify_cookie_cb(self, ssl, cookie):
if self._get_cookie(ssl) != cookie:
raise Exception("DTLS cookie mismatch")
def __init__(self, sock, keyfile=None, certfile=None,
server_side=False, cert_reqs=CERT_NONE,
ssl_version=PROTOCOL_DTLS, ca_certs=None,
do_handshake_on_connect=True,
suppress_ragged_eofs=True, ciphers=None,
cb_user_config_ssl_ctx=None,
cb_user_config_ssl=None):
"""Constructor
Arguments:
these arguments match the ones of the SSLSocket class in the
standard library's ssl module
"""
if keyfile and not certfile or certfile and not keyfile:
raise_ssl_error(ERR_BOTH_KEY_CERT_FILES)
if server_side and not keyfile:
raise_ssl_error(ERR_BOTH_KEY_CERT_FILES_SVR)
if cert_reqs != CERT_NONE and not ca_certs:
raise_ssl_error(ERR_NO_CERTS)
if not ciphers:
ciphers = "DEFAULT"
self._sock = sock
self._keyfile = keyfile
self._certfile = certfile
self._cert_reqs = cert_reqs
self._ssl_version = ssl_version
self._ca_certs = ca_certs
self._do_handshake_on_connect = do_handshake_on_connect
self._suppress_ragged_eofs = suppress_ragged_eofs
self._ciphers = ciphers
self._handshake_done = False
self._wbio_nb = self._rbio_nb = False
self._user_config_ssl_ctx = cb_user_config_ssl_ctx
self._intf_ssl_ctx = None
self._user_config_ssl = cb_user_config_ssl
self._intf_ssl = None
if isinstance(sock, SSLConnection):
post_init = self._copy_server()
elif isinstance(sock, _UnwrappedSocket):
post_init = self._reconnect_unwrapped()
else:
try:
peer_address = sock.getpeername()
except socket.error:
peer_address = None
if server_side:
post_init = self._init_server(peer_address)
else:
post_init = self._init_client(peer_address)
if self._user_config_ssl:
self._user_config_ssl(self._intf_ssl)
if sys.platform.startswith('win') and \
not (SSL_get_options(self._ssl.value) & SSL_OP_NO_QUERY_MTU):
SSL_set_options(self._ssl.value, SSL_OP_NO_QUERY_MTU)
DTLS_set_link_mtu(self._ssl.value, 576)
SSL_set_bio(self._ssl.value, self._rbio.value, self._wbio.value)
self._rbio.disown()
self._wbio.disown()
if post_init:
post_init()
def get_socket(self, inbound):
"""Retrieve a socket used by this connection
When inbound is True, then the socket from which this connection reads
data is retrieved. Otherwise the socket to which this connection writes
data is retrieved.
Read and write sockets differ depending on whether this is a server- or
a client-side connection, and on whether a routing demux is in use.
"""
if inbound and hasattr(self, "_rsock"):
return self._rsock
return self._sock
def listen(self):
"""Server-side cookie exchange
This method reads datagrams from the socket and initiates cookie
exchange, upon whose successful conclusion one can then proceed to
the accept method. Alternatively, accept can be called directly, in
which case it will call this method. In order to prevent denial-of-
service attacks, only a small, constant set of computing resources
are used during the listen phase.
On some platforms, listen must be called so that packets will be
forwarded to accepted connections. Doing so is therefore recommened
in all cases for portable code.
Return value: a peer address if a datagram from a new peer was
encountered, None if a datagram for a known peer was forwarded
"""
if not hasattr(self, "_listening"):
raise InvalidSocketError("listen called on non-listening socket")
self._pending_peer_address = None
try:
peer_address = self._udp_demux.service()
except socket.timeout:
peer_address = None
except socket.error as sock_err:
if sock_err.errno != errno.EWOULDBLOCK:
_logger.exception("Unexpected socket error in listen")
raise
peer_address = None
if not peer_address:
_logger.debug("Listen returning without peer")
return
# The demux advises that a datagram from a new peer may have arrived
if type(peer_address) is tuple:
# For this type of demux, the write BIO must be pointed at the peer
BIO_dgram_set_peer(self._wbio.value, peer_address)
self._udp_demux.forward()
self._listening_peer_address = peer_address
self._check_nbio()
self._listening = True
try:
_logger.debug("Invoking DTLSv1_listen for ssl: %d",
self._ssl.raw)
dtls_peer_address = DTLSv1_listen(self._ssl.value)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_WANT_READ:
# This method must be called again to forward the next datagram
_logger.debug("DTLSv1_listen must be resumed")
return
elif err.errqueue and err.errqueue[0][0] == ERR_WRONG_VERSION_NUMBER:
_logger.debug("Wrong version number; aborting handshake")
raise
elif err.errqueue and err.errqueue[0][0] == ERR_COOKIE_MISMATCH:
_logger.debug("Mismatching cookie received; aborting handshake")
raise
elif err.errqueue and err.errqueue[0][0] == ERR_NO_SHARED_CIPHER:
_logger.debug("No shared cipher; aborting handshake")
raise
_logger.exception("Unexpected error in DTLSv1_listen")
raise
finally:
self._listening = False
self._listening_peer_address = None
if type(peer_address) is tuple:
_logger.debug("New local peer: %s", dtls_peer_address)
self._pending_peer_address = peer_address
else:
self._pending_peer_address = dtls_peer_address
_logger.debug("New peer: %s", self._pending_peer_address)
return self._pending_peer_address
def accept(self):
"""Server-side UDP connection establishment
This method returns a server-side SSLConnection object, connected to
that peer most recently returned from the listen method and not yet
connected. If there is no such peer, then the listen method is invoked.
Return value: SSLConnection connected to a new peer, None if packet
forwarding only to an existing peer occurred.
"""
if not self._pending_peer_address:
if not self.listen():
_logger.debug("Accept returning without connection")
return
new_conn = SSLConnection(self, self._keyfile, self._certfile, True,
self._cert_reqs, self._ssl_version,
self._ca_certs, self._do_handshake_on_connect,
self._suppress_ragged_eofs, self._ciphers,
cb_user_config_ssl_ctx=self._user_config_ssl_ctx,
cb_user_config_ssl=self._user_config_ssl)
new_peer = self._pending_peer_address
self._pending_peer_address = None
if self._do_handshake_on_connect:
# Note that since that connection's socket was just created in its
# constructor, the following operation must be blocking; hence
# handshake-on-connect can only be used with a routing demux if
# listen is serviced by a separate application thread, or else we
# will hang in this call
new_conn.do_handshake()
_logger.debug("Accept returning new connection for new peer")
return new_conn, new_peer
def connect(self, peer_address):
"""Client-side UDP connection establishment
This method connects this object's underlying socket. It subsequently
performs a handshake if do_handshake_on_connect was set during
initialization.
Arguments:
peer_address - address tuple of server peer
"""
self._sock.connect(peer_address)
peer_address = self._sock.getpeername() # substituted host addrinfo
BIO_dgram_set_connected(self._wbio.value, peer_address)
assert self._wbio is self._rbio
if self._do_handshake_on_connect:
self.do_handshake()
def do_handshake(self):
"""Perform a handshake with the peer
This method forces an explicit handshake to be performed with either
the client or server peer.
"""
_logger.debug("Initiating handshake...")
try:
self._wrap_socket_library_call(
lambda: SSL_do_handshake(self._ssl.value),
ERR_HANDSHAKE_TIMEOUT)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_SYSCALL and err.result == -1:
raise_ssl_error(ERR_PORT_UNREACHABLE, err)
raise
self._handshake_done = True
_logger.debug("...completed handshake")
def read(self, len=1024, buffer=None):
"""Read data from connection
Read up to len bytes and return them.
Arguments:
len -- maximum number of bytes to read
Return value:
string containing read bytes
"""
try:
return self._wrap_socket_library_call(
lambda: SSL_read(self._ssl.value, len, buffer), ERR_READ_TIMEOUT)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_SYSCALL and err.result == -1:
raise_ssl_error(ERR_PORT_UNREACHABLE, err)
raise
def write(self, data):
"""Write data to connection
Write data as string of bytes.
Arguments:
data -- buffer containing data to be written
Return value:
number of bytes actually transmitted
"""
try:
ret = self._wrap_socket_library_call(
lambda: SSL_write(self._ssl.value, data), ERR_WRITE_TIMEOUT)
except openssl_error() as err:
if err.ssl_error == SSL_ERROR_SYSCALL and err.result == -1:
raise_ssl_error(ERR_PORT_UNREACHABLE, err)
raise
if ret:
self._handshake_done = True
return ret
def shutdown(self):
"""Shut down the DTLS connection
This method attemps to complete a bidirectional shutdown between
peers. For non-blocking sockets, it should be called repeatedly until
it no longer raises continuation request exceptions.
"""
if hasattr(self, "_listening"):
# Listening server-side sockets cannot be shut down
return
try:
self._wrap_socket_library_call(
lambda: SSL_shutdown(self._ssl.value), ERR_READ_TIMEOUT)
except openssl_error() as err:
if err.result == 0:
# close-notify alert was just sent; wait for same from peer
# Note: while it might seem wise to suppress further read-aheads
# with SSL_set_read_ahead here, doing so causes a shutdown
# failure (ret: -1, SSL_ERROR_SYSCALL) on the DTLS shutdown
# initiator side. And test_starttls does pass.
self._wrap_socket_library_call(
lambda: SSL_shutdown(self._ssl.value), ERR_READ_TIMEOUT)
else:
raise
if hasattr(self, "_rsock"):
# Return wrapped connected server socket (non-listening)
return _UnwrappedSocket(self._sock, self._rsock, self._udp_demux,
self._ctx,
BIO_dgram_get_peer(self._wbio.value))
# Return unwrapped client-side socket or unwrapped server-side socket
# for single-socket servers
return self._sock
def getpeercert(self, binary_form=False):
"""Retrieve the peer's certificate
When binary form is requested, the peer's DER-encoded certficate is
returned if it was transmitted during the handshake.
When binary form is not requested, and the peer's certificate has been
validated, then a certificate dictionary is returned. If the certificate
was not validated, an empty dictionary is returned.
In all cases, None is returned if no certificate was received from the
peer.
"""
try:
peer_cert = _X509(SSL_get_peer_certificate(self._ssl.value))
except openssl_error():
return
if binary_form:
return i2d_X509(peer_cert.value)
if self._cert_reqs == CERT_NONE:
return {}
return decode_cert(peer_cert)
peer_certificate = getpeercert # compatibility with _ssl call interface
def getpeercertchain(self, binary_form=False):
try:
stack, num, certs = SSL_get_peer_cert_chain(self._ssl.value)
except openssl_error():
return
peer_cert_chain = [_Rsrc(cert) for cert in certs]
ret = []
if binary_form:
ret = [i2d_X509(x.value) for x in peer_cert_chain]
elif len(peer_cert_chain):
ret = [decode_cert(x) for x in peer_cert_chain]
return ret
def pending(self):
"""Retrieve number of buffered bytes
Return the number of bytes that have been read from the socket and
buffered by this connection. Return 0 if no bytes have been buffered.
"""
return SSL_pending(self._ssl.value)
def get_timeout(self):
"""Retrieve the retransmission timedelta
Since datagrams are subject to packet loss, DTLS will perform
packet retransmission if a response is not received after a certain
time interval during the handshaking phase. When using non-blocking
sockets, the application must call back after that time interval to
allow for the retransmission to occur. This method returns the
timedelta after which to perform the call to handle_timeout, or None
if no such callback is needed given the current handshake state.
"""
return DTLSv1_get_timeout(self._ssl.value)
def handle_timeout(self):
"""Perform datagram retransmission, if required
This method should be called after the timedelta retrieved from
get_timeout has expired, and no datagrams were received in the
meantime. If datagrams were received, a new timeout needs to be
requested.
Return value:
True -- retransmissions were performed successfully
False -- a timeout was not in effect or had not yet expired
Exceptions:
Raised when retransmissions fail or too many timeouts occur.
"""
return DTLSv1_handle_timeout(self._ssl.value)
|
rbit/pydtls | dtls/__init__.py | _prep_bins | python | def _prep_bins():
from os import path
from sys import platform, maxsize
from shutil import copy
bit_suffix = "-x86_64" if maxsize > 2**32 else "-x86"
package_root = path.abspath(path.dirname(__file__))
prebuilt_path = path.join(package_root, "prebuilt", platform + bit_suffix)
config = {"MANIFEST_DIR": prebuilt_path}
try:
execfile(path.join(prebuilt_path, "manifest.pycfg"), config)
except IOError:
return # there are no prebuilts for this platform - nothing to do
files = map(lambda x: path.join(prebuilt_path, x), config["FILES"])
for prebuilt_file in files:
try:
copy(path.join(prebuilt_path, prebuilt_file), package_root)
except IOError:
pass | Support for running straight out of a cloned source directory instead
of an installed distribution | train | https://github.com/rbit/pydtls/blob/41a71fccd990347d0de5f42418fea1e4e733359c/dtls/__init__.py#L37-L59 | null | # PyDTLS: datagram TLS for Python.
# Copyright 2012 Ray Brown
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# The License is also distributed with this work in the file named "LICENSE."
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""PyDTLS package
This package exports OpenSSL's DTLS support to Python. Calling its patch
function will add the constant PROTOCOL_DTLSv1 to the Python standard library's
ssl module. Subsequently passing a datagram socket to that module's
wrap_socket function (or instantiating its SSLSocket class with a datagram
socket) will activate this module's DTLS implementation for the returned
SSLSocket instance.
Instead of or in addition to invoking the patch functionality, the
SSLConnection class can be used directly for secure communication over datagram
sockets.
wrap_socket's parameters and their semantics have been maintained.
"""
VERSION = 1, 2, 3
_prep_bins() # prepare before module imports
from patch import do_patch
from sslconnection import SSLContext, SSL, SSLConnection
from demux import force_routing_demux, reset_default_demux
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.