hexsha
stringlengths 40
40
| size
int64 4
996k
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
245
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
245
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
245
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
996k
| avg_line_length
float64 1.33
58.2k
| max_line_length
int64 2
323k
| alphanum_fraction
float64 0
0.97
| content_no_comment
stringlengths 0
946k
| is_comment_constant_removed
bool 2
classes | is_sharp_comment_removed
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
790997b8b46042a12bd1329e0cad3c963a64e3e7
| 37,064
|
py
|
Python
|
redis/connection.py
|
theatlantic/redis-py
|
767e260169de03f676c9f208eb8583db13c212b3
|
[
"MIT"
] | null | null | null |
redis/connection.py
|
theatlantic/redis-py
|
767e260169de03f676c9f208eb8583db13c212b3
|
[
"MIT"
] | null | null | null |
redis/connection.py
|
theatlantic/redis-py
|
767e260169de03f676c9f208eb8583db13c212b3
|
[
"MIT"
] | null | null | null |
from __future__ import with_statement
from distutils.version import StrictVersion
from itertools import chain
from select import select
import os
import socket
import sys
import threading
import warnings
try:
import ssl
ssl_available = True
except ImportError:
ssl_available = False
from redis._compat import (b, xrange, imap, byte_to_chr, unicode, bytes, long,
BytesIO, nativestr, basestring, iteritems,
LifoQueue, Empty, Full, urlparse, parse_qs,
unquote)
from redis.exceptions import (
RedisError,
ConnectionError,
TimeoutError,
BusyLoadingError,
ResponseError,
InvalidResponse,
AuthenticationError,
NoScriptError,
ExecAbortError,
ReadOnlyError
)
from redis.utils import HIREDIS_AVAILABLE
if HIREDIS_AVAILABLE:
import hiredis
hiredis_version = StrictVersion(hiredis.__version__)
HIREDIS_SUPPORTS_CALLABLE_ERRORS = \
hiredis_version >= StrictVersion('0.1.3')
HIREDIS_SUPPORTS_BYTE_BUFFER = \
hiredis_version >= StrictVersion('0.1.4')
if not HIREDIS_SUPPORTS_BYTE_BUFFER:
msg = ("redis-py works best with hiredis >= 0.1.4. You're running "
"hiredis %s. Please consider upgrading." % hiredis.__version__)
warnings.warn(msg)
HIREDIS_USE_BYTE_BUFFER = True
# only use byte buffer if hiredis supports it and the Python version
# is >= 2.7
if not HIREDIS_SUPPORTS_BYTE_BUFFER or (
sys.version_info[0] == 2 and sys.version_info[1] < 7):
HIREDIS_USE_BYTE_BUFFER = False
SYM_STAR = b('*')
SYM_DOLLAR = b('$')
SYM_CRLF = b('\r\n')
SYM_EMPTY = b('')
SERVER_CLOSED_CONNECTION_ERROR = "Connection closed by server."
class Token(object):
"""
Literal strings in Redis commands, such as the command names and any
hard-coded arguments are wrapped in this class so we know not to apply
and encoding rules on them.
"""
def __init__(self, value):
if isinstance(value, Token):
value = value.value
self.value = value
def __repr__(self):
return self.value
def __str__(self):
return self.value
class BaseParser(object):
EXCEPTION_CLASSES = {
'ERR': ResponseError,
'EXECABORT': ExecAbortError,
'LOADING': BusyLoadingError,
'NOSCRIPT': NoScriptError,
'READONLY': ReadOnlyError,
}
def parse_error(self, response):
"Parse an error response"
error_code = response.split(' ')[0]
if error_code in self.EXCEPTION_CLASSES:
response = response[len(error_code) + 1:]
return self.EXCEPTION_CLASSES[error_code](response)
return ResponseError(response)
class SocketBuffer(object):
def __init__(self, socket, socket_read_size):
self._sock = socket
self.socket_read_size = socket_read_size
self._buffer = BytesIO()
# number of bytes written to the buffer from the socket
self.bytes_written = 0
# number of bytes read from the buffer
self.bytes_read = 0
@property
def length(self):
return self.bytes_written - self.bytes_read
def _read_from_socket(self, length=None):
socket_read_size = self.socket_read_size
buf = self._buffer
buf.seek(self.bytes_written)
marker = 0
try:
while True:
data = self._sock.recv(socket_read_size)
# an empty string indicates the server shutdown the socket
if isinstance(data, bytes) and len(data) == 0:
raise socket.error(SERVER_CLOSED_CONNECTION_ERROR)
buf.write(data)
data_length = len(data)
self.bytes_written += data_length
marker += data_length
if length is not None and length > marker:
continue
break
except socket.timeout:
raise TimeoutError("Timeout reading from socket")
except socket.error:
e = sys.exc_info()[1]
raise ConnectionError("Error while reading from socket: %s" %
(e.args,))
def read(self, length):
length = length + 2 # make sure to read the \r\n terminator
# make sure we've read enough data from the socket
if length > self.length:
self._read_from_socket(length - self.length)
self._buffer.seek(self.bytes_read)
data = self._buffer.read(length)
self.bytes_read += len(data)
# purge the buffer when we've consumed it all so it doesn't
# grow forever
if self.bytes_read == self.bytes_written:
self.purge()
return data[:-2]
def readline(self):
buf = self._buffer
buf.seek(self.bytes_read)
data = buf.readline()
while not data.endswith(SYM_CRLF):
# there's more data in the socket that we need
self._read_from_socket()
buf.seek(self.bytes_read)
data = buf.readline()
self.bytes_read += len(data)
# purge the buffer when we've consumed it all so it doesn't
# grow forever
if self.bytes_read == self.bytes_written:
self.purge()
return data[:-2]
def purge(self):
self._buffer.seek(0)
self._buffer.truncate()
self.bytes_written = 0
self.bytes_read = 0
def close(self):
self.purge()
self._buffer.close()
self._buffer = None
self._sock = None
class PythonParser(BaseParser):
"Plain Python parsing class"
encoding = None
def __init__(self, socket_read_size):
self.socket_read_size = socket_read_size
self._sock = None
self._buffer = None
def __del__(self):
try:
self.on_disconnect()
except Exception:
pass
def on_connect(self, connection):
"Called when the socket connects"
self._sock = connection._sock
self._buffer = SocketBuffer(self._sock, self.socket_read_size)
if connection.decode_responses:
self.encoding = connection.encoding
def on_disconnect(self):
"Called when the socket disconnects"
if self._sock is not None:
self._sock.close()
self._sock = None
if self._buffer is not None:
self._buffer.close()
self._buffer = None
self.encoding = None
def can_read(self):
return self._buffer and bool(self._buffer.length)
def read_response(self):
response = self._buffer.readline()
if not response:
raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR)
byte, response = byte_to_chr(response[0]), response[1:]
if byte not in ('-', '+', ':', '$', '*'):
raise InvalidResponse("Protocol Error: %s, %s" %
(str(byte), str(response)))
# server returned an error
if byte == '-':
response = nativestr(response)
error = self.parse_error(response)
# if the error is a ConnectionError, raise immediately so the user
# is notified
if isinstance(error, ConnectionError):
raise error
# otherwise, we're dealing with a ResponseError that might belong
# inside a pipeline response. the connection's read_response()
# and/or the pipeline's execute() will raise this error if
# necessary, so just return the exception instance here.
return error
# single value
elif byte == '+':
pass
# int value
elif byte == ':':
response = long(response)
# bulk response
elif byte == '$':
length = int(response)
if length == -1:
return None
response = self._buffer.read(length)
# multi-bulk response
elif byte == '*':
length = int(response)
if length == -1:
return None
response = [self.read_response() for i in xrange(length)]
if isinstance(response, bytes) and self.encoding:
response = response.decode(self.encoding)
return response
class HiredisParser(BaseParser):
"Parser class for connections using Hiredis"
def __init__(self, socket_read_size):
if not HIREDIS_AVAILABLE:
raise RedisError("Hiredis is not installed")
self.socket_read_size = socket_read_size
if HIREDIS_USE_BYTE_BUFFER:
self._buffer = bytearray(socket_read_size)
def __del__(self):
try:
self.on_disconnect()
except Exception:
pass
def on_connect(self, connection):
self._sock = connection._sock
kwargs = {
'protocolError': InvalidResponse,
'replyError': self.parse_error,
}
# hiredis < 0.1.3 doesn't support functions that create exceptions
if not HIREDIS_SUPPORTS_CALLABLE_ERRORS:
kwargs['replyError'] = ResponseError
if connection.decode_responses:
kwargs['encoding'] = connection.encoding
self._reader = hiredis.Reader(**kwargs)
self._next_response = False
def on_disconnect(self):
self._sock = None
self._reader = None
self._next_response = False
def can_read(self):
if not self._reader:
raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR)
if self._next_response is False:
self._next_response = self._reader.gets()
return self._next_response is not False
def read_response(self):
if not self._reader:
raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR)
# _next_response might be cached from a can_read() call
if self._next_response is not False:
response = self._next_response
self._next_response = False
return response
response = self._reader.gets()
socket_read_size = self.socket_read_size
while response is False:
try:
if HIREDIS_USE_BYTE_BUFFER:
bufflen = self._sock.recv_into(self._buffer)
if bufflen == 0:
raise socket.error(SERVER_CLOSED_CONNECTION_ERROR)
else:
buffer = self._sock.recv(socket_read_size)
# an empty string indicates the server shutdown the socket
if not isinstance(buffer, bytes) or len(buffer) == 0:
raise socket.error(SERVER_CLOSED_CONNECTION_ERROR)
except socket.timeout:
raise TimeoutError("Timeout reading from socket")
except socket.error:
e = sys.exc_info()[1]
raise ConnectionError("Error while reading from socket: %s" %
(e.args,))
if HIREDIS_USE_BYTE_BUFFER:
self._reader.feed(self._buffer, 0, bufflen)
else:
self._reader.feed(buffer)
# proactively, but not conclusively, check if more data is in the
# buffer. if the data received doesn't end with \r\n, there's more.
if HIREDIS_USE_BYTE_BUFFER:
if bufflen > 2 and \
self._buffer[bufflen - 2:bufflen] != SYM_CRLF:
continue
else:
if not buffer.endswith(SYM_CRLF):
continue
response = self._reader.gets()
# if an older version of hiredis is installed, we need to attempt
# to convert ResponseErrors to their appropriate types.
if not HIREDIS_SUPPORTS_CALLABLE_ERRORS:
if isinstance(response, ResponseError):
response = self.parse_error(response.args[0])
elif isinstance(response, list) and response and \
isinstance(response[0], ResponseError):
response[0] = self.parse_error(response[0].args[0])
# if the response is a ConnectionError or the response is a list and
# the first item is a ConnectionError, raise it as something bad
# happened
if isinstance(response, ConnectionError):
raise response
elif isinstance(response, list) and response and \
isinstance(response[0], ConnectionError):
raise response[0]
return response
if HIREDIS_AVAILABLE:
DefaultParser = HiredisParser
else:
DefaultParser = PythonParser
class Connection(object):
"Manages TCP communication to and from a Redis server"
description_format = "Connection<host=%(host)s,port=%(port)s,db=%(db)s>"
def __init__(self, host='localhost', port=6379, db=0, password=None,
socket_timeout=None, socket_connect_timeout=None,
socket_keepalive=False, socket_keepalive_options=None,
retry_on_timeout=False, encoding='utf-8',
encoding_errors='strict', decode_responses=False,
parser_class=DefaultParser, socket_read_size=65536):
self.pid = os.getpid()
self.host = host
self.port = int(port)
self.db = db
self.password = password
self.socket_timeout = socket_timeout
self.socket_connect_timeout = socket_connect_timeout or socket_timeout
self.socket_keepalive = socket_keepalive
self.socket_keepalive_options = socket_keepalive_options or {}
self.retry_on_timeout = retry_on_timeout
self.encoding = encoding
self.encoding_errors = encoding_errors
self.decode_responses = decode_responses
self._sock = None
self._parser = parser_class(socket_read_size=socket_read_size)
self._description_args = {
'host': self.host,
'port': self.port,
'db': self.db,
}
self._connect_callbacks = []
def __repr__(self):
return self.description_format % self._description_args
def __del__(self):
try:
self.disconnect()
except Exception:
pass
def register_connect_callback(self, callback):
self._connect_callbacks.append(callback)
def clear_connect_callbacks(self):
self._connect_callbacks = []
def connect(self):
"Connects to the Redis server if not already connected"
if self._sock:
return
try:
sock = self._connect()
except socket.error:
e = sys.exc_info()[1]
raise ConnectionError(self._error_message(e))
self._sock = sock
try:
self.on_connect()
except RedisError:
# clean up after any error in on_connect
self.disconnect()
raise
# run any user callbacks. right now the only internal callback
# is for pubsub channel/pattern resubscription
for callback in self._connect_callbacks:
callback(self)
def _connect(self):
"Create a TCP socket connection"
# we want to mimic what socket.create_connection does to support
# ipv4/ipv6, but we want to set options prior to calling
# socket.connect()
err = None
for res in socket.getaddrinfo(self.host, self.port, 0,
socket.SOCK_STREAM):
family, socktype, proto, canonname, socket_address = res
sock = None
try:
sock = socket.socket(family, socktype, proto)
# TCP_NODELAY
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
# TCP_KEEPALIVE
if self.socket_keepalive:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
for k, v in iteritems(self.socket_keepalive_options):
sock.setsockopt(socket.SOL_TCP, k, v)
# set the socket_connect_timeout before we connect
sock.settimeout(self.socket_connect_timeout)
# connect
sock.connect(socket_address)
# set the socket_timeout now that we're connected
sock.settimeout(self.socket_timeout)
return sock
except socket.error as _:
err = _
if sock is not None:
sock.close()
if err is not None:
raise err
raise socket.error("socket.getaddrinfo returned an empty list")
def _error_message(self, exception):
# args for socket.error can either be (errno, "message")
# or just "message"
if len(exception.args) == 1:
return "Error connecting to %s:%s. %s." % \
(self.host, self.port, exception.args[0])
else:
return "Error %s connecting to %s:%s. %s." % \
(exception.args[0], self.host, self.port, exception.args[1])
def on_connect(self):
"Initialize the connection, authenticate and select a database"
self._parser.on_connect(self)
# if a password is specified, authenticate
if self.password:
self.send_command('AUTH', self.password)
if nativestr(self.read_response()) != 'OK':
raise AuthenticationError('Invalid Password')
# if a database is specified, switch to it
if self.db:
self.send_command('SELECT', self.db)
if nativestr(self.read_response()) != 'OK':
raise ConnectionError('Invalid Database')
def disconnect(self):
"Disconnects from the Redis server"
self._parser.on_disconnect()
if self._sock is None:
return
try:
self._sock.shutdown(socket.SHUT_RDWR)
self._sock.close()
except socket.error:
pass
self._sock = None
def send_packed_command(self, command):
"Send an already packed command to the Redis server"
if not self._sock:
self.connect()
try:
if isinstance(command, str):
command = [command]
for item in command:
self._sock.sendall(item)
except socket.timeout:
self.disconnect()
raise TimeoutError("Timeout writing to socket")
except socket.error:
e = sys.exc_info()[1]
self.disconnect()
if len(e.args) == 1:
_errno, errmsg = 'UNKNOWN', e.args[0]
else:
_errno, errmsg = e.args
raise ConnectionError("Error %s while writing to socket. %s." %
(_errno, errmsg))
except:
self.disconnect()
raise
def send_command(self, *args):
"Pack and send a command to the Redis server"
self.send_packed_command(self.pack_command(*args))
def can_read(self, timeout=0):
"Poll the socket to see if there's data that can be read."
sock = self._sock
if not sock:
self.connect()
sock = self._sock
return self._parser.can_read() or \
bool(select([sock], [], [], timeout)[0])
def read_response(self):
"Read the response from a previously sent command"
try:
response = self._parser.read_response()
except:
self.disconnect()
raise
if isinstance(response, ResponseError):
raise response
return response
def encode(self, value):
"Return a bytestring representation of the value"
if isinstance(value, Token):
return b(value.value)
elif isinstance(value, bytes):
return value
elif isinstance(value, (int, long)):
value = b(str(value))
elif isinstance(value, float):
value = b(repr(value))
elif not isinstance(value, basestring):
value = str(value)
if isinstance(value, unicode):
value = value.encode(self.encoding, self.encoding_errors)
return value
def pack_command(self, *args):
"Pack a series of arguments into the Redis protocol"
output = []
# the client might have included 1 or more literal arguments in
# the command name, e.g., 'CONFIG GET'. The Redis server expects these
# arguments to be sent separately, so split the first argument
# manually. All of these arguements get wrapped in the Token class
# to prevent them from being encoded.
command = args[0]
if ' ' in command:
args = tuple([Token(s) for s in command.split(' ')]) + args[1:]
else:
args = (Token(command),) + args[1:]
buff = SYM_EMPTY.join(
(SYM_STAR, b(str(len(args))), SYM_CRLF))
for arg in imap(self.encode, args):
# to avoid large string mallocs, chunk the command into the
# output list if we're sending large values
if len(buff) > 6000 or len(arg) > 6000:
buff = SYM_EMPTY.join(
(buff, SYM_DOLLAR, b(str(len(arg))), SYM_CRLF))
output.append(buff)
output.append(arg)
buff = SYM_CRLF
else:
buff = SYM_EMPTY.join((buff, SYM_DOLLAR, b(str(len(arg))),
SYM_CRLF, arg, SYM_CRLF))
output.append(buff)
return output
def pack_commands(self, commands):
"Pack multiple commands into the Redis protocol"
output = []
pieces = []
buffer_length = 0
for cmd in commands:
for chunk in self.pack_command(*cmd):
pieces.append(chunk)
buffer_length += len(chunk)
if buffer_length > 6000:
output.append(SYM_EMPTY.join(pieces))
buffer_length = 0
pieces = []
if pieces:
output.append(SYM_EMPTY.join(pieces))
return output
class SSLConnection(Connection):
description_format = "SSLConnection<host=%(host)s,port=%(port)s,db=%(db)s>"
def __init__(self, ssl_keyfile=None, ssl_certfile=None, ssl_cert_reqs=None,
ssl_ca_certs=None, **kwargs):
if not ssl_available:
raise RedisError("Python wasn't built with SSL support")
super(SSLConnection, self).__init__(**kwargs)
self.keyfile = ssl_keyfile
self.certfile = ssl_certfile
if ssl_cert_reqs is None:
ssl_cert_reqs = ssl.CERT_NONE
elif isinstance(ssl_cert_reqs, basestring):
CERT_REQS = {
'none': ssl.CERT_NONE,
'optional': ssl.CERT_OPTIONAL,
'required': ssl.CERT_REQUIRED
}
if ssl_cert_reqs not in CERT_REQS:
raise RedisError(
"Invalid SSL Certificate Requirements Flag: %s" %
ssl_cert_reqs)
ssl_cert_reqs = CERT_REQS[ssl_cert_reqs]
self.cert_reqs = ssl_cert_reqs
self.ca_certs = ssl_ca_certs
def _connect(self):
"Wrap the socket with SSL support"
sock = super(SSLConnection, self)._connect()
sock = ssl.wrap_socket(sock,
cert_reqs=self.cert_reqs,
keyfile=self.keyfile,
certfile=self.certfile,
ca_certs=self.ca_certs)
return sock
class UnixDomainSocketConnection(Connection):
description_format = "UnixDomainSocketConnection<path=%(path)s,db=%(db)s>"
def __init__(self, path='', db=0, password=None,
socket_timeout=None, encoding='utf-8',
encoding_errors='strict', decode_responses=False,
retry_on_timeout=False,
parser_class=DefaultParser, socket_read_size=65536):
self.pid = os.getpid()
self.path = path
self.db = db
self.password = password
self.socket_timeout = socket_timeout
self.retry_on_timeout = retry_on_timeout
self.encoding = encoding
self.encoding_errors = encoding_errors
self.decode_responses = decode_responses
self._sock = None
self._parser = parser_class(socket_read_size=socket_read_size)
self._description_args = {
'path': self.path,
'db': self.db,
}
self._connect_callbacks = []
def _connect(self):
"Create a Unix domain socket connection"
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.settimeout(self.socket_timeout)
sock.connect(self.path)
return sock
def _error_message(self, exception):
# args for socket.error can either be (errno, "message")
# or just "message"
if len(exception.args) == 1:
return "Error connecting to unix socket: %s. %s." % \
(self.path, exception.args[0])
else:
return "Error %s connecting to unix socket: %s. %s." % \
(exception.args[0], self.path, exception.args[1])
class ConnectionPool(object):
"Generic connection pool"
@classmethod
def from_url(cls, url, db=None, decode_components=False, **kwargs):
"""
Return a connection pool configured from the given URL.
For example::
redis://[:password]@localhost:6379/0
rediss://[:password]@localhost:6379/0
unix://[:password]@/path/to/socket.sock?db=0
Three URL schemes are supported:
redis:// creates a normal TCP socket connection
rediss:// creates a SSL wrapped TCP socket connection
unix:// creates a Unix Domain Socket connection
There are several ways to specify a database number. The parse function
will return the first specified option:
1. A ``db`` querystring option, e.g. redis://localhost?db=0
2. If using the redis:// scheme, the path argument of the url, e.g.
redis://localhost/0
3. The ``db`` argument to this function.
If none of these options are specified, db=0 is used.
The ``decode_components`` argument allows this function to work with
percent-encoded URLs. If this argument is set to ``True`` all ``%xx``
escapes will be replaced by their single-character equivalents after
the URL has been parsed. This only applies to the ``hostname``,
``path``, and ``password`` components.
Any additional querystring arguments and keyword arguments will be
passed along to the ConnectionPool class's initializer. In the case
of conflicting arguments, querystring arguments always win.
"""
url_string = url
url = urlparse(url)
qs = ''
# in python2.6, custom URL schemes don't recognize querystring values
# they're left as part of the url.path.
if '?' in url.path and not url.query:
# chop the querystring including the ? off the end of the url
# and reparse it.
qs = url.path.split('?', 1)[1]
url = urlparse(url_string[:-(len(qs) + 1)])
else:
qs = url.query
url_options = {}
for name, value in iteritems(parse_qs(qs)):
if value and len(value) > 0:
url_options[name] = value[0]
if decode_components:
password = unquote(url.password) if url.password else None
path = unquote(url.path) if url.path else None
hostname = unquote(url.hostname) if url.hostname else None
else:
password = url.password
path = url.path
hostname = url.hostname
# We only support redis:// and unix:// schemes.
if url.scheme == 'unix':
url_options.update({
'password': password,
'path': path,
'connection_class': UnixDomainSocketConnection,
})
else:
url_options.update({
'host': hostname,
'port': int(url.port or 6379),
'password': password,
})
# If there's a path argument, use it as the db argument if a
# querystring value wasn't specified
if 'db' not in url_options and path:
try:
url_options['db'] = int(path.replace('/', ''))
except (AttributeError, ValueError):
pass
if url.scheme == 'rediss':
url_options['connection_class'] = SSLConnection
# last shot at the db value
url_options['db'] = int(url_options.get('db', db or 0))
# update the arguments from the URL values
kwargs.update(url_options)
# backwards compatability
if 'charset' in kwargs:
warnings.warn(DeprecationWarning(
'"charset" is deprecated. Use "encoding" instead'))
kwargs['encoding'] = kwargs.pop('charset')
if 'errors' in kwargs:
warnings.warn(DeprecationWarning(
'"errors" is deprecated. Use "encoding_errors" instead'))
kwargs['encoding_errors'] = kwargs.pop('errors')
return cls(**kwargs)
def __init__(self, connection_class=Connection, max_connections=None,
**connection_kwargs):
"""
Create a connection pool. If max_connections is set, then this
object raises redis.ConnectionError when the pool's limit is reached.
By default, TCP connections are created connection_class is specified.
Use redis.UnixDomainSocketConnection for unix sockets.
Any additional keyword arguments are passed to the constructor of
connection_class.
"""
max_connections = max_connections or 2 ** 31
if not isinstance(max_connections, (int, long)) or max_connections < 0:
raise ValueError('"max_connections" must be a positive integer')
self.connection_class = connection_class
self.connection_kwargs = connection_kwargs
self.max_connections = max_connections
self.reset()
def __repr__(self):
return "%s<%s>" % (
type(self).__name__,
self.connection_class.description_format % self.connection_kwargs,
)
def reset(self):
self.pid = os.getpid()
self._created_connections = 0
self._available_connections = []
self._in_use_connections = set()
self._check_lock = threading.Lock()
def _checkpid(self):
if self.pid != os.getpid():
with self._check_lock:
if self.pid == os.getpid():
# another thread already did the work while we waited
# on the lock.
return
self.disconnect()
self.reset()
def get_connection(self, command_name, *keys, **options):
"Get a connection from the pool"
self._checkpid()
try:
connection = self._available_connections.pop()
except IndexError:
connection = self.make_connection()
self._in_use_connections.add(connection)
return connection
def make_connection(self):
"Create a new connection"
if self._created_connections >= self.max_connections:
raise ConnectionError("Too many connections")
self._created_connections += 1
return self.connection_class(**self.connection_kwargs)
def release(self, connection):
"Releases the connection back to the pool"
self._checkpid()
if connection.pid != self.pid:
return
self._in_use_connections.remove(connection)
self._available_connections.append(connection)
def disconnect(self):
"Disconnects all connections in the pool"
all_conns = chain(self._available_connections,
self._in_use_connections)
for connection in all_conns:
connection.disconnect()
class BlockingConnectionPool(ConnectionPool):
"""
Thread-safe blocking connection pool::
>>> from redis.client import Redis
>>> client = Redis(connection_pool=BlockingConnectionPool())
It performs the same function as the default
``:py:class: ~redis.connection.ConnectionPool`` implementation, in that,
it maintains a pool of reusable connections that can be shared by
multiple redis clients (safely across threads if required).
The difference is that, in the event that a client tries to get a
connection from the pool when all of connections are in use, rather than
raising a ``:py:class: ~redis.exceptions.ConnectionError`` (as the default
``:py:class: ~redis.connection.ConnectionPool`` implementation does), it
makes the client wait ("blocks") for a specified number of seconds until
a connection becomes available.
Use ``max_connections`` to increase / decrease the pool size::
>>> pool = BlockingConnectionPool(max_connections=10)
Use ``timeout`` to tell it either how many seconds to wait for a connection
to become available, or to block forever:
# Block forever.
>>> pool = BlockingConnectionPool(timeout=None)
# Raise a ``ConnectionError`` after five seconds if a connection is
# not available.
>>> pool = BlockingConnectionPool(timeout=5)
"""
def __init__(self, max_connections=50, timeout=20,
connection_class=Connection, queue_class=LifoQueue,
**connection_kwargs):
self.queue_class = queue_class
self.timeout = timeout
super(BlockingConnectionPool, self).__init__(
connection_class=connection_class,
max_connections=max_connections,
**connection_kwargs)
def reset(self):
self.pid = os.getpid()
self._check_lock = threading.Lock()
# Create and fill up a thread safe queue with ``None`` values.
self.pool = self.queue_class(self.max_connections)
while True:
try:
self.pool.put_nowait(None)
except Full:
break
# Keep a list of actual connection instances so that we can
# disconnect them later.
self._connections = []
def make_connection(self):
"Make a fresh connection."
connection = self.connection_class(**self.connection_kwargs)
self._connections.append(connection)
return connection
def get_connection(self, command_name, *keys, **options):
"""
Get a connection, blocking for ``self.timeout`` until a connection
is available from the pool.
If the connection returned is ``None`` then creates a new connection.
Because we use a last-in first-out queue, the existing connections
(having been returned to the pool after the initial ``None`` values
were added) will be returned before ``None`` values. This means we only
create new connections when we need to, i.e.: the actual number of
connections will only increase in response to demand.
"""
# Make sure we haven't changed process.
self._checkpid()
# Try and get a connection from the pool. If one isn't available within
# self.timeout then raise a ``ConnectionError``.
connection = None
try:
connection = self.pool.get(block=True, timeout=self.timeout)
except Empty:
# Note that this is not caught by the redis client and will be
# raised unless handled by application code. If you want never to
raise ConnectionError("No connection available.")
# If the ``connection`` is actually ``None`` then that's a cue to make
# a new connection to add to the pool.
if connection is None:
connection = self.make_connection()
return connection
def release(self, connection):
"Releases the connection back to the pool."
# Make sure we haven't changed process.
self._checkpid()
if connection.pid != self.pid:
return
# Put the connection back into the pool.
try:
self.pool.put_nowait(connection)
except Full:
# perhaps the pool has been reset() after a fork? regardless,
# we don't want this connection
pass
def disconnect(self):
"Disconnects all connections in the pool."
for connection in self._connections:
connection.disconnect()
| 35.776062
| 79
| 0.594566
|
from __future__ import with_statement
from distutils.version import StrictVersion
from itertools import chain
from select import select
import os
import socket
import sys
import threading
import warnings
try:
import ssl
ssl_available = True
except ImportError:
ssl_available = False
from redis._compat import (b, xrange, imap, byte_to_chr, unicode, bytes, long,
BytesIO, nativestr, basestring, iteritems,
LifoQueue, Empty, Full, urlparse, parse_qs,
unquote)
from redis.exceptions import (
RedisError,
ConnectionError,
TimeoutError,
BusyLoadingError,
ResponseError,
InvalidResponse,
AuthenticationError,
NoScriptError,
ExecAbortError,
ReadOnlyError
)
from redis.utils import HIREDIS_AVAILABLE
if HIREDIS_AVAILABLE:
import hiredis
hiredis_version = StrictVersion(hiredis.__version__)
HIREDIS_SUPPORTS_CALLABLE_ERRORS = \
hiredis_version >= StrictVersion('0.1.3')
HIREDIS_SUPPORTS_BYTE_BUFFER = \
hiredis_version >= StrictVersion('0.1.4')
if not HIREDIS_SUPPORTS_BYTE_BUFFER:
msg = ("redis-py works best with hiredis >= 0.1.4. You're running "
"hiredis %s. Please consider upgrading." % hiredis.__version__)
warnings.warn(msg)
HIREDIS_USE_BYTE_BUFFER = True
# only use byte buffer if hiredis supports it and the Python version
# is >= 2.7
if not HIREDIS_SUPPORTS_BYTE_BUFFER or (
sys.version_info[0] == 2 and sys.version_info[1] < 7):
HIREDIS_USE_BYTE_BUFFER = False
SYM_STAR = b('*')
SYM_DOLLAR = b('$')
SYM_CRLF = b('\r\n')
SYM_EMPTY = b('')
SERVER_CLOSED_CONNECTION_ERROR = "Connection closed by server."
class Token(object):
def __init__(self, value):
if isinstance(value, Token):
value = value.value
self.value = value
def __repr__(self):
return self.value
def __str__(self):
return self.value
class BaseParser(object):
EXCEPTION_CLASSES = {
'ERR': ResponseError,
'EXECABORT': ExecAbortError,
'LOADING': BusyLoadingError,
'NOSCRIPT': NoScriptError,
'READONLY': ReadOnlyError,
}
def parse_error(self, response):
error_code = response.split(' ')[0]
if error_code in self.EXCEPTION_CLASSES:
response = response[len(error_code) + 1:]
return self.EXCEPTION_CLASSES[error_code](response)
return ResponseError(response)
class SocketBuffer(object):
def __init__(self, socket, socket_read_size):
self._sock = socket
self.socket_read_size = socket_read_size
self._buffer = BytesIO()
# number of bytes written to the buffer from the socket
self.bytes_written = 0
# number of bytes read from the buffer
self.bytes_read = 0
@property
def length(self):
return self.bytes_written - self.bytes_read
def _read_from_socket(self, length=None):
socket_read_size = self.socket_read_size
buf = self._buffer
buf.seek(self.bytes_written)
marker = 0
try:
while True:
data = self._sock.recv(socket_read_size)
# an empty string indicates the server shutdown the socket
if isinstance(data, bytes) and len(data) == 0:
raise socket.error(SERVER_CLOSED_CONNECTION_ERROR)
buf.write(data)
data_length = len(data)
self.bytes_written += data_length
marker += data_length
if length is not None and length > marker:
continue
break
except socket.timeout:
raise TimeoutError("Timeout reading from socket")
except socket.error:
e = sys.exc_info()[1]
raise ConnectionError("Error while reading from socket: %s" %
(e.args,))
def read(self, length):
length = length + 2 # make sure to read the \r\n terminator
# make sure we've read enough data from the socket
if length > self.length:
self._read_from_socket(length - self.length)
self._buffer.seek(self.bytes_read)
data = self._buffer.read(length)
self.bytes_read += len(data)
if self.bytes_read == self.bytes_written:
self.purge()
return data[:-2]
def readline(self):
buf = self._buffer
buf.seek(self.bytes_read)
data = buf.readline()
while not data.endswith(SYM_CRLF):
self._read_from_socket()
buf.seek(self.bytes_read)
data = buf.readline()
self.bytes_read += len(data)
# purge the buffer when we've consumed it all so it doesn't
# grow forever
if self.bytes_read == self.bytes_written:
self.purge()
return data[:-2]
def purge(self):
self._buffer.seek(0)
self._buffer.truncate()
self.bytes_written = 0
self.bytes_read = 0
def close(self):
self.purge()
self._buffer.close()
self._buffer = None
self._sock = None
class PythonParser(BaseParser):
encoding = None
def __init__(self, socket_read_size):
self.socket_read_size = socket_read_size
self._sock = None
self._buffer = None
def __del__(self):
try:
self.on_disconnect()
except Exception:
pass
def on_connect(self, connection):
self._sock = connection._sock
self._buffer = SocketBuffer(self._sock, self.socket_read_size)
if connection.decode_responses:
self.encoding = connection.encoding
def on_disconnect(self):
if self._sock is not None:
self._sock.close()
self._sock = None
if self._buffer is not None:
self._buffer.close()
self._buffer = None
self.encoding = None
def can_read(self):
return self._buffer and bool(self._buffer.length)
def read_response(self):
response = self._buffer.readline()
if not response:
raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR)
byte, response = byte_to_chr(response[0]), response[1:]
if byte not in ('-', '+', ':', '$', '*'):
raise InvalidResponse("Protocol Error: %s, %s" %
(str(byte), str(response)))
# server returned an error
if byte == '-':
response = nativestr(response)
error = self.parse_error(response)
# if the error is a ConnectionError, raise immediately so the user
# is notified
if isinstance(error, ConnectionError):
raise error
# otherwise, we're dealing with a ResponseError that might belong
# and/or the pipeline's execute() will raise this error if
return error
elif byte == '+':
pass
elif byte == ':':
response = long(response)
elif byte == '$':
length = int(response)
if length == -1:
return None
response = self._buffer.read(length)
elif byte == '*':
length = int(response)
if length == -1:
return None
response = [self.read_response() for i in xrange(length)]
if isinstance(response, bytes) and self.encoding:
response = response.decode(self.encoding)
return response
class HiredisParser(BaseParser):
def __init__(self, socket_read_size):
if not HIREDIS_AVAILABLE:
raise RedisError("Hiredis is not installed")
self.socket_read_size = socket_read_size
if HIREDIS_USE_BYTE_BUFFER:
self._buffer = bytearray(socket_read_size)
def __del__(self):
try:
self.on_disconnect()
except Exception:
pass
def on_connect(self, connection):
self._sock = connection._sock
kwargs = {
'protocolError': InvalidResponse,
'replyError': self.parse_error,
}
if not HIREDIS_SUPPORTS_CALLABLE_ERRORS:
kwargs['replyError'] = ResponseError
if connection.decode_responses:
kwargs['encoding'] = connection.encoding
self._reader = hiredis.Reader(**kwargs)
self._next_response = False
def on_disconnect(self):
self._sock = None
self._reader = None
self._next_response = False
def can_read(self):
if not self._reader:
raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR)
if self._next_response is False:
self._next_response = self._reader.gets()
return self._next_response is not False
def read_response(self):
if not self._reader:
raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR)
# _next_response might be cached from a can_read() call
if self._next_response is not False:
response = self._next_response
self._next_response = False
return response
response = self._reader.gets()
socket_read_size = self.socket_read_size
while response is False:
try:
if HIREDIS_USE_BYTE_BUFFER:
bufflen = self._sock.recv_into(self._buffer)
if bufflen == 0:
raise socket.error(SERVER_CLOSED_CONNECTION_ERROR)
else:
buffer = self._sock.recv(socket_read_size)
# an empty string indicates the server shutdown the socket
if not isinstance(buffer, bytes) or len(buffer) == 0:
raise socket.error(SERVER_CLOSED_CONNECTION_ERROR)
except socket.timeout:
raise TimeoutError("Timeout reading from socket")
except socket.error:
e = sys.exc_info()[1]
raise ConnectionError("Error while reading from socket: %s" %
(e.args,))
if HIREDIS_USE_BYTE_BUFFER:
self._reader.feed(self._buffer, 0, bufflen)
else:
self._reader.feed(buffer)
# proactively, but not conclusively, check if more data is in the
# buffer. if the data received doesn't end with \r\n, there's more.
if HIREDIS_USE_BYTE_BUFFER:
if bufflen > 2 and \
self._buffer[bufflen - 2:bufflen] != SYM_CRLF:
continue
else:
if not buffer.endswith(SYM_CRLF):
continue
response = self._reader.gets()
# if an older version of hiredis is installed, we need to attempt
# to convert ResponseErrors to their appropriate types.
if not HIREDIS_SUPPORTS_CALLABLE_ERRORS:
if isinstance(response, ResponseError):
response = self.parse_error(response.args[0])
elif isinstance(response, list) and response and \
isinstance(response[0], ResponseError):
response[0] = self.parse_error(response[0].args[0])
# if the response is a ConnectionError or the response is a list and
# the first item is a ConnectionError, raise it as something bad
# happened
if isinstance(response, ConnectionError):
raise response
elif isinstance(response, list) and response and \
isinstance(response[0], ConnectionError):
raise response[0]
return response
if HIREDIS_AVAILABLE:
DefaultParser = HiredisParser
else:
DefaultParser = PythonParser
class Connection(object):
description_format = "Connection<host=%(host)s,port=%(port)s,db=%(db)s>"
def __init__(self, host='localhost', port=6379, db=0, password=None,
socket_timeout=None, socket_connect_timeout=None,
socket_keepalive=False, socket_keepalive_options=None,
retry_on_timeout=False, encoding='utf-8',
encoding_errors='strict', decode_responses=False,
parser_class=DefaultParser, socket_read_size=65536):
self.pid = os.getpid()
self.host = host
self.port = int(port)
self.db = db
self.password = password
self.socket_timeout = socket_timeout
self.socket_connect_timeout = socket_connect_timeout or socket_timeout
self.socket_keepalive = socket_keepalive
self.socket_keepalive_options = socket_keepalive_options or {}
self.retry_on_timeout = retry_on_timeout
self.encoding = encoding
self.encoding_errors = encoding_errors
self.decode_responses = decode_responses
self._sock = None
self._parser = parser_class(socket_read_size=socket_read_size)
self._description_args = {
'host': self.host,
'port': self.port,
'db': self.db,
}
self._connect_callbacks = []
def __repr__(self):
return self.description_format % self._description_args
def __del__(self):
try:
self.disconnect()
except Exception:
pass
def register_connect_callback(self, callback):
self._connect_callbacks.append(callback)
def clear_connect_callbacks(self):
self._connect_callbacks = []
def connect(self):
if self._sock:
return
try:
sock = self._connect()
except socket.error:
e = sys.exc_info()[1]
raise ConnectionError(self._error_message(e))
self._sock = sock
try:
self.on_connect()
except RedisError:
# clean up after any error in on_connect
self.disconnect()
raise
# run any user callbacks. right now the only internal callback
# is for pubsub channel/pattern resubscription
for callback in self._connect_callbacks:
callback(self)
def _connect(self):
# we want to mimic what socket.create_connection does to support
# ipv4/ipv6, but we want to set options prior to calling
# socket.connect()
err = None
for res in socket.getaddrinfo(self.host, self.port, 0,
socket.SOCK_STREAM):
family, socktype, proto, canonname, socket_address = res
sock = None
try:
sock = socket.socket(family, socktype, proto)
# TCP_NODELAY
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
# TCP_KEEPALIVE
if self.socket_keepalive:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
for k, v in iteritems(self.socket_keepalive_options):
sock.setsockopt(socket.SOL_TCP, k, v)
# set the socket_connect_timeout before we connect
sock.settimeout(self.socket_connect_timeout)
# connect
sock.connect(socket_address)
# set the socket_timeout now that we're connected
sock.settimeout(self.socket_timeout)
return sock
except socket.error as _:
err = _
if sock is not None:
sock.close()
if err is not None:
raise err
raise socket.error("socket.getaddrinfo returned an empty list")
def _error_message(self, exception):
if len(exception.args) == 1:
return "Error connecting to %s:%s. %s." % \
(self.host, self.port, exception.args[0])
else:
return "Error %s connecting to %s:%s. %s." % \
(exception.args[0], self.host, self.port, exception.args[1])
def on_connect(self):
self._parser.on_connect(self)
if self.password:
self.send_command('AUTH', self.password)
if nativestr(self.read_response()) != 'OK':
raise AuthenticationError('Invalid Password')
if self.db:
self.send_command('SELECT', self.db)
if nativestr(self.read_response()) != 'OK':
raise ConnectionError('Invalid Database')
def disconnect(self):
self._parser.on_disconnect()
if self._sock is None:
return
try:
self._sock.shutdown(socket.SHUT_RDWR)
self._sock.close()
except socket.error:
pass
self._sock = None
def send_packed_command(self, command):
if not self._sock:
self.connect()
try:
if isinstance(command, str):
command = [command]
for item in command:
self._sock.sendall(item)
except socket.timeout:
self.disconnect()
raise TimeoutError("Timeout writing to socket")
except socket.error:
e = sys.exc_info()[1]
self.disconnect()
if len(e.args) == 1:
_errno, errmsg = 'UNKNOWN', e.args[0]
else:
_errno, errmsg = e.args
raise ConnectionError("Error %s while writing to socket. %s." %
(_errno, errmsg))
except:
self.disconnect()
raise
def send_command(self, *args):
self.send_packed_command(self.pack_command(*args))
def can_read(self, timeout=0):
sock = self._sock
if not sock:
self.connect()
sock = self._sock
return self._parser.can_read() or \
bool(select([sock], [], [], timeout)[0])
def read_response(self):
try:
response = self._parser.read_response()
except:
self.disconnect()
raise
if isinstance(response, ResponseError):
raise response
return response
def encode(self, value):
if isinstance(value, Token):
return b(value.value)
elif isinstance(value, bytes):
return value
elif isinstance(value, (int, long)):
value = b(str(value))
elif isinstance(value, float):
value = b(repr(value))
elif not isinstance(value, basestring):
value = str(value)
if isinstance(value, unicode):
value = value.encode(self.encoding, self.encoding_errors)
return value
def pack_command(self, *args):
output = []
command = args[0]
if ' ' in command:
args = tuple([Token(s) for s in command.split(' ')]) + args[1:]
else:
args = (Token(command),) + args[1:]
buff = SYM_EMPTY.join(
(SYM_STAR, b(str(len(args))), SYM_CRLF))
for arg in imap(self.encode, args):
if len(buff) > 6000 or len(arg) > 6000:
buff = SYM_EMPTY.join(
(buff, SYM_DOLLAR, b(str(len(arg))), SYM_CRLF))
output.append(buff)
output.append(arg)
buff = SYM_CRLF
else:
buff = SYM_EMPTY.join((buff, SYM_DOLLAR, b(str(len(arg))),
SYM_CRLF, arg, SYM_CRLF))
output.append(buff)
return output
def pack_commands(self, commands):
output = []
pieces = []
buffer_length = 0
for cmd in commands:
for chunk in self.pack_command(*cmd):
pieces.append(chunk)
buffer_length += len(chunk)
if buffer_length > 6000:
output.append(SYM_EMPTY.join(pieces))
buffer_length = 0
pieces = []
if pieces:
output.append(SYM_EMPTY.join(pieces))
return output
class SSLConnection(Connection):
description_format = "SSLConnection<host=%(host)s,port=%(port)s,db=%(db)s>"
def __init__(self, ssl_keyfile=None, ssl_certfile=None, ssl_cert_reqs=None,
ssl_ca_certs=None, **kwargs):
if not ssl_available:
raise RedisError("Python wasn't built with SSL support")
super(SSLConnection, self).__init__(**kwargs)
self.keyfile = ssl_keyfile
self.certfile = ssl_certfile
if ssl_cert_reqs is None:
ssl_cert_reqs = ssl.CERT_NONE
elif isinstance(ssl_cert_reqs, basestring):
CERT_REQS = {
'none': ssl.CERT_NONE,
'optional': ssl.CERT_OPTIONAL,
'required': ssl.CERT_REQUIRED
}
if ssl_cert_reqs not in CERT_REQS:
raise RedisError(
"Invalid SSL Certificate Requirements Flag: %s" %
ssl_cert_reqs)
ssl_cert_reqs = CERT_REQS[ssl_cert_reqs]
self.cert_reqs = ssl_cert_reqs
self.ca_certs = ssl_ca_certs
def _connect(self):
sock = super(SSLConnection, self)._connect()
sock = ssl.wrap_socket(sock,
cert_reqs=self.cert_reqs,
keyfile=self.keyfile,
certfile=self.certfile,
ca_certs=self.ca_certs)
return sock
class UnixDomainSocketConnection(Connection):
description_format = "UnixDomainSocketConnection<path=%(path)s,db=%(db)s>"
def __init__(self, path='', db=0, password=None,
socket_timeout=None, encoding='utf-8',
encoding_errors='strict', decode_responses=False,
retry_on_timeout=False,
parser_class=DefaultParser, socket_read_size=65536):
self.pid = os.getpid()
self.path = path
self.db = db
self.password = password
self.socket_timeout = socket_timeout
self.retry_on_timeout = retry_on_timeout
self.encoding = encoding
self.encoding_errors = encoding_errors
self.decode_responses = decode_responses
self._sock = None
self._parser = parser_class(socket_read_size=socket_read_size)
self._description_args = {
'path': self.path,
'db': self.db,
}
self._connect_callbacks = []
def _connect(self):
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.settimeout(self.socket_timeout)
sock.connect(self.path)
return sock
def _error_message(self, exception):
if len(exception.args) == 1:
return "Error connecting to unix socket: %s. %s." % \
(self.path, exception.args[0])
else:
return "Error %s connecting to unix socket: %s. %s." % \
(exception.args[0], self.path, exception.args[1])
class ConnectionPool(object):
@classmethod
def from_url(cls, url, db=None, decode_components=False, **kwargs):
url_string = url
url = urlparse(url)
qs = ''
# they're left as part of the url.path.
if '?' in url.path and not url.query:
qs = url.path.split('?', 1)[1]
url = urlparse(url_string[:-(len(qs) + 1)])
else:
qs = url.query
url_options = {}
for name, value in iteritems(parse_qs(qs)):
if value and len(value) > 0:
url_options[name] = value[0]
if decode_components:
password = unquote(url.password) if url.password else None
path = unquote(url.path) if url.path else None
hostname = unquote(url.hostname) if url.hostname else None
else:
password = url.password
path = url.path
hostname = url.hostname
if url.scheme == 'unix':
url_options.update({
'password': password,
'path': path,
'connection_class': UnixDomainSocketConnection,
})
else:
url_options.update({
'host': hostname,
'port': int(url.port or 6379),
'password': password,
})
# querystring value wasn't specified
if 'db' not in url_options and path:
try:
url_options['db'] = int(path.replace('/', ''))
except (AttributeError, ValueError):
pass
if url.scheme == 'rediss':
url_options['connection_class'] = SSLConnection
url_options['db'] = int(url_options.get('db', db or 0))
kwargs.update(url_options)
if 'charset' in kwargs:
warnings.warn(DeprecationWarning(
'"charset" is deprecated. Use "encoding" instead'))
kwargs['encoding'] = kwargs.pop('charset')
if 'errors' in kwargs:
warnings.warn(DeprecationWarning(
'"errors" is deprecated. Use "encoding_errors" instead'))
kwargs['encoding_errors'] = kwargs.pop('errors')
return cls(**kwargs)
def __init__(self, connection_class=Connection, max_connections=None,
**connection_kwargs):
max_connections = max_connections or 2 ** 31
if not isinstance(max_connections, (int, long)) or max_connections < 0:
raise ValueError('"max_connections" must be a positive integer')
self.connection_class = connection_class
self.connection_kwargs = connection_kwargs
self.max_connections = max_connections
self.reset()
def __repr__(self):
return "%s<%s>" % (
type(self).__name__,
self.connection_class.description_format % self.connection_kwargs,
)
def reset(self):
self.pid = os.getpid()
self._created_connections = 0
self._available_connections = []
self._in_use_connections = set()
self._check_lock = threading.Lock()
def _checkpid(self):
if self.pid != os.getpid():
with self._check_lock:
if self.pid == os.getpid():
return
self.disconnect()
self.reset()
def get_connection(self, command_name, *keys, **options):
self._checkpid()
try:
connection = self._available_connections.pop()
except IndexError:
connection = self.make_connection()
self._in_use_connections.add(connection)
return connection
def make_connection(self):
if self._created_connections >= self.max_connections:
raise ConnectionError("Too many connections")
self._created_connections += 1
return self.connection_class(**self.connection_kwargs)
def release(self, connection):
self._checkpid()
if connection.pid != self.pid:
return
self._in_use_connections.remove(connection)
self._available_connections.append(connection)
def disconnect(self):
all_conns = chain(self._available_connections,
self._in_use_connections)
for connection in all_conns:
connection.disconnect()
class BlockingConnectionPool(ConnectionPool):
def __init__(self, max_connections=50, timeout=20,
connection_class=Connection, queue_class=LifoQueue,
**connection_kwargs):
self.queue_class = queue_class
self.timeout = timeout
super(BlockingConnectionPool, self).__init__(
connection_class=connection_class,
max_connections=max_connections,
**connection_kwargs)
def reset(self):
self.pid = os.getpid()
self._check_lock = threading.Lock()
self.pool = self.queue_class(self.max_connections)
while True:
try:
self.pool.put_nowait(None)
except Full:
break
self._connections = []
def make_connection(self):
connection = self.connection_class(**self.connection_kwargs)
self._connections.append(connection)
return connection
def get_connection(self, command_name, *keys, **options):
self._checkpid()
# Try and get a connection from the pool. If one isn't available within
connection = None
try:
connection = self.pool.get(block=True, timeout=self.timeout)
except Empty:
raise ConnectionError("No connection available.")
# a new connection to add to the pool.
if connection is None:
connection = self.make_connection()
return connection
def release(self, connection):
# Make sure we haven't changed process.
self._checkpid()
if connection.pid != self.pid:
return
try:
self.pool.put_nowait(connection)
except Full:
pass
def disconnect(self):
for connection in self._connections:
connection.disconnect()
| true
| true
|
790999a21693fa502efc7477382ab61aa05579f8
| 2,676
|
py
|
Python
|
multi_sample_factory/runner/run.py
|
PG642/multi-sample-factory
|
2c2bdc698a194a9779161357be40fe9265b55661
|
[
"MIT"
] | 4
|
2021-12-09T04:16:27.000Z
|
2021-12-13T08:53:46.000Z
|
multi_sample_factory/runner/run.py
|
PG642/multi-sample-factory
|
2c2bdc698a194a9779161357be40fe9265b55661
|
[
"MIT"
] | 19
|
2021-06-09T15:38:52.000Z
|
2022-02-16T11:27:03.000Z
|
multi_sample_factory/runner/run.py
|
PG642/multi-sample-factory
|
2c2bdc698a194a9779161357be40fe9265b55661
|
[
"MIT"
] | null | null | null |
import importlib
import sys
import argparse
from multi_sample_factory.algorithms.utils.algo_utils import ExperimentStatus
from multi_sample_factory.runner.run_ngc import add_ngc_args
from multi_sample_factory.runner.run_slurm import add_slurm_args
from multi_sample_factory.utils.utils import log
def runner_argparser():
parser = argparse.ArgumentParser()
parser.add_argument('--train_dir', default='./train_dir', type=str, help='Directory for sub-experiments')
parser.add_argument('--run', default=None, type=str,
help='Name of the python module that describes the run, e.g. sample_factory.runner.runs.doom_battle_hybrid')
parser.add_argument('--runner', default='processes', choices=['processes', 'slurm'])
parser.add_argument('--runner', default='processes', choices=['processes', 'slurm', 'ngc'])
parser.add_argument('--pause_between', default=10, type=int, help='Pause in seconds between processes')
parser.add_argument('--num_gpus', default=1, type=int, help='How many GPUs to use')
parser.add_argument('--experiments_per_gpu', default=-1, type=int, help='How many experiments can we squeeze on a single GPU (-1 for not altering CUDA_VISIBLE_DEVICES at all)')
parser.add_argument('--max_parallel', default=4, type=int, help='Maximum simultaneous experiments')
parser.add_argument('--experiment_suffix', default='', type=str, help='Append this to the name of the experiment dir')
parser = add_slurm_args(parser)
parser = add_ngc_args(parser)
return parser
def parse_args():
args = runner_argparser().parse_args(sys.argv[1:])
return args
def main():
args = parse_args()
try:
# assuming we're given the full name of the module
run_module = importlib.import_module(f'{args.run}')
except ImportError:
try:
run_module = importlib.import_module(f'multi_sample_factory.runner.runs.{args.run}')
except ImportError:
log.error('Could not import the run module')
return ExperimentStatus.FAILURE
run_description = run_module.RUN_DESCRIPTION
run_description.experiment_suffix = args.experiment_suffix
if args.runner == 'processes':
from multi_sample_factory.runner.run_processes import run
run(run_description, args)
elif args.runner == 'slurm':
from multi_sample_factory.runner.run_slurm import run_slurm
run_slurm(run_description, args)
elif args.runner == 'ngc':
from multi_sample_factory.runner.run_ngc import run_ngc
run_ngc(run_description, args)
return ExperimentStatus.SUCCESS
if __name__ == '__main__':
sys.exit(main())
| 40.545455
| 180
| 0.721226
|
import importlib
import sys
import argparse
from multi_sample_factory.algorithms.utils.algo_utils import ExperimentStatus
from multi_sample_factory.runner.run_ngc import add_ngc_args
from multi_sample_factory.runner.run_slurm import add_slurm_args
from multi_sample_factory.utils.utils import log
def runner_argparser():
parser = argparse.ArgumentParser()
parser.add_argument('--train_dir', default='./train_dir', type=str, help='Directory for sub-experiments')
parser.add_argument('--run', default=None, type=str,
help='Name of the python module that describes the run, e.g. sample_factory.runner.runs.doom_battle_hybrid')
parser.add_argument('--runner', default='processes', choices=['processes', 'slurm'])
parser.add_argument('--runner', default='processes', choices=['processes', 'slurm', 'ngc'])
parser.add_argument('--pause_between', default=10, type=int, help='Pause in seconds between processes')
parser.add_argument('--num_gpus', default=1, type=int, help='How many GPUs to use')
parser.add_argument('--experiments_per_gpu', default=-1, type=int, help='How many experiments can we squeeze on a single GPU (-1 for not altering CUDA_VISIBLE_DEVICES at all)')
parser.add_argument('--max_parallel', default=4, type=int, help='Maximum simultaneous experiments')
parser.add_argument('--experiment_suffix', default='', type=str, help='Append this to the name of the experiment dir')
parser = add_slurm_args(parser)
parser = add_ngc_args(parser)
return parser
def parse_args():
args = runner_argparser().parse_args(sys.argv[1:])
return args
def main():
args = parse_args()
try:
run_module = importlib.import_module(f'{args.run}')
except ImportError:
try:
run_module = importlib.import_module(f'multi_sample_factory.runner.runs.{args.run}')
except ImportError:
log.error('Could not import the run module')
return ExperimentStatus.FAILURE
run_description = run_module.RUN_DESCRIPTION
run_description.experiment_suffix = args.experiment_suffix
if args.runner == 'processes':
from multi_sample_factory.runner.run_processes import run
run(run_description, args)
elif args.runner == 'slurm':
from multi_sample_factory.runner.run_slurm import run_slurm
run_slurm(run_description, args)
elif args.runner == 'ngc':
from multi_sample_factory.runner.run_ngc import run_ngc
run_ngc(run_description, args)
return ExperimentStatus.SUCCESS
if __name__ == '__main__':
sys.exit(main())
| true
| true
|
790999c5115fa406e2d012a87ddce0959de6afdd
| 764
|
py
|
Python
|
examples/sine_tracking.py
|
ericmux/termux2d
|
195571c06b6844726e34cbd5d2643a2425c5f403
|
[
"MIT"
] | null | null | null |
examples/sine_tracking.py
|
ericmux/termux2d
|
195571c06b6844726e34cbd5d2643a2425c5f403
|
[
"MIT"
] | null | null | null |
examples/sine_tracking.py
|
ericmux/termux2d
|
195571c06b6844726e34cbd5d2643a2425c5f403
|
[
"MIT"
] | null | null | null |
from __future__ import print_function
import sys
import os
sys.path.append(os.path.abspath(".."))
from termux2d import Canvas, Palette, line, animate, COLOR_RED, COLOR_WHITE
import math
def __main__():
i = 0
height = 40
while True:
frame = []
frame.extend([(coords[0],coords[1],COLOR_WHITE) for coords in
line(0,
height,
180,
math.sin(math.radians(i)) * height + height)])
frame.extend([(x/2, height + math.sin(math.radians(x+i)) * height, COLOR_WHITE)
for x in range(0, 360, 2)])
yield frame
i += 2
if __name__ == '__main__':
animate(Canvas(), Palette(), __main__, 1./60)
| 23.151515
| 87
| 0.53534
|
from __future__ import print_function
import sys
import os
sys.path.append(os.path.abspath(".."))
from termux2d import Canvas, Palette, line, animate, COLOR_RED, COLOR_WHITE
import math
def __main__():
i = 0
height = 40
while True:
frame = []
frame.extend([(coords[0],coords[1],COLOR_WHITE) for coords in
line(0,
height,
180,
math.sin(math.radians(i)) * height + height)])
frame.extend([(x/2, height + math.sin(math.radians(x+i)) * height, COLOR_WHITE)
for x in range(0, 360, 2)])
yield frame
i += 2
if __name__ == '__main__':
animate(Canvas(), Palette(), __main__, 1./60)
| true
| true
|
790999d7cfe7e7ecdacf0df07fa17392853db17a
| 261
|
py
|
Python
|
terrascript/kind/r.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 507
|
2017-07-26T02:58:38.000Z
|
2022-01-21T12:35:13.000Z
|
terrascript/kind/r.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 135
|
2017-07-20T12:01:59.000Z
|
2021-10-04T22:25:40.000Z
|
terrascript/kind/r.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 81
|
2018-02-20T17:55:28.000Z
|
2022-01-31T07:08:40.000Z
|
# terrascript/kind/r.py
# Automatically generated by tools/makecode.py ()
import warnings
warnings.warn(
"using the 'legacy layout' is deprecated", DeprecationWarning, stacklevel=2
)
import terrascript
class kind_cluster(terrascript.Resource):
pass
| 18.642857
| 79
| 0.770115
|
import warnings
warnings.warn(
"using the 'legacy layout' is deprecated", DeprecationWarning, stacklevel=2
)
import terrascript
class kind_cluster(terrascript.Resource):
pass
| true
| true
|
79099ada99b8509b8a2afed26438a40e1ad092d0
| 5,304
|
py
|
Python
|
archdiffer/flask_frontend/request_parser.py
|
Kratochvilova/archdiffer
|
06f2ef0bb232b1ffe46e9d50575c4b79b1cff191
|
[
"MIT"
] | null | null | null |
archdiffer/flask_frontend/request_parser.py
|
Kratochvilova/archdiffer
|
06f2ef0bb232b1ffe46e9d50575c4b79b1cff191
|
[
"MIT"
] | null | null | null |
archdiffer/flask_frontend/request_parser.py
|
Kratochvilova/archdiffer
|
06f2ef0bb232b1ffe46e9d50575c4b79b1cff191
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# This file is part of Archdiffer and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
Created on Sun Mar 4 10:23:41 2018
@author: Pavla Kratochvilova <pavla.kratochvilova@gmail.com>
"""
import operator
import datetime
from flask import request
from .exceptions import BadRequest
def make_datetime(time_string, formats=None):
"""Makes datetime from string based on one of the formats.
:param string time_string: time in string
:param list formats: list of accepted formats
:return datetime.datetime: datetime or None if no format is matched
"""
if formats is None:
formats = [
'%Y-%m-%d %H:%M:%S',
'%Y-%m-%d',
]
for fmt in formats:
try:
return datetime.datetime.strptime(time_string, fmt)
except ValueError:
pass
return None
# Transformation functions
def _dict_transform(string):
return dict([item.split(':', 1) for item in string.split(';')])
def _list_transform(string):
return string.split(',')
# Transformations of common arguments
_TRANSFORMATIONS = {
'filter_by' : _dict_transform,
'filter' : _list_transform,
'order_by' : _list_transform,
'limit' : lambda x: int(x),
'offset' : lambda x: int(x),
}
# Filters creators
def before(column, name='before'):
"""Make filter template for filtering column values less or equal to
datetime.
:param column: database model
:param string name: name used in the filter template
:return dict: resulting template
"""
return {name: (column, operator.le, make_datetime)}
def after(column, name='after'):
"""Make filter template for filtering column values greater or equal to
datetime.
:param column: database model
:param string name: name used in the filter template
:return dict: resulting template
"""
return {name: (column, operator.ge, make_datetime)}
def time(column, name='time'):
"""Make filter template for filtering column values equal to datetime.
:param column: database model
:param string name: name used in the filter template
:return dict: resulting template
"""
return {name: (column, operator.eq, make_datetime)}
def equals(column, name='id', function=(lambda x: x)):
"""Make filter template for filtering column values equal to value
transformed by given function.
:param column: database model
:param string name: name used in the filter template
:param callable function: function for transforming the value
:return dict: resulting template
"""
return {name: (column, operator.eq, function)}
# Request parser
def parse_request(filters=None, defaults=None):
"""Parse arguments in request according to the _TRANSFORMATIONS or given
filters.
Requests containing other keys are considered invalid.
:param dict filters: dict of filter templates containing for each key
(column, operator, function transforming value from request argument)
:param dict defaults: default values of modifiers
:return dict: dict of parsed arguments
:raises werkzeug.exceptions.BadRequest: if one of the request arguments is
not recognized
"""
if filters is None:
filters = {}
if defaults is not None:
args_dict = defaults.copy()
else:
args_dict = {}
filters_list = []
for key, value in request.args.items():
if key in _TRANSFORMATIONS:
try:
args_dict[key] = _TRANSFORMATIONS[key](value)
except ValueError:
raise BadRequest('Argument has invalid value "%s".' % value)
elif key in filters.keys():
filters_list.append(
filters[key][1](filters[key][0], filters[key][2](value))
)
else:
raise BadRequest('Argument "%s" not recognized.' % key)
if 'filter' not in args_dict.keys():
args_dict['filter'] = []
args_dict['filter'] += filters_list
return args_dict
def get_request_arguments(*names, args_dict=None, invert=False):
"""Get arguments from args_dict or request if they match given names.
:param *names: names of arguments
:param dict args_dict: dict of arguments
:param bool invert: True if names should be exclueded instead
:return dict: dict of arguments
"""
if args_dict is None:
args_dict = parse_request()
if invert:
return {k:v for k, v in args_dict.items() if k not in names}
return {k:v for k, v in args_dict.items() if k in names}
def update_modifiers(old_modifiers, new_modifiers):
"""Update modifiers.
:param dict old_modifiers: old modifiers
:param dict old_modifiers: new modifiers
:return dict: resulting modifiers
"""
modifiers = old_modifiers.copy()
for key, value in new_modifiers.items():
if key in old_modifiers:
if _TRANSFORMATIONS.get(key) == _list_transform:
modifiers[key] += value
elif _TRANSFORMATIONS.get(key) == _dict_transform:
modifiers[key].update(value)
else:
modifiers[key] = value
else:
modifiers[key] = value
return modifiers
| 31.760479
| 78
| 0.659879
|
import operator
import datetime
from flask import request
from .exceptions import BadRequest
def make_datetime(time_string, formats=None):
if formats is None:
formats = [
'%Y-%m-%d %H:%M:%S',
'%Y-%m-%d',
]
for fmt in formats:
try:
return datetime.datetime.strptime(time_string, fmt)
except ValueError:
pass
return None
def _dict_transform(string):
return dict([item.split(':', 1) for item in string.split(';')])
def _list_transform(string):
return string.split(',')
_TRANSFORMATIONS = {
'filter_by' : _dict_transform,
'filter' : _list_transform,
'order_by' : _list_transform,
'limit' : lambda x: int(x),
'offset' : lambda x: int(x),
}
def before(column, name='before'):
return {name: (column, operator.le, make_datetime)}
def after(column, name='after'):
return {name: (column, operator.ge, make_datetime)}
def time(column, name='time'):
return {name: (column, operator.eq, make_datetime)}
def equals(column, name='id', function=(lambda x: x)):
return {name: (column, operator.eq, function)}
def parse_request(filters=None, defaults=None):
if filters is None:
filters = {}
if defaults is not None:
args_dict = defaults.copy()
else:
args_dict = {}
filters_list = []
for key, value in request.args.items():
if key in _TRANSFORMATIONS:
try:
args_dict[key] = _TRANSFORMATIONS[key](value)
except ValueError:
raise BadRequest('Argument has invalid value "%s".' % value)
elif key in filters.keys():
filters_list.append(
filters[key][1](filters[key][0], filters[key][2](value))
)
else:
raise BadRequest('Argument "%s" not recognized.' % key)
if 'filter' not in args_dict.keys():
args_dict['filter'] = []
args_dict['filter'] += filters_list
return args_dict
def get_request_arguments(*names, args_dict=None, invert=False):
if args_dict is None:
args_dict = parse_request()
if invert:
return {k:v for k, v in args_dict.items() if k not in names}
return {k:v for k, v in args_dict.items() if k in names}
def update_modifiers(old_modifiers, new_modifiers):
modifiers = old_modifiers.copy()
for key, value in new_modifiers.items():
if key in old_modifiers:
if _TRANSFORMATIONS.get(key) == _list_transform:
modifiers[key] += value
elif _TRANSFORMATIONS.get(key) == _dict_transform:
modifiers[key].update(value)
else:
modifiers[key] = value
else:
modifiers[key] = value
return modifiers
| true
| true
|
79099b1d59797a7de4bf026d2650ab83136c141c
| 6,683
|
py
|
Python
|
test/conftest.py
|
gabriel4649/aws-log-parser
|
d4a8441da0a8d5e6a1d57faded32b66904a7f8a7
|
[
"Apache-2.0"
] | null | null | null |
test/conftest.py
|
gabriel4649/aws-log-parser
|
d4a8441da0a8d5e6a1d57faded32b66904a7f8a7
|
[
"Apache-2.0"
] | null | null | null |
test/conftest.py
|
gabriel4649/aws-log-parser
|
d4a8441da0a8d5e6a1d57faded32b66904a7f8a7
|
[
"Apache-2.0"
] | null | null | null |
from http import cookies
from io import StringIO
import pytest
def log_entry(entry):
return StringIO(entry)
@pytest.fixture
def cookie_zip_code():
cookie = cookies.SimpleCookie()
cookie.load(rawdata='zip=98101')
return cookie
@pytest.fixture
def cookie_empty():
cookie = cookies.SimpleCookie()
cookie.load(rawdata='')
return cookie
@pytest.fixture
def cloudfront_entry():
return log_entry('''2014-05-23 01:13:11 FRA2 182 192.0.2.10 GET d111111abcdef8.cloudfront.net /view/my/file.html 200 www.displaymyfiles.com Mozilla/4.0%20(compatible;%20MSIE%205.0b1;%20Mac_PowerPC) - zip=98101 RefreshHit MRVMF7KydIvxMWfJIglgwHQwZsbG2IhRJ07sn9AkKUFSHS9EXAMPLE== d111111abcdef8.cloudfront.net http - 0.001 - - - RefreshHit HTTP/1.1''') # noqa: E501
@pytest.fixture
def cloudfront_entry_broken_cookie():
return log_entry('''2014-05-23 01:13:11 FRA2 182 192.0.2.10 GET d111111abcdef8.cloudfront.net /view/my/file.html 200 www.displaymyfiles.com Mozilla/4.0%20(compatible;%20MSIE%205.0b1;%20Mac_PowerPC) - zip 98101 RefreshHit MRVMF7KydIvxMWfJIglgwHQwZsbG2IhRJ07sn9AkKUFSHS9EXAMPLE== d111111abcdef8.cloudfront.net http - 0.001 - - - RefreshHit HTTP/1.1''') # noqa: E501
@pytest.fixture
def cloudfront_entry2():
return log_entry('''2014-05-23 01:13:12 LAX1 2390282 192.0.2.202 GET d111111abcdef8.cloudfront.net /soundtrack/happy.mp3 304 www.unknownsingers.com Mozilla/4.0%20(compatible;%20MSIE%207.0;%20Windows%20NT%205.1) a=b&c=d zip=98101 Hit xGN7KWpVEmB9Dp7ctcVFQC4E-nrcOcEKS3QyAez--06dV7TEXAMPLE== d111111abcdef8.cloudfront.net http - 0.002 - - - Hit HTTP/1.1''') # noqa: E501
@pytest.fixture
def loadbalancer_http_entry():
return log_entry('''http 2018-07-02T22:23:00.186641Z app/my-loadbalancer/50dc6c495c0c9188 192.168.131.39:2817 10.0.0.1:80 0.000 0.001 0.000 200 200 34 366 "GET http://www.example.com:80/?a=b&c=d&zip=98101 HTTP/1.1" "curl/7.46.0" - - arn:aws:elasticloadbalancing:us-east-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067 "Root=1-58337262-36d228ad5d99923122bbe354" "-" "-" 0 2018-07-02T22:22:48.364000Z "forward" "-" "-"''') # noqa: E501
@pytest.fixture
def loadbalancer_https_entry():
return log_entry('''https 2018-07-02T22:23:00.186641Z app/my-loadbalancer/50dc6c495c0c9188 192.168.131.39:2817 10.0.0.1:80 0.086 0.048 0.037 200 200 0 57 "GET https://www.example.com:443/ HTTP/1.1" "curl/7.46.0" ECDHE-RSA-AES128-GCM-SHA256 TLSv1.2 arn:aws:elasticloadbalancing:us-east-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067 "Root=1-58337281-1d84f3d73c47ec4e58577259" "www.example.com" "arn:aws:acm:us-east-2:123456789012:certificate/12345678-1234-1234-1234-123456789012" 1 2018-07-02T22:22:48.364000Z "authenticate,forward" "-" "-"''') # noqa: E501
@pytest.fixture
def loadbalancer_http2_entry():
return log_entry('''h2 2018-07-02T22:23:00.186641Z app/my-loadbalancer/50dc6c495c0c9188 10.0.1.252:48160 10.0.0.66:9000 0.000 0.002 0.000 200 200 5 257 "GET https://10.0.2.105:773/ HTTP/2.0" "curl/7.46.0" ECDHE-RSA-AES128-GCM-SHA256 TLSv1.2 arn:aws:elasticloadbalancing:us-east-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067 "Root=1-58337327-72bd00b0343d75b906739c42" "-" "-" 1 2018-07-02T22:22:48.364000Z "redirect" "https://example.com:80/" "-"''') # noqa: E501
@pytest.fixture
def loadbalancer_websockets_entry():
return log_entry('''ws 2018-07-02T22:23:00.186641Z app/my-loadbalancer/50dc6c495c0c9188 10.0.0.140:40914 10.0.1.192:8010 0.001 0.003 0.000 101 101 218 587 "GET http://10.0.0.30:80/ HTTP/1.1" "-" - - arn:aws:elasticloadbalancing:us-east-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067 "Root=1-58337364-23a8c76965a2ef7629b185e3" "-" "-" 1 2018-07-02T22:22:48.364000Z "forward" "-" "-"''') # noqa: E501
@pytest.fixture
def loadbalancer_secured_websockets_entry():
return log_entry('''wss 2018-07-02T22:23:00.186641Z app/my-loadbalancer/50dc6c495c0c9188 10.0.0.140:44244 10.0.0.171:8010 0.000 0.001 0.000 101 101 218 786 "GET https://10.0.0.30:443/ HTTP/1.1" "-" ECDHE-RSA-AES128-GCM-SHA256 TLSv1.2 arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067 "Root=1-58337364-23a8c76965a2ef7629b185e3" "-" "-" 1 2018-07-02T22:22:48.364000Z "forward" "-" "-"''') # noqa: E501
@pytest.fixture
def loadbalancer_lambda_entry():
return log_entry('''http 2018-11-30T22:23:00.186641Z app/my-loadbalancer/50dc6c495c0c9188 192.168.131.39:2817 - 0.000 0.001 0.000 200 200 34 366 "GET http://www.example.com:80/ HTTP/1.1" "curl/7.46.0" - - arn:aws:elasticloadbalancing:us-east-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067 "Root=1-58337364-23a8c76965a2ef7629b185e3" "-" "-" 0 2018-11-30T22:22:48.364000Z "forward" "-" "-"''') # noqa: E501
@pytest.fixture
def loadbalancer_lambda_failed_entry():
return log_entry('''http 2018-11-30T22:23:00.186641Z app/my-loadbalancer/50dc6c495c0c9188 192.168.131.39:2817 - 0.000 0.001 0.000 502 - 34 366 "GET http://www.example.com:80/ HTTP/1.1" "curl/7.46.0" - - arn:aws:elasticloadbalancing:us-east-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067 "Root=1-58337364-23a8c76965a2ef7629b185e3" "-" "-" 0 2018-11-30T22:22:48.364000Z "forward" "-" "LambdaInvalidResponse"''') # noqa: E501
@pytest.fixture
def loadbalancer_cloudfront_forward():
return log_entry('''http 2018-11-30T22:23:00.186641Z app/my-loadbalancer/50dc6c495c0c9188 192.168.131.39:2817 - 0.000 0.001 0.000 502 - 34 366 "GET http://www.example.com:80/ HTTP/1.1" "curl/7.46.0" - - arn:aws:elasticloadbalancing:us-east-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067 "Root=1-58337364-23a8c76965a2ef7629b185e3" "-" "-" 0 2018-11-30T22:22:48.364000Z "waf,forward" "-" "-"''') # noqa: E501
@pytest.fixture
def loadbalancer_cloudfront_forward_refused():
return log_entry('''http 2018-11-30T22:23:00.186641Z app/my-loadbalancer/50dc6c495c0c9188 192.168.131.39:2817 - 0.000 0.001 0.000 502 - 34 366 "GET http://www.example.com:80/ HTTP/1.1" "curl/7.46.0" - - arn:aws:elasticloadbalancing:us-east-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067 "Root=1-58337364-23a8c76965a2ef7629b185e3" "api.example.com" "session-reused" 0 2018-11-30T22:22:48.364000Z "waf,forward" "-" "-"''') # noqa: E501
@pytest.fixture
def loadbalancer_cloudfront_forward_h2():
return log_entry('''h2 2018-11-30T22:23:00.186641Z app/my-loadbalancer/50dc6c495c0c9188 192.168.131.39:2817 - 0.000 0.001 0.000 502 - 34 366 "GET http://www.example.com:80/ HTTP/1.1" "curl/7.46.0" - - arn:aws:elasticloadbalancing:us-east-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067 "Root=1-58337364-23a8c76965a2ef7629b185e3" "api.example.com" "-" 0 2018-11-30T22:22:48.364000Z "waf,forward" "-" "-"''') # noqa: E501
| 75.943182
| 569
| 0.748466
|
from http import cookies
from io import StringIO
import pytest
def log_entry(entry):
return StringIO(entry)
@pytest.fixture
def cookie_zip_code():
cookie = cookies.SimpleCookie()
cookie.load(rawdata='zip=98101')
return cookie
@pytest.fixture
def cookie_empty():
cookie = cookies.SimpleCookie()
cookie.load(rawdata='')
return cookie
@pytest.fixture
def cloudfront_entry():
return log_entry('''2014-05-23 01:13:11 FRA2 182 192.0.2.10 GET d111111abcdef8.cloudfront.net /view/my/file.html 200 www.displaymyfiles.com Mozilla/4.0%20(compatible;%20MSIE%205.0b1;%20Mac_PowerPC) - zip=98101 RefreshHit MRVMF7KydIvxMWfJIglgwHQwZsbG2IhRJ07sn9AkKUFSHS9EXAMPLE== d111111abcdef8.cloudfront.net http - 0.001 - - - RefreshHit HTTP/1.1''')
@pytest.fixture
def cloudfront_entry_broken_cookie():
return log_entry('''2014-05-23 01:13:11 FRA2 182 192.0.2.10 GET d111111abcdef8.cloudfront.net /view/my/file.html 200 www.displaymyfiles.com Mozilla/4.0%20(compatible;%20MSIE%205.0b1;%20Mac_PowerPC) - zip 98101 RefreshHit MRVMF7KydIvxMWfJIglgwHQwZsbG2IhRJ07sn9AkKUFSHS9EXAMPLE== d111111abcdef8.cloudfront.net http - 0.001 - - - RefreshHit HTTP/1.1''')
@pytest.fixture
def cloudfront_entry2():
return log_entry('''2014-05-23 01:13:12 LAX1 2390282 192.0.2.202 GET d111111abcdef8.cloudfront.net /soundtrack/happy.mp3 304 www.unknownsingers.com Mozilla/4.0%20(compatible;%20MSIE%207.0;%20Windows%20NT%205.1) a=b&c=d zip=98101 Hit xGN7KWpVEmB9Dp7ctcVFQC4E-nrcOcEKS3QyAez--06dV7TEXAMPLE== d111111abcdef8.cloudfront.net http - 0.002 - - - Hit HTTP/1.1''')
@pytest.fixture
def loadbalancer_http_entry():
return log_entry('''http 2018-07-02T22:23:00.186641Z app/my-loadbalancer/50dc6c495c0c9188 192.168.131.39:2817 10.0.0.1:80 0.000 0.001 0.000 200 200 34 366 "GET http://www.example.com:80/?a=b&c=d&zip=98101 HTTP/1.1" "curl/7.46.0" - - arn:aws:elasticloadbalancing:us-east-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067 "Root=1-58337262-36d228ad5d99923122bbe354" "-" "-" 0 2018-07-02T22:22:48.364000Z "forward" "-" "-"''')
@pytest.fixture
def loadbalancer_https_entry():
return log_entry('''https 2018-07-02T22:23:00.186641Z app/my-loadbalancer/50dc6c495c0c9188 192.168.131.39:2817 10.0.0.1:80 0.086 0.048 0.037 200 200 0 57 "GET https://www.example.com:443/ HTTP/1.1" "curl/7.46.0" ECDHE-RSA-AES128-GCM-SHA256 TLSv1.2 arn:aws:elasticloadbalancing:us-east-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067 "Root=1-58337281-1d84f3d73c47ec4e58577259" "www.example.com" "arn:aws:acm:us-east-2:123456789012:certificate/12345678-1234-1234-1234-123456789012" 1 2018-07-02T22:22:48.364000Z "authenticate,forward" "-" "-"''')
@pytest.fixture
def loadbalancer_http2_entry():
return log_entry('''h2 2018-07-02T22:23:00.186641Z app/my-loadbalancer/50dc6c495c0c9188 10.0.1.252:48160 10.0.0.66:9000 0.000 0.002 0.000 200 200 5 257 "GET https://10.0.2.105:773/ HTTP/2.0" "curl/7.46.0" ECDHE-RSA-AES128-GCM-SHA256 TLSv1.2 arn:aws:elasticloadbalancing:us-east-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067 "Root=1-58337327-72bd00b0343d75b906739c42" "-" "-" 1 2018-07-02T22:22:48.364000Z "redirect" "https://example.com:80/" "-"''')
@pytest.fixture
def loadbalancer_websockets_entry():
return log_entry('''ws 2018-07-02T22:23:00.186641Z app/my-loadbalancer/50dc6c495c0c9188 10.0.0.140:40914 10.0.1.192:8010 0.001 0.003 0.000 101 101 218 587 "GET http://10.0.0.30:80/ HTTP/1.1" "-" - - arn:aws:elasticloadbalancing:us-east-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067 "Root=1-58337364-23a8c76965a2ef7629b185e3" "-" "-" 1 2018-07-02T22:22:48.364000Z "forward" "-" "-"''')
@pytest.fixture
def loadbalancer_secured_websockets_entry():
return log_entry('''wss 2018-07-02T22:23:00.186641Z app/my-loadbalancer/50dc6c495c0c9188 10.0.0.140:44244 10.0.0.171:8010 0.000 0.001 0.000 101 101 218 786 "GET https://10.0.0.30:443/ HTTP/1.1" "-" ECDHE-RSA-AES128-GCM-SHA256 TLSv1.2 arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067 "Root=1-58337364-23a8c76965a2ef7629b185e3" "-" "-" 1 2018-07-02T22:22:48.364000Z "forward" "-" "-"''')
@pytest.fixture
def loadbalancer_lambda_entry():
return log_entry('''http 2018-11-30T22:23:00.186641Z app/my-loadbalancer/50dc6c495c0c9188 192.168.131.39:2817 - 0.000 0.001 0.000 200 200 34 366 "GET http://www.example.com:80/ HTTP/1.1" "curl/7.46.0" - - arn:aws:elasticloadbalancing:us-east-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067 "Root=1-58337364-23a8c76965a2ef7629b185e3" "-" "-" 0 2018-11-30T22:22:48.364000Z "forward" "-" "-"''')
@pytest.fixture
def loadbalancer_lambda_failed_entry():
return log_entry('''http 2018-11-30T22:23:00.186641Z app/my-loadbalancer/50dc6c495c0c9188 192.168.131.39:2817 - 0.000 0.001 0.000 502 - 34 366 "GET http://www.example.com:80/ HTTP/1.1" "curl/7.46.0" - - arn:aws:elasticloadbalancing:us-east-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067 "Root=1-58337364-23a8c76965a2ef7629b185e3" "-" "-" 0 2018-11-30T22:22:48.364000Z "forward" "-" "LambdaInvalidResponse"''')
@pytest.fixture
def loadbalancer_cloudfront_forward():
return log_entry('''http 2018-11-30T22:23:00.186641Z app/my-loadbalancer/50dc6c495c0c9188 192.168.131.39:2817 - 0.000 0.001 0.000 502 - 34 366 "GET http://www.example.com:80/ HTTP/1.1" "curl/7.46.0" - - arn:aws:elasticloadbalancing:us-east-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067 "Root=1-58337364-23a8c76965a2ef7629b185e3" "-" "-" 0 2018-11-30T22:22:48.364000Z "waf,forward" "-" "-"''')
@pytest.fixture
def loadbalancer_cloudfront_forward_refused():
return log_entry('''http 2018-11-30T22:23:00.186641Z app/my-loadbalancer/50dc6c495c0c9188 192.168.131.39:2817 - 0.000 0.001 0.000 502 - 34 366 "GET http://www.example.com:80/ HTTP/1.1" "curl/7.46.0" - - arn:aws:elasticloadbalancing:us-east-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067 "Root=1-58337364-23a8c76965a2ef7629b185e3" "api.example.com" "session-reused" 0 2018-11-30T22:22:48.364000Z "waf,forward" "-" "-"''')
@pytest.fixture
def loadbalancer_cloudfront_forward_h2():
return log_entry('''h2 2018-11-30T22:23:00.186641Z app/my-loadbalancer/50dc6c495c0c9188 192.168.131.39:2817 - 0.000 0.001 0.000 502 - 34 366 "GET http://www.example.com:80/ HTTP/1.1" "curl/7.46.0" - - arn:aws:elasticloadbalancing:us-east-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067 "Root=1-58337364-23a8c76965a2ef7629b185e3" "api.example.com" "-" 0 2018-11-30T22:22:48.364000Z "waf,forward" "-" "-"''')
| true
| true
|
79099b44cb43defad3753ef17bae0bfde2232fe9
| 12,957
|
py
|
Python
|
NLP/The_Bottom_up_Evolution_of_Representations_in_the_Transformer/analytics.py
|
Superhzf/PaperImplementation
|
a800a9e2fb52fb70766bf91b52871621e0e1dd55
|
[
"MIT"
] | null | null | null |
NLP/The_Bottom_up_Evolution_of_Representations_in_the_Transformer/analytics.py
|
Superhzf/PaperImplementation
|
a800a9e2fb52fb70766bf91b52871621e0e1dd55
|
[
"MIT"
] | null | null | null |
NLP/The_Bottom_up_Evolution_of_Representations_in_the_Transformer/analytics.py
|
Superhzf/PaperImplementation
|
a800a9e2fb52fb70766bf91b52871621e0e1dd55
|
[
"MIT"
] | null | null | null |
from sklearn.cluster import MiniBatchKMeans
import numpy as np
import torch
from models import TransformerModel, Seq2SeqTransformer, generate_square_subsequent_mask
from models import LM_NAME, MLM_NAME, MT_NAME, NLAYERS, NUM2WORD
import os
from data_preprocessing import DATA_DIR_DEV, SAVE_DATA_MT_TRAIN
from data_preprocessing import SAVE_VOCAB_SRC, SAVE_VOCAB_TRG, PAD_WORD
import pickle
from torchtext.legacy.data import Dataset, BucketIterator
import pandas as pd
from analytics_helper import MostFreqToken, GetInter, GetMI, GetInterValues
from analytics_helper import MIN_SAMPLE_SIZE_DEV, MIN_SAMPLE_SIZE_FULL
from analytics_helper import N_FREQUENT_DEV, N_FREQUENT_FULL
from analytics_helper import N_CLUSTER_DEV, N_CLUSTER_FULL
from data_preprocessing import SAVE_MODEL_PATH, DEVELOPMENT_MODE
from MT_helpers import patch_trg, create_mask
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
if DEVELOPMENT_MODE:
min_sample_size=MIN_SAMPLE_SIZE_DEV
N_frequent=N_FREQUENT_DEV
N_cluster=N_CLUSTER_DEV
data_dir=DATA_DIR_DEV
else:
min_sample_size=MIN_SAMPLE_SIZE_FULL
N_frequent=N_FREQUENT_FULL
N_cluster=N_CLUSTER_FULL
data_dir=DATA_DIR_FULL
MI_results_INP={LM_NAME.split('.')[0]:[],
f"{MLM_NAME.split('.')[0]}_SAME":[],
f"{MLM_NAME.split('.')[0]}_DIFF":[],
MT_NAME.split('.')[0]:[]}
MI_results_OUT={LM_NAME.split('.')[0]:[],
MLM_NAME.split('.')[0]:[]}
MODELS_INP=[LM_NAME, MLM_NAME, MT_NAME]
vocab_pkl_src = os.path.join(data_dir, SAVE_VOCAB_SRC)
vocab_pkl_trg = os.path.join(data_dir, SAVE_VOCAB_TRG)
train_pkl = os.path.join(data_dir, SAVE_DATA_MT_TRAIN)
field_src = pickle.load(open(vocab_pkl_src, 'rb'))
field_trg = pickle.load(open(vocab_pkl_trg, 'rb'))
src_pad_idx = field_src.vocab.stoi[PAD_WORD]
trg_pad_idx = field_trg.vocab.stoi[PAD_WORD]
train_examples = pickle.load(open(train_pkl, 'rb'))
fields = {'src':field_src , 'trg':field_trg}
train = Dataset(examples=train_examples, fields=fields)
train_iter = BucketIterator(train, batch_size=1, device=device, train=True, shuffle=False)
frequent_vocab = MostFreqToken(field_src, N_frequent, min_sample_size)
# token_reps_list saves NLAYERS dicts, for ith dict, the key is the token ID,
# the value is the representation of the ID in the ith layer.
token_reps_model_INP={}
token_reps_model_OUT={}
for this_model_name in MODELS_INP:
token_reps_list=[]
for _ in range(NLAYERS):
this_token_reps={}
for this_token_id in frequent_vocab:
this_token_reps[this_token_id]=[]
token_reps_list.append(this_token_reps)
if this_model_name.startswith("MLM"):
token_reps_model_INP[f"{MLM_NAME.split('.')[0]}_SAME"]=token_reps_list
token_reps_model_INP[f"{MLM_NAME.split('.')[0]}_DIFF"]=token_reps_list
token_reps_model_OUT[this_model_name.split('.')[0]]=token_reps_list
elif this_model_name.startswith("LM"):
token_reps_model_INP[this_model_name.split('.')[0]]=token_reps_list
token_reps_model_OUT[this_model_name.split('.')[0]]=token_reps_list
elif this_model_name.startswith("MT"):
token_reps_model_INP[this_model_name.split('.')[0]]=token_reps_list
sample_size_dict_INP={}
sample_size_dict_OUT={}
for this_model_name in MODELS_INP:
if this_model_name.startswith("MLM"):
this_sample_size_dict_INP_SAME={}
this_sample_size_dict_INP_DIFF={}
this_sample_size_dict_OUT={}
for this_token_id in frequent_vocab:
this_sample_size_dict_INP_SAME[this_token_id]=0
this_sample_size_dict_INP_DIFF[this_token_id]=0
this_sample_size_dict_OUT[this_token_id]=0
sample_size_dict_INP[f"{this_model_name.split('.')[0]}_SAME"]=this_sample_size_dict_INP_SAME
sample_size_dict_INP[f"{this_model_name.split('.')[0]}_DIFF"]=this_sample_size_dict_INP_DIFF
sample_size_dict_OUT[this_model_name.split('.')[0]]=this_sample_size_dict_OUT
elif this_model_name.startswith("LM"):
this_sample_size_dict_INP={}
this_sample_size_dict_OUT={}
for this_token_id in frequent_vocab:
this_sample_size_dict_INP[this_token_id]=0
this_sample_size_dict_OUT[this_token_id]=0
sample_size_dict_INP[this_model_name.split('.')[0]]=this_sample_size_dict_INP
sample_size_dict_OUT[this_model_name.split('.')[0]]=this_sample_size_dict_OUT
elif this_model_name.startswith("MT"):
this_sample_size_dict_INP={}
for this_token_id in frequent_vocab:
this_sample_size_dict_INP[this_token_id]=0
sample_size_dict_INP[this_model_name.split('.')[0]]=this_sample_size_dict_INP
for batch in train_iter:
src_seq_MT = batch.src.to(device)
target_sample_INP_MT=GetInter(src_seq_MT.detach().numpy(), frequent_vocab)
src_seq_MLM_SAME = batch.src.to(device)
target_sample_INP_MLM_SAME=GetInter(src_seq_MLM_SAME.detach().numpy(), frequent_vocab)
src_seq=batch.src.to(device)
src_seq_MLM_DIFF = src_seq.clone()
src_mask = generate_square_subsequent_mask(src_seq.size(0))
rand_value = torch.rand(src_seq.shape)
rand_mask = (rand_value < 0.15) * (input != src_pad_idx)
mask_idx=(rand_mask.flatten() == True).nonzero().view(-1)
src_seq_MLM_DIFF = src_seq_MLM_DIFF.flatten()
src_seq_MLM_DIFF[mask_idx] = 103
src_seq_MLM_DIFF = src_seq_MLM_DIFF.view(src_seq.size())
target_sample_INP_MLM_DIFF=GetInter(src_seq_MLM_DIFF.detach().numpy(), frequent_vocab)
src_seq_LM = batch.src[:-1]
target_sample_INP_LM=GetInter(src_seq_LM.detach().numpy(), frequent_vocab)
trg = batch.trg
trg_seq_MT, gold = map(lambda x: x.to(device), patch_trg(trg, trg_pad_idx))
trg_seq_MT = trg_seq_MT.to(device)
trg_seq_LM = src_seq[1:].to(device)
target_sample_OUT_LM=GetInter(trg_seq_LM.detach().numpy(), frequent_vocab)
trg_seq_MLM = src_seq
target_sample_OUT_MLM=GetInter(trg_seq_MLM.detach().numpy(), frequent_vocab)
for this_model_name in MODELS_INP:
this_model = torch.load(os.path.join(SAVE_MODEL_PATH,this_model_name))
this_model.eval()
if this_model_name.startswith("MT") and len(target_sample_INP_MT)>0:
src_mask, trg_mask, src_padding_mask, trg_padding_mask = create_mask(src_seq_MT, trg_seq_MT, src_pad_idx, trg_pad_idx)
_ = this_model(src=src_seq_MT,
src_mask=src_mask,
trg=trg_seq_MT,
tgt_mask=trg_mask,
src_padding_mask=src_padding_mask,
tgt_padding_mask=trg_padding_mask,
memory_key_padding_mask=src_padding_mask)
token_reps_list=token_reps_model_INP[MT_NAME.split('.')[0]]
this_sample_size_dict=sample_size_dict_INP[this_model_name.split('.')[0]]
GetInterValues(this_model, target_sample_INP_MT, NUM2WORD, token_reps_list, this_sample_size_dict, min_sample_size, NLAYERS)
elif this_model_name.startswith("MLM"):
if len(target_sample_INP_MLM_SAME)>0:
src_mask = generate_square_subsequent_mask(src_seq_MLM_SAME.size(0))
src_padding_mask = (src_seq_MLM_SAME == src_pad_idx).transpose(0, 1)
_ = this_model(src_seq_MLM_SAME, src_mask.to(device),src_padding_mask.to(device))
token_reps_list=token_reps_model_INP[f"{MLM_NAME.split('.')[0]}_SAME"]
this_sample_size_dict=sample_size_dict_INP[f"{this_model_name.split('.')[0]}_SAME"]
GetInterValues(this_model, target_sample_INP_MLM_SAME, NUM2WORD, token_reps_list, this_sample_size_dict, min_sample_size, NLAYERS)
if len(target_sample_INP_MLM_DIFF)>0 and len(target_sample_OUT_MLM)>0:
src_mask = generate_square_subsequent_mask(src_seq_MLM_DIFF.size(0))
src_padding_mask = (src_seq_MLM_DIFF == src_pad_idx).transpose(0, 1)
_ = this_model(src_seq_MLM_DIFF.to(device), src_mask.to(device),src_padding_mask.to(device))
token_reps_list_INP=token_reps_model_INP[f"{MLM_NAME.split('.')[0]}_DIFF"]
this_sample_size_dict_INP=sample_size_dict_INP[f"{this_model_name.split('.')[0]}_DIFF"]
token_reps_list_OUT=token_reps_model_OUT[MLM_NAME.split('.')[0]]
this_sample_size_dict_OUT=sample_size_dict_OUT[this_model_name.split('.')[0]]
GetInterValues(this_model, target_sample_INP_MLM_DIFF, NUM2WORD, token_reps_list_INP, this_sample_size_dict_INP, min_sample_size, NLAYERS)
GetInterValues(this_model, target_sample_OUT_MLM, NUM2WORD, token_reps_list_OUT, this_sample_size_dict_OUT, min_sample_size, NLAYERS)
elif this_model_name.startswith("LM") and len(target_sample_INP_LM)>0 and len(target_sample_OUT_LM)>0:
src_mask = generate_square_subsequent_mask(src_seq_LM.size(0))
src_padding_mask = (src_seq_LM == src_pad_idx).transpose(0, 1)
_ = this_model(src_seq_LM, src_mask.to(device),src_padding_mask.to(device))
token_reps_list_INP=token_reps_model_INP[this_model_name.split('.')[0]]
token_reps_list_OUT=token_reps_model_OUT[this_model_name.split('.')[0]]
this_sample_size_dict_INP=sample_size_dict_INP[this_model_name.split('.')[0]]
this_sample_size_dict_OUT=sample_size_dict_OUT[this_model_name.split('.')[0]]
GetInterValues(this_model, target_sample_INP_LM, NUM2WORD, token_reps_list_INP, this_sample_size_dict_INP, min_sample_size, NLAYERS)
GetInterValues(this_model, target_sample_OUT_LM, NUM2WORD, token_reps_list_OUT, this_sample_size_dict_OUT, min_sample_size, NLAYERS)
# we only need to keep the minimum sample size that has been collected
this_min_sample_size_inp=float('inf')
this_min_sample_size_out=float('inf')
for model_name, this_sample_size_dict in sample_size_dict_INP.items():
for token_id, size in this_sample_size_dict.items():
if size<this_min_sample_size_inp:
this_min_sample_size_inp=size
for model_name, this_sample_size_dict in sample_size_dict_OUT.items():
for token_id, size in this_sample_size_dict.items():
if size<this_min_sample_size_out:
this_min_sample_size_out=size
is_enough=True
if this_min_sample_size_inp>=min_sample_size and this_min_sample_size_out>=min_sample_size:
for model_name, reps_dict in token_reps_model_INP.items():
if is_enough is False:
break
for this_layer in reps_dict:
if is_enough is False:
break
for token_id, rep_list in this_layer.items():
if len(rep_list)<min_sample_size:
is_enough=False
break
for model_name, reps_list in token_reps_model_OUT.items():
if is_enough is False:
break
for this_layer in reps_dict:
if is_enough is False:
break
for token_id, rep_list in this_layer.items():
if len(rep_list)<min_sample_size:
is_enough=False
break
else:
is_enough=False
if is_enough:
break
if is_enough is False:
assert 1==0, "We have not collected enough data!"
for this_model_name in MODELS_INP:
if this_model_name.startswith("MLM"):
token_reps_list=token_reps_model_INP[f"{MLM_NAME.split('.')[0]}_SAME"]
result_list=MI_results_INP[f"{MLM_NAME.split('.')[0]}_SAME"]
GetMI(token_reps_list, N_frequent, N_cluster, NLAYERS, result_list)
token_reps_list=token_reps_model_INP[f"{MLM_NAME.split('.')[0]}_DIFF"]
result_list=MI_results_INP[f"{MLM_NAME.split('.')[0]}_DIFF"]
GetMI(token_reps_list, N_frequent, N_cluster, NLAYERS, result_list)
token_reps_list=token_reps_model_OUT[MLM_NAME.split('.')[0]]
result_list=MI_results_OUT[MLM_NAME.split('.')[0]]
GetMI(token_reps_list, N_frequent, N_cluster, NLAYERS, result_list)
elif this_model_name.startswith("MT"):
token_reps_list=token_reps_model_INP[this_model_name.split('.')[0]]
result_list=MI_results_INP[this_model_name.split('.')[0]]
GetMI(token_reps_list, N_frequent, N_cluster, NLAYERS, result_list)
elif this_model_name.startswith("LM"):
token_reps_list=token_reps_model_INP[this_model_name.split('.')[0]]
result_list=MI_results_INP[this_model_name.split('.')[0]]
GetMI(token_reps_list, N_frequent, N_cluster, NLAYERS, result_list)
token_reps_list=token_reps_model_OUT[MLM_NAME.split('.')[0]]
result_list=MI_results_OUT[this_model_name.split('.')[0]]
GetMI(token_reps_list, N_frequent, N_cluster, NLAYERS, result_list)
print("result",MI_results_INP)
print("result",MI_results_OUT)
| 48.347015
| 154
| 0.713668
|
from sklearn.cluster import MiniBatchKMeans
import numpy as np
import torch
from models import TransformerModel, Seq2SeqTransformer, generate_square_subsequent_mask
from models import LM_NAME, MLM_NAME, MT_NAME, NLAYERS, NUM2WORD
import os
from data_preprocessing import DATA_DIR_DEV, SAVE_DATA_MT_TRAIN
from data_preprocessing import SAVE_VOCAB_SRC, SAVE_VOCAB_TRG, PAD_WORD
import pickle
from torchtext.legacy.data import Dataset, BucketIterator
import pandas as pd
from analytics_helper import MostFreqToken, GetInter, GetMI, GetInterValues
from analytics_helper import MIN_SAMPLE_SIZE_DEV, MIN_SAMPLE_SIZE_FULL
from analytics_helper import N_FREQUENT_DEV, N_FREQUENT_FULL
from analytics_helper import N_CLUSTER_DEV, N_CLUSTER_FULL
from data_preprocessing import SAVE_MODEL_PATH, DEVELOPMENT_MODE
from MT_helpers import patch_trg, create_mask
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
if DEVELOPMENT_MODE:
min_sample_size=MIN_SAMPLE_SIZE_DEV
N_frequent=N_FREQUENT_DEV
N_cluster=N_CLUSTER_DEV
data_dir=DATA_DIR_DEV
else:
min_sample_size=MIN_SAMPLE_SIZE_FULL
N_frequent=N_FREQUENT_FULL
N_cluster=N_CLUSTER_FULL
data_dir=DATA_DIR_FULL
MI_results_INP={LM_NAME.split('.')[0]:[],
f"{MLM_NAME.split('.')[0]}_SAME":[],
f"{MLM_NAME.split('.')[0]}_DIFF":[],
MT_NAME.split('.')[0]:[]}
MI_results_OUT={LM_NAME.split('.')[0]:[],
MLM_NAME.split('.')[0]:[]}
MODELS_INP=[LM_NAME, MLM_NAME, MT_NAME]
vocab_pkl_src = os.path.join(data_dir, SAVE_VOCAB_SRC)
vocab_pkl_trg = os.path.join(data_dir, SAVE_VOCAB_TRG)
train_pkl = os.path.join(data_dir, SAVE_DATA_MT_TRAIN)
field_src = pickle.load(open(vocab_pkl_src, 'rb'))
field_trg = pickle.load(open(vocab_pkl_trg, 'rb'))
src_pad_idx = field_src.vocab.stoi[PAD_WORD]
trg_pad_idx = field_trg.vocab.stoi[PAD_WORD]
train_examples = pickle.load(open(train_pkl, 'rb'))
fields = {'src':field_src , 'trg':field_trg}
train = Dataset(examples=train_examples, fields=fields)
train_iter = BucketIterator(train, batch_size=1, device=device, train=True, shuffle=False)
frequent_vocab = MostFreqToken(field_src, N_frequent, min_sample_size)
token_reps_model_INP={}
token_reps_model_OUT={}
for this_model_name in MODELS_INP:
token_reps_list=[]
for _ in range(NLAYERS):
this_token_reps={}
for this_token_id in frequent_vocab:
this_token_reps[this_token_id]=[]
token_reps_list.append(this_token_reps)
if this_model_name.startswith("MLM"):
token_reps_model_INP[f"{MLM_NAME.split('.')[0]}_SAME"]=token_reps_list
token_reps_model_INP[f"{MLM_NAME.split('.')[0]}_DIFF"]=token_reps_list
token_reps_model_OUT[this_model_name.split('.')[0]]=token_reps_list
elif this_model_name.startswith("LM"):
token_reps_model_INP[this_model_name.split('.')[0]]=token_reps_list
token_reps_model_OUT[this_model_name.split('.')[0]]=token_reps_list
elif this_model_name.startswith("MT"):
token_reps_model_INP[this_model_name.split('.')[0]]=token_reps_list
sample_size_dict_INP={}
sample_size_dict_OUT={}
for this_model_name in MODELS_INP:
if this_model_name.startswith("MLM"):
this_sample_size_dict_INP_SAME={}
this_sample_size_dict_INP_DIFF={}
this_sample_size_dict_OUT={}
for this_token_id in frequent_vocab:
this_sample_size_dict_INP_SAME[this_token_id]=0
this_sample_size_dict_INP_DIFF[this_token_id]=0
this_sample_size_dict_OUT[this_token_id]=0
sample_size_dict_INP[f"{this_model_name.split('.')[0]}_SAME"]=this_sample_size_dict_INP_SAME
sample_size_dict_INP[f"{this_model_name.split('.')[0]}_DIFF"]=this_sample_size_dict_INP_DIFF
sample_size_dict_OUT[this_model_name.split('.')[0]]=this_sample_size_dict_OUT
elif this_model_name.startswith("LM"):
this_sample_size_dict_INP={}
this_sample_size_dict_OUT={}
for this_token_id in frequent_vocab:
this_sample_size_dict_INP[this_token_id]=0
this_sample_size_dict_OUT[this_token_id]=0
sample_size_dict_INP[this_model_name.split('.')[0]]=this_sample_size_dict_INP
sample_size_dict_OUT[this_model_name.split('.')[0]]=this_sample_size_dict_OUT
elif this_model_name.startswith("MT"):
this_sample_size_dict_INP={}
for this_token_id in frequent_vocab:
this_sample_size_dict_INP[this_token_id]=0
sample_size_dict_INP[this_model_name.split('.')[0]]=this_sample_size_dict_INP
for batch in train_iter:
src_seq_MT = batch.src.to(device)
target_sample_INP_MT=GetInter(src_seq_MT.detach().numpy(), frequent_vocab)
src_seq_MLM_SAME = batch.src.to(device)
target_sample_INP_MLM_SAME=GetInter(src_seq_MLM_SAME.detach().numpy(), frequent_vocab)
src_seq=batch.src.to(device)
src_seq_MLM_DIFF = src_seq.clone()
src_mask = generate_square_subsequent_mask(src_seq.size(0))
rand_value = torch.rand(src_seq.shape)
rand_mask = (rand_value < 0.15) * (input != src_pad_idx)
mask_idx=(rand_mask.flatten() == True).nonzero().view(-1)
src_seq_MLM_DIFF = src_seq_MLM_DIFF.flatten()
src_seq_MLM_DIFF[mask_idx] = 103
src_seq_MLM_DIFF = src_seq_MLM_DIFF.view(src_seq.size())
target_sample_INP_MLM_DIFF=GetInter(src_seq_MLM_DIFF.detach().numpy(), frequent_vocab)
src_seq_LM = batch.src[:-1]
target_sample_INP_LM=GetInter(src_seq_LM.detach().numpy(), frequent_vocab)
trg = batch.trg
trg_seq_MT, gold = map(lambda x: x.to(device), patch_trg(trg, trg_pad_idx))
trg_seq_MT = trg_seq_MT.to(device)
trg_seq_LM = src_seq[1:].to(device)
target_sample_OUT_LM=GetInter(trg_seq_LM.detach().numpy(), frequent_vocab)
trg_seq_MLM = src_seq
target_sample_OUT_MLM=GetInter(trg_seq_MLM.detach().numpy(), frequent_vocab)
for this_model_name in MODELS_INP:
this_model = torch.load(os.path.join(SAVE_MODEL_PATH,this_model_name))
this_model.eval()
if this_model_name.startswith("MT") and len(target_sample_INP_MT)>0:
src_mask, trg_mask, src_padding_mask, trg_padding_mask = create_mask(src_seq_MT, trg_seq_MT, src_pad_idx, trg_pad_idx)
_ = this_model(src=src_seq_MT,
src_mask=src_mask,
trg=trg_seq_MT,
tgt_mask=trg_mask,
src_padding_mask=src_padding_mask,
tgt_padding_mask=trg_padding_mask,
memory_key_padding_mask=src_padding_mask)
token_reps_list=token_reps_model_INP[MT_NAME.split('.')[0]]
this_sample_size_dict=sample_size_dict_INP[this_model_name.split('.')[0]]
GetInterValues(this_model, target_sample_INP_MT, NUM2WORD, token_reps_list, this_sample_size_dict, min_sample_size, NLAYERS)
elif this_model_name.startswith("MLM"):
if len(target_sample_INP_MLM_SAME)>0:
src_mask = generate_square_subsequent_mask(src_seq_MLM_SAME.size(0))
src_padding_mask = (src_seq_MLM_SAME == src_pad_idx).transpose(0, 1)
_ = this_model(src_seq_MLM_SAME, src_mask.to(device),src_padding_mask.to(device))
token_reps_list=token_reps_model_INP[f"{MLM_NAME.split('.')[0]}_SAME"]
this_sample_size_dict=sample_size_dict_INP[f"{this_model_name.split('.')[0]}_SAME"]
GetInterValues(this_model, target_sample_INP_MLM_SAME, NUM2WORD, token_reps_list, this_sample_size_dict, min_sample_size, NLAYERS)
if len(target_sample_INP_MLM_DIFF)>0 and len(target_sample_OUT_MLM)>0:
src_mask = generate_square_subsequent_mask(src_seq_MLM_DIFF.size(0))
src_padding_mask = (src_seq_MLM_DIFF == src_pad_idx).transpose(0, 1)
_ = this_model(src_seq_MLM_DIFF.to(device), src_mask.to(device),src_padding_mask.to(device))
token_reps_list_INP=token_reps_model_INP[f"{MLM_NAME.split('.')[0]}_DIFF"]
this_sample_size_dict_INP=sample_size_dict_INP[f"{this_model_name.split('.')[0]}_DIFF"]
token_reps_list_OUT=token_reps_model_OUT[MLM_NAME.split('.')[0]]
this_sample_size_dict_OUT=sample_size_dict_OUT[this_model_name.split('.')[0]]
GetInterValues(this_model, target_sample_INP_MLM_DIFF, NUM2WORD, token_reps_list_INP, this_sample_size_dict_INP, min_sample_size, NLAYERS)
GetInterValues(this_model, target_sample_OUT_MLM, NUM2WORD, token_reps_list_OUT, this_sample_size_dict_OUT, min_sample_size, NLAYERS)
elif this_model_name.startswith("LM") and len(target_sample_INP_LM)>0 and len(target_sample_OUT_LM)>0:
src_mask = generate_square_subsequent_mask(src_seq_LM.size(0))
src_padding_mask = (src_seq_LM == src_pad_idx).transpose(0, 1)
_ = this_model(src_seq_LM, src_mask.to(device),src_padding_mask.to(device))
token_reps_list_INP=token_reps_model_INP[this_model_name.split('.')[0]]
token_reps_list_OUT=token_reps_model_OUT[this_model_name.split('.')[0]]
this_sample_size_dict_INP=sample_size_dict_INP[this_model_name.split('.')[0]]
this_sample_size_dict_OUT=sample_size_dict_OUT[this_model_name.split('.')[0]]
GetInterValues(this_model, target_sample_INP_LM, NUM2WORD, token_reps_list_INP, this_sample_size_dict_INP, min_sample_size, NLAYERS)
GetInterValues(this_model, target_sample_OUT_LM, NUM2WORD, token_reps_list_OUT, this_sample_size_dict_OUT, min_sample_size, NLAYERS)
this_min_sample_size_inp=float('inf')
this_min_sample_size_out=float('inf')
for model_name, this_sample_size_dict in sample_size_dict_INP.items():
for token_id, size in this_sample_size_dict.items():
if size<this_min_sample_size_inp:
this_min_sample_size_inp=size
for model_name, this_sample_size_dict in sample_size_dict_OUT.items():
for token_id, size in this_sample_size_dict.items():
if size<this_min_sample_size_out:
this_min_sample_size_out=size
is_enough=True
if this_min_sample_size_inp>=min_sample_size and this_min_sample_size_out>=min_sample_size:
for model_name, reps_dict in token_reps_model_INP.items():
if is_enough is False:
break
for this_layer in reps_dict:
if is_enough is False:
break
for token_id, rep_list in this_layer.items():
if len(rep_list)<min_sample_size:
is_enough=False
break
for model_name, reps_list in token_reps_model_OUT.items():
if is_enough is False:
break
for this_layer in reps_dict:
if is_enough is False:
break
for token_id, rep_list in this_layer.items():
if len(rep_list)<min_sample_size:
is_enough=False
break
else:
is_enough=False
if is_enough:
break
if is_enough is False:
assert 1==0, "We have not collected enough data!"
for this_model_name in MODELS_INP:
if this_model_name.startswith("MLM"):
token_reps_list=token_reps_model_INP[f"{MLM_NAME.split('.')[0]}_SAME"]
result_list=MI_results_INP[f"{MLM_NAME.split('.')[0]}_SAME"]
GetMI(token_reps_list, N_frequent, N_cluster, NLAYERS, result_list)
token_reps_list=token_reps_model_INP[f"{MLM_NAME.split('.')[0]}_DIFF"]
result_list=MI_results_INP[f"{MLM_NAME.split('.')[0]}_DIFF"]
GetMI(token_reps_list, N_frequent, N_cluster, NLAYERS, result_list)
token_reps_list=token_reps_model_OUT[MLM_NAME.split('.')[0]]
result_list=MI_results_OUT[MLM_NAME.split('.')[0]]
GetMI(token_reps_list, N_frequent, N_cluster, NLAYERS, result_list)
elif this_model_name.startswith("MT"):
token_reps_list=token_reps_model_INP[this_model_name.split('.')[0]]
result_list=MI_results_INP[this_model_name.split('.')[0]]
GetMI(token_reps_list, N_frequent, N_cluster, NLAYERS, result_list)
elif this_model_name.startswith("LM"):
token_reps_list=token_reps_model_INP[this_model_name.split('.')[0]]
result_list=MI_results_INP[this_model_name.split('.')[0]]
GetMI(token_reps_list, N_frequent, N_cluster, NLAYERS, result_list)
token_reps_list=token_reps_model_OUT[MLM_NAME.split('.')[0]]
result_list=MI_results_OUT[this_model_name.split('.')[0]]
GetMI(token_reps_list, N_frequent, N_cluster, NLAYERS, result_list)
print("result",MI_results_INP)
print("result",MI_results_OUT)
| true
| true
|
79099b5d96e04a25696ac170bffa086b2a067be6
| 9,539
|
py
|
Python
|
conan/tools/system/package_manager.py
|
sthagen/conan-io-conan
|
12b4c90d911ef848d8a6d9aeccd551a5441003f3
|
[
"MIT"
] | null | null | null |
conan/tools/system/package_manager.py
|
sthagen/conan-io-conan
|
12b4c90d911ef848d8a6d9aeccd551a5441003f3
|
[
"MIT"
] | null | null | null |
conan/tools/system/package_manager.py
|
sthagen/conan-io-conan
|
12b4c90d911ef848d8a6d9aeccd551a5441003f3
|
[
"MIT"
] | null | null | null |
import platform
from conans.client.graph.graph import CONTEXT_BUILD
from conans.errors import ConanException
class _SystemPackageManagerTool(object):
mode_check = "check"
mode_install = "install"
tool_name = None
install_command = ""
update_command = ""
check_command = ""
def __init__(self, conanfile):
self._conanfile = conanfile
self._active_tool = self._conanfile.conf.get("tools.system.package_manager:tool", default=self.get_default_tool())
self._sudo = self._conanfile.conf.get("tools.system.package_manager:sudo", default=False, check_type=bool)
self._sudo_askpass = self._conanfile.conf.get("tools.system.package_manager:sudo_askpass", default=False, check_type=bool)
self._mode = self._conanfile.conf.get("tools.system.package_manager:mode", default=self.mode_check)
self._arch = self._conanfile.settings_build.get_safe('arch') \
if self._conanfile.context == CONTEXT_BUILD else self._conanfile.settings.get_safe('arch')
self._arch_names = {}
self._arch_separator = ""
def get_default_tool(self):
os_name = platform.system()
if os_name in ["Linux", "FreeBSD"]:
import distro
os_name = distro.id() or os_name
elif os_name == "Windows" and self._conanfile.conf.get("tools.microsoft.bash:subsystem") == "msys2":
os_name = "msys2"
manager_mapping = {"apt-get": ["Linux", "ubuntu", "debian"],
"yum": ["pidora", "scientific", "xenserver", "amazon", "oracle", "amzn",
"almalinux", "rocky"],
"dnf": ["fedora", "rhel", "centos", "mageia"],
"brew": ["Darwin"],
"pacman": ["arch", "manjaro", "msys2"],
"choco": ["Windows"],
"zypper": ["opensuse", "sles"],
"pkg": ["freebsd"],
"pkgutil": ["Solaris"]}
for tool, distros in manager_mapping.items():
if os_name in distros:
return tool
def get_package_name(self, package):
# TODO: should we only add the arch if cross-building?
if self._arch in self._arch_names:
return "{}{}{}".format(package, self._arch_separator,
self._arch_names.get(self._arch))
return package
@property
def sudo_str(self):
sudo = "sudo " if self._sudo else ""
askpass = "-A " if self._sudo and self._sudo_askpass else ""
return "{}{}".format(sudo, askpass)
def run(self, method, *args, **kwargs):
if self._active_tool == self.__class__.tool_name:
return method(*args, **kwargs)
def install(self, *args, **kwargs):
return self.run(self._install, *args, **kwargs)
def update(self, *args, **kwargs):
return self.run(self._update, *args, **kwargs)
def check(self, *args, **kwargs):
return self.run(self._check, *args, **kwargs)
def _install(self, packages, update=False, check=True, **kwargs):
if update:
self.update()
if check:
packages = self.check(packages)
if self._mode == self.mode_check and packages:
raise ConanException("System requirements: '{0}' are missing but can't install "
"because tools.system.package_manager:mode is '{1}'."
"Please update packages manually or set "
"'tools.system.package_manager:mode' "
"to '{2}' in the [conf] section of the profile, "
"or in the command line using "
"'-c tools.system.package_manager:mode={2}'".format(", ".join(packages),
self.mode_check,
self.mode_install))
elif packages:
packages_arch = [self.get_package_name(package) for package in packages]
if packages_arch:
command = self.install_command.format(sudo=self.sudo_str,
tool=self.tool_name,
packages=" ".join(packages_arch),
**kwargs)
return self._conanfile.run(command)
else:
self._conanfile.output.info("System requirements: {} already "
"installed".format(" ".join(packages)))
def _update(self):
if self._mode == self.mode_check:
raise ConanException("Can't update because tools.system.package_manager:mode is '{0}'."
"Please update packages manually or set "
"'tools.system.package_manager:mode' "
"to '{1}' in the [conf] section of the profile, "
"or in the command line using "
"'-c tools.system.package_manager:mode={1}'".format(self.mode_check,
self.mode_install))
command = self.update_command.format(sudo=self.sudo_str, tool=self.tool_name)
return self._conanfile.run(command)
def _check(self, packages):
missing = [pkg for pkg in packages if self.check_package(self.get_package_name(pkg)) != 0]
return missing
def check_package(self, package):
command = self.check_command.format(tool=self.tool_name,
package=package)
return self._conanfile.run(command, ignore_errors=True)
class Apt(_SystemPackageManagerTool):
# TODO: apt? apt-get?
tool_name = "apt-get"
install_command = "{sudo}{tool} install -y {recommends}{packages}"
update_command = "{sudo}{tool} update"
check_command = "dpkg-query -W -f='${{Status}}' {package} | grep -q \"ok installed\""
def __init__(self, conanfile, arch_names=None):
super(Apt, self).__init__(conanfile)
self._arch_names = {"x86_64": "amd64",
"x86": "i386",
"ppc32": "powerpc",
"ppc64le": "ppc64el",
"armv7": "arm",
"armv7hf": "armhf",
"armv8": "arm64",
"s390x": "s390x"} if arch_names is None else arch_names
self._arch_separator = ":"
def install(self, packages, update=False, check=False, recommends=False):
recommends_str = '' if recommends else '--no-install-recommends '
return super(Apt, self).install(packages, update=update, check=check,
recommends=recommends_str)
class Yum(_SystemPackageManagerTool):
tool_name = "yum"
install_command = "{sudo}{tool} install -y {packages}"
update_command = "{sudo}{tool} check-update -y"
check_command = "rpm -q {package}"
def __init__(self, conanfile, arch_names=None):
super(Yum, self).__init__(conanfile)
self._arch_names = {"x86_64": "x86_64",
"x86": "i?86",
"ppc32": "powerpc",
"ppc64le": "ppc64le",
"armv7": "armv7",
"armv7hf": "armv7hl",
"armv8": "aarch64",
"s390x": "s390x"} if arch_names is None else arch_names
self._arch_separator = "."
class Dnf(Yum):
tool_name = "dnf"
class Brew(_SystemPackageManagerTool):
tool_name = "brew"
install_command = "{sudo}{tool} install {packages}"
update_command = "{sudo}{tool} update"
check_command = 'test -n "$({tool} ls --versions {package})"'
class Pkg(_SystemPackageManagerTool):
tool_name = "pkg"
install_command = "{sudo}{tool} install -y {packages}"
update_command = "{sudo}{tool} update"
check_command = "{tool} info {package}"
class PkgUtil(_SystemPackageManagerTool):
tool_name = "pkgutil"
install_command = "{sudo}{tool} --install --yes {packages}"
update_command = "{sudo}{tool} --catalog"
check_command = 'test -n "`{tool} --list {package}`"'
class Chocolatey(_SystemPackageManagerTool):
tool_name = "choco"
install_command = "{tool} --install --yes {packages}"
update_command = "{tool} outdated"
check_command = '{tool} search --local-only --exact {package} | ' \
'findstr /c:"1 packages installed."'
class PacMan(_SystemPackageManagerTool):
tool_name = "pacman"
install_command = "{sudo}{tool} -S --noconfirm {packages}"
update_command = "{sudo}{tool} -Syyu --noconfirm"
check_command = "{tool} -Qi {package}"
def __init__(self, conanfile, arch_names=None):
super(PacMan, self).__init__(conanfile)
self._arch_names = {"x86": "lib32"} if arch_names is None else arch_names
self._arch_separator = "-"
class Zypper(_SystemPackageManagerTool):
tool_name = "zypper"
install_command = "{sudo}{tool} --non-interactive in {packages}"
update_command = "{sudo}{tool} --non-interactive ref"
check_command = "rpm -q {package}"
| 43.557078
| 130
| 0.547437
|
import platform
from conans.client.graph.graph import CONTEXT_BUILD
from conans.errors import ConanException
class _SystemPackageManagerTool(object):
mode_check = "check"
mode_install = "install"
tool_name = None
install_command = ""
update_command = ""
check_command = ""
def __init__(self, conanfile):
self._conanfile = conanfile
self._active_tool = self._conanfile.conf.get("tools.system.package_manager:tool", default=self.get_default_tool())
self._sudo = self._conanfile.conf.get("tools.system.package_manager:sudo", default=False, check_type=bool)
self._sudo_askpass = self._conanfile.conf.get("tools.system.package_manager:sudo_askpass", default=False, check_type=bool)
self._mode = self._conanfile.conf.get("tools.system.package_manager:mode", default=self.mode_check)
self._arch = self._conanfile.settings_build.get_safe('arch') \
if self._conanfile.context == CONTEXT_BUILD else self._conanfile.settings.get_safe('arch')
self._arch_names = {}
self._arch_separator = ""
def get_default_tool(self):
os_name = platform.system()
if os_name in ["Linux", "FreeBSD"]:
import distro
os_name = distro.id() or os_name
elif os_name == "Windows" and self._conanfile.conf.get("tools.microsoft.bash:subsystem") == "msys2":
os_name = "msys2"
manager_mapping = {"apt-get": ["Linux", "ubuntu", "debian"],
"yum": ["pidora", "scientific", "xenserver", "amazon", "oracle", "amzn",
"almalinux", "rocky"],
"dnf": ["fedora", "rhel", "centos", "mageia"],
"brew": ["Darwin"],
"pacman": ["arch", "manjaro", "msys2"],
"choco": ["Windows"],
"zypper": ["opensuse", "sles"],
"pkg": ["freebsd"],
"pkgutil": ["Solaris"]}
for tool, distros in manager_mapping.items():
if os_name in distros:
return tool
def get_package_name(self, package):
if self._arch in self._arch_names:
return "{}{}{}".format(package, self._arch_separator,
self._arch_names.get(self._arch))
return package
@property
def sudo_str(self):
sudo = "sudo " if self._sudo else ""
askpass = "-A " if self._sudo and self._sudo_askpass else ""
return "{}{}".format(sudo, askpass)
def run(self, method, *args, **kwargs):
if self._active_tool == self.__class__.tool_name:
return method(*args, **kwargs)
def install(self, *args, **kwargs):
return self.run(self._install, *args, **kwargs)
def update(self, *args, **kwargs):
return self.run(self._update, *args, **kwargs)
def check(self, *args, **kwargs):
return self.run(self._check, *args, **kwargs)
def _install(self, packages, update=False, check=True, **kwargs):
if update:
self.update()
if check:
packages = self.check(packages)
if self._mode == self.mode_check and packages:
raise ConanException("System requirements: '{0}' are missing but can't install "
"because tools.system.package_manager:mode is '{1}'."
"Please update packages manually or set "
"'tools.system.package_manager:mode' "
"to '{2}' in the [conf] section of the profile, "
"or in the command line using "
"'-c tools.system.package_manager:mode={2}'".format(", ".join(packages),
self.mode_check,
self.mode_install))
elif packages:
packages_arch = [self.get_package_name(package) for package in packages]
if packages_arch:
command = self.install_command.format(sudo=self.sudo_str,
tool=self.tool_name,
packages=" ".join(packages_arch),
**kwargs)
return self._conanfile.run(command)
else:
self._conanfile.output.info("System requirements: {} already "
"installed".format(" ".join(packages)))
def _update(self):
if self._mode == self.mode_check:
raise ConanException("Can't update because tools.system.package_manager:mode is '{0}'."
"Please update packages manually or set "
"'tools.system.package_manager:mode' "
"to '{1}' in the [conf] section of the profile, "
"or in the command line using "
"'-c tools.system.package_manager:mode={1}'".format(self.mode_check,
self.mode_install))
command = self.update_command.format(sudo=self.sudo_str, tool=self.tool_name)
return self._conanfile.run(command)
def _check(self, packages):
missing = [pkg for pkg in packages if self.check_package(self.get_package_name(pkg)) != 0]
return missing
def check_package(self, package):
command = self.check_command.format(tool=self.tool_name,
package=package)
return self._conanfile.run(command, ignore_errors=True)
class Apt(_SystemPackageManagerTool):
tool_name = "apt-get"
install_command = "{sudo}{tool} install -y {recommends}{packages}"
update_command = "{sudo}{tool} update"
check_command = "dpkg-query -W -f='${{Status}}' {package} | grep -q \"ok installed\""
def __init__(self, conanfile, arch_names=None):
super(Apt, self).__init__(conanfile)
self._arch_names = {"x86_64": "amd64",
"x86": "i386",
"ppc32": "powerpc",
"ppc64le": "ppc64el",
"armv7": "arm",
"armv7hf": "armhf",
"armv8": "arm64",
"s390x": "s390x"} if arch_names is None else arch_names
self._arch_separator = ":"
def install(self, packages, update=False, check=False, recommends=False):
recommends_str = '' if recommends else '--no-install-recommends '
return super(Apt, self).install(packages, update=update, check=check,
recommends=recommends_str)
class Yum(_SystemPackageManagerTool):
tool_name = "yum"
install_command = "{sudo}{tool} install -y {packages}"
update_command = "{sudo}{tool} check-update -y"
check_command = "rpm -q {package}"
def __init__(self, conanfile, arch_names=None):
super(Yum, self).__init__(conanfile)
self._arch_names = {"x86_64": "x86_64",
"x86": "i?86",
"ppc32": "powerpc",
"ppc64le": "ppc64le",
"armv7": "armv7",
"armv7hf": "armv7hl",
"armv8": "aarch64",
"s390x": "s390x"} if arch_names is None else arch_names
self._arch_separator = "."
class Dnf(Yum):
tool_name = "dnf"
class Brew(_SystemPackageManagerTool):
tool_name = "brew"
install_command = "{sudo}{tool} install {packages}"
update_command = "{sudo}{tool} update"
check_command = 'test -n "$({tool} ls --versions {package})"'
class Pkg(_SystemPackageManagerTool):
tool_name = "pkg"
install_command = "{sudo}{tool} install -y {packages}"
update_command = "{sudo}{tool} update"
check_command = "{tool} info {package}"
class PkgUtil(_SystemPackageManagerTool):
tool_name = "pkgutil"
install_command = "{sudo}{tool} --install --yes {packages}"
update_command = "{sudo}{tool} --catalog"
check_command = 'test -n "`{tool} --list {package}`"'
class Chocolatey(_SystemPackageManagerTool):
tool_name = "choco"
install_command = "{tool} --install --yes {packages}"
update_command = "{tool} outdated"
check_command = '{tool} search --local-only --exact {package} | ' \
'findstr /c:"1 packages installed."'
class PacMan(_SystemPackageManagerTool):
tool_name = "pacman"
install_command = "{sudo}{tool} -S --noconfirm {packages}"
update_command = "{sudo}{tool} -Syyu --noconfirm"
check_command = "{tool} -Qi {package}"
def __init__(self, conanfile, arch_names=None):
super(PacMan, self).__init__(conanfile)
self._arch_names = {"x86": "lib32"} if arch_names is None else arch_names
self._arch_separator = "-"
class Zypper(_SystemPackageManagerTool):
tool_name = "zypper"
install_command = "{sudo}{tool} --non-interactive in {packages}"
update_command = "{sudo}{tool} --non-interactive ref"
check_command = "rpm -q {package}"
| true
| true
|
79099ba4a5b6b0bc5244adcb812757a2da69ab65
| 9,247
|
py
|
Python
|
database_query_handler.py
|
jdwinkler/dbpedia_service
|
da4ffe5c6d271bc55556150b63d894a57e1d1396
|
[
"MIT"
] | null | null | null |
database_query_handler.py
|
jdwinkler/dbpedia_service
|
da4ffe5c6d271bc55556150b63d894a57e1d1396
|
[
"MIT"
] | null | null | null |
database_query_handler.py
|
jdwinkler/dbpedia_service
|
da4ffe5c6d271bc55556150b63d894a57e1d1396
|
[
"MIT"
] | null | null | null |
import psycopg2
import psycopg2.extras
class DBHandler:
"""
Handles I/O concerning the database to hide its implementation from client services.
"""
def __init__(self,
postgres_username=None,
postgres_password=None,
db_username='dbpedia_app',
db_password='dummy_password'):
# ordinarily you would get these from some secret store
# e.g. heroku has a specific url that you parse to get both
# or os.environ storage (like those used for API keys and the like)
user_name = db_username
password = db_password
# check to see if the db exists locally, create it if necessary
if postgres_password is not None and postgres_username is not None:
try:
connection = psycopg2.connect("dbname='postgres' user='%s' "
"host='localhost' password='%s'"
% (postgres_username, postgres_password))
connection.autocommit = True
cursor = connection.cursor()
# queries the postgres catalog to see if 'dbpedia' exists
# if not, creates it
cursor.execute("SELECT COUNT(*) = 0 FROM pg_catalog.pg_database WHERE datname = 'dbpedia'")
not_exists_row = cursor.fetchone()
not_exists = not_exists_row[0]
if not_exists:
cursor.execute("CREATE USER %s PASSWORD '%s'" % (user_name, password))
cursor.execute('CREATE DATABASE dbpedia OWNER %s' % (user_name,))
connection.close()
except:
# Presume if credentials are passed the user wants to perform this check/DB construction
# fail via error propagation
raise
try:
self.connection = psycopg2.connect("dbname='dbpedia' user='%s' host='localhost' password='%s'"
% (user_name, password))
except:
raise AssertionError('Failed to connect to dbpedia database. Has the local dbpedia been created?')
def __del__(self):
self.connection.close()
def commit(self):
self.connection.commit()
def schema_exists(self):
"""
Checks the estimated number of tuples in the subjects table to determine if data exists
:return:
"""
with self.connection.cursor() as cursor:
cursor.execute('select reltuples FROM pg_class where relname = %s', ('subjects',))
result = cursor.fetchone()[0]
return result > 0
def build_table_schema(self, schema_name, schema_file_path):
"""
Loads the dbpedia schema used for supporting downstream analysis. If the schema already exists, it is
dropped (deleted) and recreated.
:param schema_name:
:param schema_file_path:
:return:
"""
# do not call with user input given the manual query construction here
with self.connection.cursor() as cursor:
cursor.execute('DROP SCHEMA IF EXISTS %s CASCADE' % schema_name)
schema_file = open(schema_file_path, 'rU').read()
cursor.execute(schema_file)
def build_indices(self):
"""
Builds the following indices:
Index on name for subjects
Index on predicate for predicate_object
Index on subject_id for predicate object
:return:
"""
with self.connection.cursor() as cursor:
cursor.execute('DROP INDEX IF EXISTS dbpedia.pv_subject_id_idx')
cursor.execute('DROP INDEX IF EXISTS dbpedia.subject_idx')
cursor.execute('DROP INDEX IF EXISTS dbpedia.pv_predicate_idx')
cursor.execute('create index subject_idx on dbpedia.subjects (name)')
cursor.execute('create index pv_subject_id_idx on dbpedia.predicate_object (subject_id)')
cursor.execute('create index pv_predicate_idx on dbpedia.predicate_object (predicate);')
def insert_spo_tuple(self, spo_tuple):
"""
Handles the insertion of spo tuples into the db. Workflow:
Attempt to find the subject table entry corresponding to your subject. If found, use that ID for
inserting your po values. Otherwise, insert your subject into the subject table and use that ID
instead. The resulting id, predicate, object tuple is then inserted into the predicate_object table.
:param spo_tuple:
:return:
"""
(subject, predicate, db_object) = spo_tuple
with self.connection.cursor(cursor_factory=psycopg2.extras.RealDictCursor) as cursor:
cursor.execute('select subject_id from dbpedia.subjects '
'where name = %s', (subject,))
results = cursor.fetchone()
if results is None or len(results) == 0:
cursor.execute('INSERT INTO dbpedia.subjects (name) VALUES (%s) '
'returning subject_id', (subject,))
results = cursor.fetchone()
id = results['subject_id']
# now we have the correct id in either case, insert the values into the db
cursor.execute('INSERT INTO dbpedia.predicate_object (subject_id, predicate, object) '
'VALUES (%s, %s, %s)', (id, predicate, db_object))
def get_person_metadata(self, person_name, use_exact_match=False):
"""
Returns all metadata associated with the provided person_name. However, does not actually check
to see if the identifier corresponds to a person or not; the class of the identifier will
be included in the returned metadata though. DBPedia People only contains people predicate
types as well.
Use_exact_match toggles between two behaviors: if True, then uses the exact identifier provided
to query against the subject table (WHERE = identifier). If False, uses the LIKE operator
to attempt to find similar IDs that are not exactly the same. Results will still be a superset
of the use_exact_match = True case.
:param person_name:
:param use_exact_match:
:return:
"""
# wikipedia replaces all spaces with under scores
# upper case to make case sensitive
person_name = person_name.replace(' ', '_').upper()
with self.connection.cursor() as cursor:
# get id associated with this person
# get all similar IDs
if not use_exact_match:
cursor.execute('SELECT subject_id, name FROM dbpedia.subjects WHERE upper(name) '
'LIKE %s',
('%%' + person_name + '%%',))
else:
cursor.execute('SELECT subject_id, name FROM dbpedia.subjects WHERE upper(name) = %s',
(person_name,))
results = cursor.fetchall()
# no person matches the input name
# return empty list
if results is None:
return []
subject_id_list = [x[0] for x in results]
# get all metadata associated with the subject_ids
cursor.execute('select dbpedia.subjects.name, predicate, object '
'FROM dbpedia.predicate_object '
'INNER JOIN dbpedia.subjects on (dbpedia.subjects.subject_id = dbpedia.predicate_object.subject_id) '
'WHERE dbpedia.predicate_object.subject_id = ANY(%s)', (subject_id_list,))
# this should never be none
# Sort results by name and return
return sorted(cursor.fetchall(), key=lambda x: x[0])
def get_tuples_by_predicate(self, predicate_of_interest):
"""
Extracts SPO tuples based on the predicate value passed to the function. This query will be slow since
you are querying such a large fraction of the po table at once (unless your predicate does not exist).
Predicates:
Name
Type
Gender
Description
Birthdate
GivenName
Surname
BirthPlace
DeathDate
DeathPlace
:param predicate_of_interest:
:return:
"""
with self.connection.cursor() as cursor:
cursor.execute('select dbpedia.subjects.name, '
'predicate, '
'object '
'FROM dbpedia.predicate_object '
'INNER JOIN dbpedia.subjects on (dbpedia.subjects.subject_id = dbpedia.predicate_object.subject_id) '
'WHERE upper(dbpedia.predicate_object.predicate) = upper(%s)', (predicate_of_interest,))
results = cursor.fetchall()
if results is None:
return []
else:
return results
| 36.549407
| 128
| 0.583324
|
import psycopg2
import psycopg2.extras
class DBHandler:
def __init__(self,
postgres_username=None,
postgres_password=None,
db_username='dbpedia_app',
db_password='dummy_password'):
user_name = db_username
password = db_password
if postgres_password is not None and postgres_username is not None:
try:
connection = psycopg2.connect("dbname='postgres' user='%s' "
"host='localhost' password='%s'"
% (postgres_username, postgres_password))
connection.autocommit = True
cursor = connection.cursor()
cursor.execute("SELECT COUNT(*) = 0 FROM pg_catalog.pg_database WHERE datname = 'dbpedia'")
not_exists_row = cursor.fetchone()
not_exists = not_exists_row[0]
if not_exists:
cursor.execute("CREATE USER %s PASSWORD '%s'" % (user_name, password))
cursor.execute('CREATE DATABASE dbpedia OWNER %s' % (user_name,))
connection.close()
except:
raise
try:
self.connection = psycopg2.connect("dbname='dbpedia' user='%s' host='localhost' password='%s'"
% (user_name, password))
except:
raise AssertionError('Failed to connect to dbpedia database. Has the local dbpedia been created?')
def __del__(self):
self.connection.close()
def commit(self):
self.connection.commit()
def schema_exists(self):
with self.connection.cursor() as cursor:
cursor.execute('select reltuples FROM pg_class where relname = %s', ('subjects',))
result = cursor.fetchone()[0]
return result > 0
def build_table_schema(self, schema_name, schema_file_path):
with self.connection.cursor() as cursor:
cursor.execute('DROP SCHEMA IF EXISTS %s CASCADE' % schema_name)
schema_file = open(schema_file_path, 'rU').read()
cursor.execute(schema_file)
def build_indices(self):
with self.connection.cursor() as cursor:
cursor.execute('DROP INDEX IF EXISTS dbpedia.pv_subject_id_idx')
cursor.execute('DROP INDEX IF EXISTS dbpedia.subject_idx')
cursor.execute('DROP INDEX IF EXISTS dbpedia.pv_predicate_idx')
cursor.execute('create index subject_idx on dbpedia.subjects (name)')
cursor.execute('create index pv_subject_id_idx on dbpedia.predicate_object (subject_id)')
cursor.execute('create index pv_predicate_idx on dbpedia.predicate_object (predicate);')
def insert_spo_tuple(self, spo_tuple):
(subject, predicate, db_object) = spo_tuple
with self.connection.cursor(cursor_factory=psycopg2.extras.RealDictCursor) as cursor:
cursor.execute('select subject_id from dbpedia.subjects '
'where name = %s', (subject,))
results = cursor.fetchone()
if results is None or len(results) == 0:
cursor.execute('INSERT INTO dbpedia.subjects (name) VALUES (%s) '
'returning subject_id', (subject,))
results = cursor.fetchone()
id = results['subject_id']
cursor.execute('INSERT INTO dbpedia.predicate_object (subject_id, predicate, object) '
'VALUES (%s, %s, %s)', (id, predicate, db_object))
def get_person_metadata(self, person_name, use_exact_match=False):
person_name = person_name.replace(' ', '_').upper()
with self.connection.cursor() as cursor:
if not use_exact_match:
cursor.execute('SELECT subject_id, name FROM dbpedia.subjects WHERE upper(name) '
'LIKE %s',
('%%' + person_name + '%%',))
else:
cursor.execute('SELECT subject_id, name FROM dbpedia.subjects WHERE upper(name) = %s',
(person_name,))
results = cursor.fetchall()
if results is None:
return []
subject_id_list = [x[0] for x in results]
cursor.execute('select dbpedia.subjects.name, predicate, object '
'FROM dbpedia.predicate_object '
'INNER JOIN dbpedia.subjects on (dbpedia.subjects.subject_id = dbpedia.predicate_object.subject_id) '
'WHERE dbpedia.predicate_object.subject_id = ANY(%s)', (subject_id_list,))
return sorted(cursor.fetchall(), key=lambda x: x[0])
def get_tuples_by_predicate(self, predicate_of_interest):
with self.connection.cursor() as cursor:
cursor.execute('select dbpedia.subjects.name, '
'predicate, '
'object '
'FROM dbpedia.predicate_object '
'INNER JOIN dbpedia.subjects on (dbpedia.subjects.subject_id = dbpedia.predicate_object.subject_id) '
'WHERE upper(dbpedia.predicate_object.predicate) = upper(%s)', (predicate_of_interest,))
results = cursor.fetchall()
if results is None:
return []
else:
return results
| true
| true
|
79099c28c046af97d5a67408b24314231e474a1a
| 2,236
|
py
|
Python
|
nova/tests/functional/db/test_console_auth_token.py
|
panguan737/nova
|
0d177185a439baa228b42c948cab4e934d6ac7b8
|
[
"Apache-2.0"
] | 2
|
2021-10-11T04:56:25.000Z
|
2022-02-16T08:49:29.000Z
|
nova/tests/functional/db/test_console_auth_token.py
|
ljzjohnson/nova
|
87e1951a1b8c03b9ecdf8f75610d14690b61f272
|
[
"Apache-2.0"
] | 132
|
2017-03-27T11:31:52.000Z
|
2022-03-30T08:45:02.000Z
|
nova/tests/functional/db/test_console_auth_token.py
|
ljzjohnson/nova
|
87e1951a1b8c03b9ecdf8f75610d14690b61f272
|
[
"Apache-2.0"
] | 8
|
2017-03-27T07:50:38.000Z
|
2020-02-14T16:55:56.000Z
|
# Copyright 2016 Hewlett Packard Enterprise Development Company LP
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_versionedobjects import fixture as ovo_fixture
from nova import context
from nova import exception
from nova import objects
from nova import test
from nova.tests import uuidsentinel
class ConsoleAuthTokenTestCase(test.TestCase):
def setUp(self):
super(ConsoleAuthTokenTestCase, self).setUp()
self.context = context.RequestContext('fake-user', 'fake-project')
instance = objects.Instance(
context=self.context,
project_id=self.context.project_id,
uuid=uuidsentinel.fake_instance)
instance.create()
self.console = objects.ConsoleAuthToken(
context=self.context,
instance_uuid=uuidsentinel.fake_instance,
console_type='fake-type',
host='fake-host',
port=1000,
internal_access_path='fake-internal_access_path',
access_url_base='fake-external_access_path'
)
self.token = self.console.authorize(100)
def test_validate(self):
connection_info = objects.ConsoleAuthToken.validate(
self.context, self.token)
expected = self.console.obj_to_primitive()['nova_object.data']
del expected['created_at']
ovo_fixture.compare_obj(self, connection_info, expected,
allow_missing=['created_at'])
def test_validate_invalid(self):
unauthorized_token = uuidsentinel.token
self.assertRaises(
exception.InvalidToken,
objects.ConsoleAuthToken.validate,
self.context, unauthorized_token)
| 38.551724
| 78
| 0.68381
|
from oslo_versionedobjects import fixture as ovo_fixture
from nova import context
from nova import exception
from nova import objects
from nova import test
from nova.tests import uuidsentinel
class ConsoleAuthTokenTestCase(test.TestCase):
def setUp(self):
super(ConsoleAuthTokenTestCase, self).setUp()
self.context = context.RequestContext('fake-user', 'fake-project')
instance = objects.Instance(
context=self.context,
project_id=self.context.project_id,
uuid=uuidsentinel.fake_instance)
instance.create()
self.console = objects.ConsoleAuthToken(
context=self.context,
instance_uuid=uuidsentinel.fake_instance,
console_type='fake-type',
host='fake-host',
port=1000,
internal_access_path='fake-internal_access_path',
access_url_base='fake-external_access_path'
)
self.token = self.console.authorize(100)
def test_validate(self):
connection_info = objects.ConsoleAuthToken.validate(
self.context, self.token)
expected = self.console.obj_to_primitive()['nova_object.data']
del expected['created_at']
ovo_fixture.compare_obj(self, connection_info, expected,
allow_missing=['created_at'])
def test_validate_invalid(self):
unauthorized_token = uuidsentinel.token
self.assertRaises(
exception.InvalidToken,
objects.ConsoleAuthToken.validate,
self.context, unauthorized_token)
| true
| true
|
79099c3b14fb51637142a501bfbed09936d8b84b
| 7,228
|
py
|
Python
|
tests/unit/models/test_user.py
|
duddlf23/amundsendatabuilder
|
3e281373bfa8989c7a489dcf5b8c67a9f1ac38f1
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/models/test_user.py
|
duddlf23/amundsendatabuilder
|
3e281373bfa8989c7a489dcf5b8c67a9f1ac38f1
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/models/test_user.py
|
duddlf23/amundsendatabuilder
|
3e281373bfa8989c7a489dcf5b8c67a9f1ac38f1
|
[
"Apache-2.0"
] | null | null | null |
# Copyright Contributors to the Amundsen project.
# SPDX-License-Identifier: Apache-2.0
import unittest
from unittest.mock import ANY
from databuilder.models.graph_serializable import (
RELATION_END_KEY, RELATION_END_LABEL, RELATION_REVERSE_TYPE, RELATION_START_KEY, RELATION_START_LABEL,
RELATION_TYPE,
)
from databuilder.models.user import User
from databuilder.serializers import neo4_serializer, neptune_serializer
from databuilder.serializers.neptune_serializer import (
NEPTUNE_CREATION_TYPE_JOB, NEPTUNE_CREATION_TYPE_RELATIONSHIP_PROPERTY_NAME_BULK_LOADER_FORMAT, NEPTUNE_HEADER_ID,
NEPTUNE_HEADER_LABEL, NEPTUNE_LAST_EXTRACTED_AT_RELATIONSHIP_PROPERTY_NAME_BULK_LOADER_FORMAT,
NEPTUNE_RELATIONSHIP_HEADER_FROM, NEPTUNE_RELATIONSHIP_HEADER_TO,
)
class TestUser(unittest.TestCase):
def setUp(self) -> None:
super(TestUser, self).setUp()
self.user = User(first_name='test_first',
last_name='test_last',
name='test_first test_last',
email='test@email.com',
github_username='github_test',
team_name='test_team',
employee_type='FTE',
manager_email='test_manager@email.com',
slack_id='slack',
is_active=True,
updated_at=1,
role_name='swe')
def test_get_user_model_key(self) -> None:
user_email = User.get_user_model_key(email=self.user.email)
self.assertEqual(user_email, 'test@email.com')
def test_create_nodes(self) -> None:
nodes = self.user.create_nodes()
self.assertEqual(len(nodes), 1)
def test_create_node_additional_attr(self) -> None:
test_user = User(first_name='test_first',
last_name='test_last',
name='test_first test_last',
email='test@email.com',
github_username='github_test',
team_name='test_team',
employee_type='FTE',
manager_email='test_manager@email.com',
slack_id='slack',
is_active=True,
updated_at=1,
role_name='swe',
enable_notify=True)
nodes = test_user.create_nodes()
serialized_node = neo4_serializer.serialize_node(nodes[0])
self.assertEqual(serialized_node['email'], 'test@email.com')
self.assertEqual(serialized_node['role_name'], 'swe')
self.assertTrue(serialized_node['enable_notify:UNQUOTED'])
def test_create_node_additional_attr_neptune(self) -> None:
test_user = User(first_name='test_first',
last_name='test_last',
name='test_first test_last',
email='test@email.com',
github_username='github_test',
team_name='test_team',
employee_type='FTE',
manager_email='test_manager@email.com',
slack_id='slack',
is_active=True,
updated_at=1,
role_name='swe',
enable_notify=True)
nodes = test_user.create_nodes()
serialized_node = neptune_serializer.convert_node(nodes[0])
self.assertEqual(serialized_node['email:String(single)'], 'test@email.com')
self.assertEqual(serialized_node['role_name:String(single)'], 'swe')
self.assertTrue(serialized_node['enable_notify:Bool(single)'])
def test_create_relation(self) -> None:
relations = self.user.create_relation()
self.assertEqual(len(relations), 1)
start_key = 'test@email.com'
end_key = 'test_manager@email.com'
expected_relation = {
RELATION_START_KEY: start_key,
RELATION_START_LABEL: User.USER_NODE_LABEL,
RELATION_END_KEY: end_key,
RELATION_END_LABEL: User.USER_NODE_LABEL,
RELATION_TYPE: User.USER_MANAGER_RELATION_TYPE,
RELATION_REVERSE_TYPE: User.MANAGER_USER_RELATION_TYPE
}
self.assertTrue(expected_relation, neo4_serializer.serialize_relationship(relations[0]))
def test_create_relation_neptune(self) -> None:
relations = self.user.create_relation()
serialized = neptune_serializer.convert_relationship(relations[0])
start_key = '{email}'.format(email='test@email.com')
end_key = '{email}'.format(email='test_manager@email.com')
expected = [
{
NEPTUNE_HEADER_ID: "{from_vertex_id}_{to_vertex_id}_{label}".format(
from_vertex_id=start_key,
to_vertex_id=end_key,
label=User.USER_MANAGER_RELATION_TYPE
),
NEPTUNE_RELATIONSHIP_HEADER_FROM: start_key,
NEPTUNE_RELATIONSHIP_HEADER_TO: end_key,
NEPTUNE_HEADER_LABEL: User.USER_MANAGER_RELATION_TYPE,
NEPTUNE_LAST_EXTRACTED_AT_RELATIONSHIP_PROPERTY_NAME_BULK_LOADER_FORMAT: ANY,
NEPTUNE_CREATION_TYPE_RELATIONSHIP_PROPERTY_NAME_BULK_LOADER_FORMAT: NEPTUNE_CREATION_TYPE_JOB
},
{
NEPTUNE_HEADER_ID: "{from_vertex_id}_{to_vertex_id}_{label}".format(
from_vertex_id=end_key,
to_vertex_id=start_key,
label=User.MANAGER_USER_RELATION_TYPE
),
NEPTUNE_RELATIONSHIP_HEADER_FROM: end_key,
NEPTUNE_RELATIONSHIP_HEADER_TO: start_key,
NEPTUNE_HEADER_LABEL: User.MANAGER_USER_RELATION_TYPE,
NEPTUNE_LAST_EXTRACTED_AT_RELATIONSHIP_PROPERTY_NAME_BULK_LOADER_FORMAT: ANY,
NEPTUNE_CREATION_TYPE_RELATIONSHIP_PROPERTY_NAME_BULK_LOADER_FORMAT: NEPTUNE_CREATION_TYPE_JOB
}
]
self.assertListEqual(serialized, expected)
def test_not_including_empty_attribute(self) -> None:
test_user = User(email='test@email.com',
foo='bar')
self.assertDictEqual(neo4_serializer.serialize_node(test_user.create_next_node()),
{'KEY': 'test@email.com', 'LABEL': 'User', 'email': 'test@email.com',
'is_active:UNQUOTED': True, 'first_name': '', 'last_name': '', 'full_name': '',
'github_username': '', 'team_name': '', 'employee_type': '', 'slack_id': '',
'role_name': '', 'updated_at:UNQUOTED': 0, 'foo': 'bar'})
test_user2 = User(email='test@email.com',
foo='bar',
is_active=False,
do_not_update_empty_attribute=True)
self.assertDictEqual(neo4_serializer.serialize_node(test_user2.create_next_node()),
{'KEY': 'test@email.com', 'LABEL': 'User', 'email': 'test@email.com', 'foo': 'bar'})
| 46.038217
| 118
| 0.596292
|
import unittest
from unittest.mock import ANY
from databuilder.models.graph_serializable import (
RELATION_END_KEY, RELATION_END_LABEL, RELATION_REVERSE_TYPE, RELATION_START_KEY, RELATION_START_LABEL,
RELATION_TYPE,
)
from databuilder.models.user import User
from databuilder.serializers import neo4_serializer, neptune_serializer
from databuilder.serializers.neptune_serializer import (
NEPTUNE_CREATION_TYPE_JOB, NEPTUNE_CREATION_TYPE_RELATIONSHIP_PROPERTY_NAME_BULK_LOADER_FORMAT, NEPTUNE_HEADER_ID,
NEPTUNE_HEADER_LABEL, NEPTUNE_LAST_EXTRACTED_AT_RELATIONSHIP_PROPERTY_NAME_BULK_LOADER_FORMAT,
NEPTUNE_RELATIONSHIP_HEADER_FROM, NEPTUNE_RELATIONSHIP_HEADER_TO,
)
class TestUser(unittest.TestCase):
def setUp(self) -> None:
super(TestUser, self).setUp()
self.user = User(first_name='test_first',
last_name='test_last',
name='test_first test_last',
email='test@email.com',
github_username='github_test',
team_name='test_team',
employee_type='FTE',
manager_email='test_manager@email.com',
slack_id='slack',
is_active=True,
updated_at=1,
role_name='swe')
def test_get_user_model_key(self) -> None:
user_email = User.get_user_model_key(email=self.user.email)
self.assertEqual(user_email, 'test@email.com')
def test_create_nodes(self) -> None:
nodes = self.user.create_nodes()
self.assertEqual(len(nodes), 1)
def test_create_node_additional_attr(self) -> None:
test_user = User(first_name='test_first',
last_name='test_last',
name='test_first test_last',
email='test@email.com',
github_username='github_test',
team_name='test_team',
employee_type='FTE',
manager_email='test_manager@email.com',
slack_id='slack',
is_active=True,
updated_at=1,
role_name='swe',
enable_notify=True)
nodes = test_user.create_nodes()
serialized_node = neo4_serializer.serialize_node(nodes[0])
self.assertEqual(serialized_node['email'], 'test@email.com')
self.assertEqual(serialized_node['role_name'], 'swe')
self.assertTrue(serialized_node['enable_notify:UNQUOTED'])
def test_create_node_additional_attr_neptune(self) -> None:
test_user = User(first_name='test_first',
last_name='test_last',
name='test_first test_last',
email='test@email.com',
github_username='github_test',
team_name='test_team',
employee_type='FTE',
manager_email='test_manager@email.com',
slack_id='slack',
is_active=True,
updated_at=1,
role_name='swe',
enable_notify=True)
nodes = test_user.create_nodes()
serialized_node = neptune_serializer.convert_node(nodes[0])
self.assertEqual(serialized_node['email:String(single)'], 'test@email.com')
self.assertEqual(serialized_node['role_name:String(single)'], 'swe')
self.assertTrue(serialized_node['enable_notify:Bool(single)'])
def test_create_relation(self) -> None:
relations = self.user.create_relation()
self.assertEqual(len(relations), 1)
start_key = 'test@email.com'
end_key = 'test_manager@email.com'
expected_relation = {
RELATION_START_KEY: start_key,
RELATION_START_LABEL: User.USER_NODE_LABEL,
RELATION_END_KEY: end_key,
RELATION_END_LABEL: User.USER_NODE_LABEL,
RELATION_TYPE: User.USER_MANAGER_RELATION_TYPE,
RELATION_REVERSE_TYPE: User.MANAGER_USER_RELATION_TYPE
}
self.assertTrue(expected_relation, neo4_serializer.serialize_relationship(relations[0]))
def test_create_relation_neptune(self) -> None:
relations = self.user.create_relation()
serialized = neptune_serializer.convert_relationship(relations[0])
start_key = '{email}'.format(email='test@email.com')
end_key = '{email}'.format(email='test_manager@email.com')
expected = [
{
NEPTUNE_HEADER_ID: "{from_vertex_id}_{to_vertex_id}_{label}".format(
from_vertex_id=start_key,
to_vertex_id=end_key,
label=User.USER_MANAGER_RELATION_TYPE
),
NEPTUNE_RELATIONSHIP_HEADER_FROM: start_key,
NEPTUNE_RELATIONSHIP_HEADER_TO: end_key,
NEPTUNE_HEADER_LABEL: User.USER_MANAGER_RELATION_TYPE,
NEPTUNE_LAST_EXTRACTED_AT_RELATIONSHIP_PROPERTY_NAME_BULK_LOADER_FORMAT: ANY,
NEPTUNE_CREATION_TYPE_RELATIONSHIP_PROPERTY_NAME_BULK_LOADER_FORMAT: NEPTUNE_CREATION_TYPE_JOB
},
{
NEPTUNE_HEADER_ID: "{from_vertex_id}_{to_vertex_id}_{label}".format(
from_vertex_id=end_key,
to_vertex_id=start_key,
label=User.MANAGER_USER_RELATION_TYPE
),
NEPTUNE_RELATIONSHIP_HEADER_FROM: end_key,
NEPTUNE_RELATIONSHIP_HEADER_TO: start_key,
NEPTUNE_HEADER_LABEL: User.MANAGER_USER_RELATION_TYPE,
NEPTUNE_LAST_EXTRACTED_AT_RELATIONSHIP_PROPERTY_NAME_BULK_LOADER_FORMAT: ANY,
NEPTUNE_CREATION_TYPE_RELATIONSHIP_PROPERTY_NAME_BULK_LOADER_FORMAT: NEPTUNE_CREATION_TYPE_JOB
}
]
self.assertListEqual(serialized, expected)
def test_not_including_empty_attribute(self) -> None:
test_user = User(email='test@email.com',
foo='bar')
self.assertDictEqual(neo4_serializer.serialize_node(test_user.create_next_node()),
{'KEY': 'test@email.com', 'LABEL': 'User', 'email': 'test@email.com',
'is_active:UNQUOTED': True, 'first_name': '', 'last_name': '', 'full_name': '',
'github_username': '', 'team_name': '', 'employee_type': '', 'slack_id': '',
'role_name': '', 'updated_at:UNQUOTED': 0, 'foo': 'bar'})
test_user2 = User(email='test@email.com',
foo='bar',
is_active=False,
do_not_update_empty_attribute=True)
self.assertDictEqual(neo4_serializer.serialize_node(test_user2.create_next_node()),
{'KEY': 'test@email.com', 'LABEL': 'User', 'email': 'test@email.com', 'foo': 'bar'})
| true
| true
|
79099c4c1e886fb513bf9a81cebae0993ab8173f
| 41,077
|
py
|
Python
|
neutron/tests/unit/agent/l3/test_dvr_local_router.py
|
1pintbeer/neutron
|
f5a827c2be06f24a1f8025f120f16c12eb1b1f55
|
[
"Apache-2.0"
] | null | null | null |
neutron/tests/unit/agent/l3/test_dvr_local_router.py
|
1pintbeer/neutron
|
f5a827c2be06f24a1f8025f120f16c12eb1b1f55
|
[
"Apache-2.0"
] | null | null | null |
neutron/tests/unit/agent/l3/test_dvr_local_router.py
|
1pintbeer/neutron
|
f5a827c2be06f24a1f8025f120f16c12eb1b1f55
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2015 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from neutron_lib.api.definitions import portbindings
from neutron_lib import constants as lib_constants
from oslo_config import cfg
from oslo_log import log
from oslo_utils import uuidutils
from neutron.agent.l3 import agent as l3_agent
from neutron.agent.l3 import dvr_edge_ha_router as dvr_edge_ha_rtr
from neutron.agent.l3 import dvr_edge_router as dvr_edge_rtr
from neutron.agent.l3 import dvr_local_router as dvr_router
from neutron.agent.l3 import link_local_allocator as lla
from neutron.agent.l3 import router_info
from neutron.agent.linux import interface
from neutron.agent.linux import ip_lib
from neutron.common import utils as common_utils
from neutron.conf.agent import common as agent_config
from neutron.conf.agent.l3 import config as l3_config
from neutron.conf.agent.l3 import ha as ha_conf
from neutron.conf import common as base_config
from neutron.tests import base
from neutron.tests.common import l3_test_common
_uuid = uuidutils.generate_uuid
FIP_PRI = 32768
HOSTNAME = 'myhost'
class TestDvrRouterOperations(base.BaseTestCase):
def setUp(self):
super(TestDvrRouterOperations, self).setUp()
mock.patch('eventlet.spawn').start()
self.conf = agent_config.setup_conf()
self.conf.register_opts(base_config.core_opts)
log.register_options(self.conf)
self.conf.register_opts(agent_config.AGENT_STATE_OPTS, 'AGENT')
l3_config.register_l3_agent_config_opts(l3_config.OPTS, self.conf)
ha_conf.register_l3_agent_ha_opts(self.conf)
agent_config.register_interface_driver_opts_helper(self.conf)
agent_config.register_process_monitor_opts(self.conf)
agent_config.register_interface_opts(self.conf)
agent_config.register_external_process_opts(self.conf)
self.conf.set_override('interface_driver',
'neutron.agent.linux.interface.NullDriver')
self.conf.set_override('state_path', cfg.CONF.state_path)
self.device_exists_p = mock.patch(
'neutron.agent.linux.ip_lib.device_exists')
self.device_exists = self.device_exists_p.start()
self.ensure_dir = mock.patch(
'oslo_utils.fileutils.ensure_tree').start()
mock.patch('neutron.agent.linux.keepalived.KeepalivedManager'
'.get_full_config_file_path').start()
self.utils_exec_p = mock.patch(
'neutron.agent.linux.utils.execute')
self.utils_exec = self.utils_exec_p.start()
self.utils_replace_file_p = mock.patch(
'neutron_lib.utils.file.replace_file')
self.utils_replace_file = self.utils_replace_file_p.start()
self.external_process_p = mock.patch(
'neutron.agent.linux.external_process.ProcessManager')
self.external_process = self.external_process_p.start()
self.process_monitor = mock.patch(
'neutron.agent.linux.external_process.ProcessMonitor').start()
self.send_adv_notif_p = mock.patch(
'neutron.agent.linux.ip_lib.send_ip_addr_adv_notif')
self.send_adv_notif = self.send_adv_notif_p.start()
self.dvr_cls_p = mock.patch('neutron.agent.linux.interface.NullDriver')
driver_cls = self.dvr_cls_p.start()
self.mock_driver = mock.MagicMock()
self.mock_driver.DEV_NAME_LEN = (
interface.LinuxInterfaceDriver.DEV_NAME_LEN)
driver_cls.return_value = self.mock_driver
self.ip_cls_p = mock.patch('neutron.agent.linux.ip_lib.IPWrapper')
ip_cls = self.ip_cls_p.start()
self.mock_ip = mock.MagicMock()
ip_cls.return_value = self.mock_ip
self.mock_delete_ip_rule = mock.patch.object(ip_lib,
'delete_ip_rule').start()
ip_dev = mock.patch('neutron.agent.linux.ip_lib.IPDevice').start()
self.mock_ip_dev = mock.MagicMock()
ip_dev.return_value = self.mock_ip_dev
self.l3pluginApi_cls_p = mock.patch(
'neutron.agent.l3.agent.L3PluginApi')
l3pluginApi_cls = self.l3pluginApi_cls_p.start()
self.plugin_api = mock.MagicMock()
l3pluginApi_cls.return_value = self.plugin_api
self.looping_call_p = mock.patch(
'oslo_service.loopingcall.FixedIntervalLoopingCall')
self.looping_call_p.start()
subnet_id_1 = _uuid()
subnet_id_2 = _uuid()
self.snat_ports = [{'subnets': [{'cidr': '152.2.0.0/16',
'gateway_ip': '152.2.0.1',
'id': subnet_id_1}],
'network_id': _uuid(),
'device_owner':
lib_constants.DEVICE_OWNER_ROUTER_SNAT,
'mac_address': 'fa:16:3e:80:8d:80',
'fixed_ips': [{'subnet_id': subnet_id_1,
'ip_address': '152.2.0.13',
'prefixlen': 16}],
'id': _uuid(), 'device_id': _uuid()},
{'subnets': [{'cidr': '152.10.0.0/16',
'gateway_ip': '152.10.0.1',
'id': subnet_id_2}],
'network_id': _uuid(),
'device_owner':
lib_constants.DEVICE_OWNER_ROUTER_SNAT,
'mac_address': 'fa:16:3e:80:8d:80',
'fixed_ips': [{'subnet_id': subnet_id_2,
'ip_address': '152.10.0.13',
'prefixlen': 16}],
'id': _uuid(), 'device_id': _uuid()}]
self.ri_kwargs = {'agent_conf': self.conf,
'interface_driver': self.mock_driver}
def _create_router(self, router=None, **kwargs):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
self.router_id = _uuid()
if not router:
router = mock.MagicMock()
kwargs['agent'] = agent
kwargs['router_id'] = self.router_id
kwargs['router'] = router
kwargs['agent_conf'] = self.conf
kwargs['interface_driver'] = mock.Mock()
return dvr_router.DvrLocalRouter(HOSTNAME, **kwargs)
def _set_ri_kwargs(self, agent, router_id, router):
self.ri_kwargs['agent'] = agent
self.ri_kwargs['router_id'] = router_id
self.ri_kwargs['router'] = router
def test_gw_ns_name(self):
ri = self._create_router()
self.assertEqual(ri.ns_name, ri.get_gw_ns_name())
def test_create_dvr_fip_interfaces_update(self):
ri = self._create_router()
fip_agent_port = {'subnets': []}
ri.get_floating_agent_gw_interface = mock.Mock(
return_value=fip_agent_port)
ri.get_floating_ips = mock.Mock(return_value=True)
ri.fip_ns = mock.Mock()
ri.fip_ns.subscribe.return_value = False
ri.rtr_fip_connect = True
ex_gw_port = {'network_id': 'fake_net_id'}
ri.create_dvr_external_gateway_on_agent(ex_gw_port)
ri.fip_ns.create_or_update_gateway_port.assert_called_once_with(
fip_agent_port)
def test_create_dvr_fip_interfaces_with_matching_address_scope(self):
self._setup_create_dvr_fip_interfaces_for_setting_routing_rules(
address_scopes_match=True)
def test_create_dvr_fip_interfaces_with_address_scope_mismatch(self):
self._setup_create_dvr_fip_interfaces_for_setting_routing_rules()
def _setup_create_dvr_fip_interfaces_for_setting_routing_rules(
self, address_scopes_match=False):
ri = self._create_router()
ri.get_floating_agent_gw_interface = mock.Mock()
ri.fip_ns = mock.Mock()
ri._add_interface_routing_rule_to_router_ns = mock.Mock()
ri._add_interface_route_to_fip_ns = mock.Mock()
ri.fip_ns._create_rtr_2_fip_link = mock.Mock()
ri.internal_ports = ['moke_port_1', 'moke_port_2']
if address_scopes_match:
ri._check_if_address_scopes_match = mock.Mock(
return_value=True)
else:
ri._check_if_address_scopes_match = mock.Mock(
return_value=False)
ri.rtr_fip_connect = False
ex_gw_port = {'network_id': 'fake_net_id'}
ri.create_dvr_external_gateway_on_agent(ex_gw_port)
ri._check_rtr_2_fip_connect = mock.Mock()
ri.connect_rtr_2_fip()
self.assertTrue(ri._check_if_address_scopes_match.called)
if address_scopes_match:
self.assertTrue(
ri.fip_ns.create_rtr_2_fip_link.called)
self.assertTrue(
ri._add_interface_routing_rule_to_router_ns.called)
self.assertTrue(
ri._add_interface_route_to_fip_ns.called)
else:
self.assertFalse(
ri._add_interface_routing_rule_to_router_ns.called)
self.assertFalse(
ri._add_interface_route_to_fip_ns.called)
self.assertTrue(
ri.fip_ns.create_rtr_2_fip_link.called)
def test_get_floating_ips_dvr(self):
router = mock.MagicMock()
router.get.return_value = [{'host': HOSTNAME},
{'host': mock.sentinel.otherhost}]
ri = self._create_router(router)
fips = ri.get_floating_ips()
self.assertEqual(
[{'host': HOSTNAME}, {'host': mock.sentinel.otherhost}], fips)
def test_floating_forward_rules_no_fip_ns(self):
router = mock.MagicMock()
router.get.return_value = [{'host': HOSTNAME},
{'host': mock.sentinel.otherhost}]
fip = {'id': _uuid()}
ri = self._create_router(router)
self.assertFalse(ri.floating_forward_rules(fip))
def test_floating_forward_rules(self):
router = mock.MagicMock()
router.get.return_value = [{'host': HOSTNAME},
{'host': mock.sentinel.otherhost}]
ri = self._create_router(router)
floating_ip = '15.1.2.3'
rtr_2_fip_name = 'fake_router'
fixed_ip = '192.168.0.1'
fip = {'id': _uuid(),
'fixed_ip_address': '192.168.0.1',
'floating_ip_address': '15.1.2.3'}
instance = mock.Mock()
instance.get_rtr_ext_device_name = mock.Mock(
return_value=rtr_2_fip_name)
ri.fip_ns = instance
dnat_from_floatingip_to_fixedip = (
'PREROUTING', '-d %s/32 -i %s -j DNAT --to-destination %s' % (
floating_ip, rtr_2_fip_name, fixed_ip))
to_source = '-s %s/32 -j SNAT --to-source %s' % (fixed_ip, floating_ip)
if ri.iptables_manager.random_fully:
to_source += ' --random-fully'
snat_from_fixedip_to_floatingip = ('float-snat', to_source)
actual = ri.floating_forward_rules(fip)
expected = [dnat_from_floatingip_to_fixedip,
snat_from_fixedip_to_floatingip]
self.assertEqual(expected, actual)
def test_floating_mangle_rules_no_fip_ns(self):
router = mock.MagicMock()
router.get.return_value = [{'host': HOSTNAME},
{'host': mock.sentinel.otherhost}]
ri = self._create_router(router)
floating_ip = mock.Mock()
fixed_ip = mock.Mock()
internal_mark = mock.Mock()
self.assertFalse(ri.floating_mangle_rules(floating_ip, fixed_ip,
internal_mark))
def test_floating_mangle_rules(self):
router = mock.MagicMock()
router.get.return_value = [{'host': HOSTNAME},
{'host': mock.sentinel.otherhost}]
ri = self._create_router(router)
floating_ip = '15.1.2.3'
fixed_ip = '192.168.0.1'
internal_mark = 'fake_mark'
rtr_2_fip_name = 'fake_router'
instance = mock.Mock()
instance.get_rtr_ext_device_name = mock.Mock(
return_value=rtr_2_fip_name)
ri.fip_ns = instance
mark_traffic_to_floating_ip = (
'floatingip', '-d %s/32 -i %s -j MARK --set-xmark %s' % (
floating_ip, rtr_2_fip_name, internal_mark))
mark_traffic_from_fixed_ip = (
'FORWARD', '-s %s/32 -j $float-snat' % fixed_ip)
actual = ri.floating_mangle_rules(floating_ip, fixed_ip, internal_mark)
expected = [mark_traffic_to_floating_ip, mark_traffic_from_fixed_ip]
self.assertEqual(expected, actual)
@mock.patch.object(ip_lib, 'send_ip_addr_adv_notif')
@mock.patch.object(ip_lib, 'IPDevice')
@mock.patch.object(ip_lib, 'add_ip_rule')
def test_floating_ip_added_dist(self, mock_add_ip_rule, mIPDevice,
mock_adv_notif):
router = mock.MagicMock()
ri = self._create_router(router)
ri.ex_gw_port = ri.router['gw_port']
ext_net_id = _uuid()
subnet_id = _uuid()
agent_gw_port = {'fixed_ips': [{'ip_address': '20.0.0.30',
'prefixlen': 24,
'subnet_id': subnet_id}],
'subnets': [{'id': subnet_id,
'cidr': '20.0.0.0/24',
'gateway_ip': '20.0.0.1'}],
'id': _uuid(),
'network_id': ext_net_id,
'mac_address': 'ca:fe:de:ad:be:ef'}
fip = {'id': _uuid(),
'host': HOSTNAME,
'floating_ip_address': '15.1.2.3',
'fixed_ip_address': '192.168.0.1',
'floating_network_id': ext_net_id,
'port_id': _uuid()}
ri.fip_ns = mock.Mock()
ri.fip_ns.agent_gateway_port = agent_gw_port
ri.create_dvr_external_gateway_on_agent(ri.ex_gw_port)
ri._check_rtr_2_fip_connect = mock.Mock()
ri.connect_rtr_2_fip()
self.assertTrue(ri.rtr_fip_connect)
ri.fip_ns.allocate_rule_priority.return_value = FIP_PRI
subnet = lla.LinkLocalAddressPair('169.254.30.42/31')
ri.rtr_fip_subnet = subnet
ri.fip_ns.local_subnets = mock.Mock()
ri.fip_ns.local_subnets.allocate.return_value = subnet
ip_cidr = common_utils.ip_to_cidr(fip['floating_ip_address'])
ri.floating_ip_added_dist(fip, ip_cidr)
mock_add_ip_rule.assert_called_with(
namespace=ri.router_namespace.name, ip='192.168.0.1',
table=16, priority=FIP_PRI)
ri.fip_ns.local_subnets.allocate.assert_not_called()
# Validate that fip_ns.local_subnets is called when
# ri.rtr_fip_subnet is None
ri.rtr_fip_subnet = None
ri.floating_ip_added_dist(fip, ip_cidr)
mock_add_ip_rule.assert_called_with(
namespace=ri.router_namespace.name, ip='192.168.0.1',
table=16, priority=FIP_PRI)
ri.fip_ns.local_subnets.allocate.assert_called_once_with(ri.router_id)
# TODO(mrsmith): add more asserts
@mock.patch.object(ip_lib, 'IPWrapper')
@mock.patch.object(ip_lib, 'IPDevice')
def test_floating_ip_removed_dist(self, mIPDevice, mIPWrapper):
router = mock.MagicMock()
ri = self._create_router(router)
ri.ex_gw_port = ri.router['gw_port']
subnet_id = _uuid()
fixed_ip = '20.0.0.30'
agent_gw_port = {'fixed_ips': [{'ip_address': fixed_ip,
'prefixlen': 24,
'subnet_id': subnet_id}],
'subnets': [{'id': subnet_id,
'cidr': '20.0.0.0/24',
'gateway_ip': '20.0.0.1'}],
'id': _uuid(),
'network_id': _uuid(),
'mac_address': 'ca:fe:de:ad:be:ef'}
fip_cidr = '11.22.33.44/24'
ri.fip_ns = mock.Mock()
ri.fip_ns.get_name.return_value = 'fip_ns_name'
ri.floating_ips_dict['11.22.33.44'] = (fixed_ip, FIP_PRI)
ri.fip_2_rtr = '11.22.33.42'
ri.rtr_2_fip = '11.22.33.40'
ri.fip_ns.agent_gateway_port = agent_gw_port
s = lla.LinkLocalAddressPair('169.254.30.42/31')
ri.rtr_fip_subnet = s
ri.fip_ns.local_subnets = mock.Mock()
ri.floating_ip_removed_dist(fip_cidr)
self.mock_delete_ip_rule.assert_called_with(
ri.router_namespace.name, ip=fixed_ip, table=16, priority=FIP_PRI)
mIPDevice().route.delete_route.assert_called_with(fip_cidr,
via=str(s.ip))
ri.fip_ns.local_subnets.allocate.assert_not_called()
@mock.patch.object(ip_lib, 'add_ip_rule')
def test_floating_ip_moved_dist(self, mock_add_ip_rule):
router = mock.MagicMock()
ri = self._create_router(router)
floating_ip_address = '15.1.2.3'
fixed_ip = '192.168.0.1'
fip = {'floating_ip_address': floating_ip_address,
'fixed_ip_address': fixed_ip}
ri.floating_ips_dict['15.1.2.3'] = (fixed_ip, FIP_PRI)
ri.fip_ns = mock.Mock()
ri.fip_ns.allocate_rule_priority.return_value = FIP_PRI
ri.floating_ip_moved_dist(fip)
self.mock_delete_ip_rule.assert_called_once_with(
ri.router_namespace.name, ip=fixed_ip, table=16, priority=FIP_PRI)
ri.fip_ns.deallocate_rule_priority.assert_called_once_with(
floating_ip_address)
ri.fip_ns.allocate_rule_priority.assert_called_once_with(
floating_ip_address)
mock_add_ip_rule.assert_called_with(
namespace=ri.router_namespace.name, ip=fixed_ip,
table=16, priority=FIP_PRI)
def _test_add_floating_ip(self, ri, fip, is_failure=False):
if not is_failure:
ri.floating_ip_added_dist = mock.Mock(
return_value=lib_constants.FLOATINGIP_STATUS_ACTIVE)
else:
ri.floating_ip_added_dist = mock.Mock(
return_value=lib_constants.FLOATINGIP_STATUS_ERROR)
result = ri.add_floating_ip(fip,
mock.sentinel.interface_name,
mock.sentinel.device)
ri.floating_ip_added_dist.assert_called_once_with(
fip, mock.ANY)
return result
def test_add_floating_ip(self):
ri = self._create_router(mock.MagicMock())
ip = '15.1.2.3'
fip = {'floating_ip_address': ip}
result = self._test_add_floating_ip(ri, fip)
ri.floating_ip_added_dist.assert_called_once_with(fip, ip + '/32')
self.assertEqual(lib_constants.FLOATINGIP_STATUS_ACTIVE, result)
def test_add_floating_ip_failure(self):
ri = self._create_router(mock.MagicMock())
ip = '15.1.2.3'
fip = {'floating_ip_address': ip}
result = self._test_add_floating_ip(ri, fip, True)
ri.floating_ip_added_dist.assert_called_once_with(fip, ip + '/32')
self.assertEqual(lib_constants.FLOATINGIP_STATUS_ERROR, result)
@mock.patch.object(router_info.RouterInfo, 'remove_floating_ip')
def test_remove_floating_ip(self, super_remove_floating_ip):
ri = self._create_router(mock.MagicMock())
ri.floating_ip_removed_dist = mock.Mock()
ri.remove_floating_ip(mock.sentinel.device, mock.sentinel.ip_cidr)
self.assertFalse(super_remove_floating_ip.called)
ri.floating_ip_removed_dist.assert_called_once_with(
mock.sentinel.ip_cidr)
def test__get_internal_port(self):
ri = self._create_router()
port = {'fixed_ips': [{'subnet_id': mock.sentinel.subnet_id}]}
router_ports = [port]
ri.router.get.return_value = router_ports
self.assertEqual(port, ri._get_internal_port(mock.sentinel.subnet_id))
def test__get_internal_port_not_found(self):
ri = self._create_router()
port = {'fixed_ips': [{'subnet_id': mock.sentinel.subnet_id}]}
router_ports = [port]
ri.router.get.return_value = router_ports
self.assertIsNone(ri._get_internal_port(mock.sentinel.subnet_id2))
def test__get_snat_idx_ipv4(self):
ip_cidr = '101.12.13.00/24'
ri = self._create_router(mock.MagicMock())
snat_idx = ri._get_snat_idx(ip_cidr)
# 0x650C0D00 is numerical value of 101.12.13.00
self.assertEqual(0x650C0D00, snat_idx)
def test__get_snat_idx_ipv6(self):
ip_cidr = '2620:0:a03:e100::/64'
ri = self._create_router(mock.MagicMock())
snat_idx = ri._get_snat_idx(ip_cidr)
# 0x3D345705 is 30 bit xor folded crc32 of the ip_cidr
self.assertEqual(0x3D345705, snat_idx)
def test__get_snat_idx_ipv6_below_32768(self):
ip_cidr = 'd488::/30'
# crc32 of this ip_cidr is 0x1BD7
ri = self._create_router(mock.MagicMock())
snat_idx = ri._get_snat_idx(ip_cidr)
# 0x1BD7 + 0x3FFFFFFF = 0x40001BD6
self.assertEqual(0x40001BD6, snat_idx)
def test__set_subnet_arp_info(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = l3_test_common.prepare_router_data(num_internal_ports=2)
router['distributed'] = True
self._set_ri_kwargs(agent, router['id'], router)
ri = dvr_router.DvrLocalRouter(HOSTNAME, **self.ri_kwargs)
ports = ri.router.get(lib_constants.INTERFACE_KEY, [])
subnet_id = l3_test_common.get_subnet_id(ports[0])
test_ports = [{'mac_address': '00:11:22:33:44:55',
'device_owner': lib_constants.DEVICE_OWNER_DHCP,
'fixed_ips': [{'ip_address': '1.2.3.4',
'prefixlen': 24,
'subnet_id': subnet_id}]},
{'mac_address': '11:22:33:44:55:66',
'device_owner': lib_constants.DEVICE_OWNER_LOADBALANCER,
'fixed_ips': [{'ip_address': '1.2.3.5',
'prefixlen': 24,
'subnet_id': subnet_id}]},
{'mac_address': '22:33:44:55:66:77',
'device_owner':
lib_constants.DEVICE_OWNER_LOADBALANCERV2,
'fixed_ips': [{'ip_address': '1.2.3.6',
'prefixlen': 24,
'subnet_id': subnet_id}]}]
self.plugin_api.get_ports_by_subnet.return_value = test_ports
# Test basic case
ports[0]['subnets'] = [{'id': subnet_id,
'cidr': '1.2.3.0/24'}]
with mock.patch.object(ri,
'_process_arp_cache_for_internal_port') as parp:
ri._set_subnet_arp_info(subnet_id)
self.assertEqual(1, parp.call_count)
self.mock_ip_dev.neigh.add.assert_called_once_with(
'1.2.3.4', '00:11:22:33:44:55')
# Test negative case
router['distributed'] = False
ri._set_subnet_arp_info(subnet_id)
self.mock_ip_dev.neigh.add.never_called()
def test_add_arp_entry(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = l3_test_common.prepare_router_data(num_internal_ports=2)
router['distributed'] = True
subnet_id = l3_test_common.get_subnet_id(
router[lib_constants.INTERFACE_KEY][0])
arp_table = {'ip_address': '1.7.23.11',
'mac_address': '00:11:22:33:44:55',
'subnet_id': subnet_id}
payload = {'arp_table': arp_table, 'router_id': router['id']}
agent._router_added(router['id'], router)
agent.add_arp_entry(None, payload)
agent.router_deleted(None, router['id'])
self.mock_ip_dev.neigh.add.assert_called_once_with(
'1.7.23.11', '00:11:22:33:44:55')
def test_add_arp_entry_no_routerinfo(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = l3_test_common.prepare_router_data(num_internal_ports=2)
subnet_id = l3_test_common.get_subnet_id(
router[lib_constants.INTERFACE_KEY][0])
arp_table = {'ip_address': '1.7.23.11',
'mac_address': '00:11:22:33:44:55',
'subnet_id': subnet_id}
payload = {'arp_table': arp_table, 'router_id': router['id']}
agent.add_arp_entry(None, payload)
def test__update_arp_entry_with_no_subnet(self):
self._set_ri_kwargs(mock.sentinel.agent,
'foo_router_id',
{'distributed': True, 'gw_port_host': HOSTNAME})
ri = dvr_router.DvrLocalRouter(HOSTNAME, **self.ri_kwargs)
ri.get_internal_device_name = mock.Mock()
ri._update_arp_entry(mock.ANY, mock.ANY, 'foo_subnet_id', 'add')
self.assertFalse(ri.get_internal_device_name.call_count)
def _setup_test_for_arp_entry_cache(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = l3_test_common.prepare_router_data(num_internal_ports=2)
router['distributed'] = True
self._set_ri_kwargs(agent, router['id'], router)
ri = dvr_router.DvrLocalRouter(HOSTNAME, **self.ri_kwargs)
subnet_id = l3_test_common.get_subnet_id(
ri.router[lib_constants.INTERFACE_KEY][0])
return ri, subnet_id
def test__update_arp_entry_calls_arp_cache_with_no_device(self):
ri, subnet_id = self._setup_test_for_arp_entry_cache()
state = True
with mock.patch('neutron.agent.linux.ip_lib.IPDevice') as rtrdev,\
mock.patch.object(ri, '_cache_arp_entry') as arp_cache:
rtrdev.return_value.exists.return_value = False
state = ri._update_arp_entry(
mock.ANY, mock.ANY, subnet_id, 'add')
self.assertFalse(state)
self.assertTrue(arp_cache.called)
arp_cache.assert_called_once_with(mock.ANY, mock.ANY,
subnet_id, 'add')
self.assertFalse(rtrdev.neigh.add.called)
def test__process_arp_cache_for_internal_port(self):
ri, subnet_id = self._setup_test_for_arp_entry_cache()
ri._cache_arp_entry('1.7.23.11', '00:11:22:33:44:55',
subnet_id, 'add')
self.assertEqual(1, len(ri._pending_arp_set))
with mock.patch.object(ri, '_update_arp_entry') as update_arp:
update_arp.return_value = True
ri._process_arp_cache_for_internal_port(subnet_id)
self.assertEqual(0, len(ri._pending_arp_set))
def test__delete_arp_cache_for_internal_port(self):
ri, subnet_id = self._setup_test_for_arp_entry_cache()
ri._cache_arp_entry('1.7.23.11', '00:11:22:33:44:55',
subnet_id, 'add')
self.assertEqual(1, len(ri._pending_arp_set))
ri._delete_arp_cache_for_internal_port(subnet_id)
self.assertEqual(0, len(ri._pending_arp_set))
def test_del_arp_entry(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = l3_test_common.prepare_router_data(num_internal_ports=2)
router['distributed'] = True
subnet_id = l3_test_common.get_subnet_id(
router[lib_constants.INTERFACE_KEY][0])
arp_table = {'ip_address': '1.5.25.15',
'mac_address': '00:44:33:22:11:55',
'subnet_id': subnet_id}
payload = {'arp_table': arp_table, 'router_id': router['id']}
agent._router_added(router['id'], router)
# first add the entry
agent.add_arp_entry(None, payload)
# now delete it
agent.del_arp_entry(None, payload)
self.mock_ip_dev.neigh.delete.assert_called_once_with(
'1.5.25.15', '00:44:33:22:11:55')
agent.router_deleted(None, router['id'])
def test_get_floating_agent_gw_interfaces(self):
fake_network_id = _uuid()
subnet_id = _uuid()
agent_gateway_port = (
[{'fixed_ips': [{'ip_address': '20.0.0.30',
'prefixlen': 24,
'subnet_id': subnet_id}],
'subnets': [{'id': subnet_id,
'cidr': '20.0.0.0/24',
'gateway_ip': '20.0.0.1'}],
'id': _uuid(),
portbindings.HOST_ID: 'myhost',
'device_owner': lib_constants.DEVICE_OWNER_AGENT_GW,
'network_id': fake_network_id,
'mac_address': 'ca:fe:de:ad:be:ef'}]
)
router = l3_test_common.prepare_router_data(enable_snat=True)
router[lib_constants.FLOATINGIP_AGENT_INTF_KEY] = agent_gateway_port
router['distributed'] = True
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
self._set_ri_kwargs(agent, router['id'], router)
ri = dvr_router.DvrLocalRouter(HOSTNAME, **self.ri_kwargs)
self.assertEqual(
agent_gateway_port[0],
ri.get_floating_agent_gw_interface(fake_network_id))
def test_process_router_dist_floating_ip_add(self):
fake_floatingips = {'floatingips': [
{'id': _uuid(),
'host': HOSTNAME,
'floating_ip_address': '15.1.2.3',
'fixed_ip_address': '192.168.0.1',
'floating_network_id': mock.sentinel.ext_net_id,
'port_id': _uuid()},
{'id': _uuid(),
'host': 'some-other-host',
'floating_ip_address': '15.1.2.4',
'fixed_ip_address': '192.168.0.10',
'floating_network_id': mock.sentinel.ext_net_id,
'port_id': _uuid()}]}
router = l3_test_common.prepare_router_data(enable_snat=True)
router[lib_constants.FLOATINGIP_KEY] = fake_floatingips['floatingips']
router['distributed'] = True
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
self._set_ri_kwargs(agent, router['id'], router)
ri = dvr_router.DvrLocalRouter(HOSTNAME, **self.ri_kwargs)
ri.iptables_manager.ipv4['nat'] = mock.MagicMock()
fip_ns = agent.get_fip_ns(mock.sentinel.ext_net_id)
subnet_id = _uuid()
fip_ns.agent_gateway_port = (
{'fixed_ips': [{'ip_address': '20.0.0.30',
'subnet_id': subnet_id}],
'subnets': [{'id': subnet_id,
'cidr': '20.0.0.0/24',
'gateway_ip': '20.0.0.1'}],
'id': _uuid(),
'network_id': _uuid(),
'mac_address': 'ca:fe:de:ad:be:ef'}
)
def _test_ext_gw_updated_dvr_agent_mode(self, host,
agent_mode, expected_call_count):
router = l3_test_common.prepare_router_data(num_internal_ports=2)
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
self._set_ri_kwargs(agent, router['id'], router)
ri = dvr_router.DvrLocalRouter(HOSTNAME, **self.ri_kwargs)
interface_name, ex_gw_port = l3_test_common.prepare_ext_gw_test(self,
ri)
ri._external_gateway_added = mock.Mock()
# test agent mode = dvr (compute node)
router['gw_port_host'] = host
agent.conf.agent_mode = agent_mode
ri.external_gateway_updated(ex_gw_port, interface_name)
# no gateway should be added on dvr node
self.assertEqual(expected_call_count,
ri._external_gateway_added.call_count)
def test_ext_gw_updated_dvr_agent_mode(self):
# no gateway should be added on dvr node
self._test_ext_gw_updated_dvr_agent_mode('any-foo', 'dvr', 0)
def test_ext_gw_updated_dvr_agent_mode_host(self):
# no gateway should be added on dvr node
self._test_ext_gw_updated_dvr_agent_mode(HOSTNAME,
'dvr', 0)
def test_external_gateway_removed_ext_gw_port_and_fip(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
agent.conf.agent_mode = lib_constants.L3_AGENT_MODE_DVR_SNAT
router = l3_test_common.prepare_router_data(num_internal_ports=2)
router['gw_port_host'] = HOSTNAME
self.mock_driver.unplug.reset_mock()
external_net_id = router['gw_port']['network_id']
self._set_ri_kwargs(agent, router['id'], router)
ri = dvr_edge_rtr.DvrEdgeRouter(HOSTNAME, **self.ri_kwargs)
ri.remove_floating_ip = mock.Mock()
agent._fetch_external_net_id = mock.Mock(return_value=external_net_id)
ri.ex_gw_port = ri.router['gw_port']
del ri.router['gw_port']
ri.external_gateway_added(
ri.ex_gw_port,
ri.get_external_device_name(ri.ex_gw_port['id']))
ri.fip_ns = None
nat = ri.iptables_manager.ipv4['nat']
nat.clear_rules_by_tag = mock.Mock()
nat.add_rule = mock.Mock()
ri.fip_ns = agent.get_fip_ns(external_net_id)
subnet_id = _uuid()
ri.fip_ns.agent_gateway_port = {
'fixed_ips': [{
'ip_address': '20.0.0.30',
'prefixlen': 24,
'subnet_id': subnet_id
}],
'subnets': [{'id': subnet_id,
'cidr': '20.0.0.0/24',
'gateway_ip': '20.0.0.1'}],
'id': _uuid(),
'network_id': external_net_id,
'mac_address': 'ca:fe:de:ad:be:ef'}
vm_floating_ip = '19.4.4.2'
ri.floating_ips_dict[vm_floating_ip] = FIP_PRI
ri.rtr_fip_subnet = ri.fip_ns.local_subnets.allocate(ri.router_id)
_, fip_to_rtr = ri.rtr_fip_subnet.get_pair()
self.mock_ip.get_devices.return_value = [
l3_test_common.FakeDev(ri.fip_ns.get_ext_device_name(_uuid()))]
ri.get_router_cidrs = mock.Mock(
return_value={vm_floating_ip + '/32', '19.4.4.1/24'})
self.device_exists.return_value = True
ri.external_gateway_removed(
ri.ex_gw_port,
ri.get_external_device_name(ri.ex_gw_port['id']))
ri.remove_floating_ip.assert_called_once_with(self.mock_ip_dev,
'19.4.4.2/32')
def test_get_router_cidrs_no_fip_ns(self):
router = mock.MagicMock()
router.get.return_value = [{'host': HOSTNAME},
{'host': mock.sentinel.otherhost}]
ri = self._create_router(router)
device = mock.Mock()
self.assertFalse(ri.get_router_cidrs(device))
def test_get_router_cidrs_no_device_exists(self):
router = mock.MagicMock()
router.get.return_value = [{'host': HOSTNAME},
{'host': mock.sentinel.otherhost}]
ri = self._create_router(router)
fake_fip_ns = mock.Mock(return_value=True)
fake_fip_ns.get_name = mock.Mock(return_value=None)
fake_fip_ns.get_int_device_name = mock.Mock(return_value=None)
ri.fip_ns = fake_fip_ns
device = mock.Mock()
device.exists = mock.Mock(return_value=False)
with mock.patch.object(ip_lib, 'IPDevice', return_value=device):
self.assertFalse(ri.get_router_cidrs(device))
@mock.patch.object(router_info.RouterInfo, '_add_snat_rules')
@mock.patch.object(dvr_router.DvrLocalRouter, '_handle_router_snat_rules')
def test_handle_snat_rule_for_centralized_fip(
self, _add_snat_rules, _handle_router_snat_rules):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
agent.conf.agent_mode = lib_constants.L3_AGENT_MODE_DVR_SNAT
self.mock_driver.unplug.reset_mock()
router = l3_test_common.prepare_router_data(enable_floating_ip=True)
router['gw_port_host'] = HOSTNAME
self._set_ri_kwargs(agent, router['id'], router)
ri = dvr_edge_rtr.DvrEdgeRouter(HOSTNAME, **self.ri_kwargs)
ri.snat_iptables_manager = mock.MagicMock()
ipv4_nat = ri.snat_iptables_manager.ipv4['nat']
interface_name, ex_gw_port = l3_test_common.prepare_ext_gw_test(self,
ri)
ri._handle_router_snat_rules(ex_gw_port, interface_name)
ipv4_nat.add_rule.assert_called_once_with('snat', '-j $float-snat')
@mock.patch.object(dvr_edge_rtr.DvrEdgeRouter,
'add_centralized_floatingip')
def test_add_centralized_floatingip_dvr_ha(
self,
super_add_centralized_floatingip):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
agent.conf.agent_mode = lib_constants.L3_AGENT_MODE_DVR_SNAT
router = l3_test_common.prepare_router_data(
num_internal_ports=2, enable_ha=True)
router['gw_port_host'] = HOSTNAME
self.mock_driver.unplug.reset_mock()
self._set_ri_kwargs(agent, router['id'], router)
fip = {'id': _uuid()}
fip_cidr = '11.22.33.44/24'
ri = dvr_edge_ha_rtr.DvrEdgeHaRouter(HOSTNAME, [], **self.ri_kwargs)
ri.is_router_master = mock.Mock(return_value=False)
ri._add_vip = mock.Mock()
interface_name = ri.get_snat_external_device_interface_name(
ri.get_ex_gw_port())
ri.add_centralized_floatingip(fip, fip_cidr)
ri._add_vip.assert_called_once_with(fip_cidr, interface_name)
super_add_centralized_floatingip.assert_not_called()
router[lib_constants.HA_INTERFACE_KEY]['status'] = 'DOWN'
self._set_ri_kwargs(agent, router['id'], router)
ri_1 = dvr_edge_ha_rtr.DvrEdgeHaRouter(HOSTNAME, [], **self.ri_kwargs)
ri_1.is_router_master = mock.Mock(return_value=True)
ri_1._add_vip = mock.Mock()
interface_name = ri_1.get_snat_external_device_interface_name(
ri_1.get_ex_gw_port())
ri_1.add_centralized_floatingip(fip, fip_cidr)
ri_1._add_vip.assert_called_once_with(fip_cidr, interface_name)
super_add_centralized_floatingip.assert_not_called()
router[lib_constants.HA_INTERFACE_KEY]['status'] = 'ACTIVE'
self._set_ri_kwargs(agent, router['id'], router)
ri_2 = dvr_edge_ha_rtr.DvrEdgeHaRouter(HOSTNAME, [], **self.ri_kwargs)
ri_2.is_router_master = mock.Mock(return_value=True)
ri_2._add_vip = mock.Mock()
interface_name = ri_2.get_snat_external_device_interface_name(
ri_2.get_ex_gw_port())
ri_2.add_centralized_floatingip(fip, fip_cidr)
ri_2._add_vip.assert_called_once_with(fip_cidr, interface_name)
super_add_centralized_floatingip.assert_called_once_with(fip,
fip_cidr)
@mock.patch.object(dvr_edge_rtr.DvrEdgeRouter,
'remove_centralized_floatingip')
def test_remove_centralized_floatingip(self,
super_remove_centralized_floatingip):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
agent.conf.agent_mode = lib_constants.L3_AGENT_MODE_DVR_SNAT
router = l3_test_common.prepare_router_data(num_internal_ports=2)
router['gw_port_host'] = HOSTNAME
self.mock_driver.unplug.reset_mock()
self._set_ri_kwargs(agent, router['id'], router)
fip_cidr = '11.22.33.44/24'
ri = dvr_edge_ha_rtr.DvrEdgeHaRouter(HOSTNAME, [], **self.ri_kwargs)
ri.is_router_master = mock.Mock(return_value=False)
ri._remove_vip = mock.Mock()
ri.remove_centralized_floatingip(fip_cidr)
ri._remove_vip.assert_called_once_with(fip_cidr)
super_remove_centralized_floatingip.assert_not_called()
ri1 = dvr_edge_ha_rtr.DvrEdgeHaRouter(HOSTNAME, [], **self.ri_kwargs)
ri1.is_router_master = mock.Mock(return_value=True)
ri1._remove_vip = mock.Mock()
ri1.remove_centralized_floatingip(fip_cidr)
ri1._remove_vip.assert_called_once_with(fip_cidr)
super_remove_centralized_floatingip.assert_called_once_with(fip_cidr)
| 46.102132
| 79
| 0.616817
|
import mock
from neutron_lib.api.definitions import portbindings
from neutron_lib import constants as lib_constants
from oslo_config import cfg
from oslo_log import log
from oslo_utils import uuidutils
from neutron.agent.l3 import agent as l3_agent
from neutron.agent.l3 import dvr_edge_ha_router as dvr_edge_ha_rtr
from neutron.agent.l3 import dvr_edge_router as dvr_edge_rtr
from neutron.agent.l3 import dvr_local_router as dvr_router
from neutron.agent.l3 import link_local_allocator as lla
from neutron.agent.l3 import router_info
from neutron.agent.linux import interface
from neutron.agent.linux import ip_lib
from neutron.common import utils as common_utils
from neutron.conf.agent import common as agent_config
from neutron.conf.agent.l3 import config as l3_config
from neutron.conf.agent.l3 import ha as ha_conf
from neutron.conf import common as base_config
from neutron.tests import base
from neutron.tests.common import l3_test_common
_uuid = uuidutils.generate_uuid
FIP_PRI = 32768
HOSTNAME = 'myhost'
class TestDvrRouterOperations(base.BaseTestCase):
def setUp(self):
super(TestDvrRouterOperations, self).setUp()
mock.patch('eventlet.spawn').start()
self.conf = agent_config.setup_conf()
self.conf.register_opts(base_config.core_opts)
log.register_options(self.conf)
self.conf.register_opts(agent_config.AGENT_STATE_OPTS, 'AGENT')
l3_config.register_l3_agent_config_opts(l3_config.OPTS, self.conf)
ha_conf.register_l3_agent_ha_opts(self.conf)
agent_config.register_interface_driver_opts_helper(self.conf)
agent_config.register_process_monitor_opts(self.conf)
agent_config.register_interface_opts(self.conf)
agent_config.register_external_process_opts(self.conf)
self.conf.set_override('interface_driver',
'neutron.agent.linux.interface.NullDriver')
self.conf.set_override('state_path', cfg.CONF.state_path)
self.device_exists_p = mock.patch(
'neutron.agent.linux.ip_lib.device_exists')
self.device_exists = self.device_exists_p.start()
self.ensure_dir = mock.patch(
'oslo_utils.fileutils.ensure_tree').start()
mock.patch('neutron.agent.linux.keepalived.KeepalivedManager'
'.get_full_config_file_path').start()
self.utils_exec_p = mock.patch(
'neutron.agent.linux.utils.execute')
self.utils_exec = self.utils_exec_p.start()
self.utils_replace_file_p = mock.patch(
'neutron_lib.utils.file.replace_file')
self.utils_replace_file = self.utils_replace_file_p.start()
self.external_process_p = mock.patch(
'neutron.agent.linux.external_process.ProcessManager')
self.external_process = self.external_process_p.start()
self.process_monitor = mock.patch(
'neutron.agent.linux.external_process.ProcessMonitor').start()
self.send_adv_notif_p = mock.patch(
'neutron.agent.linux.ip_lib.send_ip_addr_adv_notif')
self.send_adv_notif = self.send_adv_notif_p.start()
self.dvr_cls_p = mock.patch('neutron.agent.linux.interface.NullDriver')
driver_cls = self.dvr_cls_p.start()
self.mock_driver = mock.MagicMock()
self.mock_driver.DEV_NAME_LEN = (
interface.LinuxInterfaceDriver.DEV_NAME_LEN)
driver_cls.return_value = self.mock_driver
self.ip_cls_p = mock.patch('neutron.agent.linux.ip_lib.IPWrapper')
ip_cls = self.ip_cls_p.start()
self.mock_ip = mock.MagicMock()
ip_cls.return_value = self.mock_ip
self.mock_delete_ip_rule = mock.patch.object(ip_lib,
'delete_ip_rule').start()
ip_dev = mock.patch('neutron.agent.linux.ip_lib.IPDevice').start()
self.mock_ip_dev = mock.MagicMock()
ip_dev.return_value = self.mock_ip_dev
self.l3pluginApi_cls_p = mock.patch(
'neutron.agent.l3.agent.L3PluginApi')
l3pluginApi_cls = self.l3pluginApi_cls_p.start()
self.plugin_api = mock.MagicMock()
l3pluginApi_cls.return_value = self.plugin_api
self.looping_call_p = mock.patch(
'oslo_service.loopingcall.FixedIntervalLoopingCall')
self.looping_call_p.start()
subnet_id_1 = _uuid()
subnet_id_2 = _uuid()
self.snat_ports = [{'subnets': [{'cidr': '152.2.0.0/16',
'gateway_ip': '152.2.0.1',
'id': subnet_id_1}],
'network_id': _uuid(),
'device_owner':
lib_constants.DEVICE_OWNER_ROUTER_SNAT,
'mac_address': 'fa:16:3e:80:8d:80',
'fixed_ips': [{'subnet_id': subnet_id_1,
'ip_address': '152.2.0.13',
'prefixlen': 16}],
'id': _uuid(), 'device_id': _uuid()},
{'subnets': [{'cidr': '152.10.0.0/16',
'gateway_ip': '152.10.0.1',
'id': subnet_id_2}],
'network_id': _uuid(),
'device_owner':
lib_constants.DEVICE_OWNER_ROUTER_SNAT,
'mac_address': 'fa:16:3e:80:8d:80',
'fixed_ips': [{'subnet_id': subnet_id_2,
'ip_address': '152.10.0.13',
'prefixlen': 16}],
'id': _uuid(), 'device_id': _uuid()}]
self.ri_kwargs = {'agent_conf': self.conf,
'interface_driver': self.mock_driver}
def _create_router(self, router=None, **kwargs):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
self.router_id = _uuid()
if not router:
router = mock.MagicMock()
kwargs['agent'] = agent
kwargs['router_id'] = self.router_id
kwargs['router'] = router
kwargs['agent_conf'] = self.conf
kwargs['interface_driver'] = mock.Mock()
return dvr_router.DvrLocalRouter(HOSTNAME, **kwargs)
def _set_ri_kwargs(self, agent, router_id, router):
self.ri_kwargs['agent'] = agent
self.ri_kwargs['router_id'] = router_id
self.ri_kwargs['router'] = router
def test_gw_ns_name(self):
ri = self._create_router()
self.assertEqual(ri.ns_name, ri.get_gw_ns_name())
def test_create_dvr_fip_interfaces_update(self):
ri = self._create_router()
fip_agent_port = {'subnets': []}
ri.get_floating_agent_gw_interface = mock.Mock(
return_value=fip_agent_port)
ri.get_floating_ips = mock.Mock(return_value=True)
ri.fip_ns = mock.Mock()
ri.fip_ns.subscribe.return_value = False
ri.rtr_fip_connect = True
ex_gw_port = {'network_id': 'fake_net_id'}
ri.create_dvr_external_gateway_on_agent(ex_gw_port)
ri.fip_ns.create_or_update_gateway_port.assert_called_once_with(
fip_agent_port)
def test_create_dvr_fip_interfaces_with_matching_address_scope(self):
self._setup_create_dvr_fip_interfaces_for_setting_routing_rules(
address_scopes_match=True)
def test_create_dvr_fip_interfaces_with_address_scope_mismatch(self):
self._setup_create_dvr_fip_interfaces_for_setting_routing_rules()
def _setup_create_dvr_fip_interfaces_for_setting_routing_rules(
self, address_scopes_match=False):
ri = self._create_router()
ri.get_floating_agent_gw_interface = mock.Mock()
ri.fip_ns = mock.Mock()
ri._add_interface_routing_rule_to_router_ns = mock.Mock()
ri._add_interface_route_to_fip_ns = mock.Mock()
ri.fip_ns._create_rtr_2_fip_link = mock.Mock()
ri.internal_ports = ['moke_port_1', 'moke_port_2']
if address_scopes_match:
ri._check_if_address_scopes_match = mock.Mock(
return_value=True)
else:
ri._check_if_address_scopes_match = mock.Mock(
return_value=False)
ri.rtr_fip_connect = False
ex_gw_port = {'network_id': 'fake_net_id'}
ri.create_dvr_external_gateway_on_agent(ex_gw_port)
ri._check_rtr_2_fip_connect = mock.Mock()
ri.connect_rtr_2_fip()
self.assertTrue(ri._check_if_address_scopes_match.called)
if address_scopes_match:
self.assertTrue(
ri.fip_ns.create_rtr_2_fip_link.called)
self.assertTrue(
ri._add_interface_routing_rule_to_router_ns.called)
self.assertTrue(
ri._add_interface_route_to_fip_ns.called)
else:
self.assertFalse(
ri._add_interface_routing_rule_to_router_ns.called)
self.assertFalse(
ri._add_interface_route_to_fip_ns.called)
self.assertTrue(
ri.fip_ns.create_rtr_2_fip_link.called)
def test_get_floating_ips_dvr(self):
router = mock.MagicMock()
router.get.return_value = [{'host': HOSTNAME},
{'host': mock.sentinel.otherhost}]
ri = self._create_router(router)
fips = ri.get_floating_ips()
self.assertEqual(
[{'host': HOSTNAME}, {'host': mock.sentinel.otherhost}], fips)
def test_floating_forward_rules_no_fip_ns(self):
router = mock.MagicMock()
router.get.return_value = [{'host': HOSTNAME},
{'host': mock.sentinel.otherhost}]
fip = {'id': _uuid()}
ri = self._create_router(router)
self.assertFalse(ri.floating_forward_rules(fip))
def test_floating_forward_rules(self):
router = mock.MagicMock()
router.get.return_value = [{'host': HOSTNAME},
{'host': mock.sentinel.otherhost}]
ri = self._create_router(router)
floating_ip = '15.1.2.3'
rtr_2_fip_name = 'fake_router'
fixed_ip = '192.168.0.1'
fip = {'id': _uuid(),
'fixed_ip_address': '192.168.0.1',
'floating_ip_address': '15.1.2.3'}
instance = mock.Mock()
instance.get_rtr_ext_device_name = mock.Mock(
return_value=rtr_2_fip_name)
ri.fip_ns = instance
dnat_from_floatingip_to_fixedip = (
'PREROUTING', '-d %s/32 -i %s -j DNAT --to-destination %s' % (
floating_ip, rtr_2_fip_name, fixed_ip))
to_source = '-s %s/32 -j SNAT --to-source %s' % (fixed_ip, floating_ip)
if ri.iptables_manager.random_fully:
to_source += ' --random-fully'
snat_from_fixedip_to_floatingip = ('float-snat', to_source)
actual = ri.floating_forward_rules(fip)
expected = [dnat_from_floatingip_to_fixedip,
snat_from_fixedip_to_floatingip]
self.assertEqual(expected, actual)
def test_floating_mangle_rules_no_fip_ns(self):
router = mock.MagicMock()
router.get.return_value = [{'host': HOSTNAME},
{'host': mock.sentinel.otherhost}]
ri = self._create_router(router)
floating_ip = mock.Mock()
fixed_ip = mock.Mock()
internal_mark = mock.Mock()
self.assertFalse(ri.floating_mangle_rules(floating_ip, fixed_ip,
internal_mark))
def test_floating_mangle_rules(self):
router = mock.MagicMock()
router.get.return_value = [{'host': HOSTNAME},
{'host': mock.sentinel.otherhost}]
ri = self._create_router(router)
floating_ip = '15.1.2.3'
fixed_ip = '192.168.0.1'
internal_mark = 'fake_mark'
rtr_2_fip_name = 'fake_router'
instance = mock.Mock()
instance.get_rtr_ext_device_name = mock.Mock(
return_value=rtr_2_fip_name)
ri.fip_ns = instance
mark_traffic_to_floating_ip = (
'floatingip', '-d %s/32 -i %s -j MARK --set-xmark %s' % (
floating_ip, rtr_2_fip_name, internal_mark))
mark_traffic_from_fixed_ip = (
'FORWARD', '-s %s/32 -j $float-snat' % fixed_ip)
actual = ri.floating_mangle_rules(floating_ip, fixed_ip, internal_mark)
expected = [mark_traffic_to_floating_ip, mark_traffic_from_fixed_ip]
self.assertEqual(expected, actual)
@mock.patch.object(ip_lib, 'send_ip_addr_adv_notif')
@mock.patch.object(ip_lib, 'IPDevice')
@mock.patch.object(ip_lib, 'add_ip_rule')
def test_floating_ip_added_dist(self, mock_add_ip_rule, mIPDevice,
mock_adv_notif):
router = mock.MagicMock()
ri = self._create_router(router)
ri.ex_gw_port = ri.router['gw_port']
ext_net_id = _uuid()
subnet_id = _uuid()
agent_gw_port = {'fixed_ips': [{'ip_address': '20.0.0.30',
'prefixlen': 24,
'subnet_id': subnet_id}],
'subnets': [{'id': subnet_id,
'cidr': '20.0.0.0/24',
'gateway_ip': '20.0.0.1'}],
'id': _uuid(),
'network_id': ext_net_id,
'mac_address': 'ca:fe:de:ad:be:ef'}
fip = {'id': _uuid(),
'host': HOSTNAME,
'floating_ip_address': '15.1.2.3',
'fixed_ip_address': '192.168.0.1',
'floating_network_id': ext_net_id,
'port_id': _uuid()}
ri.fip_ns = mock.Mock()
ri.fip_ns.agent_gateway_port = agent_gw_port
ri.create_dvr_external_gateway_on_agent(ri.ex_gw_port)
ri._check_rtr_2_fip_connect = mock.Mock()
ri.connect_rtr_2_fip()
self.assertTrue(ri.rtr_fip_connect)
ri.fip_ns.allocate_rule_priority.return_value = FIP_PRI
subnet = lla.LinkLocalAddressPair('169.254.30.42/31')
ri.rtr_fip_subnet = subnet
ri.fip_ns.local_subnets = mock.Mock()
ri.fip_ns.local_subnets.allocate.return_value = subnet
ip_cidr = common_utils.ip_to_cidr(fip['floating_ip_address'])
ri.floating_ip_added_dist(fip, ip_cidr)
mock_add_ip_rule.assert_called_with(
namespace=ri.router_namespace.name, ip='192.168.0.1',
table=16, priority=FIP_PRI)
ri.fip_ns.local_subnets.allocate.assert_not_called()
ri.rtr_fip_subnet = None
ri.floating_ip_added_dist(fip, ip_cidr)
mock_add_ip_rule.assert_called_with(
namespace=ri.router_namespace.name, ip='192.168.0.1',
table=16, priority=FIP_PRI)
ri.fip_ns.local_subnets.allocate.assert_called_once_with(ri.router_id)
@mock.patch.object(ip_lib, 'IPWrapper')
@mock.patch.object(ip_lib, 'IPDevice')
def test_floating_ip_removed_dist(self, mIPDevice, mIPWrapper):
router = mock.MagicMock()
ri = self._create_router(router)
ri.ex_gw_port = ri.router['gw_port']
subnet_id = _uuid()
fixed_ip = '20.0.0.30'
agent_gw_port = {'fixed_ips': [{'ip_address': fixed_ip,
'prefixlen': 24,
'subnet_id': subnet_id}],
'subnets': [{'id': subnet_id,
'cidr': '20.0.0.0/24',
'gateway_ip': '20.0.0.1'}],
'id': _uuid(),
'network_id': _uuid(),
'mac_address': 'ca:fe:de:ad:be:ef'}
fip_cidr = '11.22.33.44/24'
ri.fip_ns = mock.Mock()
ri.fip_ns.get_name.return_value = 'fip_ns_name'
ri.floating_ips_dict['11.22.33.44'] = (fixed_ip, FIP_PRI)
ri.fip_2_rtr = '11.22.33.42'
ri.rtr_2_fip = '11.22.33.40'
ri.fip_ns.agent_gateway_port = agent_gw_port
s = lla.LinkLocalAddressPair('169.254.30.42/31')
ri.rtr_fip_subnet = s
ri.fip_ns.local_subnets = mock.Mock()
ri.floating_ip_removed_dist(fip_cidr)
self.mock_delete_ip_rule.assert_called_with(
ri.router_namespace.name, ip=fixed_ip, table=16, priority=FIP_PRI)
mIPDevice().route.delete_route.assert_called_with(fip_cidr,
via=str(s.ip))
ri.fip_ns.local_subnets.allocate.assert_not_called()
@mock.patch.object(ip_lib, 'add_ip_rule')
def test_floating_ip_moved_dist(self, mock_add_ip_rule):
router = mock.MagicMock()
ri = self._create_router(router)
floating_ip_address = '15.1.2.3'
fixed_ip = '192.168.0.1'
fip = {'floating_ip_address': floating_ip_address,
'fixed_ip_address': fixed_ip}
ri.floating_ips_dict['15.1.2.3'] = (fixed_ip, FIP_PRI)
ri.fip_ns = mock.Mock()
ri.fip_ns.allocate_rule_priority.return_value = FIP_PRI
ri.floating_ip_moved_dist(fip)
self.mock_delete_ip_rule.assert_called_once_with(
ri.router_namespace.name, ip=fixed_ip, table=16, priority=FIP_PRI)
ri.fip_ns.deallocate_rule_priority.assert_called_once_with(
floating_ip_address)
ri.fip_ns.allocate_rule_priority.assert_called_once_with(
floating_ip_address)
mock_add_ip_rule.assert_called_with(
namespace=ri.router_namespace.name, ip=fixed_ip,
table=16, priority=FIP_PRI)
def _test_add_floating_ip(self, ri, fip, is_failure=False):
if not is_failure:
ri.floating_ip_added_dist = mock.Mock(
return_value=lib_constants.FLOATINGIP_STATUS_ACTIVE)
else:
ri.floating_ip_added_dist = mock.Mock(
return_value=lib_constants.FLOATINGIP_STATUS_ERROR)
result = ri.add_floating_ip(fip,
mock.sentinel.interface_name,
mock.sentinel.device)
ri.floating_ip_added_dist.assert_called_once_with(
fip, mock.ANY)
return result
def test_add_floating_ip(self):
ri = self._create_router(mock.MagicMock())
ip = '15.1.2.3'
fip = {'floating_ip_address': ip}
result = self._test_add_floating_ip(ri, fip)
ri.floating_ip_added_dist.assert_called_once_with(fip, ip + '/32')
self.assertEqual(lib_constants.FLOATINGIP_STATUS_ACTIVE, result)
def test_add_floating_ip_failure(self):
ri = self._create_router(mock.MagicMock())
ip = '15.1.2.3'
fip = {'floating_ip_address': ip}
result = self._test_add_floating_ip(ri, fip, True)
ri.floating_ip_added_dist.assert_called_once_with(fip, ip + '/32')
self.assertEqual(lib_constants.FLOATINGIP_STATUS_ERROR, result)
@mock.patch.object(router_info.RouterInfo, 'remove_floating_ip')
def test_remove_floating_ip(self, super_remove_floating_ip):
ri = self._create_router(mock.MagicMock())
ri.floating_ip_removed_dist = mock.Mock()
ri.remove_floating_ip(mock.sentinel.device, mock.sentinel.ip_cidr)
self.assertFalse(super_remove_floating_ip.called)
ri.floating_ip_removed_dist.assert_called_once_with(
mock.sentinel.ip_cidr)
def test__get_internal_port(self):
ri = self._create_router()
port = {'fixed_ips': [{'subnet_id': mock.sentinel.subnet_id}]}
router_ports = [port]
ri.router.get.return_value = router_ports
self.assertEqual(port, ri._get_internal_port(mock.sentinel.subnet_id))
def test__get_internal_port_not_found(self):
ri = self._create_router()
port = {'fixed_ips': [{'subnet_id': mock.sentinel.subnet_id}]}
router_ports = [port]
ri.router.get.return_value = router_ports
self.assertIsNone(ri._get_internal_port(mock.sentinel.subnet_id2))
def test__get_snat_idx_ipv4(self):
ip_cidr = '101.12.13.00/24'
ri = self._create_router(mock.MagicMock())
snat_idx = ri._get_snat_idx(ip_cidr)
self.assertEqual(0x650C0D00, snat_idx)
def test__get_snat_idx_ipv6(self):
ip_cidr = '2620:0:a03:e100::/64'
ri = self._create_router(mock.MagicMock())
snat_idx = ri._get_snat_idx(ip_cidr)
self.assertEqual(0x3D345705, snat_idx)
def test__get_snat_idx_ipv6_below_32768(self):
ip_cidr = 'd488::/30'
ri = self._create_router(mock.MagicMock())
snat_idx = ri._get_snat_idx(ip_cidr)
self.assertEqual(0x40001BD6, snat_idx)
def test__set_subnet_arp_info(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = l3_test_common.prepare_router_data(num_internal_ports=2)
router['distributed'] = True
self._set_ri_kwargs(agent, router['id'], router)
ri = dvr_router.DvrLocalRouter(HOSTNAME, **self.ri_kwargs)
ports = ri.router.get(lib_constants.INTERFACE_KEY, [])
subnet_id = l3_test_common.get_subnet_id(ports[0])
test_ports = [{'mac_address': '00:11:22:33:44:55',
'device_owner': lib_constants.DEVICE_OWNER_DHCP,
'fixed_ips': [{'ip_address': '1.2.3.4',
'prefixlen': 24,
'subnet_id': subnet_id}]},
{'mac_address': '11:22:33:44:55:66',
'device_owner': lib_constants.DEVICE_OWNER_LOADBALANCER,
'fixed_ips': [{'ip_address': '1.2.3.5',
'prefixlen': 24,
'subnet_id': subnet_id}]},
{'mac_address': '22:33:44:55:66:77',
'device_owner':
lib_constants.DEVICE_OWNER_LOADBALANCERV2,
'fixed_ips': [{'ip_address': '1.2.3.6',
'prefixlen': 24,
'subnet_id': subnet_id}]}]
self.plugin_api.get_ports_by_subnet.return_value = test_ports
ports[0]['subnets'] = [{'id': subnet_id,
'cidr': '1.2.3.0/24'}]
with mock.patch.object(ri,
'_process_arp_cache_for_internal_port') as parp:
ri._set_subnet_arp_info(subnet_id)
self.assertEqual(1, parp.call_count)
self.mock_ip_dev.neigh.add.assert_called_once_with(
'1.2.3.4', '00:11:22:33:44:55')
router['distributed'] = False
ri._set_subnet_arp_info(subnet_id)
self.mock_ip_dev.neigh.add.never_called()
def test_add_arp_entry(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = l3_test_common.prepare_router_data(num_internal_ports=2)
router['distributed'] = True
subnet_id = l3_test_common.get_subnet_id(
router[lib_constants.INTERFACE_KEY][0])
arp_table = {'ip_address': '1.7.23.11',
'mac_address': '00:11:22:33:44:55',
'subnet_id': subnet_id}
payload = {'arp_table': arp_table, 'router_id': router['id']}
agent._router_added(router['id'], router)
agent.add_arp_entry(None, payload)
agent.router_deleted(None, router['id'])
self.mock_ip_dev.neigh.add.assert_called_once_with(
'1.7.23.11', '00:11:22:33:44:55')
def test_add_arp_entry_no_routerinfo(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = l3_test_common.prepare_router_data(num_internal_ports=2)
subnet_id = l3_test_common.get_subnet_id(
router[lib_constants.INTERFACE_KEY][0])
arp_table = {'ip_address': '1.7.23.11',
'mac_address': '00:11:22:33:44:55',
'subnet_id': subnet_id}
payload = {'arp_table': arp_table, 'router_id': router['id']}
agent.add_arp_entry(None, payload)
def test__update_arp_entry_with_no_subnet(self):
self._set_ri_kwargs(mock.sentinel.agent,
'foo_router_id',
{'distributed': True, 'gw_port_host': HOSTNAME})
ri = dvr_router.DvrLocalRouter(HOSTNAME, **self.ri_kwargs)
ri.get_internal_device_name = mock.Mock()
ri._update_arp_entry(mock.ANY, mock.ANY, 'foo_subnet_id', 'add')
self.assertFalse(ri.get_internal_device_name.call_count)
def _setup_test_for_arp_entry_cache(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = l3_test_common.prepare_router_data(num_internal_ports=2)
router['distributed'] = True
self._set_ri_kwargs(agent, router['id'], router)
ri = dvr_router.DvrLocalRouter(HOSTNAME, **self.ri_kwargs)
subnet_id = l3_test_common.get_subnet_id(
ri.router[lib_constants.INTERFACE_KEY][0])
return ri, subnet_id
def test__update_arp_entry_calls_arp_cache_with_no_device(self):
ri, subnet_id = self._setup_test_for_arp_entry_cache()
state = True
with mock.patch('neutron.agent.linux.ip_lib.IPDevice') as rtrdev,\
mock.patch.object(ri, '_cache_arp_entry') as arp_cache:
rtrdev.return_value.exists.return_value = False
state = ri._update_arp_entry(
mock.ANY, mock.ANY, subnet_id, 'add')
self.assertFalse(state)
self.assertTrue(arp_cache.called)
arp_cache.assert_called_once_with(mock.ANY, mock.ANY,
subnet_id, 'add')
self.assertFalse(rtrdev.neigh.add.called)
def test__process_arp_cache_for_internal_port(self):
ri, subnet_id = self._setup_test_for_arp_entry_cache()
ri._cache_arp_entry('1.7.23.11', '00:11:22:33:44:55',
subnet_id, 'add')
self.assertEqual(1, len(ri._pending_arp_set))
with mock.patch.object(ri, '_update_arp_entry') as update_arp:
update_arp.return_value = True
ri._process_arp_cache_for_internal_port(subnet_id)
self.assertEqual(0, len(ri._pending_arp_set))
def test__delete_arp_cache_for_internal_port(self):
ri, subnet_id = self._setup_test_for_arp_entry_cache()
ri._cache_arp_entry('1.7.23.11', '00:11:22:33:44:55',
subnet_id, 'add')
self.assertEqual(1, len(ri._pending_arp_set))
ri._delete_arp_cache_for_internal_port(subnet_id)
self.assertEqual(0, len(ri._pending_arp_set))
def test_del_arp_entry(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = l3_test_common.prepare_router_data(num_internal_ports=2)
router['distributed'] = True
subnet_id = l3_test_common.get_subnet_id(
router[lib_constants.INTERFACE_KEY][0])
arp_table = {'ip_address': '1.5.25.15',
'mac_address': '00:44:33:22:11:55',
'subnet_id': subnet_id}
payload = {'arp_table': arp_table, 'router_id': router['id']}
agent._router_added(router['id'], router)
agent.add_arp_entry(None, payload)
agent.del_arp_entry(None, payload)
self.mock_ip_dev.neigh.delete.assert_called_once_with(
'1.5.25.15', '00:44:33:22:11:55')
agent.router_deleted(None, router['id'])
def test_get_floating_agent_gw_interfaces(self):
fake_network_id = _uuid()
subnet_id = _uuid()
agent_gateway_port = (
[{'fixed_ips': [{'ip_address': '20.0.0.30',
'prefixlen': 24,
'subnet_id': subnet_id}],
'subnets': [{'id': subnet_id,
'cidr': '20.0.0.0/24',
'gateway_ip': '20.0.0.1'}],
'id': _uuid(),
portbindings.HOST_ID: 'myhost',
'device_owner': lib_constants.DEVICE_OWNER_AGENT_GW,
'network_id': fake_network_id,
'mac_address': 'ca:fe:de:ad:be:ef'}]
)
router = l3_test_common.prepare_router_data(enable_snat=True)
router[lib_constants.FLOATINGIP_AGENT_INTF_KEY] = agent_gateway_port
router['distributed'] = True
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
self._set_ri_kwargs(agent, router['id'], router)
ri = dvr_router.DvrLocalRouter(HOSTNAME, **self.ri_kwargs)
self.assertEqual(
agent_gateway_port[0],
ri.get_floating_agent_gw_interface(fake_network_id))
def test_process_router_dist_floating_ip_add(self):
fake_floatingips = {'floatingips': [
{'id': _uuid(),
'host': HOSTNAME,
'floating_ip_address': '15.1.2.3',
'fixed_ip_address': '192.168.0.1',
'floating_network_id': mock.sentinel.ext_net_id,
'port_id': _uuid()},
{'id': _uuid(),
'host': 'some-other-host',
'floating_ip_address': '15.1.2.4',
'fixed_ip_address': '192.168.0.10',
'floating_network_id': mock.sentinel.ext_net_id,
'port_id': _uuid()}]}
router = l3_test_common.prepare_router_data(enable_snat=True)
router[lib_constants.FLOATINGIP_KEY] = fake_floatingips['floatingips']
router['distributed'] = True
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
self._set_ri_kwargs(agent, router['id'], router)
ri = dvr_router.DvrLocalRouter(HOSTNAME, **self.ri_kwargs)
ri.iptables_manager.ipv4['nat'] = mock.MagicMock()
fip_ns = agent.get_fip_ns(mock.sentinel.ext_net_id)
subnet_id = _uuid()
fip_ns.agent_gateway_port = (
{'fixed_ips': [{'ip_address': '20.0.0.30',
'subnet_id': subnet_id}],
'subnets': [{'id': subnet_id,
'cidr': '20.0.0.0/24',
'gateway_ip': '20.0.0.1'}],
'id': _uuid(),
'network_id': _uuid(),
'mac_address': 'ca:fe:de:ad:be:ef'}
)
def _test_ext_gw_updated_dvr_agent_mode(self, host,
agent_mode, expected_call_count):
router = l3_test_common.prepare_router_data(num_internal_ports=2)
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
self._set_ri_kwargs(agent, router['id'], router)
ri = dvr_router.DvrLocalRouter(HOSTNAME, **self.ri_kwargs)
interface_name, ex_gw_port = l3_test_common.prepare_ext_gw_test(self,
ri)
ri._external_gateway_added = mock.Mock()
router['gw_port_host'] = host
agent.conf.agent_mode = agent_mode
ri.external_gateway_updated(ex_gw_port, interface_name)
self.assertEqual(expected_call_count,
ri._external_gateway_added.call_count)
def test_ext_gw_updated_dvr_agent_mode(self):
self._test_ext_gw_updated_dvr_agent_mode('any-foo', 'dvr', 0)
def test_ext_gw_updated_dvr_agent_mode_host(self):
self._test_ext_gw_updated_dvr_agent_mode(HOSTNAME,
'dvr', 0)
def test_external_gateway_removed_ext_gw_port_and_fip(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
agent.conf.agent_mode = lib_constants.L3_AGENT_MODE_DVR_SNAT
router = l3_test_common.prepare_router_data(num_internal_ports=2)
router['gw_port_host'] = HOSTNAME
self.mock_driver.unplug.reset_mock()
external_net_id = router['gw_port']['network_id']
self._set_ri_kwargs(agent, router['id'], router)
ri = dvr_edge_rtr.DvrEdgeRouter(HOSTNAME, **self.ri_kwargs)
ri.remove_floating_ip = mock.Mock()
agent._fetch_external_net_id = mock.Mock(return_value=external_net_id)
ri.ex_gw_port = ri.router['gw_port']
del ri.router['gw_port']
ri.external_gateway_added(
ri.ex_gw_port,
ri.get_external_device_name(ri.ex_gw_port['id']))
ri.fip_ns = None
nat = ri.iptables_manager.ipv4['nat']
nat.clear_rules_by_tag = mock.Mock()
nat.add_rule = mock.Mock()
ri.fip_ns = agent.get_fip_ns(external_net_id)
subnet_id = _uuid()
ri.fip_ns.agent_gateway_port = {
'fixed_ips': [{
'ip_address': '20.0.0.30',
'prefixlen': 24,
'subnet_id': subnet_id
}],
'subnets': [{'id': subnet_id,
'cidr': '20.0.0.0/24',
'gateway_ip': '20.0.0.1'}],
'id': _uuid(),
'network_id': external_net_id,
'mac_address': 'ca:fe:de:ad:be:ef'}
vm_floating_ip = '19.4.4.2'
ri.floating_ips_dict[vm_floating_ip] = FIP_PRI
ri.rtr_fip_subnet = ri.fip_ns.local_subnets.allocate(ri.router_id)
_, fip_to_rtr = ri.rtr_fip_subnet.get_pair()
self.mock_ip.get_devices.return_value = [
l3_test_common.FakeDev(ri.fip_ns.get_ext_device_name(_uuid()))]
ri.get_router_cidrs = mock.Mock(
return_value={vm_floating_ip + '/32', '19.4.4.1/24'})
self.device_exists.return_value = True
ri.external_gateway_removed(
ri.ex_gw_port,
ri.get_external_device_name(ri.ex_gw_port['id']))
ri.remove_floating_ip.assert_called_once_with(self.mock_ip_dev,
'19.4.4.2/32')
def test_get_router_cidrs_no_fip_ns(self):
router = mock.MagicMock()
router.get.return_value = [{'host': HOSTNAME},
{'host': mock.sentinel.otherhost}]
ri = self._create_router(router)
device = mock.Mock()
self.assertFalse(ri.get_router_cidrs(device))
def test_get_router_cidrs_no_device_exists(self):
router = mock.MagicMock()
router.get.return_value = [{'host': HOSTNAME},
{'host': mock.sentinel.otherhost}]
ri = self._create_router(router)
fake_fip_ns = mock.Mock(return_value=True)
fake_fip_ns.get_name = mock.Mock(return_value=None)
fake_fip_ns.get_int_device_name = mock.Mock(return_value=None)
ri.fip_ns = fake_fip_ns
device = mock.Mock()
device.exists = mock.Mock(return_value=False)
with mock.patch.object(ip_lib, 'IPDevice', return_value=device):
self.assertFalse(ri.get_router_cidrs(device))
@mock.patch.object(router_info.RouterInfo, '_add_snat_rules')
@mock.patch.object(dvr_router.DvrLocalRouter, '_handle_router_snat_rules')
def test_handle_snat_rule_for_centralized_fip(
self, _add_snat_rules, _handle_router_snat_rules):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
agent.conf.agent_mode = lib_constants.L3_AGENT_MODE_DVR_SNAT
self.mock_driver.unplug.reset_mock()
router = l3_test_common.prepare_router_data(enable_floating_ip=True)
router['gw_port_host'] = HOSTNAME
self._set_ri_kwargs(agent, router['id'], router)
ri = dvr_edge_rtr.DvrEdgeRouter(HOSTNAME, **self.ri_kwargs)
ri.snat_iptables_manager = mock.MagicMock()
ipv4_nat = ri.snat_iptables_manager.ipv4['nat']
interface_name, ex_gw_port = l3_test_common.prepare_ext_gw_test(self,
ri)
ri._handle_router_snat_rules(ex_gw_port, interface_name)
ipv4_nat.add_rule.assert_called_once_with('snat', '-j $float-snat')
@mock.patch.object(dvr_edge_rtr.DvrEdgeRouter,
'add_centralized_floatingip')
def test_add_centralized_floatingip_dvr_ha(
self,
super_add_centralized_floatingip):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
agent.conf.agent_mode = lib_constants.L3_AGENT_MODE_DVR_SNAT
router = l3_test_common.prepare_router_data(
num_internal_ports=2, enable_ha=True)
router['gw_port_host'] = HOSTNAME
self.mock_driver.unplug.reset_mock()
self._set_ri_kwargs(agent, router['id'], router)
fip = {'id': _uuid()}
fip_cidr = '11.22.33.44/24'
ri = dvr_edge_ha_rtr.DvrEdgeHaRouter(HOSTNAME, [], **self.ri_kwargs)
ri.is_router_master = mock.Mock(return_value=False)
ri._add_vip = mock.Mock()
interface_name = ri.get_snat_external_device_interface_name(
ri.get_ex_gw_port())
ri.add_centralized_floatingip(fip, fip_cidr)
ri._add_vip.assert_called_once_with(fip_cidr, interface_name)
super_add_centralized_floatingip.assert_not_called()
router[lib_constants.HA_INTERFACE_KEY]['status'] = 'DOWN'
self._set_ri_kwargs(agent, router['id'], router)
ri_1 = dvr_edge_ha_rtr.DvrEdgeHaRouter(HOSTNAME, [], **self.ri_kwargs)
ri_1.is_router_master = mock.Mock(return_value=True)
ri_1._add_vip = mock.Mock()
interface_name = ri_1.get_snat_external_device_interface_name(
ri_1.get_ex_gw_port())
ri_1.add_centralized_floatingip(fip, fip_cidr)
ri_1._add_vip.assert_called_once_with(fip_cidr, interface_name)
super_add_centralized_floatingip.assert_not_called()
router[lib_constants.HA_INTERFACE_KEY]['status'] = 'ACTIVE'
self._set_ri_kwargs(agent, router['id'], router)
ri_2 = dvr_edge_ha_rtr.DvrEdgeHaRouter(HOSTNAME, [], **self.ri_kwargs)
ri_2.is_router_master = mock.Mock(return_value=True)
ri_2._add_vip = mock.Mock()
interface_name = ri_2.get_snat_external_device_interface_name(
ri_2.get_ex_gw_port())
ri_2.add_centralized_floatingip(fip, fip_cidr)
ri_2._add_vip.assert_called_once_with(fip_cidr, interface_name)
super_add_centralized_floatingip.assert_called_once_with(fip,
fip_cidr)
@mock.patch.object(dvr_edge_rtr.DvrEdgeRouter,
'remove_centralized_floatingip')
def test_remove_centralized_floatingip(self,
super_remove_centralized_floatingip):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
agent.conf.agent_mode = lib_constants.L3_AGENT_MODE_DVR_SNAT
router = l3_test_common.prepare_router_data(num_internal_ports=2)
router['gw_port_host'] = HOSTNAME
self.mock_driver.unplug.reset_mock()
self._set_ri_kwargs(agent, router['id'], router)
fip_cidr = '11.22.33.44/24'
ri = dvr_edge_ha_rtr.DvrEdgeHaRouter(HOSTNAME, [], **self.ri_kwargs)
ri.is_router_master = mock.Mock(return_value=False)
ri._remove_vip = mock.Mock()
ri.remove_centralized_floatingip(fip_cidr)
ri._remove_vip.assert_called_once_with(fip_cidr)
super_remove_centralized_floatingip.assert_not_called()
ri1 = dvr_edge_ha_rtr.DvrEdgeHaRouter(HOSTNAME, [], **self.ri_kwargs)
ri1.is_router_master = mock.Mock(return_value=True)
ri1._remove_vip = mock.Mock()
ri1.remove_centralized_floatingip(fip_cidr)
ri1._remove_vip.assert_called_once_with(fip_cidr)
super_remove_centralized_floatingip.assert_called_once_with(fip_cidr)
| true
| true
|
79099c62b310485ccef413d96604a076dad40b20
| 358
|
py
|
Python
|
teoria/clase 29-03/algoritmos.py
|
pgentil/PC
|
6ee4e79f4594f4c733fb07106ac179575370956d
|
[
"MIT"
] | null | null | null |
teoria/clase 29-03/algoritmos.py
|
pgentil/PC
|
6ee4e79f4594f4c733fb07106ac179575370956d
|
[
"MIT"
] | null | null | null |
teoria/clase 29-03/algoritmos.py
|
pgentil/PC
|
6ee4e79f4594f4c733fb07106ac179575370956d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Tue Mar 29 11:46:26 2022
@author: Pedro
"""
def search(lista, target) -> int:
for i in range(len(lista)):
if lista [i] == target:
return i
return -1
def search2(lista, target) -> int:
for i, element in enumerate(lista):
if element == target:
return i
return -1
| 19.888889
| 39
| 0.553073
|
def search(lista, target) -> int:
for i in range(len(lista)):
if lista [i] == target:
return i
return -1
def search2(lista, target) -> int:
for i, element in enumerate(lista):
if element == target:
return i
return -1
| true
| true
|
79099db8ebbdccce894b07d9478c8ca2c6d71a27
| 3,723
|
py
|
Python
|
accelbyte_py_sdk/api/iam/models/model_get_users_response_with_pagination_v3.py
|
encyphered/accelbyte-python-sdk
|
09c1e989d7251de308150fdcd3119d662ca2d205
|
[
"MIT"
] | null | null | null |
accelbyte_py_sdk/api/iam/models/model_get_users_response_with_pagination_v3.py
|
encyphered/accelbyte-python-sdk
|
09c1e989d7251de308150fdcd3119d662ca2d205
|
[
"MIT"
] | null | null | null |
accelbyte_py_sdk/api/iam/models/model_get_users_response_with_pagination_v3.py
|
encyphered/accelbyte-python-sdk
|
09c1e989d7251de308150fdcd3119d662ca2d205
|
[
"MIT"
] | null | null | null |
# Auto-generated at 2021-09-27T17:12:31.553030+08:00
# from: Justice Iam Service (4.1.0)
# Copyright (c) 2018 - 2021 AccelByte Inc. All Rights Reserved.
# This is licensed software from AccelByte Inc, for limitations
# and restrictions contact your company contract manager.
# pylint: disable=duplicate-code
# pylint: disable=line-too-long
# pylint: disable=missing-function-docstring
# pylint: disable=missing-module-docstring
# pylint: disable=too-many-arguments
# pylint: disable=too-many-branches
# pylint: disable=too-many-instance-attributes
# pylint: disable=too-many-lines
# pylint: disable=too-many-locals
# pylint: disable=too-many-public-methods
# pylint: disable=too-many-return-statements
# pylint: disable=too-many-statements
# pylint: disable=unused-import
from __future__ import annotations
from typing import Any, Dict, List, Optional, Tuple, Union
from ....core import Model
from ..models.accountcommon_pagination_v3 import AccountcommonPaginationV3
from ..models.model_user_response_v3 import ModelUserResponseV3
class ModelGetUsersResponseWithPaginationV3(Model):
"""Model get users response with pagination V3
Properties:
data: (data) REQUIRED List[ModelUserResponseV3]
paging: (paging) REQUIRED AccountcommonPaginationV3
"""
# region fields
data: List[ModelUserResponseV3] # REQUIRED
paging: AccountcommonPaginationV3 # REQUIRED
# endregion fields
# region with_x methods
def with_data(self, value: List[ModelUserResponseV3]) -> ModelGetUsersResponseWithPaginationV3:
self.data = value
return self
def with_paging(self, value: AccountcommonPaginationV3) -> ModelGetUsersResponseWithPaginationV3:
self.paging = value
return self
# endregion with_x methods
# region to methods
def to_dict(self, include_empty: bool = False) -> dict:
result = {}
if hasattr(self, "data") and self.data:
result["data"] = [i0.to_dict(include_empty=include_empty) for i0 in self.data]
elif include_empty:
result["data"] = []
if hasattr(self, "paging") and self.paging:
result["paging"] = self.paging.to_dict(include_empty=include_empty)
elif include_empty:
result["paging"] = AccountcommonPaginationV3()
return result
# endregion to methods
# region static methods
@classmethod
def create(
cls,
data: List[ModelUserResponseV3],
paging: AccountcommonPaginationV3,
) -> ModelGetUsersResponseWithPaginationV3:
instance = cls()
instance.data = data
instance.paging = paging
return instance
@classmethod
def create_from_dict(cls, dict_: dict, include_empty: bool = False) -> ModelGetUsersResponseWithPaginationV3:
instance = cls()
if not dict_:
return instance
if "data" in dict_ and dict_["data"] is not None:
instance.data = [ModelUserResponseV3.create_from_dict(i0, include_empty=include_empty) for i0 in dict_["data"]]
elif include_empty:
instance.data = []
if "paging" in dict_ and dict_["paging"] is not None:
instance.paging = AccountcommonPaginationV3.create_from_dict(dict_["paging"], include_empty=include_empty)
elif include_empty:
instance.paging = AccountcommonPaginationV3()
return instance
@staticmethod
def get_field_info() -> Dict[str, str]:
return {
"data": "data",
"paging": "paging",
}
# endregion static methods
| 33.540541
| 123
| 0.665861
|
from __future__ import annotations
from typing import Any, Dict, List, Optional, Tuple, Union
from ....core import Model
from ..models.accountcommon_pagination_v3 import AccountcommonPaginationV3
from ..models.model_user_response_v3 import ModelUserResponseV3
class ModelGetUsersResponseWithPaginationV3(Model):
data: List[ModelUserResponseV3]
paging: AccountcommonPaginationV3
def with_data(self, value: List[ModelUserResponseV3]) -> ModelGetUsersResponseWithPaginationV3:
self.data = value
return self
def with_paging(self, value: AccountcommonPaginationV3) -> ModelGetUsersResponseWithPaginationV3:
self.paging = value
return self
def to_dict(self, include_empty: bool = False) -> dict:
result = {}
if hasattr(self, "data") and self.data:
result["data"] = [i0.to_dict(include_empty=include_empty) for i0 in self.data]
elif include_empty:
result["data"] = []
if hasattr(self, "paging") and self.paging:
result["paging"] = self.paging.to_dict(include_empty=include_empty)
elif include_empty:
result["paging"] = AccountcommonPaginationV3()
return result
@classmethod
def create(
cls,
data: List[ModelUserResponseV3],
paging: AccountcommonPaginationV3,
) -> ModelGetUsersResponseWithPaginationV3:
instance = cls()
instance.data = data
instance.paging = paging
return instance
@classmethod
def create_from_dict(cls, dict_: dict, include_empty: bool = False) -> ModelGetUsersResponseWithPaginationV3:
instance = cls()
if not dict_:
return instance
if "data" in dict_ and dict_["data"] is not None:
instance.data = [ModelUserResponseV3.create_from_dict(i0, include_empty=include_empty) for i0 in dict_["data"]]
elif include_empty:
instance.data = []
if "paging" in dict_ and dict_["paging"] is not None:
instance.paging = AccountcommonPaginationV3.create_from_dict(dict_["paging"], include_empty=include_empty)
elif include_empty:
instance.paging = AccountcommonPaginationV3()
return instance
@staticmethod
def get_field_info() -> Dict[str, str]:
return {
"data": "data",
"paging": "paging",
}
| true
| true
|
79099ddfb307b8b2360a625cf4d8d5e4c7c305fe
| 10,551
|
py
|
Python
|
main.py
|
yoonseo0917/PKUAutoSubmit
|
caf5f3f665c41c64ec24cc14cb8382ee8a6c48a4
|
[
"Apache-2.0"
] | null | null | null |
main.py
|
yoonseo0917/PKUAutoSubmit
|
caf5f3f665c41c64ec24cc14cb8382ee8a6c48a4
|
[
"Apache-2.0"
] | null | null | null |
main.py
|
yoonseo0917/PKUAutoSubmit
|
caf5f3f665c41c64ec24cc14cb8382ee8a6c48a4
|
[
"Apache-2.0"
] | null | null | null |
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support.ui import Select
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.webdriver import Firefox, Chrome, PhantomJS
from selenium import webdriver
from argparse import ArgumentParser
from urllib.parse import quote
import time
import copy
import sys
import os
import smtplib
from email.mime.image import MIMEImage
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.header import Header
TIMEOUT = 20
TIMESLP = 3
my_sender = '1692484707@qq.com' # 发件人邮箱账号
my_pass = 'fujkixpkjiyhcaji' # 发件人邮箱密码
my_user = '18310451167@163.com' # 收件人邮箱账号
def mail():
ret = True
try:
cur_time = time.strftime("%d/%m/%Y")
msgRoot = MIMEMultipart('related')
msgRoot['From'] = Header('PKU-AutoSubmit', 'utf-8')
msgRoot['To'] = Header('student', 'utf-8')
subject = cur_time + ' 报备成功!'
msgRoot['Subject'] = Header(subject, 'utf-8')
msgAlternative = MIMEMultipart('alternative')
msgRoot.attach(msgAlternative)
mail_msg = """
<p>自动报备成功!</p>
<p>截图:</p>
<p><img src="cid:image1"></p>
"""
msgAlternative.attach(MIMEText(mail_msg, 'html', 'utf-8'))
# 指定图片为当前目录
fp = open('result.png', 'rb')
msgImage = MIMEImage(fp.read())
fp.close()
# 定义图片 ID,在 HTML 文本中引用
msgImage.add_header('Content-ID', '<image1>')
msgRoot.attach(msgImage)
server = smtplib.SMTP_SSL("smtp.qq.com", 465) # 发件人邮箱中的SMTP服务器,端口是25
server.login(my_sender, my_pass) # 括号中对应的是发件人邮箱账号、邮箱密码
server.sendmail(my_sender, [my_user, ], msgRoot.as_string()) # 括号中对应的是发件人邮箱账号、收件人邮箱账号、发送邮件
server.quit() # 关闭连接
except Exception: # 如果 try 中的语句没有执行,则会执行下面的 ret=False
ret = False
return ret
def login(driver, username, password, failed=0):
if failed == 3:
raise Exception('门户登录失败')
iaaaUrl = 'https://iaaa.pku.edu.cn/iaaa/oauth.jsp'
appName = quote('北京大学校内信息门户新版')
redirectUrl = 'https://portal.pku.edu.cn/portal2017/ssoLogin.do'
driver.get('https://portal.pku.edu.cn/portal2017/')
driver.get(
f'{iaaaUrl}?appID=portal2017&appName={appName}&redirectUrl={redirectUrl}'
)
print('门户登陆中...')
driver.find_element_by_id('user_name').send_keys(username)
time.sleep(TIMESLP)
driver.find_element_by_id('password').send_keys(password)
time.sleep(TIMESLP)
driver.find_element_by_id('logon_button').click()
try:
WebDriverWait(driver, TIMEOUT).until(
EC.visibility_of_element_located((By.LINK_TEXT, '我知道了')))
except:
pass
else:
driver.find_element_by_link_text('我知道了').click()
try:
WebDriverWait(driver, TIMEOUT).until(
EC.visibility_of_element_located((By.ID, 'all')))
except:
login(driver, username, password, failed + 1)
else:
print('门户登录成功!')
def go_to_application_out(driver):
driver.find_element_by_id('all').click()
WebDriverWait(driver, TIMEOUT).until(
EC.visibility_of_element_located((By.ID, 'tag_s_stuCampusExEnReq')))
driver.find_element_by_id('tag_s_stuCampusExEnReq').click()
time.sleep(TIMESLP)
driver.switch_to.window(driver.window_handles[-1])
WebDriverWait(driver, TIMEOUT).until(
EC.visibility_of_element_located((By.CLASS_NAME, 'el-card__body')))
time.sleep(TIMESLP)
driver.find_element_by_class_name('el-card__body').click()
time.sleep(TIMESLP)
WebDriverWait(driver, TIMEOUT).until(
EC.visibility_of_element_located((By.CLASS_NAME, 'el-input__inner')))
def go_to_application_in(driver):
driver.get('https://portal.pku.edu.cn/portal2017/#/bizCenter')
WebDriverWait(driver, TIMEOUT).until(
EC.visibility_of_element_located((By.ID, 'all')))
driver.find_element_by_id('all').click()
WebDriverWait(driver, TIMEOUT).until(
EC.visibility_of_element_located((By.ID, 'tag_s_stuCampusExEnReq')))
driver.find_element_by_id('tag_s_stuCampusExEnReq').click()
time.sleep(TIMESLP)
driver.switch_to.window(driver.window_handles[-1])
WebDriverWait(driver, TIMEOUT).until(
EC.visibility_of_element_located((By.CLASS_NAME, 'el-card__body')))
time.sleep(TIMESLP)
driver.find_element_by_class_name('el-card__body').click()
time.sleep(TIMESLP)
WebDriverWait(driver, TIMEOUT).until(
EC.visibility_of_element_located((By.CLASS_NAME, 'el-select')))
def select_in_out(driver, way):
driver.find_element_by_class_name('el-select').click()
time.sleep(TIMESLP)
driver.find_element_by_xpath(f'//li/span[text()="{way}"]').click()
def select_campus(driver, campus):
driver.find_elements_by_class_name('el-select')[1].click()
time.sleep(TIMESLP)
driver.find_element_by_xpath(f'//li/span[text()="{campus}"]').click()
def select_destination(driver, destination):
driver.find_elements_by_class_name('el-select')[2].click()
time.sleep(TIMESLP)
driver.find_element_by_xpath(f'//li/span[text()="{destination}"]').click()
def select_district(driver, district):
driver.find_elements_by_class_name('el-select')[3].click()
time.sleep(TIMESLP)
driver.find_element_by_xpath(f'//li/span[text()="{district}"]').click()
def write_reason(driver, reason):
driver.find_element_by_class_name('el-textarea__inner').send_keys(
f'{reason}')
time.sleep(TIMESLP)
def write_track(driver, track):
driver.find_elements_by_class_name('el-textarea__inner')[1].send_keys(
f'{track}')
time.sleep(TIMESLP)
def write_street(driver, street):
driver.find_elements_by_class_name('el-textarea__inner')[1].send_keys(
f'{street}')
time.sleep(TIMESLP)
def click_check(driver):
driver.find_element_by_class_name('el-checkbox__label').click()
time.sleep(TIMESLP)
def click_inPeking(driver):
driver.find_element_by_class_name('el-radio__inner').click()
time.sleep(TIMESLP)
def submit(driver):
driver.find_element_by_xpath(
'//button/span[contains(text(),"保存")]').click()
WebDriverWait(driver, TIMEOUT).until(
EC.visibility_of_element_located(
(By.XPATH, '(//button/span[contains(text(),"提交")])[3]')))
driver.find_element_by_xpath(
'(//button/span[contains(text(),"提交")])[3]').click()
time.sleep(TIMESLP)
def screen_capture(driver):
driver.back()
driver.back()
WebDriverWait(driver, 5).until(
EC.visibility_of_element_located((By.CLASS_NAME, 'el-card__body')))
driver.find_elements_by_class_name('el-card__body')[1].click()
WebDriverWait(driver, 5).until(
EC.visibility_of_element_located(
(By.XPATH, '//button/span[contains(text(),"加载更多")]')))
driver.maximize_window()
time.sleep(0.1)
driver.save_screenshot('result.png')
print('备案历史截图已保存')
def fill_out(driver, campus, reason, destination, track):
print('开始填报出校备案')
print('选择出校/入校 ', end='')
select_in_out(driver, '出校')
print('Done')
print('选择校区 ', end='')
select_campus(driver, campus)
print('Done')
print('填写出入校事由 ', end='')
write_reason(driver, reason)
print('Done')
print('选择出校目的地 ', end='')
select_destination(driver, destination)
print('Done')
print('填写出校行动轨迹 ', end='')
write_track(driver, track)
print('Done')
click_check(driver)
submit(driver)
print('出校备案填报完毕!')
def fill_in(driver, campus, reason, habitation, district, street):
print('开始填报入校备案')
print('选择出校/入校 ', end='')
select_in_out(driver, '入校')
print('Done')
print('填写出入校事由 ', end='')
write_reason(driver, reason)
print('Done')
if habitation != '北京':
raise Exception('暂不支持京外入校备案,请手动填写')
print('选择居住地所在区 ', end='')
select_district(driver, district)
print('Done')
print('填写居住地所在街道 ', end='')
write_street(driver, street)
print('Done')
click_inPeking(driver)
click_check(driver)
submit(driver)
print('入校备案填报完毕!')
def run(driver, username, password, campus, reason, destination, track,
habitation, district, street):
login(driver, username, password)
print('=================================')
go_to_application_out(driver)
fill_out(driver, campus, reason, destination, track)
print('=================================')
go_to_application_in(driver)
fill_in(driver, campus, reason, habitation, district, street)
print('=================================')
screen_capture(driver)
print('=================================')
ret = mail()
if ret:
print("邮件发送成功")
else:
print("邮件发送失败")
print('可以愉快的玩耍啦!')
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('--username', '-u', type=str, help='用户名')
parser.add_argument('--password', '-p', type=str, help='密码')
parser.add_argument('--campus', type=str, help='所在校区, 燕园、万柳、畅春园、圆明园、中关新园', default='燕园')
parser.add_argument('--reason', type=str, help='出校原因, eg. 吃饭', default='上课')
parser.add_argument('--destination', type=str, help='出校目的地, eg. 北京', default='北京')
parser.add_argument('--track', type=str, help='出校轨迹, eg. 畅春园食堂', default='东南门-理教-勺园')
parser.add_argument('--habitation', type=str, help='入校前居住地, eg. 北京', default='北京')
parser.add_argument('--district', type=str, help='入校前居住所在区, eg. 海淀区', default='海淀区')
parser.add_argument('--street', type=str, help='入校前居住所在街道, eg. 燕园街道', default='燕园街道')
args = parser.parse_args()
args_public = copy.deepcopy(args)
args_public.password = 'xxxxxxxx'
print('Arguments: {}'.format(args_public))
print('Driver Launching...')
# driver = Firefox()
# driver = Chrome()
if sys.platform == 'darwin': # macOS
phantomjs_path = os.path.join('phantomjs', 'phantomjs-darwin')
elif sys.platform == 'linux': # linux
phantomjs_path = os.path.join('phantomjs', 'phantomjs-linux-x86_64')
else: # windows
phantomjs_path = os.path.join('phantomjs', 'phantomjs-windows.exe')
driver = PhantomJS(executable_path=phantomjs_path)
run(driver, args.username, args.password, args.campus, args.reason,
args.destination, args.track, args.habitation, args.district,
args.street)
driver.close()
| 31.58982
| 99
| 0.660696
|
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support.ui import Select
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.webdriver import Firefox, Chrome, PhantomJS
from selenium import webdriver
from argparse import ArgumentParser
from urllib.parse import quote
import time
import copy
import sys
import os
import smtplib
from email.mime.image import MIMEImage
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.header import Header
TIMEOUT = 20
TIMESLP = 3
my_sender = '1692484707@qq.com'
my_pass = 'fujkixpkjiyhcaji'
my_user = '18310451167@163.com'
def mail():
ret = True
try:
cur_time = time.strftime("%d/%m/%Y")
msgRoot = MIMEMultipart('related')
msgRoot['From'] = Header('PKU-AutoSubmit', 'utf-8')
msgRoot['To'] = Header('student', 'utf-8')
subject = cur_time + ' 报备成功!'
msgRoot['Subject'] = Header(subject, 'utf-8')
msgAlternative = MIMEMultipart('alternative')
msgRoot.attach(msgAlternative)
mail_msg = """
<p>自动报备成功!</p>
<p>截图:</p>
<p><img src="cid:image1"></p>
"""
msgAlternative.attach(MIMEText(mail_msg, 'html', 'utf-8'))
fp = open('result.png', 'rb')
msgImage = MIMEImage(fp.read())
fp.close()
msgImage.add_header('Content-ID', '<image1>')
msgRoot.attach(msgImage)
server = smtplib.SMTP_SSL("smtp.qq.com", 465)
server.login(my_sender, my_pass)
server.sendmail(my_sender, [my_user, ], msgRoot.as_string())
server.quit()
except Exception:
ret = False
return ret
def login(driver, username, password, failed=0):
if failed == 3:
raise Exception('门户登录失败')
iaaaUrl = 'https://iaaa.pku.edu.cn/iaaa/oauth.jsp'
appName = quote('北京大学校内信息门户新版')
redirectUrl = 'https://portal.pku.edu.cn/portal2017/ssoLogin.do'
driver.get('https://portal.pku.edu.cn/portal2017/')
driver.get(
f'{iaaaUrl}?appID=portal2017&appName={appName}&redirectUrl={redirectUrl}'
)
print('门户登陆中...')
driver.find_element_by_id('user_name').send_keys(username)
time.sleep(TIMESLP)
driver.find_element_by_id('password').send_keys(password)
time.sleep(TIMESLP)
driver.find_element_by_id('logon_button').click()
try:
WebDriverWait(driver, TIMEOUT).until(
EC.visibility_of_element_located((By.LINK_TEXT, '我知道了')))
except:
pass
else:
driver.find_element_by_link_text('我知道了').click()
try:
WebDriverWait(driver, TIMEOUT).until(
EC.visibility_of_element_located((By.ID, 'all')))
except:
login(driver, username, password, failed + 1)
else:
print('门户登录成功!')
def go_to_application_out(driver):
driver.find_element_by_id('all').click()
WebDriverWait(driver, TIMEOUT).until(
EC.visibility_of_element_located((By.ID, 'tag_s_stuCampusExEnReq')))
driver.find_element_by_id('tag_s_stuCampusExEnReq').click()
time.sleep(TIMESLP)
driver.switch_to.window(driver.window_handles[-1])
WebDriverWait(driver, TIMEOUT).until(
EC.visibility_of_element_located((By.CLASS_NAME, 'el-card__body')))
time.sleep(TIMESLP)
driver.find_element_by_class_name('el-card__body').click()
time.sleep(TIMESLP)
WebDriverWait(driver, TIMEOUT).until(
EC.visibility_of_element_located((By.CLASS_NAME, 'el-input__inner')))
def go_to_application_in(driver):
driver.get('https://portal.pku.edu.cn/portal2017/#/bizCenter')
WebDriverWait(driver, TIMEOUT).until(
EC.visibility_of_element_located((By.ID, 'all')))
driver.find_element_by_id('all').click()
WebDriverWait(driver, TIMEOUT).until(
EC.visibility_of_element_located((By.ID, 'tag_s_stuCampusExEnReq')))
driver.find_element_by_id('tag_s_stuCampusExEnReq').click()
time.sleep(TIMESLP)
driver.switch_to.window(driver.window_handles[-1])
WebDriverWait(driver, TIMEOUT).until(
EC.visibility_of_element_located((By.CLASS_NAME, 'el-card__body')))
time.sleep(TIMESLP)
driver.find_element_by_class_name('el-card__body').click()
time.sleep(TIMESLP)
WebDriverWait(driver, TIMEOUT).until(
EC.visibility_of_element_located((By.CLASS_NAME, 'el-select')))
def select_in_out(driver, way):
driver.find_element_by_class_name('el-select').click()
time.sleep(TIMESLP)
driver.find_element_by_xpath(f'//li/span[text()="{way}"]').click()
def select_campus(driver, campus):
driver.find_elements_by_class_name('el-select')[1].click()
time.sleep(TIMESLP)
driver.find_element_by_xpath(f'//li/span[text()="{campus}"]').click()
def select_destination(driver, destination):
driver.find_elements_by_class_name('el-select')[2].click()
time.sleep(TIMESLP)
driver.find_element_by_xpath(f'//li/span[text()="{destination}"]').click()
def select_district(driver, district):
driver.find_elements_by_class_name('el-select')[3].click()
time.sleep(TIMESLP)
driver.find_element_by_xpath(f'//li/span[text()="{district}"]').click()
def write_reason(driver, reason):
driver.find_element_by_class_name('el-textarea__inner').send_keys(
f'{reason}')
time.sleep(TIMESLP)
def write_track(driver, track):
driver.find_elements_by_class_name('el-textarea__inner')[1].send_keys(
f'{track}')
time.sleep(TIMESLP)
def write_street(driver, street):
driver.find_elements_by_class_name('el-textarea__inner')[1].send_keys(
f'{street}')
time.sleep(TIMESLP)
def click_check(driver):
driver.find_element_by_class_name('el-checkbox__label').click()
time.sleep(TIMESLP)
def click_inPeking(driver):
driver.find_element_by_class_name('el-radio__inner').click()
time.sleep(TIMESLP)
def submit(driver):
driver.find_element_by_xpath(
'//button/span[contains(text(),"保存")]').click()
WebDriverWait(driver, TIMEOUT).until(
EC.visibility_of_element_located(
(By.XPATH, '(//button/span[contains(text(),"提交")])[3]')))
driver.find_element_by_xpath(
'(//button/span[contains(text(),"提交")])[3]').click()
time.sleep(TIMESLP)
def screen_capture(driver):
driver.back()
driver.back()
WebDriverWait(driver, 5).until(
EC.visibility_of_element_located((By.CLASS_NAME, 'el-card__body')))
driver.find_elements_by_class_name('el-card__body')[1].click()
WebDriverWait(driver, 5).until(
EC.visibility_of_element_located(
(By.XPATH, '//button/span[contains(text(),"加载更多")]')))
driver.maximize_window()
time.sleep(0.1)
driver.save_screenshot('result.png')
print('备案历史截图已保存')
def fill_out(driver, campus, reason, destination, track):
print('开始填报出校备案')
print('选择出校/入校 ', end='')
select_in_out(driver, '出校')
print('Done')
print('选择校区 ', end='')
select_campus(driver, campus)
print('Done')
print('填写出入校事由 ', end='')
write_reason(driver, reason)
print('Done')
print('选择出校目的地 ', end='')
select_destination(driver, destination)
print('Done')
print('填写出校行动轨迹 ', end='')
write_track(driver, track)
print('Done')
click_check(driver)
submit(driver)
print('出校备案填报完毕!')
def fill_in(driver, campus, reason, habitation, district, street):
print('开始填报入校备案')
print('选择出校/入校 ', end='')
select_in_out(driver, '入校')
print('Done')
print('填写出入校事由 ', end='')
write_reason(driver, reason)
print('Done')
if habitation != '北京':
raise Exception('暂不支持京外入校备案,请手动填写')
print('选择居住地所在区 ', end='')
select_district(driver, district)
print('Done')
print('填写居住地所在街道 ', end='')
write_street(driver, street)
print('Done')
click_inPeking(driver)
click_check(driver)
submit(driver)
print('入校备案填报完毕!')
def run(driver, username, password, campus, reason, destination, track,
habitation, district, street):
login(driver, username, password)
print('=================================')
go_to_application_out(driver)
fill_out(driver, campus, reason, destination, track)
print('=================================')
go_to_application_in(driver)
fill_in(driver, campus, reason, habitation, district, street)
print('=================================')
screen_capture(driver)
print('=================================')
ret = mail()
if ret:
print("邮件发送成功")
else:
print("邮件发送失败")
print('可以愉快的玩耍啦!')
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('--username', '-u', type=str, help='用户名')
parser.add_argument('--password', '-p', type=str, help='密码')
parser.add_argument('--campus', type=str, help='所在校区, 燕园、万柳、畅春园、圆明园、中关新园', default='燕园')
parser.add_argument('--reason', type=str, help='出校原因, eg. 吃饭', default='上课')
parser.add_argument('--destination', type=str, help='出校目的地, eg. 北京', default='北京')
parser.add_argument('--track', type=str, help='出校轨迹, eg. 畅春园食堂', default='东南门-理教-勺园')
parser.add_argument('--habitation', type=str, help='入校前居住地, eg. 北京', default='北京')
parser.add_argument('--district', type=str, help='入校前居住所在区, eg. 海淀区', default='海淀区')
parser.add_argument('--street', type=str, help='入校前居住所在街道, eg. 燕园街道', default='燕园街道')
args = parser.parse_args()
args_public = copy.deepcopy(args)
args_public.password = 'xxxxxxxx'
print('Arguments: {}'.format(args_public))
print('Driver Launching...')
if sys.platform == 'darwin':
phantomjs_path = os.path.join('phantomjs', 'phantomjs-darwin')
elif sys.platform == 'linux':
phantomjs_path = os.path.join('phantomjs', 'phantomjs-linux-x86_64')
else:
phantomjs_path = os.path.join('phantomjs', 'phantomjs-windows.exe')
driver = PhantomJS(executable_path=phantomjs_path)
run(driver, args.username, args.password, args.campus, args.reason,
args.destination, args.track, args.habitation, args.district,
args.street)
driver.close()
| true
| true
|
79099e6ddcb0bb37384c99a34d483635c5c5d7c8
| 8,625
|
py
|
Python
|
zerver/lib/webhooks/git.py
|
roberthoenig/zulip
|
5d6724345a8ba4896d21478be2e33e624f8ac8ab
|
[
"Apache-2.0"
] | null | null | null |
zerver/lib/webhooks/git.py
|
roberthoenig/zulip
|
5d6724345a8ba4896d21478be2e33e624f8ac8ab
|
[
"Apache-2.0"
] | null | null | null |
zerver/lib/webhooks/git.py
|
roberthoenig/zulip
|
5d6724345a8ba4896d21478be2e33e624f8ac8ab
|
[
"Apache-2.0"
] | null | null | null |
from typing import Optional, Any, Dict, List, Text, Tuple
from collections import defaultdict
SUBJECT_WITH_BRANCH_TEMPLATE = u'{repo} / {branch}'
SUBJECT_WITH_PR_OR_ISSUE_INFO_TEMPLATE = u'{repo} / {type} #{id} {title}'
EMPTY_SHA = '0000000000000000000000000000000000000000'
COMMITS_LIMIT = 20
COMMIT_ROW_TEMPLATE = u'* {commit_msg} ([{commit_short_sha}]({commit_url}))\n'
COMMITS_MORE_THAN_LIMIT_TEMPLATE = u"[and {commits_number} more commit(s)]"
COMMIT_OR_COMMITS = u"commit{}"
PUSH_PUSHED_TEXT_WITH_URL = u"[pushed]({compare_url}) {number_of_commits} {commit_or_commits}"
PUSH_PUSHED_TEXT_WITHOUT_URL = u"pushed {number_of_commits} {commit_or_commits}"
PUSH_COMMITS_MESSAGE_TEMPLATE_WITH_COMMITTERS = u"""{user_name} {pushed_text} to branch {branch_name}. {committers_details}.
{commits_data}
"""
PUSH_COMMITS_MESSAGE_TEMPLATE_WITHOUT_COMMITTERS = u"""{user_name} {pushed_text} to branch {branch_name}.
{commits_data}
"""
PUSH_COMMITS_MESSAGE_EXTENSION = u"Commits by {}"
PUSH_COMMITTERS_LIMIT_INFO = 3
FORCE_PUSH_COMMITS_MESSAGE_TEMPLATE = u"{user_name} [force pushed]({url}) to branch {branch_name}. Head is now {head}"
CREATE_BRANCH_MESSAGE_TEMPLATE = u"{user_name} created [{branch_name}]({url}) branch"
REMOVE_BRANCH_MESSAGE_TEMPLATE = u"{user_name} deleted branch {branch_name}"
PULL_REQUEST_OR_ISSUE_MESSAGE_TEMPLATE = u"{user_name} {action} [{type}{id}]({url})"
PULL_REQUEST_OR_ISSUE_ASSIGNEE_INFO_TEMPLATE = u"(assigned to {assignee})"
PULL_REQUEST_BRANCH_INFO_TEMPLATE = u"\nfrom `{target}` to `{base}`"
SETUP_MESSAGE_TEMPLATE = u"{integration} webhook has been successfully configured"
SETUP_MESSAGE_USER_PART = u" by {user_name}"
CONTENT_MESSAGE_TEMPLATE = u"\n~~~ quote\n{message}\n~~~"
COMMITS_COMMENT_MESSAGE_TEMPLATE = u"{user_name} {action} on [{sha}]({url})"
PUSH_TAGS_MESSAGE_TEMPLATE = u"""{user_name} {action} tag {tag}"""
TAG_WITH_URL_TEMPLATE = u"[{tag_name}]({tag_url})"
TAG_WITHOUT_URL_TEMPLATE = u"{tag_name}"
def get_push_commits_event_message(user_name, compare_url, branch_name, commits_data, is_truncated=False):
# type: (Text, Optional[Text], Text, List[Dict[str, Any]], Optional[bool]) -> Text
pushed_message_template = PUSH_PUSHED_TEXT_WITH_URL if compare_url else PUSH_PUSHED_TEXT_WITHOUT_URL
pushed_text_message = pushed_message_template.format(
compare_url=compare_url,
number_of_commits=len(commits_data),
commit_or_commits=COMMIT_OR_COMMITS.format(u's' if len(commits_data) > 1 else u''))
committers_items = get_all_committers(commits_data) # type: List[Tuple[str, int]]
if len(committers_items) == 1 and user_name == committers_items[0][0]:
return PUSH_COMMITS_MESSAGE_TEMPLATE_WITHOUT_COMMITTERS.format(
user_name=user_name,
pushed_text=pushed_text_message,
branch_name=branch_name,
commits_data=get_commits_content(commits_data, is_truncated),
).rstrip()
else:
committers_details = "{} ({})".format(*committers_items[0])
for name, number_of_commits in committers_items[1:-1]:
committers_details = "{}, {} ({})".format(committers_details, name, number_of_commits)
if len(committers_items) > 1:
committers_details = "{} and {} ({})".format(committers_details, *committers_items[-1])
return PUSH_COMMITS_MESSAGE_TEMPLATE_WITH_COMMITTERS.format(
user_name=user_name,
pushed_text=pushed_text_message,
branch_name=branch_name,
committers_details=PUSH_COMMITS_MESSAGE_EXTENSION.format(committers_details),
commits_data=get_commits_content(commits_data, is_truncated),
).rstrip()
def get_force_push_commits_event_message(user_name, url, branch_name, head):
# type: (Text, Text, Text, Text) -> Text
return FORCE_PUSH_COMMITS_MESSAGE_TEMPLATE.format(
user_name=user_name,
url=url,
branch_name=branch_name,
head=head
)
def get_create_branch_event_message(user_name, url, branch_name):
# type: (Text, Text, Text) -> Text
return CREATE_BRANCH_MESSAGE_TEMPLATE.format(
user_name=user_name,
url=url,
branch_name=branch_name,
)
def get_remove_branch_event_message(user_name, branch_name):
# type: (Text, Text) -> Text
return REMOVE_BRANCH_MESSAGE_TEMPLATE.format(
user_name=user_name,
branch_name=branch_name,
)
def get_pull_request_event_message(
user_name, action, url, number=None,
target_branch=None, base_branch=None,
message=None, assignee=None, type='PR'
):
# type: (Text, Text, Text, Optional[int], Optional[Text], Optional[Text], Optional[Text], Optional[Text], Optional[Text]) -> Text
main_message = PULL_REQUEST_OR_ISSUE_MESSAGE_TEMPLATE.format(
user_name=user_name,
action=action,
type=type,
url=url,
id=" #{}".format(number) if number is not None else ''
)
if assignee:
main_message += PULL_REQUEST_OR_ISSUE_ASSIGNEE_INFO_TEMPLATE.format(assignee=assignee)
if target_branch and base_branch:
main_message += PULL_REQUEST_BRANCH_INFO_TEMPLATE.format(
target=target_branch,
base=base_branch
)
if message:
main_message += '\n' + CONTENT_MESSAGE_TEMPLATE.format(message=message)
return main_message.rstrip()
def get_setup_webhook_message(integration, user_name=None):
# type: (Text, Optional[Text]) -> Text
content = SETUP_MESSAGE_TEMPLATE.format(integration=integration)
if user_name:
content += SETUP_MESSAGE_USER_PART.format(user_name=user_name)
return content
def get_issue_event_message(user_name, action, url, number=None, message=None, assignee=None):
# type: (Text, Text, Text, Optional[int], Optional[Text], Optional[Text]) -> Text
return get_pull_request_event_message(
user_name,
action,
url,
number,
message=message,
assignee=assignee,
type='Issue'
)
def get_push_tag_event_message(user_name, tag_name, tag_url=None, action='pushed'):
# type: (Text, Text, Optional[Text], Optional[Text]) -> Text
if tag_url:
tag_part = TAG_WITH_URL_TEMPLATE.format(tag_name=tag_name, tag_url=tag_url)
else:
tag_part = TAG_WITHOUT_URL_TEMPLATE.format(tag_name=tag_name)
return PUSH_TAGS_MESSAGE_TEMPLATE.format(
user_name=user_name,
action=action,
tag=tag_part
)
def get_commits_comment_action_message(user_name, action, commit_url, sha, message=None):
# type: (Text, Text, Text, Text, Optional[Text]) -> Text
content = COMMITS_COMMENT_MESSAGE_TEMPLATE.format(
user_name=user_name,
action=action,
sha=get_short_sha(sha),
url=commit_url
)
if message is not None:
content += CONTENT_MESSAGE_TEMPLATE.format(
message=message
)
return content
def get_commits_content(commits_data, is_truncated=False):
# type: (List[Dict[str, Any]], Optional[bool]) -> Text
commits_content = u''
for commit in commits_data[:COMMITS_LIMIT]:
commits_content += COMMIT_ROW_TEMPLATE.format(
commit_short_sha=get_short_sha(commit.get('sha')),
commit_url=commit.get('url'),
commit_msg=commit.get('message').partition('\n')[0]
)
if len(commits_data) > COMMITS_LIMIT:
commits_content += COMMITS_MORE_THAN_LIMIT_TEMPLATE.format(
commits_number=len(commits_data) - COMMITS_LIMIT
)
elif is_truncated:
commits_content += COMMITS_MORE_THAN_LIMIT_TEMPLATE.format(
commits_number=''
).replace(' ', ' ')
return commits_content.rstrip()
def get_short_sha(sha):
# type: (Text) -> Text
return sha[:7]
def get_all_committers(commits_data):
# type: (List[Dict[str, Any]]) -> List[Tuple[str, int]]
committers = defaultdict(int) # type: Dict[str, int]
for commit in commits_data:
committers[commit['name']] += 1
# Sort by commit count, breaking ties alphabetically.
committers_items = sorted(list(committers.items()),
key=lambda item: (-item[1], item[0])) # type: List[Tuple[str, int]]
committers_values = [c_i[1] for c_i in committers_items] # type: List[int]
if len(committers) > PUSH_COMMITTERS_LIMIT_INFO:
others_number_of_commits = sum(committers_values[PUSH_COMMITTERS_LIMIT_INFO:])
committers_items = committers_items[:PUSH_COMMITTERS_LIMIT_INFO]
committers_items.append(('others', others_number_of_commits))
return committers_items
| 39.930556
| 133
| 0.704464
|
from typing import Optional, Any, Dict, List, Text, Tuple
from collections import defaultdict
SUBJECT_WITH_BRANCH_TEMPLATE = u'{repo} / {branch}'
SUBJECT_WITH_PR_OR_ISSUE_INFO_TEMPLATE = u'{repo} / {type} #{id} {title}'
EMPTY_SHA = '0000000000000000000000000000000000000000'
COMMITS_LIMIT = 20
COMMIT_ROW_TEMPLATE = u'* {commit_msg} ([{commit_short_sha}]({commit_url}))\n'
COMMITS_MORE_THAN_LIMIT_TEMPLATE = u"[and {commits_number} more commit(s)]"
COMMIT_OR_COMMITS = u"commit{}"
PUSH_PUSHED_TEXT_WITH_URL = u"[pushed]({compare_url}) {number_of_commits} {commit_or_commits}"
PUSH_PUSHED_TEXT_WITHOUT_URL = u"pushed {number_of_commits} {commit_or_commits}"
PUSH_COMMITS_MESSAGE_TEMPLATE_WITH_COMMITTERS = u"""{user_name} {pushed_text} to branch {branch_name}. {committers_details}.
{commits_data}
"""
PUSH_COMMITS_MESSAGE_TEMPLATE_WITHOUT_COMMITTERS = u"""{user_name} {pushed_text} to branch {branch_name}.
{commits_data}
"""
PUSH_COMMITS_MESSAGE_EXTENSION = u"Commits by {}"
PUSH_COMMITTERS_LIMIT_INFO = 3
FORCE_PUSH_COMMITS_MESSAGE_TEMPLATE = u"{user_name} [force pushed]({url}) to branch {branch_name}. Head is now {head}"
CREATE_BRANCH_MESSAGE_TEMPLATE = u"{user_name} created [{branch_name}]({url}) branch"
REMOVE_BRANCH_MESSAGE_TEMPLATE = u"{user_name} deleted branch {branch_name}"
PULL_REQUEST_OR_ISSUE_MESSAGE_TEMPLATE = u"{user_name} {action} [{type}{id}]({url})"
PULL_REQUEST_OR_ISSUE_ASSIGNEE_INFO_TEMPLATE = u"(assigned to {assignee})"
PULL_REQUEST_BRANCH_INFO_TEMPLATE = u"\nfrom `{target}` to `{base}`"
SETUP_MESSAGE_TEMPLATE = u"{integration} webhook has been successfully configured"
SETUP_MESSAGE_USER_PART = u" by {user_name}"
CONTENT_MESSAGE_TEMPLATE = u"\n~~~ quote\n{message}\n~~~"
COMMITS_COMMENT_MESSAGE_TEMPLATE = u"{user_name} {action} on [{sha}]({url})"
PUSH_TAGS_MESSAGE_TEMPLATE = u"""{user_name} {action} tag {tag}"""
TAG_WITH_URL_TEMPLATE = u"[{tag_name}]({tag_url})"
TAG_WITHOUT_URL_TEMPLATE = u"{tag_name}"
def get_push_commits_event_message(user_name, compare_url, branch_name, commits_data, is_truncated=False):
pushed_message_template = PUSH_PUSHED_TEXT_WITH_URL if compare_url else PUSH_PUSHED_TEXT_WITHOUT_URL
pushed_text_message = pushed_message_template.format(
compare_url=compare_url,
number_of_commits=len(commits_data),
commit_or_commits=COMMIT_OR_COMMITS.format(u's' if len(commits_data) > 1 else u''))
committers_items = get_all_committers(commits_data)
if len(committers_items) == 1 and user_name == committers_items[0][0]:
return PUSH_COMMITS_MESSAGE_TEMPLATE_WITHOUT_COMMITTERS.format(
user_name=user_name,
pushed_text=pushed_text_message,
branch_name=branch_name,
commits_data=get_commits_content(commits_data, is_truncated),
).rstrip()
else:
committers_details = "{} ({})".format(*committers_items[0])
for name, number_of_commits in committers_items[1:-1]:
committers_details = "{}, {} ({})".format(committers_details, name, number_of_commits)
if len(committers_items) > 1:
committers_details = "{} and {} ({})".format(committers_details, *committers_items[-1])
return PUSH_COMMITS_MESSAGE_TEMPLATE_WITH_COMMITTERS.format(
user_name=user_name,
pushed_text=pushed_text_message,
branch_name=branch_name,
committers_details=PUSH_COMMITS_MESSAGE_EXTENSION.format(committers_details),
commits_data=get_commits_content(commits_data, is_truncated),
).rstrip()
def get_force_push_commits_event_message(user_name, url, branch_name, head):
return FORCE_PUSH_COMMITS_MESSAGE_TEMPLATE.format(
user_name=user_name,
url=url,
branch_name=branch_name,
head=head
)
def get_create_branch_event_message(user_name, url, branch_name):
return CREATE_BRANCH_MESSAGE_TEMPLATE.format(
user_name=user_name,
url=url,
branch_name=branch_name,
)
def get_remove_branch_event_message(user_name, branch_name):
return REMOVE_BRANCH_MESSAGE_TEMPLATE.format(
user_name=user_name,
branch_name=branch_name,
)
def get_pull_request_event_message(
user_name, action, url, number=None,
target_branch=None, base_branch=None,
message=None, assignee=None, type='PR'
):
main_message = PULL_REQUEST_OR_ISSUE_MESSAGE_TEMPLATE.format(
user_name=user_name,
action=action,
type=type,
url=url,
id=" #{}".format(number) if number is not None else ''
)
if assignee:
main_message += PULL_REQUEST_OR_ISSUE_ASSIGNEE_INFO_TEMPLATE.format(assignee=assignee)
if target_branch and base_branch:
main_message += PULL_REQUEST_BRANCH_INFO_TEMPLATE.format(
target=target_branch,
base=base_branch
)
if message:
main_message += '\n' + CONTENT_MESSAGE_TEMPLATE.format(message=message)
return main_message.rstrip()
def get_setup_webhook_message(integration, user_name=None):
content = SETUP_MESSAGE_TEMPLATE.format(integration=integration)
if user_name:
content += SETUP_MESSAGE_USER_PART.format(user_name=user_name)
return content
def get_issue_event_message(user_name, action, url, number=None, message=None, assignee=None):
return get_pull_request_event_message(
user_name,
action,
url,
number,
message=message,
assignee=assignee,
type='Issue'
)
def get_push_tag_event_message(user_name, tag_name, tag_url=None, action='pushed'):
if tag_url:
tag_part = TAG_WITH_URL_TEMPLATE.format(tag_name=tag_name, tag_url=tag_url)
else:
tag_part = TAG_WITHOUT_URL_TEMPLATE.format(tag_name=tag_name)
return PUSH_TAGS_MESSAGE_TEMPLATE.format(
user_name=user_name,
action=action,
tag=tag_part
)
def get_commits_comment_action_message(user_name, action, commit_url, sha, message=None):
content = COMMITS_COMMENT_MESSAGE_TEMPLATE.format(
user_name=user_name,
action=action,
sha=get_short_sha(sha),
url=commit_url
)
if message is not None:
content += CONTENT_MESSAGE_TEMPLATE.format(
message=message
)
return content
def get_commits_content(commits_data, is_truncated=False):
commits_content = u''
for commit in commits_data[:COMMITS_LIMIT]:
commits_content += COMMIT_ROW_TEMPLATE.format(
commit_short_sha=get_short_sha(commit.get('sha')),
commit_url=commit.get('url'),
commit_msg=commit.get('message').partition('\n')[0]
)
if len(commits_data) > COMMITS_LIMIT:
commits_content += COMMITS_MORE_THAN_LIMIT_TEMPLATE.format(
commits_number=len(commits_data) - COMMITS_LIMIT
)
elif is_truncated:
commits_content += COMMITS_MORE_THAN_LIMIT_TEMPLATE.format(
commits_number=''
).replace(' ', ' ')
return commits_content.rstrip()
def get_short_sha(sha):
return sha[:7]
def get_all_committers(commits_data):
committers = defaultdict(int)
for commit in commits_data:
committers[commit['name']] += 1
committers_items = sorted(list(committers.items()),
key=lambda item: (-item[1], item[0]))
committers_values = [c_i[1] for c_i in committers_items]
if len(committers) > PUSH_COMMITTERS_LIMIT_INFO:
others_number_of_commits = sum(committers_values[PUSH_COMMITTERS_LIMIT_INFO:])
committers_items = committers_items[:PUSH_COMMITTERS_LIMIT_INFO]
committers_items.append(('others', others_number_of_commits))
return committers_items
| true
| true
|
79099f8125584e5a2c15fc98ea5006841518b562
| 4,026
|
py
|
Python
|
dpipe/batch_iter/utils.py
|
samokhinv/deep_pipe
|
9461b02f5f32c3e9f24490619ebccf417979cffc
|
[
"MIT"
] | 38
|
2017-09-08T04:51:17.000Z
|
2022-03-29T17:34:22.000Z
|
dpipe/batch_iter/utils.py
|
samokhinv/deep_pipe
|
9461b02f5f32c3e9f24490619ebccf417979cffc
|
[
"MIT"
] | 41
|
2017-09-29T22:06:21.000Z
|
2021-12-03T09:31:57.000Z
|
dpipe/batch_iter/utils.py
|
samokhinv/deep_pipe
|
9461b02f5f32c3e9f24490619ebccf417979cffc
|
[
"MIT"
] | 12
|
2017-09-08T04:40:39.000Z
|
2021-01-19T19:19:37.000Z
|
from typing import Callable, Iterable, Sequence
import numpy as np
from dpipe.im.axes import AxesLike, AxesParams
from dpipe.itertools import lmap, squeeze_first
from dpipe.im import pad_to_shape
def pad_batch_equal(batch, padding_values: AxesParams = 0, ratio: AxesParams = 0.5):
"""
Pad each element of ``batch`` to obtain a correctly shaped array.
References
----------
`pad_to_shape`
"""
max_shapes = np.max(lmap(np.shape, batch), axis=0)
# if not scalars
if max_shapes.size != 0:
batch = [pad_to_shape(x, max_shapes, padding_values=padding_values, ratio=ratio) for x in batch]
return np.array(batch)
def unpack_args(func: Callable, *args, **kwargs):
"""
Returns a function that takes an iterable and unpacks it while calling ``func``.
``args`` and ``kwargs`` are passed to ``func`` as additional arguments.
Examples
--------
>>> def add(x, y):
>>> return x + y
>>>
>>> add_ = unpack_args(add)
>>> add(1, 2) == add_([1, 2])
>>> True
"""
def wrapper(xs, *args_, **kwargs_):
return func(*xs, *args_, *args, **kwargs_, **kwargs)
return wrapper
def multiply(func: Callable, *args, **kwargs):
"""
Returns a function that takes an iterable and maps ``func`` over it.
Useful when multiple batches require the same function.
``args`` and ``kwargs`` are passed to ``func`` as additional arguments.
"""
def wrapped(xs: Iterable, *args_, **kwargs_) -> tuple:
return tuple(func(x, *args_, *args, **kwargs_, **kwargs) for x in xs)
return wrapped
def apply_at(index: AxesLike, func: Callable, *args, **kwargs):
"""
Returns a function that takes an iterable and applies ``func`` to the values at the corresponding ``index``.
``args`` and ``kwargs`` are passed to ``func`` as additional arguments.
Examples
--------
>>> first_sqr = apply_at(0, np.square)
>>> first_sqr([3, 2, 1])
>>> (9, 2, 1)
"""
index = set(np.atleast_1d(index).tolist())
def wrapped(xs: Sequence, *args_, **kwargs_) -> tuple:
index_ = {i + len(xs) if i < 0 else i for i in index}
for idx in index_:
if idx < 0 or idx >= len(xs):
raise IndexError(f'Index {idx} out of bounds.')
return tuple(func(x, *args_, *args, **kwargs_, **kwargs) if i in index_ else x for i, x in enumerate(xs))
return wrapped
def zip_apply(*functions: Callable, **kwargs):
"""
Returns a function that takes an iterable and zips ``functions`` over it.
``kwargs`` are passed to each function as additional arguments.
Examples
--------
>>> zipper = zip_apply(np.square, np.sqrt)
>>> zipper([4, 9])
>>> (16, 3)
"""
def wrapped(xs: Sequence, *args, **kwargs_) -> tuple:
return tuple(func(x, *args, **kwargs_, **kwargs) for func, x in zip(functions, xs))
return wrapped
def random_apply(p: float, func: Callable, *args, **kwargs):
"""
Returns a function that applies ``func`` with a given probability ``p``.
``args`` and ``kwargs`` are passed to ``func`` as additional arguments.
"""
def wrapped(*args_, **kwargs_):
if np.random.binomial(1, p):
return func(*args_, *args, **kwargs_, **kwargs)
return squeeze_first(args_)
return wrapped
def sample_args(func: Callable, *args: Callable, **kwargs: Callable):
"""
Returns a function that samples arguments for ``func`` from ``args`` and ``kwargs``.
Each argument in ``args`` and ``kwargs`` must be a callable that samples a random value.
Examples
--------
>>> from scipy.ndimage import rotate
>>>
>>> random_rotate = sample_args(rotate, angle=np.random.normal)
>>> random_rotate(x)
>>> # same as
>>> rotate(x, angle=np.random.normal())
"""
def wrapped(*args_, **kwargs_):
return func(*args_, *([arg() for arg in args]), **kwargs_, **{name: arg() for name, arg in kwargs.items()})
return wrapped
| 28.757143
| 115
| 0.60929
|
from typing import Callable, Iterable, Sequence
import numpy as np
from dpipe.im.axes import AxesLike, AxesParams
from dpipe.itertools import lmap, squeeze_first
from dpipe.im import pad_to_shape
def pad_batch_equal(batch, padding_values: AxesParams = 0, ratio: AxesParams = 0.5):
max_shapes = np.max(lmap(np.shape, batch), axis=0)
if max_shapes.size != 0:
batch = [pad_to_shape(x, max_shapes, padding_values=padding_values, ratio=ratio) for x in batch]
return np.array(batch)
def unpack_args(func: Callable, *args, **kwargs):
def wrapper(xs, *args_, **kwargs_):
return func(*xs, *args_, *args, **kwargs_, **kwargs)
return wrapper
def multiply(func: Callable, *args, **kwargs):
def wrapped(xs: Iterable, *args_, **kwargs_) -> tuple:
return tuple(func(x, *args_, *args, **kwargs_, **kwargs) for x in xs)
return wrapped
def apply_at(index: AxesLike, func: Callable, *args, **kwargs):
index = set(np.atleast_1d(index).tolist())
def wrapped(xs: Sequence, *args_, **kwargs_) -> tuple:
index_ = {i + len(xs) if i < 0 else i for i in index}
for idx in index_:
if idx < 0 or idx >= len(xs):
raise IndexError(f'Index {idx} out of bounds.')
return tuple(func(x, *args_, *args, **kwargs_, **kwargs) if i in index_ else x for i, x in enumerate(xs))
return wrapped
def zip_apply(*functions: Callable, **kwargs):
def wrapped(xs: Sequence, *args, **kwargs_) -> tuple:
return tuple(func(x, *args, **kwargs_, **kwargs) for func, x in zip(functions, xs))
return wrapped
def random_apply(p: float, func: Callable, *args, **kwargs):
def wrapped(*args_, **kwargs_):
if np.random.binomial(1, p):
return func(*args_, *args, **kwargs_, **kwargs)
return squeeze_first(args_)
return wrapped
def sample_args(func: Callable, *args: Callable, **kwargs: Callable):
def wrapped(*args_, **kwargs_):
return func(*args_, *([arg() for arg in args]), **kwargs_, **{name: arg() for name, arg in kwargs.items()})
return wrapped
| true
| true
|
7909a11fc479c66853b4ae093eecadc669af2de4
| 1,429
|
py
|
Python
|
4+Median+of+Two+Sorted+Arrays/alg.py
|
xiaoh12/leetcode
|
537e7d2b5b9013ae2e1a9cdda1e1916e77922611
|
[
"MIT"
] | 1
|
2018-11-21T15:36:12.000Z
|
2018-11-21T15:36:12.000Z
|
4+Median+of+Two+Sorted+Arrays/alg.py
|
xiaoh12/leetcode
|
537e7d2b5b9013ae2e1a9cdda1e1916e77922611
|
[
"MIT"
] | null | null | null |
4+Median+of+Two+Sorted+Arrays/alg.py
|
xiaoh12/leetcode
|
537e7d2b5b9013ae2e1a9cdda1e1916e77922611
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
#-*- coding: UTF-8 -*-
###########################################################################
#
# Copyright (c) 2018 www.codingchen.com, Inc. All Rights Reserved
#
##########################################################################
'''
@brief leetcode algorithm
@author chenhui(hui.chen6789@gmail.com)
@date 2018/11/07 21:30:33
'''
class Solution:
def findMedianSortedArrays(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2: List[int]
:rtype: float
"""
odd = (len(nums1) + len(nums2)) % 2
if odd:
half = (len(nums1) + len(nums2)) // 2
else:
half = (len(nums1) + len(nums2)) // 2 - 1
for _ in range(half):
__ = self.pop_num(nums1, nums2)
if odd:
return float(self.pop_num(nums1, nums2))
else:
t1 = self.pop_num(nums1, nums2)
t2 = self.pop_num(nums1, nums2)
return (t1 + t2) / 2
def pop_num(self, nums1, nums2):
if len(nums1) == 0:
return nums2.pop(0)
elif len(nums2) == 0:
return nums1.pop(0)
elif nums1[0] > nums2[0]:
return nums2.pop(0)
elif nums1[0] <= nums2[0]:
return nums1.pop(0)
if __name__ == '__main__':
s = Solution()
nums1 = [1, 2]
nums2 = [3, 4]
print(s.findMedianSortedArrays(nums1, nums2))
| 29.163265
| 75
| 0.46746
| true
| true
|
|
7909a177346599e23a62e0426f9357144afec2af
| 796
|
py
|
Python
|
desafio/urls.py
|
NathanMilhomen/vagas
|
552a1a8fd6d2997b0944774e344df513e99bec2e
|
[
"MIT"
] | null | null | null |
desafio/urls.py
|
NathanMilhomen/vagas
|
552a1a8fd6d2997b0944774e344df513e99bec2e
|
[
"MIT"
] | null | null | null |
desafio/urls.py
|
NathanMilhomen/vagas
|
552a1a8fd6d2997b0944774e344df513e99bec2e
|
[
"MIT"
] | null | null | null |
"""desafio URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import include, path
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('vagas.urls')),
]
| 33.166667
| 77
| 0.701005
|
from django.contrib import admin
from django.urls import include, path
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('vagas.urls')),
]
| true
| true
|
7909a17fd3a77c88bc70486bdab1efe7848b5bc8
| 109
|
py
|
Python
|
nginx-flask/webapp/run.py
|
simonwuchj/docker-nginx-uwsgi-flask-mysql
|
cad1c32ff2fd5fbdec498ec7f87244947edeeb33
|
[
"MIT"
] | 1
|
2021-01-08T12:25:03.000Z
|
2021-01-08T12:25:03.000Z
|
nginx-flask/webapp/run.py
|
simonwuchj/docker-nginx-uwsgi-flask-mysql
|
cad1c32ff2fd5fbdec498ec7f87244947edeeb33
|
[
"MIT"
] | null | null | null |
nginx-flask/webapp/run.py
|
simonwuchj/docker-nginx-uwsgi-flask-mysql
|
cad1c32ff2fd5fbdec498ec7f87244947edeeb33
|
[
"MIT"
] | null | null | null |
from app import create_app
app = create_app()
if __name__ == '__main__':
app.run(debug=True, port=5000)
| 18.166667
| 34
| 0.706422
|
from app import create_app
app = create_app()
if __name__ == '__main__':
app.run(debug=True, port=5000)
| true
| true
|
7909a192ed6b4952cc237a81b3b7ed15f6c0fb61
| 9,361
|
py
|
Python
|
baselines/a2c/utils.py
|
zeuseyera/baselines-kr
|
c9926418d2d8efee21ef20d548366eaaaa193011
|
[
"MIT"
] | null | null | null |
baselines/a2c/utils.py
|
zeuseyera/baselines-kr
|
c9926418d2d8efee21ef20d548366eaaaa193011
|
[
"MIT"
] | null | null | null |
baselines/a2c/utils.py
|
zeuseyera/baselines-kr
|
c9926418d2d8efee21ef20d548366eaaaa193011
|
[
"MIT"
] | null | null | null |
import os
import numpy as np
import tensorflow as tf
from collections import deque
def sample(logits):
noise = tf.random_uniform(tf.shape(logits))
return tf.argmax(logits - tf.log(-tf.log(noise)), 1)
def cat_entropy(logits):
a0 = logits - tf.reduce_max(logits, 1, keepdims=True)
ea0 = tf.exp(a0)
z0 = tf.reduce_sum(ea0, 1, keepdims=True)
p0 = ea0 / z0
return tf.reduce_sum(p0 * (tf.log(z0) - a0), 1)
def cat_entropy_softmax(p0):
return - tf.reduce_sum(p0 * tf.log(p0 + 1e-6), axis = 1)
def ortho_init(scale=1.0):
def _ortho_init(shape, dtype, partition_info=None):
#lasagne ortho init for tf
shape = tuple(shape)
if len(shape) == 2:
flat_shape = shape
elif len(shape) == 4: # assumes NHWC
flat_shape = (np.prod(shape[:-1]), shape[-1])
else:
raise NotImplementedError
a = np.random.normal(0.0, 1.0, flat_shape)
u, _, v = np.linalg.svd(a, full_matrices=False)
q = u if u.shape == flat_shape else v # pick the one with the correct shape
q = q.reshape(shape)
return (scale * q[:shape[0], :shape[1]]).astype(np.float32)
return _ortho_init
def conv(x, scope, *, nf, rf, stride, pad='VALID', init_scale=1.0, data_format='NHWC', one_dim_bias=False):
if data_format == 'NHWC':
channel_ax = 3
strides = [1, stride, stride, 1]
bshape = [1, 1, 1, nf]
elif data_format == 'NCHW':
channel_ax = 1
strides = [1, 1, stride, stride]
bshape = [1, nf, 1, 1]
else:
raise NotImplementedError
bias_var_shape = [nf] if one_dim_bias else [1, nf, 1, 1]
nin = x.get_shape()[channel_ax].value
wshape = [rf, rf, nin, nf]
with tf.variable_scope(scope):
w = tf.get_variable("w", wshape, initializer=ortho_init(init_scale))
b = tf.get_variable("b", bias_var_shape, initializer=tf.constant_initializer(0.0))
if not one_dim_bias and data_format == 'NHWC':
b = tf.reshape(b, bshape)
return tf.nn.conv2d(x, w, strides=strides, padding=pad, data_format=data_format) + b
def fc(x, scope, nh, *, init_scale=1.0, init_bias=0.0):
with tf.variable_scope(scope):
nin = x.get_shape()[1].value
w = tf.get_variable("w", [nin, nh], initializer=ortho_init(init_scale))
b = tf.get_variable("b", [nh], initializer=tf.constant_initializer(init_bias))
return tf.matmul(x, w)+b
def batch_to_seq(h, nbatch, nsteps, flat=False):
if flat:
h = tf.reshape(h, [nbatch, nsteps])
else:
h = tf.reshape(h, [nbatch, nsteps, -1])
return [tf.squeeze(v, [1]) for v in tf.split(axis=1, num_or_size_splits=nsteps, value=h)]
def seq_to_batch(h, flat = False):
shape = h[0].get_shape().as_list()
if not flat:
assert(len(shape) > 1)
nh = h[0].get_shape()[-1].value
return tf.reshape(tf.concat(axis=1, values=h), [-1, nh])
else:
return tf.reshape(tf.stack(values=h, axis=1), [-1])
def lstm(xs, ms, s, scope, nh, init_scale=1.0):
nbatch, nin = [v.value for v in xs[0].get_shape()]
with tf.variable_scope(scope):
wx = tf.get_variable("wx", [nin, nh*4], initializer=ortho_init(init_scale))
wh = tf.get_variable("wh", [nh, nh*4], initializer=ortho_init(init_scale))
b = tf.get_variable("b", [nh*4], initializer=tf.constant_initializer(0.0))
c, h = tf.split(axis=1, num_or_size_splits=2, value=s)
for idx, (x, m) in enumerate(zip(xs, ms)):
c = c*(1-m)
h = h*(1-m)
z = tf.matmul(x, wx) + tf.matmul(h, wh) + b
i, f, o, u = tf.split(axis=1, num_or_size_splits=4, value=z)
i = tf.nn.sigmoid(i)
f = tf.nn.sigmoid(f)
o = tf.nn.sigmoid(o)
u = tf.tanh(u)
c = f*c + i*u
h = o*tf.tanh(c)
xs[idx] = h
s = tf.concat(axis=1, values=[c, h])
return xs, s
def _ln(x, g, b, e=1e-5, axes=[1]):
u, s = tf.nn.moments(x, axes=axes, keep_dims=True)
x = (x-u)/tf.sqrt(s+e)
x = x*g+b
return x
def lnlstm(xs, ms, s, scope, nh, init_scale=1.0):
nbatch, nin = [v.value for v in xs[0].get_shape()]
with tf.variable_scope(scope):
wx = tf.get_variable("wx", [nin, nh*4], initializer=ortho_init(init_scale))
gx = tf.get_variable("gx", [nh*4], initializer=tf.constant_initializer(1.0))
bx = tf.get_variable("bx", [nh*4], initializer=tf.constant_initializer(0.0))
wh = tf.get_variable("wh", [nh, nh*4], initializer=ortho_init(init_scale))
gh = tf.get_variable("gh", [nh*4], initializer=tf.constant_initializer(1.0))
bh = tf.get_variable("bh", [nh*4], initializer=tf.constant_initializer(0.0))
b = tf.get_variable("b", [nh*4], initializer=tf.constant_initializer(0.0))
gc = tf.get_variable("gc", [nh], initializer=tf.constant_initializer(1.0))
bc = tf.get_variable("bc", [nh], initializer=tf.constant_initializer(0.0))
c, h = tf.split(axis=1, num_or_size_splits=2, value=s)
for idx, (x, m) in enumerate(zip(xs, ms)):
c = c*(1-m)
h = h*(1-m)
z = _ln(tf.matmul(x, wx), gx, bx) + _ln(tf.matmul(h, wh), gh, bh) + b
i, f, o, u = tf.split(axis=1, num_or_size_splits=4, value=z)
i = tf.nn.sigmoid(i)
f = tf.nn.sigmoid(f)
o = tf.nn.sigmoid(o)
u = tf.tanh(u)
c = f*c + i*u
h = o*tf.tanh(_ln(c, gc, bc))
xs[idx] = h
s = tf.concat(axis=1, values=[c, h])
return xs, s
def conv_to_fc(x):
nh = np.prod([v.value for v in x.get_shape()[1:]])
x = tf.reshape(x, [-1, nh])
return x
def discount_with_dones(rewards, dones, gamma):
discounted = []
r = 0
for reward, done in zip(rewards[::-1], dones[::-1]):
r = reward + gamma*r*(1.-done) # fixed off by one bug
discounted.append(r)
return discounted[::-1]
def find_trainable_variables(key):
return tf.trainable_variables(key)
def make_path(f):
return os.makedirs(f, exist_ok=True)
def constant(p):
return 1
def linear(p):
return 1-p
def middle_drop(p):
eps = 0.75
if 1-p<eps:
return eps*0.1
return 1-p
def double_linear_con(p):
p *= 2
eps = 0.125
if 1-p<eps:
return eps
return 1-p
def double_middle_drop(p):
eps1 = 0.75
eps2 = 0.25
if 1-p<eps1:
if 1-p<eps2:
return eps2*0.5
return eps1*0.1
return 1-p
schedules = {
'linear':linear,
'constant':constant,
'double_linear_con': double_linear_con,
'middle_drop': middle_drop,
'double_middle_drop': double_middle_drop
}
class Scheduler(object):
def __init__(self, v, nvalues, schedule):
self.n = 0.
self.v = v
self.nvalues = nvalues
self.schedule = schedules[schedule]
def value(self):
current_value = self.v*self.schedule(self.n/self.nvalues)
self.n += 1.
return current_value
def value_steps(self, steps):
return self.v*self.schedule(steps/self.nvalues)
class EpisodeStats:
def __init__(self, nsteps, nenvs):
self.episode_rewards = []
for i in range(nenvs):
self.episode_rewards.append([])
self.lenbuffer = deque(maxlen=40) # rolling buffer for episode lengths
self.rewbuffer = deque(maxlen=40) # rolling buffer for episode rewards
self.nsteps = nsteps
self.nenvs = nenvs
def feed(self, rewards, masks):
rewards = np.reshape(rewards, [self.nenvs, self.nsteps])
masks = np.reshape(masks, [self.nenvs, self.nsteps])
for i in range(0, self.nenvs):
for j in range(0, self.nsteps):
self.episode_rewards[i].append(rewards[i][j])
if masks[i][j]:
l = len(self.episode_rewards[i])
s = sum(self.episode_rewards[i])
self.lenbuffer.append(l)
self.rewbuffer.append(s)
self.episode_rewards[i] = []
def mean_length(self):
if self.lenbuffer:
return np.mean(self.lenbuffer)
else:
return 0 # on the first params dump, no episodes are finished
def mean_reward(self):
if self.rewbuffer:
return np.mean(self.rewbuffer)
else:
return 0
# For ACER
def get_by_index(x, idx):
assert(len(x.get_shape()) == 2)
assert(len(idx.get_shape()) == 1)
idx_flattened = tf.range(0, x.shape[0]) * x.shape[1] + idx
y = tf.gather(tf.reshape(x, [-1]), # flatten input
idx_flattened) # use flattened indices
return y
def check_shape(ts,shapes):
i = 0
for (t,shape) in zip(ts,shapes):
assert t.get_shape().as_list()==shape, "id " + str(i) + " shape " + str(t.get_shape()) + str(shape)
i += 1
def avg_norm(t):
return tf.reduce_mean(tf.sqrt(tf.reduce_sum(tf.square(t), axis=-1)))
def gradient_add(g1, g2, param):
print([g1, g2, param.name])
assert (not (g1 is None and g2 is None)), param.name
if g1 is None:
return g2
elif g2 is None:
return g1
else:
return g1 + g2
def q_explained_variance(qpred, q):
_, vary = tf.nn.moments(q, axes=[0, 1])
_, varpred = tf.nn.moments(q - qpred, axes=[0, 1])
check_shape([vary, varpred], [[]] * 2)
return 1.0 - (varpred / vary)
| 32.616725
| 107
| 0.587651
|
import os
import numpy as np
import tensorflow as tf
from collections import deque
def sample(logits):
noise = tf.random_uniform(tf.shape(logits))
return tf.argmax(logits - tf.log(-tf.log(noise)), 1)
def cat_entropy(logits):
a0 = logits - tf.reduce_max(logits, 1, keepdims=True)
ea0 = tf.exp(a0)
z0 = tf.reduce_sum(ea0, 1, keepdims=True)
p0 = ea0 / z0
return tf.reduce_sum(p0 * (tf.log(z0) - a0), 1)
def cat_entropy_softmax(p0):
return - tf.reduce_sum(p0 * tf.log(p0 + 1e-6), axis = 1)
def ortho_init(scale=1.0):
def _ortho_init(shape, dtype, partition_info=None):
shape = tuple(shape)
if len(shape) == 2:
flat_shape = shape
elif len(shape) == 4:
flat_shape = (np.prod(shape[:-1]), shape[-1])
else:
raise NotImplementedError
a = np.random.normal(0.0, 1.0, flat_shape)
u, _, v = np.linalg.svd(a, full_matrices=False)
q = u if u.shape == flat_shape else v
q = q.reshape(shape)
return (scale * q[:shape[0], :shape[1]]).astype(np.float32)
return _ortho_init
def conv(x, scope, *, nf, rf, stride, pad='VALID', init_scale=1.0, data_format='NHWC', one_dim_bias=False):
if data_format == 'NHWC':
channel_ax = 3
strides = [1, stride, stride, 1]
bshape = [1, 1, 1, nf]
elif data_format == 'NCHW':
channel_ax = 1
strides = [1, 1, stride, stride]
bshape = [1, nf, 1, 1]
else:
raise NotImplementedError
bias_var_shape = [nf] if one_dim_bias else [1, nf, 1, 1]
nin = x.get_shape()[channel_ax].value
wshape = [rf, rf, nin, nf]
with tf.variable_scope(scope):
w = tf.get_variable("w", wshape, initializer=ortho_init(init_scale))
b = tf.get_variable("b", bias_var_shape, initializer=tf.constant_initializer(0.0))
if not one_dim_bias and data_format == 'NHWC':
b = tf.reshape(b, bshape)
return tf.nn.conv2d(x, w, strides=strides, padding=pad, data_format=data_format) + b
def fc(x, scope, nh, *, init_scale=1.0, init_bias=0.0):
with tf.variable_scope(scope):
nin = x.get_shape()[1].value
w = tf.get_variable("w", [nin, nh], initializer=ortho_init(init_scale))
b = tf.get_variable("b", [nh], initializer=tf.constant_initializer(init_bias))
return tf.matmul(x, w)+b
def batch_to_seq(h, nbatch, nsteps, flat=False):
if flat:
h = tf.reshape(h, [nbatch, nsteps])
else:
h = tf.reshape(h, [nbatch, nsteps, -1])
return [tf.squeeze(v, [1]) for v in tf.split(axis=1, num_or_size_splits=nsteps, value=h)]
def seq_to_batch(h, flat = False):
shape = h[0].get_shape().as_list()
if not flat:
assert(len(shape) > 1)
nh = h[0].get_shape()[-1].value
return tf.reshape(tf.concat(axis=1, values=h), [-1, nh])
else:
return tf.reshape(tf.stack(values=h, axis=1), [-1])
def lstm(xs, ms, s, scope, nh, init_scale=1.0):
nbatch, nin = [v.value for v in xs[0].get_shape()]
with tf.variable_scope(scope):
wx = tf.get_variable("wx", [nin, nh*4], initializer=ortho_init(init_scale))
wh = tf.get_variable("wh", [nh, nh*4], initializer=ortho_init(init_scale))
b = tf.get_variable("b", [nh*4], initializer=tf.constant_initializer(0.0))
c, h = tf.split(axis=1, num_or_size_splits=2, value=s)
for idx, (x, m) in enumerate(zip(xs, ms)):
c = c*(1-m)
h = h*(1-m)
z = tf.matmul(x, wx) + tf.matmul(h, wh) + b
i, f, o, u = tf.split(axis=1, num_or_size_splits=4, value=z)
i = tf.nn.sigmoid(i)
f = tf.nn.sigmoid(f)
o = tf.nn.sigmoid(o)
u = tf.tanh(u)
c = f*c + i*u
h = o*tf.tanh(c)
xs[idx] = h
s = tf.concat(axis=1, values=[c, h])
return xs, s
def _ln(x, g, b, e=1e-5, axes=[1]):
u, s = tf.nn.moments(x, axes=axes, keep_dims=True)
x = (x-u)/tf.sqrt(s+e)
x = x*g+b
return x
def lnlstm(xs, ms, s, scope, nh, init_scale=1.0):
nbatch, nin = [v.value for v in xs[0].get_shape()]
with tf.variable_scope(scope):
wx = tf.get_variable("wx", [nin, nh*4], initializer=ortho_init(init_scale))
gx = tf.get_variable("gx", [nh*4], initializer=tf.constant_initializer(1.0))
bx = tf.get_variable("bx", [nh*4], initializer=tf.constant_initializer(0.0))
wh = tf.get_variable("wh", [nh, nh*4], initializer=ortho_init(init_scale))
gh = tf.get_variable("gh", [nh*4], initializer=tf.constant_initializer(1.0))
bh = tf.get_variable("bh", [nh*4], initializer=tf.constant_initializer(0.0))
b = tf.get_variable("b", [nh*4], initializer=tf.constant_initializer(0.0))
gc = tf.get_variable("gc", [nh], initializer=tf.constant_initializer(1.0))
bc = tf.get_variable("bc", [nh], initializer=tf.constant_initializer(0.0))
c, h = tf.split(axis=1, num_or_size_splits=2, value=s)
for idx, (x, m) in enumerate(zip(xs, ms)):
c = c*(1-m)
h = h*(1-m)
z = _ln(tf.matmul(x, wx), gx, bx) + _ln(tf.matmul(h, wh), gh, bh) + b
i, f, o, u = tf.split(axis=1, num_or_size_splits=4, value=z)
i = tf.nn.sigmoid(i)
f = tf.nn.sigmoid(f)
o = tf.nn.sigmoid(o)
u = tf.tanh(u)
c = f*c + i*u
h = o*tf.tanh(_ln(c, gc, bc))
xs[idx] = h
s = tf.concat(axis=1, values=[c, h])
return xs, s
def conv_to_fc(x):
nh = np.prod([v.value for v in x.get_shape()[1:]])
x = tf.reshape(x, [-1, nh])
return x
def discount_with_dones(rewards, dones, gamma):
discounted = []
r = 0
for reward, done in zip(rewards[::-1], dones[::-1]):
r = reward + gamma*r*(1.-done)
discounted.append(r)
return discounted[::-1]
def find_trainable_variables(key):
return tf.trainable_variables(key)
def make_path(f):
return os.makedirs(f, exist_ok=True)
def constant(p):
return 1
def linear(p):
return 1-p
def middle_drop(p):
eps = 0.75
if 1-p<eps:
return eps*0.1
return 1-p
def double_linear_con(p):
p *= 2
eps = 0.125
if 1-p<eps:
return eps
return 1-p
def double_middle_drop(p):
eps1 = 0.75
eps2 = 0.25
if 1-p<eps1:
if 1-p<eps2:
return eps2*0.5
return eps1*0.1
return 1-p
schedules = {
'linear':linear,
'constant':constant,
'double_linear_con': double_linear_con,
'middle_drop': middle_drop,
'double_middle_drop': double_middle_drop
}
class Scheduler(object):
def __init__(self, v, nvalues, schedule):
self.n = 0.
self.v = v
self.nvalues = nvalues
self.schedule = schedules[schedule]
def value(self):
current_value = self.v*self.schedule(self.n/self.nvalues)
self.n += 1.
return current_value
def value_steps(self, steps):
return self.v*self.schedule(steps/self.nvalues)
class EpisodeStats:
def __init__(self, nsteps, nenvs):
self.episode_rewards = []
for i in range(nenvs):
self.episode_rewards.append([])
self.lenbuffer = deque(maxlen=40)
self.rewbuffer = deque(maxlen=40)
self.nsteps = nsteps
self.nenvs = nenvs
def feed(self, rewards, masks):
rewards = np.reshape(rewards, [self.nenvs, self.nsteps])
masks = np.reshape(masks, [self.nenvs, self.nsteps])
for i in range(0, self.nenvs):
for j in range(0, self.nsteps):
self.episode_rewards[i].append(rewards[i][j])
if masks[i][j]:
l = len(self.episode_rewards[i])
s = sum(self.episode_rewards[i])
self.lenbuffer.append(l)
self.rewbuffer.append(s)
self.episode_rewards[i] = []
def mean_length(self):
if self.lenbuffer:
return np.mean(self.lenbuffer)
else:
return 0
def mean_reward(self):
if self.rewbuffer:
return np.mean(self.rewbuffer)
else:
return 0
def get_by_index(x, idx):
assert(len(x.get_shape()) == 2)
assert(len(idx.get_shape()) == 1)
idx_flattened = tf.range(0, x.shape[0]) * x.shape[1] + idx
y = tf.gather(tf.reshape(x, [-1]),
idx_flattened)
return y
def check_shape(ts,shapes):
i = 0
for (t,shape) in zip(ts,shapes):
assert t.get_shape().as_list()==shape, "id " + str(i) + " shape " + str(t.get_shape()) + str(shape)
i += 1
def avg_norm(t):
return tf.reduce_mean(tf.sqrt(tf.reduce_sum(tf.square(t), axis=-1)))
def gradient_add(g1, g2, param):
print([g1, g2, param.name])
assert (not (g1 is None and g2 is None)), param.name
if g1 is None:
return g2
elif g2 is None:
return g1
else:
return g1 + g2
def q_explained_variance(qpred, q):
_, vary = tf.nn.moments(q, axes=[0, 1])
_, varpred = tf.nn.moments(q - qpred, axes=[0, 1])
check_shape([vary, varpred], [[]] * 2)
return 1.0 - (varpred / vary)
| true
| true
|
7909a42d2071e44e24486848e6edd0deb2ab5d53
| 4,165
|
py
|
Python
|
jax_cfd/base/time_stepping.py
|
google/jax-cfd
|
c28e55eb8f4afcc21a8ab77b7b30b661ad6bba76
|
[
"Apache-2.0"
] | 244
|
2021-05-18T18:49:14.000Z
|
2022-03-30T18:27:21.000Z
|
jax_cfd/base/time_stepping.py
|
google/jax-cfd
|
c28e55eb8f4afcc21a8ab77b7b30b661ad6bba76
|
[
"Apache-2.0"
] | 14
|
2021-06-24T22:15:44.000Z
|
2022-03-30T06:22:52.000Z
|
jax_cfd/base/time_stepping.py
|
google/jax-cfd
|
c28e55eb8f4afcc21a8ab77b7b30b661ad6bba76
|
[
"Apache-2.0"
] | 36
|
2021-05-29T09:30:44.000Z
|
2022-03-28T12:33:40.000Z
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Time stepping for Navier-Stokes equations."""
import dataclasses
from typing import Callable, Sequence, TypeVar
import jax
from jax_cfd.base import tree_math
PyTreeState = TypeVar("PyTreeState")
TimeStepFn = Callable[[PyTreeState], PyTreeState]
class ExplicitNavierStokesODE:
"""Spatially discretized version of Navier-Stokes.
The equation is given by:
∂u/∂t = explicit_terms(u)
0 = incompressibility_constraint(u)
"""
def __init__(self, explicit_terms, pressure_projection):
self.explicit_terms = explicit_terms
self.pressure_projection = pressure_projection
def explicit_terms(self, state):
"""Explicitly evaluate the ODE."""
raise NotImplementedError
def pressure_projection(self, state):
"""Enforce the incompressibility constraint."""
raise NotImplementedError
@dataclasses.dataclass
class ButcherTableau:
a: Sequence[Sequence[float]]
b: Sequence[float]
# TODO(shoyer): add c, when we support time-dependent equations.
def __post_init__(self):
if len(self.a) + 1 != len(self.b):
raise ValueError("inconsistent Butcher tableau")
def navier_stokes_rk(
tableau: ButcherTableau,
equation: ExplicitNavierStokesODE,
time_step: float,
) -> TimeStepFn:
"""Create a forward Runge-Kutta time-stepper for incompressible Navier-Stokes.
This function implements the reference method (equations 16-21), rather than
the fast projection method, from:
"Fast-Projection Methods for the Incompressible Navier–Stokes Equations"
Fluids 2020, 5, 222; doi:10.3390/fluids5040222
Args:
tableau: Butcher tableau.
equation: equation to use.
time_step: overall time-step size.
Returns:
Function that advances one time-step forward.
"""
# pylint: disable=invalid-name
dt = time_step
F = tree_math.pytree_to_vector_fun(equation.explicit_terms)
P = tree_math.pytree_to_vector_fun(equation.pressure_projection)
a = tableau.a
b = tableau.b
num_steps = len(b)
@tree_math.vector_to_pytree_fun
def step_fn(u0):
u = [None] * num_steps
k = [None] * num_steps
u[0] = u0
k[0] = F(u0)
for i in range(1, num_steps):
u_star = u0 + dt * sum(a[i-1][j] * k[j] for j in range(i) if a[i-1][j])
u[i] = P(u_star)
k[i] = F(u[i])
u_star = u0 + dt * sum(b[j] * k[j] for j in range(num_steps) if b[j])
u_final = P(u_star)
return u_final
return step_fn
def forward_euler(
equation: ExplicitNavierStokesODE, time_step: float,
) -> TimeStepFn:
return jax.named_call(
navier_stokes_rk(
ButcherTableau(a=[], b=[1]),
equation,
time_step),
name="forward_euler",
)
def midpoint_rk2(
equation: ExplicitNavierStokesODE, time_step: float,
) -> TimeStepFn:
return jax.named_call(
navier_stokes_rk(
ButcherTableau(a=[[1/2]], b=[0, 1]),
equation=equation,
time_step=time_step,
),
name="midpoint_rk2",
)
def heun_rk2(
equation: ExplicitNavierStokesODE, time_step: float,
) -> TimeStepFn:
return jax.named_call(
navier_stokes_rk(
ButcherTableau(a=[[1]], b=[1/2, 1/2]),
equation=equation,
time_step=time_step,
),
name="heun_rk2",
)
def classic_rk4(
equation: ExplicitNavierStokesODE, time_step: float,
) -> TimeStepFn:
return jax.named_call(
navier_stokes_rk(
ButcherTableau(a=[[1/2], [0, 1/2], [0, 0, 1]],
b=[1/6, 1/3, 1/3, 1/6]),
equation=equation,
time_step=time_step,
),
name="classic_rk4",
)
| 26.194969
| 80
| 0.677791
|
import dataclasses
from typing import Callable, Sequence, TypeVar
import jax
from jax_cfd.base import tree_math
PyTreeState = TypeVar("PyTreeState")
TimeStepFn = Callable[[PyTreeState], PyTreeState]
class ExplicitNavierStokesODE:
def __init__(self, explicit_terms, pressure_projection):
self.explicit_terms = explicit_terms
self.pressure_projection = pressure_projection
def explicit_terms(self, state):
raise NotImplementedError
def pressure_projection(self, state):
raise NotImplementedError
@dataclasses.dataclass
class ButcherTableau:
a: Sequence[Sequence[float]]
b: Sequence[float]
def __post_init__(self):
if len(self.a) + 1 != len(self.b):
raise ValueError("inconsistent Butcher tableau")
def navier_stokes_rk(
tableau: ButcherTableau,
equation: ExplicitNavierStokesODE,
time_step: float,
) -> TimeStepFn:
dt = time_step
F = tree_math.pytree_to_vector_fun(equation.explicit_terms)
P = tree_math.pytree_to_vector_fun(equation.pressure_projection)
a = tableau.a
b = tableau.b
num_steps = len(b)
@tree_math.vector_to_pytree_fun
def step_fn(u0):
u = [None] * num_steps
k = [None] * num_steps
u[0] = u0
k[0] = F(u0)
for i in range(1, num_steps):
u_star = u0 + dt * sum(a[i-1][j] * k[j] for j in range(i) if a[i-1][j])
u[i] = P(u_star)
k[i] = F(u[i])
u_star = u0 + dt * sum(b[j] * k[j] for j in range(num_steps) if b[j])
u_final = P(u_star)
return u_final
return step_fn
def forward_euler(
equation: ExplicitNavierStokesODE, time_step: float,
) -> TimeStepFn:
return jax.named_call(
navier_stokes_rk(
ButcherTableau(a=[], b=[1]),
equation,
time_step),
name="forward_euler",
)
def midpoint_rk2(
equation: ExplicitNavierStokesODE, time_step: float,
) -> TimeStepFn:
return jax.named_call(
navier_stokes_rk(
ButcherTableau(a=[[1/2]], b=[0, 1]),
equation=equation,
time_step=time_step,
),
name="midpoint_rk2",
)
def heun_rk2(
equation: ExplicitNavierStokesODE, time_step: float,
) -> TimeStepFn:
return jax.named_call(
navier_stokes_rk(
ButcherTableau(a=[[1]], b=[1/2, 1/2]),
equation=equation,
time_step=time_step,
),
name="heun_rk2",
)
def classic_rk4(
equation: ExplicitNavierStokesODE, time_step: float,
) -> TimeStepFn:
return jax.named_call(
navier_stokes_rk(
ButcherTableau(a=[[1/2], [0, 1/2], [0, 0, 1]],
b=[1/6, 1/3, 1/3, 1/6]),
equation=equation,
time_step=time_step,
),
name="classic_rk4",
)
| true
| true
|
7909a4f4205bf95d34bbc759a08e7b7f13100771
| 160
|
py
|
Python
|
Mundo 1/Ex04.py
|
legna7/Python
|
52e0b642d1b7acc592ec82dd360c5697fb0765db
|
[
"MIT"
] | null | null | null |
Mundo 1/Ex04.py
|
legna7/Python
|
52e0b642d1b7acc592ec82dd360c5697fb0765db
|
[
"MIT"
] | null | null | null |
Mundo 1/Ex04.py
|
legna7/Python
|
52e0b642d1b7acc592ec82dd360c5697fb0765db
|
[
"MIT"
] | null | null | null |
a = input('Digite algo: ')
print(type(a))
print(a.isnumeric())
print(a.capitalize())
print(a.isalnum())
print(a.isdecimal())
print(a.islower())
print(a.upper())
| 20
| 26
| 0.68125
|
a = input('Digite algo: ')
print(type(a))
print(a.isnumeric())
print(a.capitalize())
print(a.isalnum())
print(a.isdecimal())
print(a.islower())
print(a.upper())
| true
| true
|
7909a5c38761ef61f6e73713d9934023aa1d6999
| 104
|
py
|
Python
|
sabueso/protein/show_function.py
|
dprada/sabueso
|
14843cf3522b5b89db5b61c1541a7015f114dd53
|
[
"MIT"
] | null | null | null |
sabueso/protein/show_function.py
|
dprada/sabueso
|
14843cf3522b5b89db5b61c1541a7015f114dd53
|
[
"MIT"
] | 2
|
2022-01-31T21:22:17.000Z
|
2022-02-04T20:20:12.000Z
|
sabueso/protein/show_function.py
|
dprada/sabueso
|
14843cf3522b5b89db5b61c1541a7015f114dd53
|
[
"MIT"
] | 1
|
2021-07-20T15:01:14.000Z
|
2021-07-20T15:01:14.000Z
|
def show_function(protein):
from sabueso.entity.protein import get_function_card
return card
| 14.857143
| 56
| 0.769231
|
def show_function(protein):
from sabueso.entity.protein import get_function_card
return card
| true
| true
|
7909a6034f89833d11ab2fff5f398036172680f8
| 168
|
py
|
Python
|
assignment_3/5.py
|
gauravjuvekar/ppl
|
fc5592623fa294c18a6e24444b9e06e2a08b2f6c
|
[
"WTFPL"
] | null | null | null |
assignment_3/5.py
|
gauravjuvekar/ppl
|
fc5592623fa294c18a6e24444b9e06e2a08b2f6c
|
[
"WTFPL"
] | null | null | null |
assignment_3/5.py
|
gauravjuvekar/ppl
|
fc5592623fa294c18a6e24444b9e06e2a08b2f6c
|
[
"WTFPL"
] | null | null | null |
#!/usr/bin/env python3
# Golfing for the fun of it
print('\n'.join(['{:^80}'.format(x)for x in['*']+[' '.join('*'*x)for x in range(3,int(input("Enter depth: "))+2)]]))
| 42
| 116
| 0.577381
|
print('\n'.join(['{:^80}'.format(x)for x in['*']+[' '.join('*'*x)for x in range(3,int(input("Enter depth: "))+2)]]))
| true
| true
|
7909a6a11add20f8ca7192bd012d6cc2891d69f5
| 2,052
|
py
|
Python
|
resources/tests/test_admin_period_inline.py
|
HotStew/respa
|
04f39efb15b4f4206a122e665f8377c7198e1f25
|
[
"MIT"
] | 49
|
2015-10-21T06:25:31.000Z
|
2022-03-20T07:24:20.000Z
|
resources/tests/test_admin_period_inline.py
|
HotStew/respa
|
04f39efb15b4f4206a122e665f8377c7198e1f25
|
[
"MIT"
] | 728
|
2015-06-24T13:26:54.000Z
|
2022-03-24T12:18:41.000Z
|
resources/tests/test_admin_period_inline.py
|
HotStew/respa
|
04f39efb15b4f4206a122e665f8377c7198e1f25
|
[
"MIT"
] | 46
|
2015-06-26T10:52:57.000Z
|
2021-12-17T09:38:25.000Z
|
# -*- coding: utf-8 -*-
from datetime import date, time
import pytest
from django.contrib.admin import site as admin_site
from resources.admin.period_inline import PeriodModelForm, prefix_weekday
from resources.models import Period, Resource
from resources.models.unit import Unit
from resources.tests.utils import assert_response_contains, get_form_data
@pytest.mark.django_db
@pytest.mark.parametrize("commit", (False, True))
def test_period_model_form(space_resource, commit):
period = Period(resource=space_resource, start=date(2015, 8, 1), end=date(2015, 11, 1), name="plop")
period.full_clean()
period.save()
for wd in range(7):
period.days.create(weekday=wd, opens=time(9, wd * 2), closes=time(12 + wd))
pmf = PeriodModelForm(instance=period)
data = get_form_data(pmf, prepared=True)
# Make every day open at 06, set closed on wednesdays
for key in list(data.keys()):
if key.startswith(prefix_weekday(2, "")):
data[key] = ""
elif key.endswith("opens"):
data[key] = "06:00"
pmf = PeriodModelForm(instance=period, data=data)
assert pmf.is_valid()
period = pmf.save(commit=commit)
if not commit:
period.save()
pmf.save_m2m()
assert all(day.opens.hour == 6 for day in period.days.all())
assert not period.days.filter(weekday=2).exists() # Weekdays _got_ closed, yeah?
@pytest.mark.django_db
@pytest.mark.parametrize("model", (Resource, Unit))
def test_period_inline_containing_admins_work(rf, admin_user, model, space_resource, test_unit):
if model is Resource:
instance = space_resource
elif model is Unit:
instance = test_unit
else:
raise NotImplementedError("Unexpected parametrization")
admin = admin_site._registry[model] # Sorry for accessing a private member :(
request = rf.get("/")
request.user = admin_user
response = admin.change_view(request, instance.pk)
assert_response_contains(response, prefix_weekday(2, "opens")) # should have a weekday field
| 36.642857
| 104
| 0.703704
|
from datetime import date, time
import pytest
from django.contrib.admin import site as admin_site
from resources.admin.period_inline import PeriodModelForm, prefix_weekday
from resources.models import Period, Resource
from resources.models.unit import Unit
from resources.tests.utils import assert_response_contains, get_form_data
@pytest.mark.django_db
@pytest.mark.parametrize("commit", (False, True))
def test_period_model_form(space_resource, commit):
period = Period(resource=space_resource, start=date(2015, 8, 1), end=date(2015, 11, 1), name="plop")
period.full_clean()
period.save()
for wd in range(7):
period.days.create(weekday=wd, opens=time(9, wd * 2), closes=time(12 + wd))
pmf = PeriodModelForm(instance=period)
data = get_form_data(pmf, prepared=True)
for key in list(data.keys()):
if key.startswith(prefix_weekday(2, "")):
data[key] = ""
elif key.endswith("opens"):
data[key] = "06:00"
pmf = PeriodModelForm(instance=period, data=data)
assert pmf.is_valid()
period = pmf.save(commit=commit)
if not commit:
period.save()
pmf.save_m2m()
assert all(day.opens.hour == 6 for day in period.days.all())
assert not period.days.filter(weekday=2).exists()
@pytest.mark.django_db
@pytest.mark.parametrize("model", (Resource, Unit))
def test_period_inline_containing_admins_work(rf, admin_user, model, space_resource, test_unit):
if model is Resource:
instance = space_resource
elif model is Unit:
instance = test_unit
else:
raise NotImplementedError("Unexpected parametrization")
admin = admin_site._registry[model]
request = rf.get("/")
request.user = admin_user
response = admin.change_view(request, instance.pk)
assert_response_contains(response, prefix_weekday(2, "opens"))
| true
| true
|
7909a7287c260626bcd39624ff164901fce9b1df
| 27,184
|
py
|
Python
|
scripts/cinder-consistency.py
|
DEiselt/openstack-nannies
|
1349c03ff30ca5abefc46877178bc97d8551ac48
|
[
"Apache-2.0"
] | null | null | null |
scripts/cinder-consistency.py
|
DEiselt/openstack-nannies
|
1349c03ff30ca5abefc46877178bc97d8551ac48
|
[
"Apache-2.0"
] | null | null | null |
scripts/cinder-consistency.py
|
DEiselt/openstack-nannies
|
1349c03ff30ca5abefc46877178bc97d8551ac48
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
#
# Copyright (c) 2018 SAP SE
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# this script checks for volume attachments of already deleted volumes in the cinder db
import argparse
import configparser
import datetime
import logging
import os
import sys
from openstack import connection, exceptions
from sqlalchemy import and_, MetaData, select, Table, create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
log = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO, format='%(asctime)-15s %(message)s')
# get all instances from nova
def get_nova_instances(conn):
nova_instances = dict()
# get all instance from nova
try:
for nova_instance in conn.compute.servers(details=False, all_projects=1):
nova_instances[nova_instance.id] = nova_instance
if not nova_instances:
raise RuntimeError('- PLEASE CHECK MANUALLY - did not get any nova instances back from the nova api - this should in theory never happen ...')
except exceptions.HttpException as e:
log.warn("- PLEASE CHECK MANUALLY - got an http exception connecting to openstack: %s", str(e))
sys.exit(1)
except exceptions.SDKException as e:
log.warn("- PLEASE CHECK MANUALLY - got an sdk exception connecting to openstack: %s", str(e))
sys.exit(1)
#for i in nova_instances:
# print nova_instances[i].id
if not nova_instances:
raise RuntimeError('Did not get any nova instances back.')
return nova_instances
# get all volume attachments for volumes
def get_orphan_volume_attachments(meta):
orphan_volume_attachments = {}
orphan_volume_attachment_t = Table('volume_attachment', meta, autoload=True)
columns = [orphan_volume_attachment_t.c.id, orphan_volume_attachment_t.c.instance_uuid]
orphan_volume_attachment_q = select(columns=columns, whereclause=and_(orphan_volume_attachment_t.c.deleted == 0))
# return a dict indexed by orphan_volume_attachment_id and with the value nova_instance_uuid for non deleted orphan_volume_attachments
for (orphan_volume_attachment_id, nova_instance_uuid) in orphan_volume_attachment_q.execute():
orphan_volume_attachments[orphan_volume_attachment_id] = nova_instance_uuid
return orphan_volume_attachments
# get all the volume attachments in the cinder db for already deleted instances in nova
def get_wrong_orphan_volume_attachments(nova_instances, orphan_volume_attachments):
wrong_orphan_volume_attachments = {}
for orphan_volume_attachment_id in orphan_volume_attachments:
if nova_instances.get(orphan_volume_attachments[orphan_volume_attachment_id]) is None:
wrong_orphan_volume_attachments[orphan_volume_attachment_id] = orphan_volume_attachments[orphan_volume_attachment_id]
return wrong_orphan_volume_attachments
# delete volume attachments in the cinder db for already deleted instances in nova
def fix_wrong_orphan_volume_attachments(meta, wrong_orphan_volume_attachments, fix_limit):
if len(wrong_orphan_volume_attachments) <= int(fix_limit):
orphan_volume_attachment_t = Table('volume_attachment', meta, autoload=True)
for orphan_volume_attachment_id in wrong_orphan_volume_attachments:
log.info ("-- action: deleting orphan volume attachment id: %s", orphan_volume_attachment_id)
now = datetime.datetime.utcnow()
delete_orphan_volume_attachment_q = orphan_volume_attachment_t.update().\
where(orphan_volume_attachment_t.c.id == orphan_volume_attachment_id).values(updated_at=now, deleted_at=now, deleted=1)
delete_orphan_volume_attachment_q.execute()
else:
log.warn("- PLEASE CHECK MANUALLY - too many (more than %s) wrong orphan volume attachments - denying to fix them automatically", str(fix_limit))
# get all the volumes in state "error_deleting"
def get_error_deleting_volumes(meta):
error_deleting_volumes = []
volumes_t = Table('volumes', meta, autoload=True)
error_deleting_volumes_q = select(columns=[volumes_t.c.id]).where(and_(volumes_t.c.status == "error_deleting",volumes_t.c.deleted == 0))
# convert the query result into a list
for i in error_deleting_volumes_q.execute():
error_deleting_volumes.append(i[0])
return error_deleting_volumes
# delete all the volumes in state "error_deleting"
def fix_error_deleting_volumes(meta, error_deleting_volumes):
volumes_t = Table('volumes', meta, autoload=True)
volume_attachment_t = Table('volume_attachment', meta, autoload=True)
volume_metadata_t = Table('volume_metadata', meta, autoload=True)
volume_admin_metadata_t = Table('volume_admin_metadata', meta, autoload=True)
for error_deleting_volumes_id in error_deleting_volumes:
now = datetime.datetime.utcnow()
log.info("-- action: deleting possible volume admin metadata for volume id: %s", error_deleting_volumes_id)
delete_volume_admin_metadata_q = volume_admin_metadata_t.update().\
where(volume_admin_metadata_t.c.volume_id == error_deleting_volumes_id).values(updated_at=now, deleted_at=now, deleted=1)
delete_volume_admin_metadata_q.execute()
log.info("-- action: deleting possible volume metadata for volume id: %s", error_deleting_volumes_id)
delete_volume_metadata_q = volume_metadata_t.update().\
where(volume_metadata_t.c.volume_id == error_deleting_volumes_id).values(updated_at=now, deleted_at=now, deleted=1)
delete_volume_metadata_q.execute()
log.info("-- action: deleting possible volume attachments for volume id: %s", error_deleting_volumes_id)
delete_volume_attachment_q = volume_attachment_t.update().\
where(volume_attachment_t.c.volume_id == error_deleting_volumes_id).values(updated_at=now, deleted_at=now, deleted=1)
delete_volume_attachment_q.execute()
log.info("-- action: deleting volume id: %s", error_deleting_volumes_id)
delete_volume_q = volumes_t.update().\
where(volumes_t.c.id == error_deleting_volumes_id).values(updated_at=now, deleted_at=now, deleted=1)
delete_volume_q.execute()
# get all the snapshots in state "error_deleting"
def get_error_deleting_snapshots(meta):
error_deleting_snapshots = []
snapshots_t = Table('snapshots', meta, autoload=True)
error_deleting_snapshots_q = select(columns=[snapshots_t.c.id]).where(and_(snapshots_t.c.status == "error_deleting",snapshots_t.c.deleted == 0))
# convert the query result into a list
for i in error_deleting_snapshots_q.execute():
error_deleting_snapshots.append(i[0])
return error_deleting_snapshots
# delete all the snapshots in state "error_deleting"
def fix_error_deleting_snapshots(meta, error_deleting_snapshots):
snapshots_t = Table('snapshots', meta, autoload=True)
for error_deleting_snapshots_id in error_deleting_snapshots:
log.info("-- action: deleting snapshot id: %s", error_deleting_snapshots_id)
now = datetime.datetime.utcnow()
delete_snapshot_q = snapshots_t.update().\
where(snapshots_t.c.id == error_deleting_snapshots_id).values(updated_at=now, deleted_at=now, deleted=1)
delete_snapshot_q.execute()
# get all the rows with a volume_admin_metadata still defined where the corresponding volume is already deleted
def get_wrong_volume_admin_metadata(meta):
wrong_admin_metadata = {}
volume_admin_metadata_t = Table('volume_admin_metadata', meta, autoload=True)
volumes_t = Table('volumes', meta, autoload=True)
admin_metadata_join = volume_admin_metadata_t.join(volumes_t,volume_admin_metadata_t.c.volume_id == volumes_t.c.id)
columns = [volumes_t.c.id, volumes_t.c.deleted, volume_admin_metadata_t.c.id, volume_admin_metadata_t.c.deleted]
wrong_volume_admin_metadata_q = select(columns=columns).select_from(admin_metadata_join).\
where(and_(volumes_t.c.deleted == 1, volume_admin_metadata_t.c.deleted == 0))
# return a dict indexed by volume_admin_metadata_id and with the value volume_id for non deleted volume_admin_metadata
for (volume_id, volume_deleted, volume_admin_metadata_id, volume_admin_metadata_deleted) in wrong_volume_admin_metadata_q.execute():
wrong_admin_metadata[volume_admin_metadata_id] = volume_id
return wrong_admin_metadata
# delete volume_admin_metadata still defined where the corresponding volume is already deleted
def fix_wrong_volume_admin_metadata(meta, wrong_admin_metadata):
volume_admin_metadata_t = Table('volume_admin_metadata', meta, autoload=True)
for volume_admin_metadata_id in wrong_admin_metadata:
log.info("-- action: deleting volume_admin_metadata id: %s", volume_admin_metadata_id)
now = datetime.datetime.utcnow()
delete_volume_admin_metadata_q = volume_admin_metadata_t.update().\
where(volume_admin_metadata_t.c.id == volume_admin_metadata_id).values(updated_at=now, deleted_at=now, deleted=1)
delete_volume_admin_metadata_q.execute()
# get all the rows with a volume_glance_metadata still defined where the corresponding volume is already deleted
def get_wrong_volume_glance_metadata(meta):
wrong_glance_metadata = {}
volume_glance_metadata_t = Table('volume_glance_metadata', meta, autoload=True)
volumes_t = Table('volumes', meta, autoload=True)
glance_metadata_join = volume_glance_metadata_t.join(volumes_t,volume_glance_metadata_t.c.volume_id == volumes_t.c.id)
columns = [volumes_t.c.id, volumes_t.c.deleted, volume_glance_metadata_t.c.id, volume_glance_metadata_t.c.deleted]
wrong_volume_glance_metadata_q = select(columns=columns).select_from(glance_metadata_join).\
where(and_(volumes_t.c.deleted == 1, volume_glance_metadata_t.c.deleted == 0))
# return a dict indexed by volume_glance_metadata_id and with the value volume_id for non deleted volume_glance_metadata
for (volume_id, volume_deleted, volume_glance_metadata_id, volume_glance_metadata_deleted) in wrong_volume_glance_metadata_q.execute():
wrong_glance_metadata[volume_glance_metadata_id] = volume_id
return wrong_glance_metadata
# delete volume_glance_metadata still defined where the corresponding volume is already deleted
def fix_wrong_volume_glance_metadata(meta, wrong_glance_metadata):
volume_glance_metadata_t = Table('volume_glance_metadata', meta, autoload=True)
for volume_glance_metadata_id in wrong_glance_metadata:
log.info("-- action: deleting volume_glance_metadata id: %s", volume_glance_metadata_id)
now = datetime.datetime.utcnow()
delete_volume_glance_metadata_q = volume_glance_metadata_t.update().\
where(volume_glance_metadata_t.c.id == volume_glance_metadata_id).values(updated_at=now, deleted_at=now, deleted=1)
delete_volume_glance_metadata_q.execute()
# get all the rows with a volume_metadata still defined where the corresponding volume is already deleted
def get_wrong_volume_metadata(meta):
wrong_metadata = {}
volume_metadata_t = Table('volume_metadata', meta, autoload=True)
volumes_t = Table('volumes', meta, autoload=True)
metadata_join = volume_metadata_t.join(volumes_t,volume_metadata_t.c.volume_id == volumes_t.c.id)
columns = [volumes_t.c.id, volumes_t.c.deleted, volume_metadata_t.c.id, volume_metadata_t.c.deleted]
wrong_volume_metadata_q = select(columns=columns).select_from(metadata_join).\
where(and_(volumes_t.c.deleted == 1, volume_metadata_t.c.deleted == 0))
# return a dict indexed by volume_metadata_id and with the value volume_id for non deleted volume_metadata
for (volume_id, volume_deleted, volume_metadata_id, volume_metadata_deleted) in wrong_volume_metadata_q.execute():
wrong_metadata[volume_metadata_id] = volume_id
return wrong_metadata
# delete volume_metadata still defined where the corresponding volume is already deleted
def fix_wrong_volume_metadata(meta, wrong_metadata):
volume_metadata_t = Table('volume_metadata', meta, autoload=True)
for volume_metadata_id in wrong_metadata:
log.info("-- action: deleting volume_metadata id: %s", volume_metadata_id)
now = datetime.datetime.utcnow()
delete_volume_metadata_q = volume_metadata_t.update().\
where(volume_metadata_t.c.id == volume_metadata_id).values(updated_at=now, deleted_at=now, deleted=1)
delete_volume_metadata_q.execute()
# get all the rows with a volume attachment still defined where the corresponding volume is already deleted
def get_wrong_volume_attachments(meta):
wrong_attachments = {}
volume_attachment_t = Table('volume_attachment', meta, autoload=True)
volumes_t = Table('volumes', meta, autoload=True)
attachment_join = volume_attachment_t.join(volumes_t,volume_attachment_t.c.volume_id == volumes_t.c.id)
columns = [volumes_t.c.id, volumes_t.c.deleted, volume_attachment_t.c.id, volume_attachment_t.c.deleted]
wrong_volume_attachment_q = select(columns=columns).select_from(attachment_join).\
where(and_(volumes_t.c.deleted == 1, volume_attachment_t.c.deleted == 0))
# return a dict indexed by volume_attachment_id and with the value volume_id for non deleted volume_attachments
for (volume_id, volume_deleted, volume_attachment_id, volume_attachment_deleted) in wrong_volume_attachment_q.execute():
wrong_attachments[volume_attachment_id] = volume_id
return wrong_attachments
# delete volume attachment still defined where the corresponding volume is already deleted
def fix_wrong_volume_attachments(meta, wrong_attachments, fix_limit):
if len(wrong_attachments) <= int(fix_limit):
volume_attachment_t = Table('volume_attachment', meta, autoload=True)
for volume_attachment_id in wrong_attachments:
log.info("-- action: deleting volume attachment id: %s", volume_attachment_id)
now = datetime.datetime.utcnow()
delete_volume_attachment_q = volume_attachment_t.update().\
where(volume_attachment_t.c.id == volume_attachment_id).values(updated_at=now, deleted_at=now, deleted=1)
delete_volume_attachment_q.execute()
else:
log.warn("- PLEASE CHECK MANUALLY - too many (more than %s) wrong volume attachments - denying to fix them automatically", str(fix_limit))
# get all the rows, which have the deleted flag set, but not the delete_at column
def get_missing_deleted_at(meta, table_names):
missing_deleted_at = {}
for t in table_names:
a_table_t = Table(t, meta, autoload=True)
a_table_select_deleted_at_q = a_table_t.select().where(
and_(a_table_t.c.deleted == 1, a_table_t.c.deleted_at == None))
for row in a_table_select_deleted_at_q.execute():
missing_deleted_at[row.id] = t
return missing_deleted_at
# set deleted_at to updated_at value if not set for marked as deleted rows
def fix_missing_deleted_at(meta, table_names):
now = datetime.datetime.utcnow()
for t in table_names:
a_table_t = Table(t, meta, autoload=True)
log.info("- action: fixing columns with missing deleted_at times in the %s table", t)
a_table_set_deleted_at_q = a_table_t.update().where(
and_(a_table_t.c.deleted == 1, a_table_t.c.deleted_at == None)).values(
deleted_at=now)
a_table_set_deleted_at_q.execute()
# get all the rows with a service still defined where the corresponding volume is already deleted
def get_deleted_services_still_used_in_volumes(meta):
deleted_services_still_used_in_volumes = {}
services_t = Table('services', meta, autoload=True)
volumes_t = Table('volumes', meta, autoload=True)
services_volumes_join = services_t.join(volumes_t,services_t.c.uuid == volumes_t.c.service_uuid)
columns = [services_t.c.uuid, services_t.c.deleted, volumes_t.c.id, volumes_t.c.deleted]
deleted_services_still_used_in_volumes_q = select(columns=columns).select_from(services_volumes_join).\
where(and_(volumes_t.c.deleted == 0, services_t.c.deleted == 1))
# return a dict indexed by service_uuid and with the value volume_id for deleted but still referenced services
for (service_uuid, service_deleted, volume_id, volume_deleted) in deleted_services_still_used_in_volumes_q.execute():
deleted_services_still_used_in_volumes[service_uuid] = volume_id
return deleted_services_still_used_in_volumes
# delete services still defined where the corresponding volume is already deleted
def fix_deleted_services_still_used_in_volumes(meta, deleted_services_still_used_in_volumes):
services_t = Table('services', meta, autoload=True)
for deleted_services_still_used_in_volumes_id in deleted_services_still_used_in_volumes:
log.info("-- action: undeleting service uuid: %s", deleted_services_still_used_in_volumes_id)
undelete_services_q = services_t.update().where(services_t.c.uuid == deleted_services_still_used_in_volumes_id).values(deleted=0,deleted_at=None)
undelete_services_q.execute()
# establish an openstack connection
def makeOsConnection():
try:
conn = connection.Connection(auth_url=os.getenv('OS_AUTH_URL'),
project_name=os.getenv('OS_PROJECT_NAME'),
project_domain_name=os.getenv('OS_PROJECT_DOMAIN_NAME'),
username=os.getenv('OS_USERNAME'),
user_domain_name=os.getenv('OS_USER_DOMAIN_NAME'),
password=os.getenv('OS_PASSWORD'),
identity_api_version="3")
except Exception as e:
log.warn("- PLEASE CHECK MANUALLY - problems connecting to openstack: %s",
str(e))
sys.exit(1)
return conn
# establish a database connection and return the handle
def makeConnection(db_url):
engine = create_engine(db_url)
engine.connect()
Session = sessionmaker(bind=engine)
thisSession = Session()
metadata = MetaData()
metadata.bind = engine
Base = declarative_base()
return thisSession, metadata, Base
# return the database connection string from the config file
def get_db_url(config_file):
parser = configparser.SafeConfigParser()
try:
parser.read(config_file)
db_url = parser.get('database', 'connection', raw=True)
except:
log.info("ERROR: Check Cinder configuration file.")
sys.exit(2)
return db_url
# cmdline handling
def parse_cmdline_args():
parser = argparse.ArgumentParser()
parser.add_argument("--config",
default='./cinder.conf',
help='configuration file')
parser.add_argument("--dry-run",
action="store_true",
help='print only what would be done without actually doing it')
parser.add_argument("--fix-limit",
default=25,
help='maximum number of inconsistencies to fix automatically - if there are more, automatic fixing is denied')
return parser.parse_args()
def main():
try:
args = parse_cmdline_args()
except Exception as e:
log.error("Check command line arguments (%s)", e.strerror)
# connect to openstack
conn = makeOsConnection()
# connect to the DB
db_url = get_db_url(args.config)
cinder_session, cinder_metadata, cinder_Base = makeConnection(db_url)
# fixing volume attachments at no longer existing instances
orphan_volume_attachments = get_orphan_volume_attachments(cinder_metadata)
nova_instances = get_nova_instances(conn)
wrong_orphan_volume_attachments = get_wrong_orphan_volume_attachments(nova_instances, orphan_volume_attachments)
if len(wrong_orphan_volume_attachments) != 0:
log.info("- orphan volume attachments found:")
# print out what we would delete
for orphan_volume_attachment_id in wrong_orphan_volume_attachments:
log.info("-- orphan volume attachment (id in cinder db: %s) for non existent instance in nova: %s", orphan_volume_attachment_id,
orphan_volume_attachments[orphan_volume_attachment_id])
if not args.dry_run:
log.info("- deleting orphan volume attachment inconsistencies found")
fix_wrong_orphan_volume_attachments(cinder_metadata, wrong_orphan_volume_attachments, args.fix_limit)
else:
log.info("- no orphan volume attachments found")
# fixing possible volumes in state "error-deleting"
error_deleting_volumes = get_error_deleting_volumes(cinder_metadata)
if len(error_deleting_volumes) != 0:
log.info("- volumes in state error_deleting found")
# print out what we would delete
for error_deleting_volumes_id in error_deleting_volumes:
log.info("-- volume id: %s", error_deleting_volumes_id)
if not args.dry_run:
log.info("- deleting volumes in state error_deleting")
fix_error_deleting_volumes(cinder_metadata, error_deleting_volumes)
else:
log.info("- no volumes in state error_deleting found")
# fixing possible snapshots in state "error-deleting"
error_deleting_snapshots = get_error_deleting_snapshots(cinder_metadata)
if len(error_deleting_snapshots) != 0:
log.info("- snapshots in state error_deleting found")
# print out what we would delete
for error_deleting_snapshots_id in error_deleting_snapshots:
log.info("-- snapshot id: %s", error_deleting_snapshots_id)
if not args.dry_run:
log.info("- deleting snapshots in state error_deleting")
fix_error_deleting_snapshots(cinder_metadata, error_deleting_snapshots)
else:
log.info("- no snapshots in state error_deleting found")
# fixing possible wrong admin_metadata entries
wrong_admin_metadata = get_wrong_volume_admin_metadata(cinder_metadata)
if len(wrong_admin_metadata) != 0:
log.info("- volume_admin_metadata inconsistencies found")
# print out what we would delete
for volume_admin_metadata_id in wrong_admin_metadata:
log.info("-- volume_admin_metadata id: %s - deleted volume id: %s", volume_admin_metadata_id, wrong_admin_metadata[volume_admin_metadata_id])
if not args.dry_run:
log.info("- removing volume_admin_metadata inconsistencies found")
fix_wrong_volume_admin_metadata(cinder_metadata, wrong_admin_metadata)
else:
log.info("- volume_admin_metadata entries are consistent")
# fixing possible wrong glance_metadata entries
wrong_glance_metadata = get_wrong_volume_glance_metadata(cinder_metadata)
if len(wrong_glance_metadata) != 0:
log.info("- volume_glance_metadata inconsistencies found")
# print out what we would delete
for volume_glance_metadata_id in wrong_glance_metadata:
log.info("-- volume_glance_metadata id: %s - deleted volume id: %s", volume_glance_metadata_id, wrong_glance_metadata[volume_glance_metadata_id])
if not args.dry_run:
log.info("- removing volume_glance_metadata inconsistencies found")
fix_wrong_volume_glance_metadata(cinder_metadata, wrong_glance_metadata)
else:
log.info("- volume_glance_metadata entries are consistent")
# fixing possible wrong metadata entries
wrong_metadata = get_wrong_volume_metadata(cinder_metadata)
if len(wrong_metadata) != 0:
log.info("- volume_metadata inconsistencies found")
# print out what we would delete
for volume_metadata_id in wrong_metadata:
log.info("-- volume_metadata id: %s - deleted volume id: %s", volume_metadata_id, wrong_metadata[volume_metadata_id])
if not args.dry_run:
log.info("- removing volume_metadata inconsistencies found")
fix_wrong_volume_metadata(cinder_metadata, wrong_metadata)
else:
log.info("- volume_metadata entries are consistent")
# fixing possible wrong attachment entries
wrong_attachments = get_wrong_volume_attachments(cinder_metadata)
if len(wrong_attachments) != 0:
log.info("- volume attachment inconsistencies found")
# print out what we would delete
for volume_attachment_id in wrong_attachments:
log.info("-- volume attachment id: %s - deleted volume id: %s", volume_attachment_id, wrong_attachments[volume_attachment_id])
if not args.dry_run:
log.info("- removing volume attachment inconsistencies found")
fix_wrong_volume_attachments(cinder_metadata, wrong_attachments, args.fix_limit)
else:
log.info("- volume attachments are consistent")
# fixing possible missing deleted_at timestamps in some tables
# tables which sometimes have missing deleted_at values
table_names = [ 'snapshots', 'volume_attachment' ]
missing_deleted_at = get_missing_deleted_at(cinder_metadata, table_names)
if len(missing_deleted_at) != 0:
log.info("- missing deleted_at values found:")
# print out what we would delete
for missing_deleted_at_id in missing_deleted_at:
log.info("--- id %s of the %s table is missing deleted_at time", missing_deleted_at_id, missing_deleted_at[missing_deleted_at_id])
if not args.dry_run:
log.info("- setting missing deleted_at values")
fix_missing_deleted_at(cinder_metadata, table_names)
else:
log.info("- no missing deleted_at values")
deleted_services_still_used_in_volumes = get_deleted_services_still_used_in_volumes(cinder_metadata)
if len(deleted_services_still_used_in_volumes) != 0:
log.info("- deleted services still used in volumes found:")
# print out what we would delete
for deleted_services_still_used_in_volumes_id in deleted_services_still_used_in_volumes:
log.info("--- deleted service uuid %s still used in volumes table entry %s", deleted_services_still_used_in_volumes_id, deleted_services_still_used_in_volumes[deleted_services_still_used_in_volumes_id])
if not args.dry_run:
log.info("- undeleting service uuid still used in volumes table")
fix_deleted_services_still_used_in_volumes(cinder_metadata, deleted_services_still_used_in_volumes)
else:
log.info("- deleted services still used in volumes")
if __name__ == "__main__":
main()
| 48.716846
| 214
| 0.739038
|
import argparse
import configparser
import datetime
import logging
import os
import sys
from openstack import connection, exceptions
from sqlalchemy import and_, MetaData, select, Table, create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
log = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO, format='%(asctime)-15s %(message)s')
def get_nova_instances(conn):
nova_instances = dict()
try:
for nova_instance in conn.compute.servers(details=False, all_projects=1):
nova_instances[nova_instance.id] = nova_instance
if not nova_instances:
raise RuntimeError('- PLEASE CHECK MANUALLY - did not get any nova instances back from the nova api - this should in theory never happen ...')
except exceptions.HttpException as e:
log.warn("- PLEASE CHECK MANUALLY - got an http exception connecting to openstack: %s", str(e))
sys.exit(1)
except exceptions.SDKException as e:
log.warn("- PLEASE CHECK MANUALLY - got an sdk exception connecting to openstack: %s", str(e))
sys.exit(1)
if not nova_instances:
raise RuntimeError('Did not get any nova instances back.')
return nova_instances
def get_orphan_volume_attachments(meta):
orphan_volume_attachments = {}
orphan_volume_attachment_t = Table('volume_attachment', meta, autoload=True)
columns = [orphan_volume_attachment_t.c.id, orphan_volume_attachment_t.c.instance_uuid]
orphan_volume_attachment_q = select(columns=columns, whereclause=and_(orphan_volume_attachment_t.c.deleted == 0))
for (orphan_volume_attachment_id, nova_instance_uuid) in orphan_volume_attachment_q.execute():
orphan_volume_attachments[orphan_volume_attachment_id] = nova_instance_uuid
return orphan_volume_attachments
def get_wrong_orphan_volume_attachments(nova_instances, orphan_volume_attachments):
wrong_orphan_volume_attachments = {}
for orphan_volume_attachment_id in orphan_volume_attachments:
if nova_instances.get(orphan_volume_attachments[orphan_volume_attachment_id]) is None:
wrong_orphan_volume_attachments[orphan_volume_attachment_id] = orphan_volume_attachments[orphan_volume_attachment_id]
return wrong_orphan_volume_attachments
def fix_wrong_orphan_volume_attachments(meta, wrong_orphan_volume_attachments, fix_limit):
if len(wrong_orphan_volume_attachments) <= int(fix_limit):
orphan_volume_attachment_t = Table('volume_attachment', meta, autoload=True)
for orphan_volume_attachment_id in wrong_orphan_volume_attachments:
log.info ("-- action: deleting orphan volume attachment id: %s", orphan_volume_attachment_id)
now = datetime.datetime.utcnow()
delete_orphan_volume_attachment_q = orphan_volume_attachment_t.update().\
where(orphan_volume_attachment_t.c.id == orphan_volume_attachment_id).values(updated_at=now, deleted_at=now, deleted=1)
delete_orphan_volume_attachment_q.execute()
else:
log.warn("- PLEASE CHECK MANUALLY - too many (more than %s) wrong orphan volume attachments - denying to fix them automatically", str(fix_limit))
def get_error_deleting_volumes(meta):
error_deleting_volumes = []
volumes_t = Table('volumes', meta, autoload=True)
error_deleting_volumes_q = select(columns=[volumes_t.c.id]).where(and_(volumes_t.c.status == "error_deleting",volumes_t.c.deleted == 0))
for i in error_deleting_volumes_q.execute():
error_deleting_volumes.append(i[0])
return error_deleting_volumes
def fix_error_deleting_volumes(meta, error_deleting_volumes):
volumes_t = Table('volumes', meta, autoload=True)
volume_attachment_t = Table('volume_attachment', meta, autoload=True)
volume_metadata_t = Table('volume_metadata', meta, autoload=True)
volume_admin_metadata_t = Table('volume_admin_metadata', meta, autoload=True)
for error_deleting_volumes_id in error_deleting_volumes:
now = datetime.datetime.utcnow()
log.info("-- action: deleting possible volume admin metadata for volume id: %s", error_deleting_volumes_id)
delete_volume_admin_metadata_q = volume_admin_metadata_t.update().\
where(volume_admin_metadata_t.c.volume_id == error_deleting_volumes_id).values(updated_at=now, deleted_at=now, deleted=1)
delete_volume_admin_metadata_q.execute()
log.info("-- action: deleting possible volume metadata for volume id: %s", error_deleting_volumes_id)
delete_volume_metadata_q = volume_metadata_t.update().\
where(volume_metadata_t.c.volume_id == error_deleting_volumes_id).values(updated_at=now, deleted_at=now, deleted=1)
delete_volume_metadata_q.execute()
log.info("-- action: deleting possible volume attachments for volume id: %s", error_deleting_volumes_id)
delete_volume_attachment_q = volume_attachment_t.update().\
where(volume_attachment_t.c.volume_id == error_deleting_volumes_id).values(updated_at=now, deleted_at=now, deleted=1)
delete_volume_attachment_q.execute()
log.info("-- action: deleting volume id: %s", error_deleting_volumes_id)
delete_volume_q = volumes_t.update().\
where(volumes_t.c.id == error_deleting_volumes_id).values(updated_at=now, deleted_at=now, deleted=1)
delete_volume_q.execute()
def get_error_deleting_snapshots(meta):
error_deleting_snapshots = []
snapshots_t = Table('snapshots', meta, autoload=True)
error_deleting_snapshots_q = select(columns=[snapshots_t.c.id]).where(and_(snapshots_t.c.status == "error_deleting",snapshots_t.c.deleted == 0))
for i in error_deleting_snapshots_q.execute():
error_deleting_snapshots.append(i[0])
return error_deleting_snapshots
def fix_error_deleting_snapshots(meta, error_deleting_snapshots):
snapshots_t = Table('snapshots', meta, autoload=True)
for error_deleting_snapshots_id in error_deleting_snapshots:
log.info("-- action: deleting snapshot id: %s", error_deleting_snapshots_id)
now = datetime.datetime.utcnow()
delete_snapshot_q = snapshots_t.update().\
where(snapshots_t.c.id == error_deleting_snapshots_id).values(updated_at=now, deleted_at=now, deleted=1)
delete_snapshot_q.execute()
def get_wrong_volume_admin_metadata(meta):
wrong_admin_metadata = {}
volume_admin_metadata_t = Table('volume_admin_metadata', meta, autoload=True)
volumes_t = Table('volumes', meta, autoload=True)
admin_metadata_join = volume_admin_metadata_t.join(volumes_t,volume_admin_metadata_t.c.volume_id == volumes_t.c.id)
columns = [volumes_t.c.id, volumes_t.c.deleted, volume_admin_metadata_t.c.id, volume_admin_metadata_t.c.deleted]
wrong_volume_admin_metadata_q = select(columns=columns).select_from(admin_metadata_join).\
where(and_(volumes_t.c.deleted == 1, volume_admin_metadata_t.c.deleted == 0))
for (volume_id, volume_deleted, volume_admin_metadata_id, volume_admin_metadata_deleted) in wrong_volume_admin_metadata_q.execute():
wrong_admin_metadata[volume_admin_metadata_id] = volume_id
return wrong_admin_metadata
def fix_wrong_volume_admin_metadata(meta, wrong_admin_metadata):
volume_admin_metadata_t = Table('volume_admin_metadata', meta, autoload=True)
for volume_admin_metadata_id in wrong_admin_metadata:
log.info("-- action: deleting volume_admin_metadata id: %s", volume_admin_metadata_id)
now = datetime.datetime.utcnow()
delete_volume_admin_metadata_q = volume_admin_metadata_t.update().\
where(volume_admin_metadata_t.c.id == volume_admin_metadata_id).values(updated_at=now, deleted_at=now, deleted=1)
delete_volume_admin_metadata_q.execute()
def get_wrong_volume_glance_metadata(meta):
wrong_glance_metadata = {}
volume_glance_metadata_t = Table('volume_glance_metadata', meta, autoload=True)
volumes_t = Table('volumes', meta, autoload=True)
glance_metadata_join = volume_glance_metadata_t.join(volumes_t,volume_glance_metadata_t.c.volume_id == volumes_t.c.id)
columns = [volumes_t.c.id, volumes_t.c.deleted, volume_glance_metadata_t.c.id, volume_glance_metadata_t.c.deleted]
wrong_volume_glance_metadata_q = select(columns=columns).select_from(glance_metadata_join).\
where(and_(volumes_t.c.deleted == 1, volume_glance_metadata_t.c.deleted == 0))
for (volume_id, volume_deleted, volume_glance_metadata_id, volume_glance_metadata_deleted) in wrong_volume_glance_metadata_q.execute():
wrong_glance_metadata[volume_glance_metadata_id] = volume_id
return wrong_glance_metadata
def fix_wrong_volume_glance_metadata(meta, wrong_glance_metadata):
volume_glance_metadata_t = Table('volume_glance_metadata', meta, autoload=True)
for volume_glance_metadata_id in wrong_glance_metadata:
log.info("-- action: deleting volume_glance_metadata id: %s", volume_glance_metadata_id)
now = datetime.datetime.utcnow()
delete_volume_glance_metadata_q = volume_glance_metadata_t.update().\
where(volume_glance_metadata_t.c.id == volume_glance_metadata_id).values(updated_at=now, deleted_at=now, deleted=1)
delete_volume_glance_metadata_q.execute()
def get_wrong_volume_metadata(meta):
wrong_metadata = {}
volume_metadata_t = Table('volume_metadata', meta, autoload=True)
volumes_t = Table('volumes', meta, autoload=True)
metadata_join = volume_metadata_t.join(volumes_t,volume_metadata_t.c.volume_id == volumes_t.c.id)
columns = [volumes_t.c.id, volumes_t.c.deleted, volume_metadata_t.c.id, volume_metadata_t.c.deleted]
wrong_volume_metadata_q = select(columns=columns).select_from(metadata_join).\
where(and_(volumes_t.c.deleted == 1, volume_metadata_t.c.deleted == 0))
for (volume_id, volume_deleted, volume_metadata_id, volume_metadata_deleted) in wrong_volume_metadata_q.execute():
wrong_metadata[volume_metadata_id] = volume_id
return wrong_metadata
def fix_wrong_volume_metadata(meta, wrong_metadata):
volume_metadata_t = Table('volume_metadata', meta, autoload=True)
for volume_metadata_id in wrong_metadata:
log.info("-- action: deleting volume_metadata id: %s", volume_metadata_id)
now = datetime.datetime.utcnow()
delete_volume_metadata_q = volume_metadata_t.update().\
where(volume_metadata_t.c.id == volume_metadata_id).values(updated_at=now, deleted_at=now, deleted=1)
delete_volume_metadata_q.execute()
def get_wrong_volume_attachments(meta):
wrong_attachments = {}
volume_attachment_t = Table('volume_attachment', meta, autoload=True)
volumes_t = Table('volumes', meta, autoload=True)
attachment_join = volume_attachment_t.join(volumes_t,volume_attachment_t.c.volume_id == volumes_t.c.id)
columns = [volumes_t.c.id, volumes_t.c.deleted, volume_attachment_t.c.id, volume_attachment_t.c.deleted]
wrong_volume_attachment_q = select(columns=columns).select_from(attachment_join).\
where(and_(volumes_t.c.deleted == 1, volume_attachment_t.c.deleted == 0))
for (volume_id, volume_deleted, volume_attachment_id, volume_attachment_deleted) in wrong_volume_attachment_q.execute():
wrong_attachments[volume_attachment_id] = volume_id
return wrong_attachments
def fix_wrong_volume_attachments(meta, wrong_attachments, fix_limit):
if len(wrong_attachments) <= int(fix_limit):
volume_attachment_t = Table('volume_attachment', meta, autoload=True)
for volume_attachment_id in wrong_attachments:
log.info("-- action: deleting volume attachment id: %s", volume_attachment_id)
now = datetime.datetime.utcnow()
delete_volume_attachment_q = volume_attachment_t.update().\
where(volume_attachment_t.c.id == volume_attachment_id).values(updated_at=now, deleted_at=now, deleted=1)
delete_volume_attachment_q.execute()
else:
log.warn("- PLEASE CHECK MANUALLY - too many (more than %s) wrong volume attachments - denying to fix them automatically", str(fix_limit))
def get_missing_deleted_at(meta, table_names):
missing_deleted_at = {}
for t in table_names:
a_table_t = Table(t, meta, autoload=True)
a_table_select_deleted_at_q = a_table_t.select().where(
and_(a_table_t.c.deleted == 1, a_table_t.c.deleted_at == None))
for row in a_table_select_deleted_at_q.execute():
missing_deleted_at[row.id] = t
return missing_deleted_at
def fix_missing_deleted_at(meta, table_names):
now = datetime.datetime.utcnow()
for t in table_names:
a_table_t = Table(t, meta, autoload=True)
log.info("- action: fixing columns with missing deleted_at times in the %s table", t)
a_table_set_deleted_at_q = a_table_t.update().where(
and_(a_table_t.c.deleted == 1, a_table_t.c.deleted_at == None)).values(
deleted_at=now)
a_table_set_deleted_at_q.execute()
def get_deleted_services_still_used_in_volumes(meta):
deleted_services_still_used_in_volumes = {}
services_t = Table('services', meta, autoload=True)
volumes_t = Table('volumes', meta, autoload=True)
services_volumes_join = services_t.join(volumes_t,services_t.c.uuid == volumes_t.c.service_uuid)
columns = [services_t.c.uuid, services_t.c.deleted, volumes_t.c.id, volumes_t.c.deleted]
deleted_services_still_used_in_volumes_q = select(columns=columns).select_from(services_volumes_join).\
where(and_(volumes_t.c.deleted == 0, services_t.c.deleted == 1))
for (service_uuid, service_deleted, volume_id, volume_deleted) in deleted_services_still_used_in_volumes_q.execute():
deleted_services_still_used_in_volumes[service_uuid] = volume_id
return deleted_services_still_used_in_volumes
def fix_deleted_services_still_used_in_volumes(meta, deleted_services_still_used_in_volumes):
services_t = Table('services', meta, autoload=True)
for deleted_services_still_used_in_volumes_id in deleted_services_still_used_in_volumes:
log.info("-- action: undeleting service uuid: %s", deleted_services_still_used_in_volumes_id)
undelete_services_q = services_t.update().where(services_t.c.uuid == deleted_services_still_used_in_volumes_id).values(deleted=0,deleted_at=None)
undelete_services_q.execute()
def makeOsConnection():
try:
conn = connection.Connection(auth_url=os.getenv('OS_AUTH_URL'),
project_name=os.getenv('OS_PROJECT_NAME'),
project_domain_name=os.getenv('OS_PROJECT_DOMAIN_NAME'),
username=os.getenv('OS_USERNAME'),
user_domain_name=os.getenv('OS_USER_DOMAIN_NAME'),
password=os.getenv('OS_PASSWORD'),
identity_api_version="3")
except Exception as e:
log.warn("- PLEASE CHECK MANUALLY - problems connecting to openstack: %s",
str(e))
sys.exit(1)
return conn
def makeConnection(db_url):
engine = create_engine(db_url)
engine.connect()
Session = sessionmaker(bind=engine)
thisSession = Session()
metadata = MetaData()
metadata.bind = engine
Base = declarative_base()
return thisSession, metadata, Base
def get_db_url(config_file):
parser = configparser.SafeConfigParser()
try:
parser.read(config_file)
db_url = parser.get('database', 'connection', raw=True)
except:
log.info("ERROR: Check Cinder configuration file.")
sys.exit(2)
return db_url
def parse_cmdline_args():
parser = argparse.ArgumentParser()
parser.add_argument("--config",
default='./cinder.conf',
help='configuration file')
parser.add_argument("--dry-run",
action="store_true",
help='print only what would be done without actually doing it')
parser.add_argument("--fix-limit",
default=25,
help='maximum number of inconsistencies to fix automatically - if there are more, automatic fixing is denied')
return parser.parse_args()
def main():
try:
args = parse_cmdline_args()
except Exception as e:
log.error("Check command line arguments (%s)", e.strerror)
conn = makeOsConnection()
db_url = get_db_url(args.config)
cinder_session, cinder_metadata, cinder_Base = makeConnection(db_url)
orphan_volume_attachments = get_orphan_volume_attachments(cinder_metadata)
nova_instances = get_nova_instances(conn)
wrong_orphan_volume_attachments = get_wrong_orphan_volume_attachments(nova_instances, orphan_volume_attachments)
if len(wrong_orphan_volume_attachments) != 0:
log.info("- orphan volume attachments found:")
for orphan_volume_attachment_id in wrong_orphan_volume_attachments:
log.info("-- orphan volume attachment (id in cinder db: %s) for non existent instance in nova: %s", orphan_volume_attachment_id,
orphan_volume_attachments[orphan_volume_attachment_id])
if not args.dry_run:
log.info("- deleting orphan volume attachment inconsistencies found")
fix_wrong_orphan_volume_attachments(cinder_metadata, wrong_orphan_volume_attachments, args.fix_limit)
else:
log.info("- no orphan volume attachments found")
error_deleting_volumes = get_error_deleting_volumes(cinder_metadata)
if len(error_deleting_volumes) != 0:
log.info("- volumes in state error_deleting found")
for error_deleting_volumes_id in error_deleting_volumes:
log.info("-- volume id: %s", error_deleting_volumes_id)
if not args.dry_run:
log.info("- deleting volumes in state error_deleting")
fix_error_deleting_volumes(cinder_metadata, error_deleting_volumes)
else:
log.info("- no volumes in state error_deleting found")
error_deleting_snapshots = get_error_deleting_snapshots(cinder_metadata)
if len(error_deleting_snapshots) != 0:
log.info("- snapshots in state error_deleting found")
for error_deleting_snapshots_id in error_deleting_snapshots:
log.info("-- snapshot id: %s", error_deleting_snapshots_id)
if not args.dry_run:
log.info("- deleting snapshots in state error_deleting")
fix_error_deleting_snapshots(cinder_metadata, error_deleting_snapshots)
else:
log.info("- no snapshots in state error_deleting found")
wrong_admin_metadata = get_wrong_volume_admin_metadata(cinder_metadata)
if len(wrong_admin_metadata) != 0:
log.info("- volume_admin_metadata inconsistencies found")
for volume_admin_metadata_id in wrong_admin_metadata:
log.info("-- volume_admin_metadata id: %s - deleted volume id: %s", volume_admin_metadata_id, wrong_admin_metadata[volume_admin_metadata_id])
if not args.dry_run:
log.info("- removing volume_admin_metadata inconsistencies found")
fix_wrong_volume_admin_metadata(cinder_metadata, wrong_admin_metadata)
else:
log.info("- volume_admin_metadata entries are consistent")
wrong_glance_metadata = get_wrong_volume_glance_metadata(cinder_metadata)
if len(wrong_glance_metadata) != 0:
log.info("- volume_glance_metadata inconsistencies found")
for volume_glance_metadata_id in wrong_glance_metadata:
log.info("-- volume_glance_metadata id: %s - deleted volume id: %s", volume_glance_metadata_id, wrong_glance_metadata[volume_glance_metadata_id])
if not args.dry_run:
log.info("- removing volume_glance_metadata inconsistencies found")
fix_wrong_volume_glance_metadata(cinder_metadata, wrong_glance_metadata)
else:
log.info("- volume_glance_metadata entries are consistent")
wrong_metadata = get_wrong_volume_metadata(cinder_metadata)
if len(wrong_metadata) != 0:
log.info("- volume_metadata inconsistencies found")
for volume_metadata_id in wrong_metadata:
log.info("-- volume_metadata id: %s - deleted volume id: %s", volume_metadata_id, wrong_metadata[volume_metadata_id])
if not args.dry_run:
log.info("- removing volume_metadata inconsistencies found")
fix_wrong_volume_metadata(cinder_metadata, wrong_metadata)
else:
log.info("- volume_metadata entries are consistent")
wrong_attachments = get_wrong_volume_attachments(cinder_metadata)
if len(wrong_attachments) != 0:
log.info("- volume attachment inconsistencies found")
for volume_attachment_id in wrong_attachments:
log.info("-- volume attachment id: %s - deleted volume id: %s", volume_attachment_id, wrong_attachments[volume_attachment_id])
if not args.dry_run:
log.info("- removing volume attachment inconsistencies found")
fix_wrong_volume_attachments(cinder_metadata, wrong_attachments, args.fix_limit)
else:
log.info("- volume attachments are consistent")
table_names = [ 'snapshots', 'volume_attachment' ]
missing_deleted_at = get_missing_deleted_at(cinder_metadata, table_names)
if len(missing_deleted_at) != 0:
log.info("- missing deleted_at values found:")
for missing_deleted_at_id in missing_deleted_at:
log.info("--- id %s of the %s table is missing deleted_at time", missing_deleted_at_id, missing_deleted_at[missing_deleted_at_id])
if not args.dry_run:
log.info("- setting missing deleted_at values")
fix_missing_deleted_at(cinder_metadata, table_names)
else:
log.info("- no missing deleted_at values")
deleted_services_still_used_in_volumes = get_deleted_services_still_used_in_volumes(cinder_metadata)
if len(deleted_services_still_used_in_volumes) != 0:
log.info("- deleted services still used in volumes found:")
for deleted_services_still_used_in_volumes_id in deleted_services_still_used_in_volumes:
log.info("--- deleted service uuid %s still used in volumes table entry %s", deleted_services_still_used_in_volumes_id, deleted_services_still_used_in_volumes[deleted_services_still_used_in_volumes_id])
if not args.dry_run:
log.info("- undeleting service uuid still used in volumes table")
fix_deleted_services_still_used_in_volumes(cinder_metadata, deleted_services_still_used_in_volumes)
else:
log.info("- deleted services still used in volumes")
if __name__ == "__main__":
main()
| true
| true
|
7909a789a7ab5367ea1b024bb539ed8e05c99f13
| 9,981
|
py
|
Python
|
sdk/python/pulumi_azure_nextgen/apimanagement/v20200601preview/api_operation.py
|
pulumi/pulumi-azure-nextgen
|
452736b0a1cf584c2d4c04666e017af6e9b2c15c
|
[
"Apache-2.0"
] | 31
|
2020-09-21T09:41:01.000Z
|
2021-02-26T13:21:59.000Z
|
sdk/python/pulumi_azure_nextgen/apimanagement/v20200601preview/api_operation.py
|
pulumi/pulumi-azure-nextgen
|
452736b0a1cf584c2d4c04666e017af6e9b2c15c
|
[
"Apache-2.0"
] | 231
|
2020-09-21T09:38:45.000Z
|
2021-03-01T11:16:03.000Z
|
sdk/python/pulumi_azure_nextgen/apimanagement/v20200601preview/api_operation.py
|
pulumi/pulumi-azure-nextgen
|
452736b0a1cf584c2d4c04666e017af6e9b2c15c
|
[
"Apache-2.0"
] | 4
|
2020-09-29T14:14:59.000Z
|
2021-02-10T20:38:16.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
from ._inputs import *
__all__ = ['ApiOperation']
class ApiOperation(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
api_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
method: Optional[pulumi.Input[str]] = None,
operation_id: Optional[pulumi.Input[str]] = None,
policies: Optional[pulumi.Input[str]] = None,
request: Optional[pulumi.Input[pulumi.InputType['RequestContractArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
responses: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ResponseContractArgs']]]]] = None,
service_name: Optional[pulumi.Input[str]] = None,
template_parameters: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ParameterContractArgs']]]]] = None,
url_template: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
Api Operation details.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] api_id: API revision identifier. Must be unique in the current API Management service instance. Non-current revision has ;rev=n as a suffix where n is the revision number.
:param pulumi.Input[str] description: Description of the operation. May include HTML formatting tags.
:param pulumi.Input[str] display_name: Operation Name.
:param pulumi.Input[str] method: A Valid HTTP Operation Method. Typical Http Methods like GET, PUT, POST but not limited by only them.
:param pulumi.Input[str] operation_id: Operation identifier within an API. Must be unique in the current API Management service instance.
:param pulumi.Input[str] policies: Operation Policies
:param pulumi.Input[pulumi.InputType['RequestContractArgs']] request: An entity containing request details.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ResponseContractArgs']]]] responses: Array of Operation responses.
:param pulumi.Input[str] service_name: The name of the API Management service.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ParameterContractArgs']]]] template_parameters: Collection of URL template parameters.
:param pulumi.Input[str] url_template: Relative URL template identifying the target resource for this operation. May include parameters. Example: /customers/{cid}/orders/{oid}/?date={date}
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
if api_id is None and not opts.urn:
raise TypeError("Missing required property 'api_id'")
__props__['api_id'] = api_id
__props__['description'] = description
if display_name is None and not opts.urn:
raise TypeError("Missing required property 'display_name'")
__props__['display_name'] = display_name
if method is None and not opts.urn:
raise TypeError("Missing required property 'method'")
__props__['method'] = method
__props__['operation_id'] = operation_id
__props__['policies'] = policies
__props__['request'] = request
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
__props__['responses'] = responses
if service_name is None and not opts.urn:
raise TypeError("Missing required property 'service_name'")
__props__['service_name'] = service_name
__props__['template_parameters'] = template_parameters
if url_template is None and not opts.urn:
raise TypeError("Missing required property 'url_template'")
__props__['url_template'] = url_template
__props__['name'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:apimanagement:ApiOperation"), pulumi.Alias(type_="azure-nextgen:apimanagement/latest:ApiOperation"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20160707:ApiOperation"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20161010:ApiOperation"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20170301:ApiOperation"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20180101:ApiOperation"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20180601preview:ApiOperation"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20190101:ApiOperation"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20191201:ApiOperation"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20191201preview:ApiOperation")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(ApiOperation, __self__).__init__(
'azure-nextgen:apimanagement/v20200601preview:ApiOperation',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'ApiOperation':
"""
Get an existing ApiOperation resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
return ApiOperation(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
Description of the operation. May include HTML formatting tags.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Output[str]:
"""
Operation Name.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter
def method(self) -> pulumi.Output[str]:
"""
A Valid HTTP Operation Method. Typical Http Methods like GET, PUT, POST but not limited by only them.
"""
return pulumi.get(self, "method")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def policies(self) -> pulumi.Output[Optional[str]]:
"""
Operation Policies
"""
return pulumi.get(self, "policies")
@property
@pulumi.getter
def request(self) -> pulumi.Output[Optional['outputs.RequestContractResponse']]:
"""
An entity containing request details.
"""
return pulumi.get(self, "request")
@property
@pulumi.getter
def responses(self) -> pulumi.Output[Optional[Sequence['outputs.ResponseContractResponse']]]:
"""
Array of Operation responses.
"""
return pulumi.get(self, "responses")
@property
@pulumi.getter(name="templateParameters")
def template_parameters(self) -> pulumi.Output[Optional[Sequence['outputs.ParameterContractResponse']]]:
"""
Collection of URL template parameters.
"""
return pulumi.get(self, "template_parameters")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type for API Management resource.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="urlTemplate")
def url_template(self) -> pulumi.Output[str]:
"""
Relative URL template identifying the target resource for this operation. May include parameters. Example: /customers/{cid}/orders/{oid}/?date={date}
"""
return pulumi.get(self, "url_template")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 47.985577
| 794
| 0.660655
|
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
from ._inputs import *
__all__ = ['ApiOperation']
class ApiOperation(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
api_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
method: Optional[pulumi.Input[str]] = None,
operation_id: Optional[pulumi.Input[str]] = None,
policies: Optional[pulumi.Input[str]] = None,
request: Optional[pulumi.Input[pulumi.InputType['RequestContractArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
responses: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ResponseContractArgs']]]]] = None,
service_name: Optional[pulumi.Input[str]] = None,
template_parameters: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ParameterContractArgs']]]]] = None,
url_template: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
if api_id is None and not opts.urn:
raise TypeError("Missing required property 'api_id'")
__props__['api_id'] = api_id
__props__['description'] = description
if display_name is None and not opts.urn:
raise TypeError("Missing required property 'display_name'")
__props__['display_name'] = display_name
if method is None and not opts.urn:
raise TypeError("Missing required property 'method'")
__props__['method'] = method
__props__['operation_id'] = operation_id
__props__['policies'] = policies
__props__['request'] = request
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
__props__['responses'] = responses
if service_name is None and not opts.urn:
raise TypeError("Missing required property 'service_name'")
__props__['service_name'] = service_name
__props__['template_parameters'] = template_parameters
if url_template is None and not opts.urn:
raise TypeError("Missing required property 'url_template'")
__props__['url_template'] = url_template
__props__['name'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:apimanagement:ApiOperation"), pulumi.Alias(type_="azure-nextgen:apimanagement/latest:ApiOperation"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20160707:ApiOperation"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20161010:ApiOperation"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20170301:ApiOperation"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20180101:ApiOperation"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20180601preview:ApiOperation"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20190101:ApiOperation"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20191201:ApiOperation"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20191201preview:ApiOperation")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(ApiOperation, __self__).__init__(
'azure-nextgen:apimanagement/v20200601preview:ApiOperation',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'ApiOperation':
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
return ApiOperation(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "description")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Output[str]:
return pulumi.get(self, "display_name")
@property
@pulumi.getter
def method(self) -> pulumi.Output[str]:
return pulumi.get(self, "method")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
return pulumi.get(self, "name")
@property
@pulumi.getter
def policies(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "policies")
@property
@pulumi.getter
def request(self) -> pulumi.Output[Optional['outputs.RequestContractResponse']]:
return pulumi.get(self, "request")
@property
@pulumi.getter
def responses(self) -> pulumi.Output[Optional[Sequence['outputs.ResponseContractResponse']]]:
return pulumi.get(self, "responses")
@property
@pulumi.getter(name="templateParameters")
def template_parameters(self) -> pulumi.Output[Optional[Sequence['outputs.ParameterContractResponse']]]:
return pulumi.get(self, "template_parameters")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
return pulumi.get(self, "type")
@property
@pulumi.getter(name="urlTemplate")
def url_template(self) -> pulumi.Output[str]:
return pulumi.get(self, "url_template")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| true
| true
|
7909a8382b0dbafadeefa95be20e04969bf424ca
| 11,198
|
py
|
Python
|
chia/wallet/wallet_puzzle_store.py
|
santiagoferreira33/mainchia
|
16917701fd93cebab25bf054cf7c17967052ef2e
|
[
"Apache-2.0"
] | 6
|
2021-05-21T10:29:32.000Z
|
2021-07-10T12:51:34.000Z
|
chia/wallet/wallet_puzzle_store.py
|
santiagoferreira33/mainchia
|
16917701fd93cebab25bf054cf7c17967052ef2e
|
[
"Apache-2.0"
] | 28
|
2021-07-13T21:07:14.000Z
|
2022-03-29T21:10:38.000Z
|
chia/wallet/wallet_puzzle_store.py
|
santiagoferreira33/mainchia
|
16917701fd93cebab25bf054cf7c17967052ef2e
|
[
"Apache-2.0"
] | 2
|
2021-05-18T15:33:58.000Z
|
2021-05-28T21:15:09.000Z
|
import asyncio
import logging
from typing import List, Optional, Set, Tuple
import aiosqlite
from blspy import G1Element
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.util.db_wrapper import DBWrapper
from chia.util.ints import uint32
from chia.wallet.derivation_record import DerivationRecord
from chia.wallet.util.wallet_types import WalletType
log = logging.getLogger(__name__)
class WalletPuzzleStore:
"""
WalletPuzzleStore keeps track of all generated puzzle_hashes and their derivation path / wallet.
"""
db_connection: aiosqlite.Connection
lock: asyncio.Lock
cache_size: uint32
all_puzzle_hashes: Set[bytes32]
db_wrapper: DBWrapper
@classmethod
async def create(cls, db_wrapper: DBWrapper, cache_size: uint32 = uint32(600000)):
self = cls()
self.cache_size = cache_size
self.db_wrapper = db_wrapper
self.db_connection = self.db_wrapper.db
await self.db_connection.execute("pragma journal_mode=wal")
await self.db_connection.execute("pragma synchronous=2")
await self.db_connection.execute(
(
"CREATE TABLE IF NOT EXISTS derivation_paths("
"derivation_index int,"
" pubkey text,"
" puzzle_hash text PRIMARY_KEY,"
" wallet_type int,"
" wallet_id int,"
" used tinyint)"
)
)
await self.db_connection.execute(
"CREATE INDEX IF NOT EXISTS derivation_index_index on derivation_paths(derivation_index)"
)
await self.db_connection.execute("CREATE INDEX IF NOT EXISTS ph on derivation_paths(puzzle_hash)")
await self.db_connection.execute("CREATE INDEX IF NOT EXISTS pubkey on derivation_paths(pubkey)")
await self.db_connection.execute("CREATE INDEX IF NOT EXISTS wallet_type on derivation_paths(wallet_type)")
await self.db_connection.execute("CREATE INDEX IF NOT EXISTS wallet_id on derivation_paths(wallet_id)")
await self.db_connection.execute("CREATE INDEX IF NOT EXISTS used on derivation_paths(wallet_type)")
await self.db_connection.commit()
# Lock
self.lock = asyncio.Lock() # external
await self._init_cache()
return self
async def close(self):
await self.db_connection.close()
async def _init_cache(self):
self.all_puzzle_hashes = await self.get_all_puzzle_hashes()
async def _clear_database(self):
cursor = await self.db_connection.execute("DELETE FROM derivation_paths")
await cursor.close()
await self.db_connection.commit()
async def add_derivation_paths(self, records: List[DerivationRecord]) -> None:
"""
Insert many derivation paths into the database.
"""
async with self.db_wrapper.lock:
sql_records = []
for record in records:
self.all_puzzle_hashes.add(record.puzzle_hash)
sql_records.append(
(
record.index,
bytes(record.pubkey).hex(),
record.puzzle_hash.hex(),
record.wallet_type,
record.wallet_id,
0,
),
)
cursor = await self.db_connection.executemany(
"INSERT OR REPLACE INTO derivation_paths VALUES(?, ?, ?, ?, ?, ?)",
sql_records,
)
await cursor.close()
await self.db_connection.commit()
async def get_derivation_record(self, index: uint32, wallet_id: uint32) -> Optional[DerivationRecord]:
"""
Returns the derivation record by index and wallet id.
"""
cursor = await self.db_connection.execute(
"SELECT * FROM derivation_paths WHERE derivation_index=? and wallet_id=?;",
(
index,
wallet_id,
),
)
row = await cursor.fetchone()
await cursor.close()
if row is not None and row[0] is not None:
return DerivationRecord(
uint32(row[0]),
bytes32.fromhex(row[2]),
G1Element.from_bytes(bytes.fromhex(row[1])),
WalletType(row[3]),
uint32(row[4]),
)
return None
async def get_derivation_record_for_puzzle_hash(self, puzzle_hash: str) -> Optional[DerivationRecord]:
"""
Returns the derivation record by index and wallet id.
"""
cursor = await self.db_connection.execute(
"SELECT * FROM derivation_paths WHERE puzzle_hash=?;",
(puzzle_hash,),
)
row = await cursor.fetchone()
await cursor.close()
if row is not None and row[0] is not None:
return DerivationRecord(
uint32(row[0]),
bytes32.fromhex(row[2]),
G1Element.from_bytes(bytes.fromhex(row[1])),
WalletType(row[3]),
uint32(row[4]),
)
return None
async def set_used_up_to(self, index: uint32, in_transaction=False) -> None:
"""
Sets a derivation path to used so we don't use it again.
"""
if not in_transaction:
await self.db_wrapper.lock.acquire()
try:
cursor = await self.db_connection.execute(
"UPDATE derivation_paths SET used=1 WHERE derivation_index<=?",
(index,),
)
await cursor.close()
finally:
if not in_transaction:
await self.db_connection.commit()
self.db_wrapper.lock.release()
async def puzzle_hash_exists(self, puzzle_hash: bytes32) -> bool:
"""
Checks if passed puzzle_hash is present in the db.
"""
cursor = await self.db_connection.execute(
"SELECT * from derivation_paths WHERE puzzle_hash=?", (puzzle_hash.hex(),)
)
row = await cursor.fetchone()
await cursor.close()
return row is not None
async def one_of_puzzle_hashes_exists(self, puzzle_hashes: List[bytes32]) -> bool:
"""
Checks if one of the passed puzzle_hashes is present in the db.
"""
if len(puzzle_hashes) < 1:
return False
for ph in puzzle_hashes:
if ph in self.all_puzzle_hashes:
return True
return False
async def index_for_pubkey(self, pubkey: G1Element) -> Optional[uint32]:
"""
Returns derivation paths for the given pubkey.
Returns None if not present.
"""
cursor = await self.db_connection.execute(
"SELECT * from derivation_paths WHERE pubkey=?", (bytes(pubkey).hex(),)
)
row = await cursor.fetchone()
await cursor.close()
if row is not None:
return uint32(row[0])
return None
async def index_for_puzzle_hash(self, puzzle_hash: bytes32) -> Optional[uint32]:
"""
Returns the derivation path for the puzzle_hash.
Returns None if not present.
"""
cursor = await self.db_connection.execute(
"SELECT * from derivation_paths WHERE puzzle_hash=?", (puzzle_hash.hex(),)
)
row = await cursor.fetchone()
await cursor.close()
if row is not None:
return uint32(row[0])
return None
async def index_for_puzzle_hash_and_wallet(self, puzzle_hash: bytes32, wallet_id: uint32) -> Optional[uint32]:
"""
Returns the derivation path for the puzzle_hash.
Returns None if not present.
"""
cursor = await self.db_connection.execute(
"SELECT * from derivation_paths WHERE puzzle_hash=? and wallet_id=?;",
(
puzzle_hash.hex(),
wallet_id,
),
)
row = await cursor.fetchone()
await cursor.close()
if row is not None:
return uint32(row[0])
return None
async def wallet_info_for_puzzle_hash(self, puzzle_hash: bytes32) -> Optional[Tuple[uint32, WalletType]]:
"""
Returns the derivation path for the puzzle_hash.
Returns None if not present.
"""
cursor = await self.db_connection.execute(
"SELECT * from derivation_paths WHERE puzzle_hash=?", (puzzle_hash.hex(),)
)
row = await cursor.fetchone()
await cursor.close()
if row is not None:
return row[4], WalletType(row[3])
return None
async def get_all_puzzle_hashes(self) -> Set[bytes32]:
"""
Return a set containing all puzzle_hashes we generated.
"""
cursor = await self.db_connection.execute("SELECT * from derivation_paths")
rows = await cursor.fetchall()
await cursor.close()
result: Set[bytes32] = set()
for row in rows:
result.add(bytes32(bytes.fromhex(row[2])))
return result
async def get_last_derivation_path(self) -> Optional[uint32]:
"""
Returns the last derivation path by derivation_index.
"""
cursor = await self.db_connection.execute("SELECT MAX(derivation_index) FROM derivation_paths;")
row = await cursor.fetchone()
await cursor.close()
if row is not None and row[0] is not None:
return uint32(row[0])
return None
async def get_last_derivation_path_for_wallet(self, wallet_id: int) -> Optional[uint32]:
"""
Returns the last derivation path by derivation_index.
"""
cursor = await self.db_connection.execute(
f"SELECT MAX(derivation_index) FROM derivation_paths WHERE wallet_id={wallet_id};"
)
row = await cursor.fetchone()
await cursor.close()
if row is not None and row[0] is not None:
return uint32(row[0])
return None
async def get_current_derivation_record_for_wallet(self, wallet_id: uint32) -> Optional[DerivationRecord]:
"""
Returns the current derivation record by derivation_index.
"""
cursor = await self.db_connection.execute(
f"SELECT MAX(derivation_index) FROM derivation_paths WHERE wallet_id={wallet_id} and used=1;"
)
row = await cursor.fetchone()
await cursor.close()
if row is not None and row[0] is not None:
index = uint32(row[0])
return await self.get_derivation_record(index, wallet_id)
return None
async def get_unused_derivation_path(self) -> Optional[uint32]:
"""
Returns the first unused derivation path by derivation_index.
"""
cursor = await self.db_connection.execute("SELECT MIN(derivation_index) FROM derivation_paths WHERE used=0;")
row = await cursor.fetchone()
await cursor.close()
if row is not None and row[0] is not None:
return uint32(row[0])
return None
| 32.74269
| 117
| 0.59841
|
import asyncio
import logging
from typing import List, Optional, Set, Tuple
import aiosqlite
from blspy import G1Element
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.util.db_wrapper import DBWrapper
from chia.util.ints import uint32
from chia.wallet.derivation_record import DerivationRecord
from chia.wallet.util.wallet_types import WalletType
log = logging.getLogger(__name__)
class WalletPuzzleStore:
db_connection: aiosqlite.Connection
lock: asyncio.Lock
cache_size: uint32
all_puzzle_hashes: Set[bytes32]
db_wrapper: DBWrapper
@classmethod
async def create(cls, db_wrapper: DBWrapper, cache_size: uint32 = uint32(600000)):
self = cls()
self.cache_size = cache_size
self.db_wrapper = db_wrapper
self.db_connection = self.db_wrapper.db
await self.db_connection.execute("pragma journal_mode=wal")
await self.db_connection.execute("pragma synchronous=2")
await self.db_connection.execute(
(
"CREATE TABLE IF NOT EXISTS derivation_paths("
"derivation_index int,"
" pubkey text,"
" puzzle_hash text PRIMARY_KEY,"
" wallet_type int,"
" wallet_id int,"
" used tinyint)"
)
)
await self.db_connection.execute(
"CREATE INDEX IF NOT EXISTS derivation_index_index on derivation_paths(derivation_index)"
)
await self.db_connection.execute("CREATE INDEX IF NOT EXISTS ph on derivation_paths(puzzle_hash)")
await self.db_connection.execute("CREATE INDEX IF NOT EXISTS pubkey on derivation_paths(pubkey)")
await self.db_connection.execute("CREATE INDEX IF NOT EXISTS wallet_type on derivation_paths(wallet_type)")
await self.db_connection.execute("CREATE INDEX IF NOT EXISTS wallet_id on derivation_paths(wallet_id)")
await self.db_connection.execute("CREATE INDEX IF NOT EXISTS used on derivation_paths(wallet_type)")
await self.db_connection.commit()
self.lock = asyncio.Lock()
await self._init_cache()
return self
async def close(self):
await self.db_connection.close()
async def _init_cache(self):
self.all_puzzle_hashes = await self.get_all_puzzle_hashes()
async def _clear_database(self):
cursor = await self.db_connection.execute("DELETE FROM derivation_paths")
await cursor.close()
await self.db_connection.commit()
async def add_derivation_paths(self, records: List[DerivationRecord]) -> None:
async with self.db_wrapper.lock:
sql_records = []
for record in records:
self.all_puzzle_hashes.add(record.puzzle_hash)
sql_records.append(
(
record.index,
bytes(record.pubkey).hex(),
record.puzzle_hash.hex(),
record.wallet_type,
record.wallet_id,
0,
),
)
cursor = await self.db_connection.executemany(
"INSERT OR REPLACE INTO derivation_paths VALUES(?, ?, ?, ?, ?, ?)",
sql_records,
)
await cursor.close()
await self.db_connection.commit()
async def get_derivation_record(self, index: uint32, wallet_id: uint32) -> Optional[DerivationRecord]:
cursor = await self.db_connection.execute(
"SELECT * FROM derivation_paths WHERE derivation_index=? and wallet_id=?;",
(
index,
wallet_id,
),
)
row = await cursor.fetchone()
await cursor.close()
if row is not None and row[0] is not None:
return DerivationRecord(
uint32(row[0]),
bytes32.fromhex(row[2]),
G1Element.from_bytes(bytes.fromhex(row[1])),
WalletType(row[3]),
uint32(row[4]),
)
return None
async def get_derivation_record_for_puzzle_hash(self, puzzle_hash: str) -> Optional[DerivationRecord]:
cursor = await self.db_connection.execute(
"SELECT * FROM derivation_paths WHERE puzzle_hash=?;",
(puzzle_hash,),
)
row = await cursor.fetchone()
await cursor.close()
if row is not None and row[0] is not None:
return DerivationRecord(
uint32(row[0]),
bytes32.fromhex(row[2]),
G1Element.from_bytes(bytes.fromhex(row[1])),
WalletType(row[3]),
uint32(row[4]),
)
return None
async def set_used_up_to(self, index: uint32, in_transaction=False) -> None:
if not in_transaction:
await self.db_wrapper.lock.acquire()
try:
cursor = await self.db_connection.execute(
"UPDATE derivation_paths SET used=1 WHERE derivation_index<=?",
(index,),
)
await cursor.close()
finally:
if not in_transaction:
await self.db_connection.commit()
self.db_wrapper.lock.release()
async def puzzle_hash_exists(self, puzzle_hash: bytes32) -> bool:
cursor = await self.db_connection.execute(
"SELECT * from derivation_paths WHERE puzzle_hash=?", (puzzle_hash.hex(),)
)
row = await cursor.fetchone()
await cursor.close()
return row is not None
async def one_of_puzzle_hashes_exists(self, puzzle_hashes: List[bytes32]) -> bool:
if len(puzzle_hashes) < 1:
return False
for ph in puzzle_hashes:
if ph in self.all_puzzle_hashes:
return True
return False
async def index_for_pubkey(self, pubkey: G1Element) -> Optional[uint32]:
cursor = await self.db_connection.execute(
"SELECT * from derivation_paths WHERE pubkey=?", (bytes(pubkey).hex(),)
)
row = await cursor.fetchone()
await cursor.close()
if row is not None:
return uint32(row[0])
return None
async def index_for_puzzle_hash(self, puzzle_hash: bytes32) -> Optional[uint32]:
cursor = await self.db_connection.execute(
"SELECT * from derivation_paths WHERE puzzle_hash=?", (puzzle_hash.hex(),)
)
row = await cursor.fetchone()
await cursor.close()
if row is not None:
return uint32(row[0])
return None
async def index_for_puzzle_hash_and_wallet(self, puzzle_hash: bytes32, wallet_id: uint32) -> Optional[uint32]:
cursor = await self.db_connection.execute(
"SELECT * from derivation_paths WHERE puzzle_hash=? and wallet_id=?;",
(
puzzle_hash.hex(),
wallet_id,
),
)
row = await cursor.fetchone()
await cursor.close()
if row is not None:
return uint32(row[0])
return None
async def wallet_info_for_puzzle_hash(self, puzzle_hash: bytes32) -> Optional[Tuple[uint32, WalletType]]:
cursor = await self.db_connection.execute(
"SELECT * from derivation_paths WHERE puzzle_hash=?", (puzzle_hash.hex(),)
)
row = await cursor.fetchone()
await cursor.close()
if row is not None:
return row[4], WalletType(row[3])
return None
async def get_all_puzzle_hashes(self) -> Set[bytes32]:
cursor = await self.db_connection.execute("SELECT * from derivation_paths")
rows = await cursor.fetchall()
await cursor.close()
result: Set[bytes32] = set()
for row in rows:
result.add(bytes32(bytes.fromhex(row[2])))
return result
async def get_last_derivation_path(self) -> Optional[uint32]:
cursor = await self.db_connection.execute("SELECT MAX(derivation_index) FROM derivation_paths;")
row = await cursor.fetchone()
await cursor.close()
if row is not None and row[0] is not None:
return uint32(row[0])
return None
async def get_last_derivation_path_for_wallet(self, wallet_id: int) -> Optional[uint32]:
cursor = await self.db_connection.execute(
f"SELECT MAX(derivation_index) FROM derivation_paths WHERE wallet_id={wallet_id};"
)
row = await cursor.fetchone()
await cursor.close()
if row is not None and row[0] is not None:
return uint32(row[0])
return None
async def get_current_derivation_record_for_wallet(self, wallet_id: uint32) -> Optional[DerivationRecord]:
cursor = await self.db_connection.execute(
f"SELECT MAX(derivation_index) FROM derivation_paths WHERE wallet_id={wallet_id} and used=1;"
)
row = await cursor.fetchone()
await cursor.close()
if row is not None and row[0] is not None:
index = uint32(row[0])
return await self.get_derivation_record(index, wallet_id)
return None
async def get_unused_derivation_path(self) -> Optional[uint32]:
cursor = await self.db_connection.execute("SELECT MIN(derivation_index) FROM derivation_paths WHERE used=0;")
row = await cursor.fetchone()
await cursor.close()
if row is not None and row[0] is not None:
return uint32(row[0])
return None
| true
| true
|
7909a898d4e7c6b67303f014f5cd22ed7fddf740
| 222
|
py
|
Python
|
Hackerrank/Practice/Python/9.erros and exceptions/66.Incorrect Regex.py
|
kushagra1212/Competitive-Programming
|
5b68774c617d6abdf1b29893b1b13d47f62161e8
|
[
"MIT"
] | 994
|
2017-02-28T06:13:47.000Z
|
2022-03-31T10:49:00.000Z
|
Hackerrank_python/9.erros and exceptions/66.Incorrect Regex.py
|
devesh17m/Competitive-Programming
|
2d459dc8dc5ac628d94700b739988b0ea364cb71
|
[
"MIT"
] | 16
|
2018-01-01T02:59:55.000Z
|
2021-11-22T12:49:16.000Z
|
Hackerrank_python/9.erros and exceptions/66.Incorrect Regex.py
|
devesh17m/Competitive-Programming
|
2d459dc8dc5ac628d94700b739988b0ea364cb71
|
[
"MIT"
] | 325
|
2017-06-15T03:32:43.000Z
|
2022-03-28T22:43:42.000Z
|
# Enter your code here. Read input from STDIN. Print output to STOUT
import re
for _ in range(int(input())):
try:
re.compile(input())
print (True)
except re.error:
print (False)
| 24.666667
| 69
| 0.581081
|
import re
for _ in range(int(input())):
try:
re.compile(input())
print (True)
except re.error:
print (False)
| true
| true
|
7909aa1b8c84fa1900a1c97151d4010a8dc7ab22
| 2,563
|
py
|
Python
|
main.py
|
imgVOID/autograding-api
|
7c2f5491607d5d76880827c73565f9f5be5a33ad
|
[
"Apache-2.0"
] | 5
|
2021-11-08T18:55:09.000Z
|
2022-02-27T19:14:35.000Z
|
main.py
|
imgVOID/autograde-py
|
7c2f5491607d5d76880827c73565f9f5be5a33ad
|
[
"Apache-2.0"
] | null | null | null |
main.py
|
imgVOID/autograde-py
|
7c2f5491607d5d76880827c73565f9f5be5a33ad
|
[
"Apache-2.0"
] | 2
|
2021-12-23T05:11:31.000Z
|
2021-12-26T13:42:21.000Z
|
from os.path import dirname, abspath
from datetime import timedelta
from fastapi import FastAPI, Depends, HTTPException, status
from fastapi.security import OAuth2PasswordRequestForm
from slowapi import _rate_limit_exceeded_handler
from slowapi.errors import RateLimitExceeded
from routers.tasks import router_tasks
from routers.checks import router_checks, limiter
from routers.topics import router_topic
from routers.auth import router_users
from database.config import database
from schemas.auth import Token
from utilities.docker_scripts import DockerUtils
from utilities.app_metadata import tags_metadata, app_metadata_description
from utilities.auth_scripts import AuthUtils
# FastAPI app instance
app = FastAPI(title='Autograding-API',
description=app_metadata_description,
version='0.0.1',
contact={
"name": "Maria Hladka",
"url": "https://github.com/imgVOID",
"email": "imgvoid@gmail.com",
},
license_info={
"name": "Apache 2.0",
"url": "https://www.apache.org/licenses/LICENSE-2.0.html",
}, openapi_tags=tags_metadata)
# Save main app directory
APP_ROOT = dirname(abspath(__file__))
# Fix Docker dockerfile problems on the app startup
DockerUtils.fix_docker_bug()
# Connecting routers to the app
app.include_router(router_tasks)
app.include_router(router_checks)
app.include_router(router_topic)
app.include_router(router_users)
# Connecting rate limiter to the app
app.state.limiter = limiter
app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler)
@app.on_event("startup")
async def startup():
await database.connect()
@app.on_event("shutdown")
async def shutdown():
await database.disconnect()
@app.post("/auth/token", response_model=Token, summary="Grab the Bearer token")
async def login_for_access_token(form_data: OAuth2PasswordRequestForm = Depends()):
user = await AuthUtils.authenticate_user(form_data.username, form_data.password)
if not user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Incorrect username or password",
headers={"WWW-Authenticate": "Bearer"},
)
access_token_expires = timedelta(minutes=AuthUtils.ACCESS_TOKEN_EXPIRE_MINUTES)
access_token = await AuthUtils.create_access_token(
data={"sub": user.email}, expires_delta=access_token_expires
)
return {"access_token": access_token, "token_type": "bearer"}
| 36.098592
| 84
| 0.730784
|
from os.path import dirname, abspath
from datetime import timedelta
from fastapi import FastAPI, Depends, HTTPException, status
from fastapi.security import OAuth2PasswordRequestForm
from slowapi import _rate_limit_exceeded_handler
from slowapi.errors import RateLimitExceeded
from routers.tasks import router_tasks
from routers.checks import router_checks, limiter
from routers.topics import router_topic
from routers.auth import router_users
from database.config import database
from schemas.auth import Token
from utilities.docker_scripts import DockerUtils
from utilities.app_metadata import tags_metadata, app_metadata_description
from utilities.auth_scripts import AuthUtils
app = FastAPI(title='Autograding-API',
description=app_metadata_description,
version='0.0.1',
contact={
"name": "Maria Hladka",
"url": "https://github.com/imgVOID",
"email": "imgvoid@gmail.com",
},
license_info={
"name": "Apache 2.0",
"url": "https://www.apache.org/licenses/LICENSE-2.0.html",
}, openapi_tags=tags_metadata)
APP_ROOT = dirname(abspath(__file__))
DockerUtils.fix_docker_bug()
app.include_router(router_tasks)
app.include_router(router_checks)
app.include_router(router_topic)
app.include_router(router_users)
app.state.limiter = limiter
app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler)
@app.on_event("startup")
async def startup():
await database.connect()
@app.on_event("shutdown")
async def shutdown():
await database.disconnect()
@app.post("/auth/token", response_model=Token, summary="Grab the Bearer token")
async def login_for_access_token(form_data: OAuth2PasswordRequestForm = Depends()):
user = await AuthUtils.authenticate_user(form_data.username, form_data.password)
if not user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Incorrect username or password",
headers={"WWW-Authenticate": "Bearer"},
)
access_token_expires = timedelta(minutes=AuthUtils.ACCESS_TOKEN_EXPIRE_MINUTES)
access_token = await AuthUtils.create_access_token(
data={"sub": user.email}, expires_delta=access_token_expires
)
return {"access_token": access_token, "token_type": "bearer"}
| true
| true
|
7909ab951053c7b14daa53e38a7864549876e9d8
| 791
|
py
|
Python
|
2020/01/test.py
|
mr-bigbang/advent-of-code
|
5fa0f78c70b10c66e516b21c08335e63e71d9e95
|
[
"MIT"
] | null | null | null |
2020/01/test.py
|
mr-bigbang/advent-of-code
|
5fa0f78c70b10c66e516b21c08335e63e71d9e95
|
[
"MIT"
] | 2
|
2020-12-04T00:58:16.000Z
|
2020-12-05T21:08:59.000Z
|
2020/01/test.py
|
mr-bigbang/advent-of-code
|
5fa0f78c70b10c66e516b21c08335e63e71d9e95
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- encoding: utf-8 -*-
import unittest
import code
class TestDay01(unittest.TestCase):
# Part 01
def test_example01(self):
expense_report = [1721, 299]
expected = 514579
result = code.part01(expense_report)
self.assertEqual(result, expected)
# Don't count a 2020/2 value twice
def test_duplicate(self):
expense_report = [1010, 1721, 299]
expected = 514579
result = code.part01(expense_report)
self.assertEqual(result, expected)
# Part 02
def test_example02(self):
expense_report = [979, 366, 675]
expected = 241861950
result = code.part02(expense_report)
self.assertEqual(result, expected)
if __name__ == '__main__':
unittest.main()
| 23.264706
| 44
| 0.635904
|
import unittest
import code
class TestDay01(unittest.TestCase):
def test_example01(self):
expense_report = [1721, 299]
expected = 514579
result = code.part01(expense_report)
self.assertEqual(result, expected)
def test_duplicate(self):
expense_report = [1010, 1721, 299]
expected = 514579
result = code.part01(expense_report)
self.assertEqual(result, expected)
# Part 02
def test_example02(self):
expense_report = [979, 366, 675]
expected = 241861950
result = code.part02(expense_report)
self.assertEqual(result, expected)
if __name__ == '__main__':
unittest.main()
| true
| true
|
7909ab9ab3d311bc36162af2d7c7d570e8554468
| 89
|
py
|
Python
|
ytvideo/apps.py
|
LSM2016/Bilibili-
|
21ab048d18c790b531cddb63129ef41a0ffecb4d
|
[
"MIT"
] | 1
|
2021-03-18T05:55:33.000Z
|
2021-03-18T05:55:33.000Z
|
ytvideo/apps.py
|
LSM2016/Bilibili-
|
21ab048d18c790b531cddb63129ef41a0ffecb4d
|
[
"MIT"
] | null | null | null |
ytvideo/apps.py
|
LSM2016/Bilibili-
|
21ab048d18c790b531cddb63129ef41a0ffecb4d
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class YtvideoConfig(AppConfig):
name = 'ytvideo'
| 14.833333
| 33
| 0.752809
|
from django.apps import AppConfig
class YtvideoConfig(AppConfig):
name = 'ytvideo'
| true
| true
|
7909abc5d934bd1954bab11cdca70fa396622b8f
| 25,852
|
py
|
Python
|
app/modules/users/models.py
|
karenc/houston
|
4eaaaf11d61394035e34b55bb847ea7eb4099c61
|
[
"Apache-2.0"
] | null | null | null |
app/modules/users/models.py
|
karenc/houston
|
4eaaaf11d61394035e34b55bb847ea7eb4099c61
|
[
"Apache-2.0"
] | 2
|
2021-03-16T20:28:06.000Z
|
2021-03-29T15:54:11.000Z
|
app/modules/users/models.py
|
karenc/houston
|
4eaaaf11d61394035e34b55bb847ea7eb4099c61
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
User database models
--------------------
"""
import enum
import logging
import uuid
from flask import current_app
from sqlalchemy_utils import types as column_types
from flask_login import current_user # NOQA
from app.extensions import db, FeatherModel
from app.extensions.auth import security
from app.extensions.edm import EDMObjectMixin
from app.extensions.api.parameters import _get_is_static_role_property
import app.extensions.logging as AuditLog
log = logging.getLogger(__name__)
class UserEDMMixin(EDMObjectMixin):
# fmt: off
# Name of the module, used for knowing what to sync i.e user.list, user.data
EDM_NAME = 'user'
# The EDM attribute for the version, if reported
EDM_VERSION_ATTRIBUTE = 'version'
#
EDM_LOG_ATTRIBUTES = [
'emailAddress',
]
EDM_ATTRIBUTE_MAPPING = {
# Ignored
'id' : None,
'lastLogin' : None,
'username' : None,
# Attributes
'acceptedUserAgreement' : 'accepted_user_agreement',
'affiliation' : 'affiliation',
'emailAddress' : 'email',
'fullName' : 'full_name',
'receiveEmails' : 'receive_notification_emails',
'sharing' : 'shares_data',
'userURL' : 'website',
'version' : 'version',
# Functions
'organizations' : '_process_edm_user_organization',
'profileImageUrl' : '_process_edm_user_profile_url',
}
# fmt: on
@classmethod
def ensure_edm_obj(cls, guid):
user = User.query.filter(User.guid == guid).first()
is_new = user is None
if is_new:
email = '%s@localhost' % (guid,)
password = User.initial_random_password()
user = User(
guid=guid,
email=email,
password=password,
version=None,
is_active=True,
in_alpha=True,
)
with db.session.begin():
db.session.add(user)
db.session.refresh(user)
return user, is_new
def _process_edm_user_profile_url(self, url):
# TODO is this actually needed
log.warning('User._process_edm_profile_url() not implemented yet')
def _process_edm_user_organization(self, org):
# TODO is this actually needed
log.warning('User._process_edm_user_organization() not implemented yet')
class User(db.Model, FeatherModel, UserEDMMixin):
"""
User database model.
TODO:
* Upgrade to HoustonModel after full transition for Users out of EDM is
complete
"""
def __init__(self, *args, **kwargs):
if 'password' not in kwargs:
raise ValueError('User must have a password')
super().__init__(*args, **kwargs)
guid = db.Column(
db.GUID, default=uuid.uuid4, primary_key=True
) # pylint: disable=invalid-name
version = db.Column(db.BigInteger, default=None, nullable=True)
email = db.Column(
db.String(length=120), index=True, unique=True, default='', nullable=False
)
password = db.Column(
column_types.PasswordType(max_length=128, schemes=('bcrypt',)), nullable=False
) # can me migrated from EDM field "password"
full_name = db.Column(
db.String(length=120), default='', nullable=False
) # can be migrated from EDM field "fullName"
website = db.Column(
db.String(length=120), nullable=True
) # can be migrated from EDM field "userURL"
location = db.Column(db.String(length=120), default='', nullable=True)
affiliation = db.Column(
db.String(length=120), default='', nullable=True
) # can be migrated from BE field "affiliation"
forum_id = db.Column(db.String(length=120), default='', nullable=True)
locale = db.Column(db.String(length=20), default='EN', nullable=True)
accepted_user_agreement = db.Column(
db.Boolean, default=False, nullable=False
) # can be migrated from EDM field "acceptedUserAgreement"
use_usa_date_format = db.Column(db.Boolean, default=True, nullable=False)
show_email_in_profile = db.Column(db.Boolean, default=False, nullable=False)
receive_notification_emails = db.Column(
db.Boolean, default=True, nullable=False
) # can be migrated from BE field "receiveEmails"
receive_newsletter_emails = db.Column(db.Boolean, default=False, nullable=False)
shares_data = db.Column(
db.Boolean, default=True, nullable=False
) # can be migrated from BE field "sharing"
default_identification_catalogue = db.Column(
db.GUID, nullable=True
) # this may just be a string, however EDM wants to do ID catalogues
profile_fileupload_guid = db.Column(
db.GUID, db.ForeignKey('file_upload.guid'), nullable=True
)
# 'FileUpload' failed to locate a name (class not yet loaded)
# so explicitly import FileUpload here
from app.modules.fileuploads.models import FileUpload
profile_fileupload = db.relationship(FileUpload)
organization_membership_enrollments = db.relationship(
'OrganizationUserMembershipEnrollment', back_populates='user'
)
organization_moderator_enrollments = db.relationship(
'OrganizationUserModeratorEnrollment', back_populates='user'
)
project_membership_enrollments = db.relationship(
'ProjectUserMembershipEnrollment', back_populates='user'
)
user_collaboration_associations = db.relationship(
'CollaborationUserAssociations', back_populates='user'
)
asset_groups = db.relationship(
'AssetGroup',
back_populates='owner',
primaryjoin='User.guid == AssetGroup.owner_guid',
order_by='AssetGroup.guid',
)
submitted_asset_groups = db.relationship(
'AssetGroup',
back_populates='submitter',
primaryjoin='User.guid == AssetGroup.submitter_guid',
order_by='AssetGroup.guid',
)
owned_encounters = db.relationship(
'Encounter',
back_populates='owner',
primaryjoin='User.guid == Encounter.owner_guid',
order_by='Encounter.guid',
)
submitted_encounters = db.relationship(
'Encounter',
back_populates='submitter',
primaryjoin='User.guid == Encounter.submitter_guid',
order_by='Encounter.guid',
)
owned_organizations = db.relationship(
'Organization',
back_populates='owner',
primaryjoin='User.guid == Organization.owner_guid',
order_by='Organization.guid',
)
owned_projects = db.relationship(
'Project',
back_populates='owner',
primaryjoin='User.guid == Project.owner_guid',
order_by='Project.guid',
)
# User may have many notifications
notifications = db.relationship(
'Notification',
back_populates='recipient',
primaryjoin='User.guid == Notification.recipient_guid',
order_by='Notification.guid',
)
# All User specific Notification Preferences will be held in one instance
notification_preferences = db.relationship(
'UserNotificationPreferences',
back_populates='user',
primaryjoin='User.guid == UserNotificationPreferences.user_guid',
order_by='UserNotificationPreferences.guid',
)
PUBLIC_USER_EMAIL = 'public@localhost'
class StaticRoles(enum.Enum):
# pylint: disable=missing-docstring,unsubscriptable-object
DATA_MANAGER = (0x100000, 'DataManager', 'DataManager', 'is_data_manager')
USER_MANAGER = (0x80000, 'UserManager', 'UserManager', 'is_user_manager')
CONTRIBUTOR = (0x40000, 'Contributor', 'Contributor', 'is_contributor')
RESEARCHER = (0x20000, 'Researcher', 'Researcher', 'is_researcher')
EXPORTER = (0x10000, 'Exporter', 'Exporter', 'is_exporter')
INTERNAL = (0x08000, 'Internal', 'Internal', 'is_internal')
ADMIN = (0x04000, 'Site Administrator', 'Admin', 'is_admin')
STAFF = (0x02000, 'Staff Member', 'Staff', 'is_staff')
ACTIVE = (0x01000, 'Active Account', 'Active', 'is_active')
SETUP = (0x00800, 'Account in Setup', 'Setup', 'in_setup')
RESET = (0x00400, 'Account in Password Reset', 'Reset', 'in_reset')
ALPHA = (0x00200, 'Enrolled in Alpha', 'Alpha', 'in_alpha')
BETA = (0x00100, 'Enrolled in Beta', 'Beta', 'in_beta')
@property
def mask(self):
return self.value[0]
@property
def title(self):
return self.value[1]
@property
def shorthand(self):
return self.value[2]
static_roles = db.Column(db.Integer, default=0, nullable=False)
is_contributor = _get_is_static_role_property(
'is_contributor', StaticRoles.CONTRIBUTOR
)
is_user_manager = _get_is_static_role_property(
'is_user_manager', StaticRoles.USER_MANAGER
)
is_data_manager = _get_is_static_role_property(
'is_data_manager', StaticRoles.DATA_MANAGER
)
is_researcher = _get_is_static_role_property('is_researcher', StaticRoles.RESEARCHER)
is_exporter = _get_is_static_role_property('is_exporter', StaticRoles.EXPORTER)
is_internal = _get_is_static_role_property('is_internal', StaticRoles.INTERNAL)
is_admin = _get_is_static_role_property('is_admin', StaticRoles.ADMIN)
is_staff = _get_is_static_role_property('is_staff', StaticRoles.STAFF)
is_active = _get_is_static_role_property('is_active', StaticRoles.ACTIVE)
in_beta = _get_is_static_role_property('in_beta', StaticRoles.BETA)
in_alpha = _get_is_static_role_property('in_alpha', StaticRoles.ALPHA)
in_reset = _get_is_static_role_property('in_reset', StaticRoles.RESET)
in_setup = _get_is_static_role_property('in_setup', StaticRoles.SETUP)
@property
def is_privileged(self):
return self.is_staff or self.is_internal
def get_state(self):
state = []
state += [self.StaticRoles.ACTIVE.shorthand] if self.is_active else []
state += [self.StaticRoles.SETUP.shorthand] if self.in_setup else []
state += [self.StaticRoles.RESET.shorthand] if self.in_reset else []
state += [self.StaticRoles.ALPHA.shorthand] if self.in_alpha else []
state += [self.StaticRoles.BETA.shorthand] if self.in_beta else []
return state
def get_roles(self):
roles = []
roles += [self.StaticRoles.DATA_MANAGER.shorthand] if self.is_data_manager else []
roles += [self.StaticRoles.USER_MANAGER.shorthand] if self.is_user_manager else []
roles += [self.StaticRoles.INTERNAL.shorthand] if self.is_internal else []
roles += [self.StaticRoles.ADMIN.shorthand] if self.is_admin else []
roles += [self.StaticRoles.STAFF.shorthand] if self.is_staff else []
roles += [self.StaticRoles.CONTRIBUTOR.shorthand] if self.is_contributor else []
roles += [self.StaticRoles.RESEARCHER.shorthand] if self.is_researcher else []
roles += [self.StaticRoles.EXPORTER.shorthand] if self.is_exporter else []
return roles
def __repr__(self):
state = ', '.join(self.get_state())
roles = ', '.join(self.get_roles())
return (
'<{class_name}('
'guid={self.guid}, '
'email="{self.email}", '
'name="{self.full_name}", '
'state={state}, '
'roles={roles}'
')>'.format(
class_name=self.__class__.__name__, self=self, state=state, roles=roles
)
)
@classmethod
def get_admins(cls):
# used for first run admin creation
users = cls.query.all() # NOQA
admin_users = []
for user in users:
# TODO: Remove the check below at a later point after default admin create is removed
if user.email.endswith('@localhost'):
continue
if user.is_admin:
admin_users.append(user)
return admin_users
@classmethod
def admin_user_initialized(cls):
# used for first run admin creation
return len(cls.get_admins()) > 0
@classmethod
def ensure_user(
cls,
email,
password,
is_internal=False,
is_admin=False,
is_staff=False,
is_researcher=False,
is_contributor=True,
is_user_manager=False,
is_exporter=False,
is_active=True,
in_beta=False,
in_alpha=False,
update=False,
**kwargs,
):
"""
Create a new user.
"""
from app.extensions import db
user = User.find(email=email)
if user is None:
user = User(
password=password,
email=email,
is_internal=is_internal,
is_admin=is_admin,
is_staff=is_staff,
is_active=is_active,
is_researcher=is_researcher,
is_contributor=is_contributor,
is_user_manager=is_user_manager,
is_exporter=is_exporter,
in_beta=in_beta,
in_alpha=in_alpha,
**kwargs,
)
with db.session.begin():
db.session.add(user)
log.info('New user created: %r' % (user,))
elif update:
user.password = password
user.is_internal = is_internal
user.is_admin = is_admin
user.is_staff = is_staff
user.is_researcher = is_researcher
user.is_contributor = is_contributor
user.is_user_manager = is_user_manager
user.is_exporter = is_exporter
user.is_active = is_active
user.in_beta = in_beta
user.in_alpha = in_alpha
with db.session.begin():
db.session.merge(user)
log.info('Updated user: %r' % (user,))
db.session.refresh(user)
return user
@classmethod
def find(cls, email=None, password=None, edm_login_fallback=True):
# Look-up via email
if email is None:
return None
email_candidates = [
email,
'%s@localhost' % (email,),
]
for email_candidate in email_candidates:
user = cls.query.filter(User.email == email_candidate).first()
if password is None:
# If no password was provided to check, return any user account we find
if user is not None:
return user
else:
# Check local Houston password first
if user is not None:
# We found the user, check their provided password
if user.password == password:
return user
# As a fallback, check all EDMs if the user can login
if edm_login_fallback:
# We want to check the EDM even if we don't have a local user record
if current_app.edm.check_user_login(email_candidate, password):
log.info('User authenticated via EDM: %r' % (email_candidate,))
if user is not None:
# We authenticated a local user against an EDM (but the local password failed)
if user.password != password:
# The user passed the login with an EDM, update local password
log.warning(
"Updating user's local password: %r" % (user,)
)
user = user.set_password(password)
return user
else:
log.critical(
'The user authenticated via EDM but has no local user record'
)
# Try syncing all users from EDM
cls.edm_sync_all()
# If the user was just synced, go grab it (recursively) and return
user = cls.find(email=email, edm_login_fallback=False)
return user
# If we have gotten here, one of these things happened:
# 1) the user wasn't found
# 2) the user's password was provided and was incorrect
# 3) the user authenticated against the EDM but has no local user record
return None
@classmethod
def query_search(cls, search=None):
from sqlalchemy import or_, and_
from app.modules.auth.models import Code, CodeTypes
if search is not None:
search = search.strip().split(' ')
search = [term.strip() for term in search]
search = [term for term in search if len(term) > 0]
or_terms = []
for term in search:
codes = (
Code.query.filter_by(code_type=CodeTypes.checkin)
.filter(
Code.accept_code.contains(term),
)
.all()
)
code_users = set([])
for code in codes:
if not code.is_expired:
code_users.add(code.user.guid)
or_term = or_(
cls.guid.in_(code_users),
cls.email.contains(term),
cls.affiliation.contains(term),
cls.forum_id.contains(term),
cls.full_name.contains(term),
)
or_terms.append(or_term)
users = cls.query.filter(and_(*or_terms))
else:
users = cls.query
return users
@property
def is_authenticated(self):
return True
@property
def is_anonymous(self):
return False
@property
def is_email_confirmed(self):
from app.modules.auth.models import Code, CodeTypes
# Get any codes that fit this request
code = (
Code.query.filter_by(user=self, code_type=CodeTypes.email)
.order_by(Code.created.desc())
.first()
)
if code is None:
return False
return code.is_resolved
def get_org_memberships(self):
return [
enrollment.organization
for enrollment in self.organization_membership_enrollments
]
def get_org_moderatorships(self):
return [
enrollment.organization
for enrollment in self.organization_moderator_enrollments
]
def get_projects(self):
return [enrollment.project for enrollment in self.project_membership_enrollments]
def get_collaborations_as_json(self):
from app.modules.collaborations.schemas import DetailedCollaborationSchema
json_resp = []
for collab_assoc in self.user_collaboration_associations:
json_resp.append(
DetailedCollaborationSchema().dump(collab_assoc.collaboration).data
)
return json_resp
def get_notification_preferences(self):
from app.modules.notifications.models import UserNotificationPreferences
# User preferences are the system ones plus the ones stored in this class
# Return the combination to the REST API
preferences = UserNotificationPreferences.get_user_preferences(self)
return preferences
def unprocessed_asset_groups(self):
return [
asset_group.guid
for asset_group in self.asset_groups
if not asset_group.is_processed()
]
def unprocessed_sightings(self):
from app.modules.sightings.models import SightingStage
return [
sighting.guid
for sighting in self.get_sightings()
if not sighting.stage == SightingStage.processed
]
def get_id(self):
return self.guid
def has_static_role(self, role):
return (self.static_roles & role.mask) != 0
def set_static_role(self, role):
if self.has_static_role(role):
return
self.static_roles |= role.mask
def unset_static_role(self, role):
if not self.has_static_role(role):
return
self.static_roles ^= role.mask
def check_owner(self, user):
return self == user
def check_supervisor(self, user):
return self.check_owner(user)
def get_codes(self, code_type, **kwargs):
# This import for Code needs to be local
from app.modules.auth.models import Code
code = Code.get(self, code_type, **kwargs)
return code
def get_invite_code(self):
# This import for Code needs to be local
from app.modules.auth.models import CodeTypes
return self.get_codes(CodeTypes.invite, replace=True)
def get_email_confirmation_code(self):
# This import for Code needs to be local
from app.modules.auth.models import CodeTypes
return self.get_codes(CodeTypes.email, replace=True)
def get_account_recovery_code(self):
# This import for Code needs to be local
from app.modules.auth.models import CodeTypes
return self.get_codes(CodeTypes.recover, replace=True, replace_ttl=None)
def set_password(self, password):
if password is None:
# This function "sets" the password, it's the responsibility of the caller to ensure it's valid
raise ValueError('Empty password not allowed')
self.password = password
with db.session.begin():
db.session.merge(self)
db.session.refresh(self)
return self
def lockout(self):
from app.modules.auth.models import OAuth2Client, OAuth2Grant, OAuth2Token, Code
# Disable permissions
self.is_staff = False
self.is_admin = False
self.is_active = False
self.in_reset = False
self.in_setup = False
with db.session.begin():
db.session.merge(self)
db.session.refresh(self)
# Logout of sessions and API keys
auth_list = []
auth_list += OAuth2Token.query.filter_by(user_guid=self.guid).all()
auth_list += OAuth2Grant.query.filter_by(user_guid=self.guid).all()
auth_list += OAuth2Client.query.filter_by(user_guid=self.guid).all()
auth_list += Code.query.filter_by(user_guid=self.guid).all()
for auth_ in auth_list:
auth_.delete()
return self
def owns_object(self, obj):
from app.modules.assets.models import Asset
from app.modules.asset_groups.models import AssetGroup
from app.modules.encounters.models import Encounter
from app.modules.sightings.models import Sighting
from app.modules.projects.models import Project
from app.modules.individuals.models import Individual
from app.modules.notifications.models import Notification
ret_val = False
if isinstance(obj, User):
ret_val = obj == self
# AssetGroup, Encounters and Projects all have an owner field, check that
elif isinstance(obj, (AssetGroup, Encounter, Project, Notification)):
ret_val = obj.owner == self
elif isinstance(obj, Asset):
# assets are not owned directly by the user but the asset_group they're in is.
# TODO: need to understand once assets become part of an encounter, do they still have a asset_group
if obj.asset_group is not None:
ret_val = obj.asset_group.owner is self
elif isinstance(obj, Sighting):
# decided (2021-03-12) that "owner" of a Sighting is not applicable therefore always False
# permissions must be handled in ways not dependent on ownership
ret_val = False
elif isinstance(obj, Individual):
for encounter in obj.get_encounters():
if encounter.get_owner() is self:
ret_val = True
break
return ret_val
def get_my_annotations(self):
annotations = []
for encounter in self.owned_encounters:
annotations.extend(encounter.annotations)
return annotations
def get_all_encounters(self):
annotations = self.get_my_annotations()
# TODO add collaboration annotations
return annotations
def delete(self):
with db.session.begin():
# TODO: Ensure proper cleanup
for asset_group in self.asset_groups:
asset_group.delete()
AuditLog.delete_object(log, self)
db.session.delete(self)
@classmethod
def initial_random_password(cls):
return security.generate_random(128)
@classmethod
def get_public_user(cls):
return User.ensure_user(
email=User.PUBLIC_USER_EMAIL,
password=User.initial_random_password(),
full_name='Public User',
is_internal=True,
)
def get_sightings(self):
sightings = []
for encounter in self.owned_encounters:
sighting = encounter.get_sighting()
if sighting:
sightings.append(encounter.get_sighting())
sighting_set = set(sightings)
return list(sighting_set)
USER_ROLES = [
role.value[-1]
for role in User.StaticRoles.__members__.values()
if role.value[-1] not in ('in_setup', 'in_reset')
]
| 34.561497
| 112
| 0.609005
|
import enum
import logging
import uuid
from flask import current_app
from sqlalchemy_utils import types as column_types
from flask_login import current_user
from app.extensions import db, FeatherModel
from app.extensions.auth import security
from app.extensions.edm import EDMObjectMixin
from app.extensions.api.parameters import _get_is_static_role_property
import app.extensions.logging as AuditLog
log = logging.getLogger(__name__)
class UserEDMMixin(EDMObjectMixin):
EDM_NAME = 'user'
EDM_VERSION_ATTRIBUTE = 'version'
EDM_LOG_ATTRIBUTES = [
'emailAddress',
]
EDM_ATTRIBUTE_MAPPING = {
'id' : None,
'lastLogin' : None,
'username' : None,
'acceptedUserAgreement' : 'accepted_user_agreement',
'affiliation' : 'affiliation',
'emailAddress' : 'email',
'fullName' : 'full_name',
'receiveEmails' : 'receive_notification_emails',
'sharing' : 'shares_data',
'userURL' : 'website',
'version' : 'version',
'organizations' : '_process_edm_user_organization',
'profileImageUrl' : '_process_edm_user_profile_url',
}
@classmethod
def ensure_edm_obj(cls, guid):
user = User.query.filter(User.guid == guid).first()
is_new = user is None
if is_new:
email = '%s@localhost' % (guid,)
password = User.initial_random_password()
user = User(
guid=guid,
email=email,
password=password,
version=None,
is_active=True,
in_alpha=True,
)
with db.session.begin():
db.session.add(user)
db.session.refresh(user)
return user, is_new
def _process_edm_user_profile_url(self, url):
log.warning('User._process_edm_profile_url() not implemented yet')
def _process_edm_user_organization(self, org):
log.warning('User._process_edm_user_organization() not implemented yet')
class User(db.Model, FeatherModel, UserEDMMixin):
def __init__(self, *args, **kwargs):
if 'password' not in kwargs:
raise ValueError('User must have a password')
super().__init__(*args, **kwargs)
guid = db.Column(
db.GUID, default=uuid.uuid4, primary_key=True
)
version = db.Column(db.BigInteger, default=None, nullable=True)
email = db.Column(
db.String(length=120), index=True, unique=True, default='', nullable=False
)
password = db.Column(
column_types.PasswordType(max_length=128, schemes=('bcrypt',)), nullable=False
)
full_name = db.Column(
db.String(length=120), default='', nullable=False
)
website = db.Column(
db.String(length=120), nullable=True
)
location = db.Column(db.String(length=120), default='', nullable=True)
affiliation = db.Column(
db.String(length=120), default='', nullable=True
)
forum_id = db.Column(db.String(length=120), default='', nullable=True)
locale = db.Column(db.String(length=20), default='EN', nullable=True)
accepted_user_agreement = db.Column(
db.Boolean, default=False, nullable=False
)
use_usa_date_format = db.Column(db.Boolean, default=True, nullable=False)
show_email_in_profile = db.Column(db.Boolean, default=False, nullable=False)
receive_notification_emails = db.Column(
db.Boolean, default=True, nullable=False
)
receive_newsletter_emails = db.Column(db.Boolean, default=False, nullable=False)
shares_data = db.Column(
db.Boolean, default=True, nullable=False
)
default_identification_catalogue = db.Column(
db.GUID, nullable=True
)
profile_fileupload_guid = db.Column(
db.GUID, db.ForeignKey('file_upload.guid'), nullable=True
)
from app.modules.fileuploads.models import FileUpload
profile_fileupload = db.relationship(FileUpload)
organization_membership_enrollments = db.relationship(
'OrganizationUserMembershipEnrollment', back_populates='user'
)
organization_moderator_enrollments = db.relationship(
'OrganizationUserModeratorEnrollment', back_populates='user'
)
project_membership_enrollments = db.relationship(
'ProjectUserMembershipEnrollment', back_populates='user'
)
user_collaboration_associations = db.relationship(
'CollaborationUserAssociations', back_populates='user'
)
asset_groups = db.relationship(
'AssetGroup',
back_populates='owner',
primaryjoin='User.guid == AssetGroup.owner_guid',
order_by='AssetGroup.guid',
)
submitted_asset_groups = db.relationship(
'AssetGroup',
back_populates='submitter',
primaryjoin='User.guid == AssetGroup.submitter_guid',
order_by='AssetGroup.guid',
)
owned_encounters = db.relationship(
'Encounter',
back_populates='owner',
primaryjoin='User.guid == Encounter.owner_guid',
order_by='Encounter.guid',
)
submitted_encounters = db.relationship(
'Encounter',
back_populates='submitter',
primaryjoin='User.guid == Encounter.submitter_guid',
order_by='Encounter.guid',
)
owned_organizations = db.relationship(
'Organization',
back_populates='owner',
primaryjoin='User.guid == Organization.owner_guid',
order_by='Organization.guid',
)
owned_projects = db.relationship(
'Project',
back_populates='owner',
primaryjoin='User.guid == Project.owner_guid',
order_by='Project.guid',
)
notifications = db.relationship(
'Notification',
back_populates='recipient',
primaryjoin='User.guid == Notification.recipient_guid',
order_by='Notification.guid',
)
notification_preferences = db.relationship(
'UserNotificationPreferences',
back_populates='user',
primaryjoin='User.guid == UserNotificationPreferences.user_guid',
order_by='UserNotificationPreferences.guid',
)
PUBLIC_USER_EMAIL = 'public@localhost'
class StaticRoles(enum.Enum):
DATA_MANAGER = (0x100000, 'DataManager', 'DataManager', 'is_data_manager')
USER_MANAGER = (0x80000, 'UserManager', 'UserManager', 'is_user_manager')
CONTRIBUTOR = (0x40000, 'Contributor', 'Contributor', 'is_contributor')
RESEARCHER = (0x20000, 'Researcher', 'Researcher', 'is_researcher')
EXPORTER = (0x10000, 'Exporter', 'Exporter', 'is_exporter')
INTERNAL = (0x08000, 'Internal', 'Internal', 'is_internal')
ADMIN = (0x04000, 'Site Administrator', 'Admin', 'is_admin')
STAFF = (0x02000, 'Staff Member', 'Staff', 'is_staff')
ACTIVE = (0x01000, 'Active Account', 'Active', 'is_active')
SETUP = (0x00800, 'Account in Setup', 'Setup', 'in_setup')
RESET = (0x00400, 'Account in Password Reset', 'Reset', 'in_reset')
ALPHA = (0x00200, 'Enrolled in Alpha', 'Alpha', 'in_alpha')
BETA = (0x00100, 'Enrolled in Beta', 'Beta', 'in_beta')
@property
def mask(self):
return self.value[0]
@property
def title(self):
return self.value[1]
@property
def shorthand(self):
return self.value[2]
static_roles = db.Column(db.Integer, default=0, nullable=False)
is_contributor = _get_is_static_role_property(
'is_contributor', StaticRoles.CONTRIBUTOR
)
is_user_manager = _get_is_static_role_property(
'is_user_manager', StaticRoles.USER_MANAGER
)
is_data_manager = _get_is_static_role_property(
'is_data_manager', StaticRoles.DATA_MANAGER
)
is_researcher = _get_is_static_role_property('is_researcher', StaticRoles.RESEARCHER)
is_exporter = _get_is_static_role_property('is_exporter', StaticRoles.EXPORTER)
is_internal = _get_is_static_role_property('is_internal', StaticRoles.INTERNAL)
is_admin = _get_is_static_role_property('is_admin', StaticRoles.ADMIN)
is_staff = _get_is_static_role_property('is_staff', StaticRoles.STAFF)
is_active = _get_is_static_role_property('is_active', StaticRoles.ACTIVE)
in_beta = _get_is_static_role_property('in_beta', StaticRoles.BETA)
in_alpha = _get_is_static_role_property('in_alpha', StaticRoles.ALPHA)
in_reset = _get_is_static_role_property('in_reset', StaticRoles.RESET)
in_setup = _get_is_static_role_property('in_setup', StaticRoles.SETUP)
@property
def is_privileged(self):
return self.is_staff or self.is_internal
def get_state(self):
state = []
state += [self.StaticRoles.ACTIVE.shorthand] if self.is_active else []
state += [self.StaticRoles.SETUP.shorthand] if self.in_setup else []
state += [self.StaticRoles.RESET.shorthand] if self.in_reset else []
state += [self.StaticRoles.ALPHA.shorthand] if self.in_alpha else []
state += [self.StaticRoles.BETA.shorthand] if self.in_beta else []
return state
def get_roles(self):
roles = []
roles += [self.StaticRoles.DATA_MANAGER.shorthand] if self.is_data_manager else []
roles += [self.StaticRoles.USER_MANAGER.shorthand] if self.is_user_manager else []
roles += [self.StaticRoles.INTERNAL.shorthand] if self.is_internal else []
roles += [self.StaticRoles.ADMIN.shorthand] if self.is_admin else []
roles += [self.StaticRoles.STAFF.shorthand] if self.is_staff else []
roles += [self.StaticRoles.CONTRIBUTOR.shorthand] if self.is_contributor else []
roles += [self.StaticRoles.RESEARCHER.shorthand] if self.is_researcher else []
roles += [self.StaticRoles.EXPORTER.shorthand] if self.is_exporter else []
return roles
def __repr__(self):
state = ', '.join(self.get_state())
roles = ', '.join(self.get_roles())
return (
'<{class_name}('
'guid={self.guid}, '
'email="{self.email}", '
'name="{self.full_name}", '
'state={state}, '
'roles={roles}'
')>'.format(
class_name=self.__class__.__name__, self=self, state=state, roles=roles
)
)
@classmethod
def get_admins(cls):
users = cls.query.all()
admin_users = []
for user in users:
if user.email.endswith('@localhost'):
continue
if user.is_admin:
admin_users.append(user)
return admin_users
@classmethod
def admin_user_initialized(cls):
return len(cls.get_admins()) > 0
@classmethod
def ensure_user(
cls,
email,
password,
is_internal=False,
is_admin=False,
is_staff=False,
is_researcher=False,
is_contributor=True,
is_user_manager=False,
is_exporter=False,
is_active=True,
in_beta=False,
in_alpha=False,
update=False,
**kwargs,
):
from app.extensions import db
user = User.find(email=email)
if user is None:
user = User(
password=password,
email=email,
is_internal=is_internal,
is_admin=is_admin,
is_staff=is_staff,
is_active=is_active,
is_researcher=is_researcher,
is_contributor=is_contributor,
is_user_manager=is_user_manager,
is_exporter=is_exporter,
in_beta=in_beta,
in_alpha=in_alpha,
**kwargs,
)
with db.session.begin():
db.session.add(user)
log.info('New user created: %r' % (user,))
elif update:
user.password = password
user.is_internal = is_internal
user.is_admin = is_admin
user.is_staff = is_staff
user.is_researcher = is_researcher
user.is_contributor = is_contributor
user.is_user_manager = is_user_manager
user.is_exporter = is_exporter
user.is_active = is_active
user.in_beta = in_beta
user.in_alpha = in_alpha
with db.session.begin():
db.session.merge(user)
log.info('Updated user: %r' % (user,))
db.session.refresh(user)
return user
@classmethod
def find(cls, email=None, password=None, edm_login_fallback=True):
if email is None:
return None
email_candidates = [
email,
'%s@localhost' % (email,),
]
for email_candidate in email_candidates:
user = cls.query.filter(User.email == email_candidate).first()
if password is None:
if user is not None:
return user
else:
if user is not None:
if user.password == password:
return user
if edm_login_fallback:
if current_app.edm.check_user_login(email_candidate, password):
log.info('User authenticated via EDM: %r' % (email_candidate,))
if user is not None:
# We authenticated a local user against an EDM (but the local password failed)
if user.password != password:
# The user passed the login with an EDM, update local password
log.warning(
"Updating user's local password: %r" % (user,)
)
user = user.set_password(password)
return user
else:
log.critical(
'The user authenticated via EDM but has no local user record'
)
cls.edm_sync_all()
user = cls.find(email=email, edm_login_fallback=False)
return user
# 2) the user's password was provided and was incorrect
return None
@classmethod
def query_search(cls, search=None):
from sqlalchemy import or_, and_
from app.modules.auth.models import Code, CodeTypes
if search is not None:
search = search.strip().split(' ')
search = [term.strip() for term in search]
search = [term for term in search if len(term) > 0]
or_terms = []
for term in search:
codes = (
Code.query.filter_by(code_type=CodeTypes.checkin)
.filter(
Code.accept_code.contains(term),
)
.all()
)
code_users = set([])
for code in codes:
if not code.is_expired:
code_users.add(code.user.guid)
or_term = or_(
cls.guid.in_(code_users),
cls.email.contains(term),
cls.affiliation.contains(term),
cls.forum_id.contains(term),
cls.full_name.contains(term),
)
or_terms.append(or_term)
users = cls.query.filter(and_(*or_terms))
else:
users = cls.query
return users
@property
def is_authenticated(self):
return True
@property
def is_anonymous(self):
return False
@property
def is_email_confirmed(self):
from app.modules.auth.models import Code, CodeTypes
code = (
Code.query.filter_by(user=self, code_type=CodeTypes.email)
.order_by(Code.created.desc())
.first()
)
if code is None:
return False
return code.is_resolved
def get_org_memberships(self):
return [
enrollment.organization
for enrollment in self.organization_membership_enrollments
]
def get_org_moderatorships(self):
return [
enrollment.organization
for enrollment in self.organization_moderator_enrollments
]
def get_projects(self):
return [enrollment.project for enrollment in self.project_membership_enrollments]
def get_collaborations_as_json(self):
from app.modules.collaborations.schemas import DetailedCollaborationSchema
json_resp = []
for collab_assoc in self.user_collaboration_associations:
json_resp.append(
DetailedCollaborationSchema().dump(collab_assoc.collaboration).data
)
return json_resp
def get_notification_preferences(self):
from app.modules.notifications.models import UserNotificationPreferences
preferences = UserNotificationPreferences.get_user_preferences(self)
return preferences
def unprocessed_asset_groups(self):
return [
asset_group.guid
for asset_group in self.asset_groups
if not asset_group.is_processed()
]
def unprocessed_sightings(self):
from app.modules.sightings.models import SightingStage
return [
sighting.guid
for sighting in self.get_sightings()
if not sighting.stage == SightingStage.processed
]
def get_id(self):
return self.guid
def has_static_role(self, role):
return (self.static_roles & role.mask) != 0
def set_static_role(self, role):
if self.has_static_role(role):
return
self.static_roles |= role.mask
def unset_static_role(self, role):
if not self.has_static_role(role):
return
self.static_roles ^= role.mask
def check_owner(self, user):
return self == user
def check_supervisor(self, user):
return self.check_owner(user)
def get_codes(self, code_type, **kwargs):
from app.modules.auth.models import Code
code = Code.get(self, code_type, **kwargs)
return code
def get_invite_code(self):
from app.modules.auth.models import CodeTypes
return self.get_codes(CodeTypes.invite, replace=True)
def get_email_confirmation_code(self):
from app.modules.auth.models import CodeTypes
return self.get_codes(CodeTypes.email, replace=True)
def get_account_recovery_code(self):
from app.modules.auth.models import CodeTypes
return self.get_codes(CodeTypes.recover, replace=True, replace_ttl=None)
def set_password(self, password):
if password is None:
raise ValueError('Empty password not allowed')
self.password = password
with db.session.begin():
db.session.merge(self)
db.session.refresh(self)
return self
def lockout(self):
from app.modules.auth.models import OAuth2Client, OAuth2Grant, OAuth2Token, Code
self.is_staff = False
self.is_admin = False
self.is_active = False
self.in_reset = False
self.in_setup = False
with db.session.begin():
db.session.merge(self)
db.session.refresh(self)
auth_list = []
auth_list += OAuth2Token.query.filter_by(user_guid=self.guid).all()
auth_list += OAuth2Grant.query.filter_by(user_guid=self.guid).all()
auth_list += OAuth2Client.query.filter_by(user_guid=self.guid).all()
auth_list += Code.query.filter_by(user_guid=self.guid).all()
for auth_ in auth_list:
auth_.delete()
return self
def owns_object(self, obj):
from app.modules.assets.models import Asset
from app.modules.asset_groups.models import AssetGroup
from app.modules.encounters.models import Encounter
from app.modules.sightings.models import Sighting
from app.modules.projects.models import Project
from app.modules.individuals.models import Individual
from app.modules.notifications.models import Notification
ret_val = False
if isinstance(obj, User):
ret_val = obj == self
elif isinstance(obj, (AssetGroup, Encounter, Project, Notification)):
ret_val = obj.owner == self
elif isinstance(obj, Asset):
# TODO: need to understand once assets become part of an encounter, do they still have a asset_group
if obj.asset_group is not None:
ret_val = obj.asset_group.owner is self
elif isinstance(obj, Sighting):
# decided (2021-03-12) that "owner" of a Sighting is not applicable therefore always False
# permissions must be handled in ways not dependent on ownership
ret_val = False
elif isinstance(obj, Individual):
for encounter in obj.get_encounters():
if encounter.get_owner() is self:
ret_val = True
break
return ret_val
def get_my_annotations(self):
annotations = []
for encounter in self.owned_encounters:
annotations.extend(encounter.annotations)
return annotations
def get_all_encounters(self):
annotations = self.get_my_annotations()
# TODO add collaboration annotations
return annotations
def delete(self):
with db.session.begin():
# TODO: Ensure proper cleanup
for asset_group in self.asset_groups:
asset_group.delete()
AuditLog.delete_object(log, self)
db.session.delete(self)
@classmethod
def initial_random_password(cls):
return security.generate_random(128)
@classmethod
def get_public_user(cls):
return User.ensure_user(
email=User.PUBLIC_USER_EMAIL,
password=User.initial_random_password(),
full_name='Public User',
is_internal=True,
)
def get_sightings(self):
sightings = []
for encounter in self.owned_encounters:
sighting = encounter.get_sighting()
if sighting:
sightings.append(encounter.get_sighting())
sighting_set = set(sightings)
return list(sighting_set)
USER_ROLES = [
role.value[-1]
for role in User.StaticRoles.__members__.values()
if role.value[-1] not in ('in_setup', 'in_reset')
]
| true
| true
|
7909abdb3aa096b054ad176300e4c289d51afb9c
| 3,036
|
py
|
Python
|
multitask_benchmark/train/mpnn.py
|
Michaelvll/pna
|
45828fb6d7299f212c230b35b00850883c584457
|
[
"MIT"
] | 249
|
2020-04-14T08:43:58.000Z
|
2022-03-27T01:14:44.000Z
|
multitask_benchmark/train/mpnn.py
|
KonstantinKlepikov/pna
|
45828fb6d7299f212c230b35b00850883c584457
|
[
"MIT"
] | 12
|
2020-04-16T10:15:27.000Z
|
2022-03-22T21:43:04.000Z
|
multitask_benchmark/train/mpnn.py
|
KonstantinKlepikov/pna
|
45828fb6d7299f212c230b35b00850883c584457
|
[
"MIT"
] | 42
|
2020-04-14T23:10:57.000Z
|
2022-02-25T09:02:29.000Z
|
from __future__ import division
from __future__ import print_function
from models.pytorch.pna.layer import PNALayer
from multitask_benchmark.util.train import execute_train, build_arg_parser
# Training settings
parser = build_arg_parser()
parser.add_argument('--self_loop', action='store_true', default=False, help='Whether to add self loops in aggregators')
parser.add_argument('--towers', type=int, default=4, help='Number of towers in MPNN layers')
parser.add_argument('--aggregation', type=str, default='sum', help='Type of aggregation')
parser.add_argument('--pretrans_layers', type=int, default=1, help='Number of MLP layers before aggregation')
parser.add_argument('--posttrans_layers', type=int, default=1, help='Number of MLP layers after aggregation')
args = parser.parse_args()
# The MPNNs can be considered a particular case of PNA networks with a single aggregator and no scalers (identity)
execute_train(gnn_args=dict(nfeat=None,
nhid=args.hidden,
nodes_out=None,
graph_out=None,
dropout=args.dropout,
device=None,
first_conv_descr=dict(layer_type=PNALayer,
args=dict(
aggregators=[args.aggregation],
scalers=['identity'], avg_d=None,
towers=args.towers,
self_loop=args.self_loop,
divide_input=False,
pretrans_layers=args.pretrans_layers,
posttrans_layers=args.posttrans_layers
)),
middle_conv_descr=dict(layer_type=PNALayer,
args=dict(
aggregators=[args.aggregation],
scalers=['identity'],
avg_d=None, towers=args.towers,
self_loop=args.self_loop,
divide_input=True,
pretrans_layers=args.pretrans_layers,
posttrans_layers=args.posttrans_layers
)),
fc_layers=args.fc_layers,
conv_layers=args.conv_layers,
skip=args.skip,
gru=args.gru,
fixed=args.fixed,
variable=args.variable), args=args)
| 60.72
| 119
| 0.452899
|
from __future__ import division
from __future__ import print_function
from models.pytorch.pna.layer import PNALayer
from multitask_benchmark.util.train import execute_train, build_arg_parser
parser = build_arg_parser()
parser.add_argument('--self_loop', action='store_true', default=False, help='Whether to add self loops in aggregators')
parser.add_argument('--towers', type=int, default=4, help='Number of towers in MPNN layers')
parser.add_argument('--aggregation', type=str, default='sum', help='Type of aggregation')
parser.add_argument('--pretrans_layers', type=int, default=1, help='Number of MLP layers before aggregation')
parser.add_argument('--posttrans_layers', type=int, default=1, help='Number of MLP layers after aggregation')
args = parser.parse_args()
execute_train(gnn_args=dict(nfeat=None,
nhid=args.hidden,
nodes_out=None,
graph_out=None,
dropout=args.dropout,
device=None,
first_conv_descr=dict(layer_type=PNALayer,
args=dict(
aggregators=[args.aggregation],
scalers=['identity'], avg_d=None,
towers=args.towers,
self_loop=args.self_loop,
divide_input=False,
pretrans_layers=args.pretrans_layers,
posttrans_layers=args.posttrans_layers
)),
middle_conv_descr=dict(layer_type=PNALayer,
args=dict(
aggregators=[args.aggregation],
scalers=['identity'],
avg_d=None, towers=args.towers,
self_loop=args.self_loop,
divide_input=True,
pretrans_layers=args.pretrans_layers,
posttrans_layers=args.posttrans_layers
)),
fc_layers=args.fc_layers,
conv_layers=args.conv_layers,
skip=args.skip,
gru=args.gru,
fixed=args.fixed,
variable=args.variable), args=args)
| true
| true
|
7909abdd20008c00b8dd3490e556423a1825818c
| 265
|
py
|
Python
|
setup.py
|
hisashim/pyxsltp
|
647c939921328b5e46c8d1f5262f14c037abf3f4
|
[
"MIT"
] | null | null | null |
setup.py
|
hisashim/pyxsltp
|
647c939921328b5e46c8d1f5262f14c037abf3f4
|
[
"MIT"
] | null | null | null |
setup.py
|
hisashim/pyxsltp
|
647c939921328b5e46c8d1f5262f14c037abf3f4
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
sys.dont_write_bytecode=True
from distutils.core import setup
from pyxsltp import __version__
setup(
name = "pyxsltp",
version = __version__,
py_modules = ['pyxsltp'],
scripts = ['pyxsltp'],
)
| 16.5625
| 32
| 0.671698
|
import sys
sys.dont_write_bytecode=True
from distutils.core import setup
from pyxsltp import __version__
setup(
name = "pyxsltp",
version = __version__,
py_modules = ['pyxsltp'],
scripts = ['pyxsltp'],
)
| true
| true
|
7909ac6ebed762c73ef81e2e2fbf36179641e940
| 1,142
|
py
|
Python
|
Particle-On-Potential-MC-sampling/Run_MuellerBrownPotential-WithMetaD.py
|
valsson/MD-MC-Codes-2016
|
b1e5438313fe6fec7c1bbc8fb8ea979ce964f7dc
|
[
"MIT"
] | 3
|
2016-09-30T15:27:16.000Z
|
2020-12-27T02:51:35.000Z
|
Particle-On-Potential-MC-sampling/Run_MuellerBrownPotential-WithMetaD.py
|
valsson/MD-MC-Codes-2016
|
b1e5438313fe6fec7c1bbc8fb8ea979ce964f7dc
|
[
"MIT"
] | null | null | null |
Particle-On-Potential-MC-sampling/Run_MuellerBrownPotential-WithMetaD.py
|
valsson/MD-MC-Codes-2016
|
b1e5438313fe6fec7c1bbc8fb8ea979ce964f7dc
|
[
"MIT"
] | 1
|
2020-12-27T02:51:37.000Z
|
2020-12-27T02:51:37.000Z
|
import numpy as np
import time
import matplotlib.pyplot as plt
from MuellerBrownPotential import MuellerBrownPotential
from LogExpOfHarmonicWellsPotential import LogExpOfHarmonicWellsPotential
from MonteCarloSimulator import MonteCarloSimulator
from MetadynamicsBias import MetadynamicsBias
T = 1.0
NumMCmoves = 10000
kB = 1.0
kBT = kB * T
initialHeight = (kBT/2.0)
sigma = [ 0.2,0.2 ]
pace = 1
biasfactor = 10.0
# potential = LogExpOfHarmonicWellsPotential()
potential = MuellerBrownPotential()
MetaD = MetadynamicsBias(
Temperature = T,
Sigma = sigma,
InitialHeight = initialHeight,
Pace = pace,
Biasfactor = biasfactor
)
MCsim = MonteCarloSimulator(
potentialClass=potential,
Temperature = T,
externalBiasClass = MetaD
)
MCsim.resetRun()
MCsim.setPosition( potential.getMinima()[0] )
MCsim.runMC(NumMCmoves)
print ' '
MCsim.printAverageAcceptence()
print ' '
MCsim.printTrajectoryMeanAndStddev()
print ' '
MCsim.plotPotentialAndTrajectory()
MCsim.plotTrajectoryTimeSeries()
MCsim.plotTrajectoryHistogramAndFES()
| 23.791667
| 73
| 0.718914
|
import numpy as np
import time
import matplotlib.pyplot as plt
from MuellerBrownPotential import MuellerBrownPotential
from LogExpOfHarmonicWellsPotential import LogExpOfHarmonicWellsPotential
from MonteCarloSimulator import MonteCarloSimulator
from MetadynamicsBias import MetadynamicsBias
T = 1.0
NumMCmoves = 10000
kB = 1.0
kBT = kB * T
initialHeight = (kBT/2.0)
sigma = [ 0.2,0.2 ]
pace = 1
biasfactor = 10.0
potential = MuellerBrownPotential()
MetaD = MetadynamicsBias(
Temperature = T,
Sigma = sigma,
InitialHeight = initialHeight,
Pace = pace,
Biasfactor = biasfactor
)
MCsim = MonteCarloSimulator(
potentialClass=potential,
Temperature = T,
externalBiasClass = MetaD
)
MCsim.resetRun()
MCsim.setPosition( potential.getMinima()[0] )
MCsim.runMC(NumMCmoves)
print ' '
MCsim.printAverageAcceptence()
print ' '
MCsim.printTrajectoryMeanAndStddev()
print ' '
MCsim.plotPotentialAndTrajectory()
MCsim.plotTrajectoryTimeSeries()
MCsim.plotTrajectoryHistogramAndFES()
| false
| true
|
7909ad6158304323711973e33fe4175537cf42ae
| 7,969
|
py
|
Python
|
nautobot/core/runner/runner.py
|
jtschichold/nautobot
|
71c61cbbd23301111c281ed1d908a026ea2e4e07
|
[
"Apache-2.0"
] | 1
|
2022-01-11T10:33:50.000Z
|
2022-01-11T10:33:50.000Z
|
nautobot/core/runner/runner.py
|
jtschichold/nautobot
|
71c61cbbd23301111c281ed1d908a026ea2e4e07
|
[
"Apache-2.0"
] | null | null | null |
nautobot/core/runner/runner.py
|
jtschichold/nautobot
|
71c61cbbd23301111c281ed1d908a026ea2e4e07
|
[
"Apache-2.0"
] | null | null | null |
"""
logan.runner
~~~~~~~~~~~~
:copyright: (c) 2012 David Cramer.
:license: Apache License 2.0, see NOTICE for more details.
"""
import argparse
import os
import re
import sys
from django.core import management
from nautobot import __version__
from . import importer
from .settings import create_default_settings
__configured = False
def sanitize_name(project):
project = project.replace(" ", "-")
return re.sub("[^A-Z0-9a-z_-]", "-", project)
def parse_command_args(args):
"""
This parses the arguments and returns a tuple containing:
(args, command, command_args)
For example, "--config=bar start --with=baz" would return:
(['--config=bar'], 'start', ['--with=baz'])
"""
index = None
for arg_i, arg in enumerate(args):
if not arg.startswith("-"):
index = arg_i
break
# Unable to parse any arguments
if index is None:
return (args, None, [])
return (args[:index], args[index], args[(index + 1) :])
def is_configured():
global __configured
return __configured
def configure_app(
config_path=None,
project=None,
default_config_path=None,
default_settings=None,
settings_initializer=None,
settings_envvar=None,
initializer=None,
allow_extras=True,
config_module_name=None,
runner_name=None,
on_configure=None,
):
"""
:param project: should represent the canonical name for the project, generally
the same name it assigned in distutils.
:param default_config_path: the default location for the configuration file.
:param default_settings: default settings to load (think inheritence).
:param settings_initializer: a callback function which should return a string
representing the default settings template to generate.
:param initializer: a callback function which will be executed before the command
is executed. It is passed a dictionary of various configuration attributes.
"""
global __configured
project_filename = sanitize_name(project)
if default_config_path is None:
default_config_path = "~/%s/%s.conf.py" % (project_filename, project_filename)
if settings_envvar is None:
settings_envvar = project_filename.upper() + "_CONF"
if config_module_name is None:
config_module_name = project_filename + "_config"
# normalize path
if settings_envvar in os.environ:
default_config_path = os.environ.get(settings_envvar)
else:
default_config_path = os.path.normpath(os.path.abspath(os.path.expanduser(default_config_path)))
if not config_path:
config_path = default_config_path
config_path = os.path.expanduser(config_path)
if not os.path.exists(config_path):
if runner_name:
raise ValueError(
"Configuration file does not exist. Use '%s init' to initialize the file." % (runner_name,)
)
raise ValueError("Configuration file does not exist at %r" % (config_path,))
os.environ["DJANGO_SETTINGS_MODULE"] = config_module_name
def settings_callback(settings):
if initializer is None:
return
try:
initializer(
{
"project": project,
"config_path": config_path,
"settings": settings,
}
)
except Exception:
# XXX: Django doesn't like various errors in this path
import sys
import traceback
traceback.print_exc()
sys.exit(1)
importer.install(
config_module_name,
config_path,
default_settings,
allow_extras=allow_extras,
callback=settings_callback,
)
__configured = True
# HACK(dcramer): we need to force access of django.conf.settings to
# ensure we don't hit any import-driven recursive behavior
from django.conf import settings
hasattr(settings, "INSTALLED_APPS")
if on_configure:
on_configure(
{
"project": project,
"config_path": config_path,
"settings": settings,
}
)
class VerboseHelpFormatter(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter):
"""Argparse Formatter that includes newlines and shows argument defaults."""
def run_app(**kwargs):
sys_args = sys.argv
# The established command for running this program
runner_name = os.path.basename(sys_args[0])
default_config_path = kwargs.get("default_config_path")
# Primary parser
parser = management.CommandParser(
description=kwargs.pop("description"),
formatter_class=VerboseHelpFormatter,
add_help=False,
)
parser.add_argument(
"-c",
"--config",
metavar="CONFIG",
help="Path to the configuration file",
)
parser.add_argument(
"--version",
action="version",
version=__version__,
)
# This block of code here is done in this way because of the built in Django
# management command parsing not playing well unless you have a Django
# config with SECRET_KEY defined.
# Parse out `--config` here first capturing any unparsed args for passing to
# Django parser.
args, unparsed_args = parser.parse_known_args()
# Now add the sub-parser for "init" command
subparsers = parser.add_subparsers(help=False, dest="command", metavar="")
init_parser = subparsers.add_parser(
"init",
help="Initialize a new configuration",
)
init_parser.add_argument(
"config_path",
default=default_config_path,
nargs="?",
help="Path to output generated configuration file",
)
# Try to use our parser first, to process custom arguments
try:
args = parser.parse_args()
command = args.command
command_args = sys.argv[1:]
# Fallback to passing through to Django management commands
# except RuntimeError as err:
except management.CommandError as err:
if "invalid choice" not in str(err):
raise
# Rewrite sys_args to have the unparsed args (if any)
sys_args = sys_args[:1] + unparsed_args
_, command, command_args = parse_command_args(sys_args[1:])
# If we don't get a command of some sort, print help and exit dirty
if not command:
parser.print_help()
parser.exit(1)
# The `init` command is reserved for initializing configuration
if command == "init":
settings_initializer = kwargs.get("settings_initializer")
config_path = os.path.expanduser(args.config_path)
# Check if the config already exists; alert user and exit if exists.
if os.path.exists(config_path):
print(
f"A configuration already exists at {config_path}. Please backup and remove it or choose another path."
)
return
# Create the config
try:
create_default_settings(config_path, settings_initializer)
except OSError as e:
raise e.__class__("Unable to write default settings file to %r" % config_path)
print("Configuration file created at %r" % config_path)
return
# Fetch config path from `--config` if provided, otherwise we want it to
# default to None so that the underlying machinery in `configure_app` will
# process default path or environment variable.
config_path = args.config
# Overlay our config w/ defautls
try:
configure_app(config_path=config_path, **kwargs)
except ValueError as err:
parser.exit(status=2, message=str(err) + "\n")
# Call Django management command
management.execute_from_command_line([runner_name, command] + command_args)
# Exit cleanly
sys.exit(0)
if __name__ == "__main__":
run_app()
| 29.083942
| 119
| 0.651148
|
import argparse
import os
import re
import sys
from django.core import management
from nautobot import __version__
from . import importer
from .settings import create_default_settings
__configured = False
def sanitize_name(project):
project = project.replace(" ", "-")
return re.sub("[^A-Z0-9a-z_-]", "-", project)
def parse_command_args(args):
index = None
for arg_i, arg in enumerate(args):
if not arg.startswith("-"):
index = arg_i
break
if index is None:
return (args, None, [])
return (args[:index], args[index], args[(index + 1) :])
def is_configured():
global __configured
return __configured
def configure_app(
config_path=None,
project=None,
default_config_path=None,
default_settings=None,
settings_initializer=None,
settings_envvar=None,
initializer=None,
allow_extras=True,
config_module_name=None,
runner_name=None,
on_configure=None,
):
global __configured
project_filename = sanitize_name(project)
if default_config_path is None:
default_config_path = "~/%s/%s.conf.py" % (project_filename, project_filename)
if settings_envvar is None:
settings_envvar = project_filename.upper() + "_CONF"
if config_module_name is None:
config_module_name = project_filename + "_config"
if settings_envvar in os.environ:
default_config_path = os.environ.get(settings_envvar)
else:
default_config_path = os.path.normpath(os.path.abspath(os.path.expanduser(default_config_path)))
if not config_path:
config_path = default_config_path
config_path = os.path.expanduser(config_path)
if not os.path.exists(config_path):
if runner_name:
raise ValueError(
"Configuration file does not exist. Use '%s init' to initialize the file." % (runner_name,)
)
raise ValueError("Configuration file does not exist at %r" % (config_path,))
os.environ["DJANGO_SETTINGS_MODULE"] = config_module_name
def settings_callback(settings):
if initializer is None:
return
try:
initializer(
{
"project": project,
"config_path": config_path,
"settings": settings,
}
)
except Exception:
import sys
import traceback
traceback.print_exc()
sys.exit(1)
importer.install(
config_module_name,
config_path,
default_settings,
allow_extras=allow_extras,
callback=settings_callback,
)
__configured = True
# HACK(dcramer): we need to force access of django.conf.settings to
# ensure we don't hit any import-driven recursive behavior
from django.conf import settings
hasattr(settings, "INSTALLED_APPS")
if on_configure:
on_configure(
{
"project": project,
"config_path": config_path,
"settings": settings,
}
)
class VerboseHelpFormatter(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter):
def run_app(**kwargs):
sys_args = sys.argv
runner_name = os.path.basename(sys_args[0])
default_config_path = kwargs.get("default_config_path")
parser = management.CommandParser(
description=kwargs.pop("description"),
formatter_class=VerboseHelpFormatter,
add_help=False,
)
parser.add_argument(
"-c",
"--config",
metavar="CONFIG",
help="Path to the configuration file",
)
parser.add_argument(
"--version",
action="version",
version=__version__,
)
args, unparsed_args = parser.parse_known_args()
subparsers = parser.add_subparsers(help=False, dest="command", metavar="")
init_parser = subparsers.add_parser(
"init",
help="Initialize a new configuration",
)
init_parser.add_argument(
"config_path",
default=default_config_path,
nargs="?",
help="Path to output generated configuration file",
)
try:
args = parser.parse_args()
command = args.command
command_args = sys.argv[1:]
except management.CommandError as err:
if "invalid choice" not in str(err):
raise
sys_args = sys_args[:1] + unparsed_args
_, command, command_args = parse_command_args(sys_args[1:])
if not command:
parser.print_help()
parser.exit(1)
# The `init` command is reserved for initializing configuration
if command == "init":
settings_initializer = kwargs.get("settings_initializer")
config_path = os.path.expanduser(args.config_path)
# Check if the config already exists; alert user and exit if exists.
if os.path.exists(config_path):
print(
f"A configuration already exists at {config_path}. Please backup and remove it or choose another path."
)
return
# Create the config
try:
create_default_settings(config_path, settings_initializer)
except OSError as e:
raise e.__class__("Unable to write default settings file to %r" % config_path)
print("Configuration file created at %r" % config_path)
return
# Fetch config path from `--config` if provided, otherwise we want it to
# default to None so that the underlying machinery in `configure_app` will
# process default path or environment variable.
config_path = args.config
# Overlay our config w/ defautls
try:
configure_app(config_path=config_path, **kwargs)
except ValueError as err:
parser.exit(status=2, message=str(err) + "\n")
# Call Django management command
management.execute_from_command_line([runner_name, command] + command_args)
# Exit cleanly
sys.exit(0)
if __name__ == "__main__":
run_app()
| true
| true
|
7909af8d6a7f5a564ddb44af1fea43bf16529228
| 1,919
|
py
|
Python
|
data_ops.py
|
samiulshuvo/se_relativisticgan
|
5501c4d96faa03eb3c1fd776b232b68940183f4d
|
[
"MIT"
] | 55
|
2019-02-17T11:40:22.000Z
|
2022-03-30T10:10:00.000Z
|
data_ops.py
|
samiulshuvo/se_relativisticgan
|
5501c4d96faa03eb3c1fd776b232b68940183f4d
|
[
"MIT"
] | 14
|
2019-02-17T11:47:12.000Z
|
2021-03-21T11:15:14.000Z
|
data_ops.py
|
deepakbaby/se_relativisticgan
|
5501c4d96faa03eb3c1fd776b232b68940183f4d
|
[
"MIT"
] | 18
|
2018-11-03T15:05:10.000Z
|
2022-01-12T03:46:30.000Z
|
"""
Data processing routines
Deepak Baby, UGent, June 2018
deepak.baby@ugent.be
"""
import numpy as np
def reconstruct_wav(wavmat, stride_factor=0.5):
"""
Reconstructs the audiofile from sliced matrix wavmat
"""
window_length = wavmat.shape[1]
window_stride = int(stride_factor * window_length)
wav_length = (wavmat.shape[0] -1 ) * window_stride + window_length
wav_recon = np.zeros((1,wav_length))
#print ("wav recon shape " + str(wav_recon.shape))
for k in range (wavmat.shape[0]):
wav_beg = k * window_stride
wav_end = wav_beg + window_length
wav_recon[0, wav_beg:wav_end] += wavmat[k, :]
# now compute the scaling factor for multiple instances
noverlap = int(np.ceil(1/stride_factor))
scale_ = (1/float(noverlap)) * np.ones((1, wav_length))
for s in range(noverlap-1):
s_beg = s * window_stride
s_end = s_beg + window_stride
scale_[0, s_beg:s_end] = 1/ (s+1)
scale_[0, -s_beg - 1 : -s_end:-1] = 1/ (s+1)
return wav_recon * scale_
def pre_emph(x, coeff=0.95):
"""
Apply pre_emph on 2d data (batch_size x window_length)
"""
#print ("x shape: " + str(x.shape))
x0 = x[:, 0]
x0 = np.expand_dims(x0, axis=1)
diff = x[:, 1:] - coeff * x[:, :-1]
x_preemph = np.concatenate((x0, diff), axis=1)
if not x.shape == x_preemph.shape:
print ("ERROR: Pre-emphasis is wrong")
#print ("x_preemph shape: " + str(x_preemph.shape))
return x_preemph
def de_emph(y, coeff=0.95):
"""
Apply de_emphasis on test data: works only on 1d data
"""
if coeff <= 0:
return y
x = np.zeros((y.shape[0],), dtype=np.float32)
#print("in_shape" + str(y.shape))
x[0] = y[0]
for n in range(1, y.shape[0], 1):
x[n] = coeff * x[n - 1] + y[n]
return x
def data_preprocess(wav, preemph=0.95):
wav = (2./65535.) * (wav.astype('float32') - 32767) + 1.
if preemph > 0:
wav = pre_emph(wav, coeff=preemph)
return wav.astype('float32')
| 29.075758
| 68
| 0.642522
|
import numpy as np
def reconstruct_wav(wavmat, stride_factor=0.5):
window_length = wavmat.shape[1]
window_stride = int(stride_factor * window_length)
wav_length = (wavmat.shape[0] -1 ) * window_stride + window_length
wav_recon = np.zeros((1,wav_length))
for k in range (wavmat.shape[0]):
wav_beg = k * window_stride
wav_end = wav_beg + window_length
wav_recon[0, wav_beg:wav_end] += wavmat[k, :]
noverlap = int(np.ceil(1/stride_factor))
scale_ = (1/float(noverlap)) * np.ones((1, wav_length))
for s in range(noverlap-1):
s_beg = s * window_stride
s_end = s_beg + window_stride
scale_[0, s_beg:s_end] = 1/ (s+1)
scale_[0, -s_beg - 1 : -s_end:-1] = 1/ (s+1)
return wav_recon * scale_
def pre_emph(x, coeff=0.95):
x0 = x[:, 0]
x0 = np.expand_dims(x0, axis=1)
diff = x[:, 1:] - coeff * x[:, :-1]
x_preemph = np.concatenate((x0, diff), axis=1)
if not x.shape == x_preemph.shape:
print ("ERROR: Pre-emphasis is wrong")
return x_preemph
def de_emph(y, coeff=0.95):
if coeff <= 0:
return y
x = np.zeros((y.shape[0],), dtype=np.float32)
x[0] = y[0]
for n in range(1, y.shape[0], 1):
x[n] = coeff * x[n - 1] + y[n]
return x
def data_preprocess(wav, preemph=0.95):
wav = (2./65535.) * (wav.astype('float32') - 32767) + 1.
if preemph > 0:
wav = pre_emph(wav, coeff=preemph)
return wav.astype('float32')
| true
| true
|
7909b0835b03ed2a02876ab4ba46c2114932bbe2
| 571
|
py
|
Python
|
src/name_collect.py
|
StrinTH/DrHelp
|
76cdcd549f6c8ad6315e5c4557793c622a833c6a
|
[
"MIT"
] | 1
|
2021-12-02T15:04:08.000Z
|
2021-12-02T15:04:08.000Z
|
src/name_collect.py
|
StrinTH/DrHelp
|
76cdcd549f6c8ad6315e5c4557793c622a833c6a
|
[
"MIT"
] | null | null | null |
src/name_collect.py
|
StrinTH/DrHelp
|
76cdcd549f6c8ad6315e5c4557793c622a833c6a
|
[
"MIT"
] | 1
|
2021-12-02T15:04:09.000Z
|
2021-12-02T15:04:09.000Z
|
from soup import soup_collector
def name_collector(spl_id, spl_type):
soup = soup_collector(spl_id, spl_type)
sample_info_type = soup.findAll('a')
#unwanted till now START
try:
sample_info_name1 = sample_info_type[0].get('name').split('_')[1].strip()
sample_info_name2 = sample_info_type[0].get('name').split('_')[2].strip()
sample_info_name = sample_info_name1 + "_" + sample_info_name2
except:
sample_info_name = sample_info_type[0].get('name').split('_')[1].strip()
return sample_info_name
#END
#intro
| 33.588235
| 81
| 0.677758
|
from soup import soup_collector
def name_collector(spl_id, spl_type):
soup = soup_collector(spl_id, spl_type)
sample_info_type = soup.findAll('a')
try:
sample_info_name1 = sample_info_type[0].get('name').split('_')[1].strip()
sample_info_name2 = sample_info_type[0].get('name').split('_')[2].strip()
sample_info_name = sample_info_name1 + "_" + sample_info_name2
except:
sample_info_name = sample_info_type[0].get('name').split('_')[1].strip()
return sample_info_name
| true
| true
|
7909b0e5bf3a815a35845be4461c057339225a5f
| 2,030
|
py
|
Python
|
src/sequencer_osx/sequencer_osx.py
|
NFJones/python-midi
|
baf306104e993286cce8cff912736459e9f880ee
|
[
"MIT"
] | 1
|
2021-09-07T12:34:35.000Z
|
2021-09-07T12:34:35.000Z
|
src/sequencer_osx/sequencer_osx.py
|
NFJones/python-midi
|
baf306104e993286cce8cff912736459e9f880ee
|
[
"MIT"
] | null | null | null |
src/sequencer_osx/sequencer_osx.py
|
NFJones/python-midi
|
baf306104e993286cce8cff912736459e9f880ee
|
[
"MIT"
] | null | null | null |
# This file was automatically generated by SWIG (http://www.swig.org).
# Version 1.3.31
#
# Don't modify this file, modify the SWIG interface instead.
# This file is compatible with both classic and new-style classes.
import _sequencer_osx
import new
new_instancemethod = new.instancemethod
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
if name == "thisown":
return self.this.own(value)
if name == "this":
if type(value).__name__ == "PySwigObject":
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name, None)
if method:
return method(self, value)
if (not static) or hasattr(self, name):
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr(self, class_type, name):
if name == "thisown":
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
raise AttributeError(name)
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except:
strthis = ""
return "<%s.%s; %s >" % (
self.__class__.__module__,
self.__class__.__name__,
strthis,
)
import types
try:
_object = object
_newclass = 1
except AttributeError:
class _object:
pass
_newclass = 0
del types
_MIDIGetNumberOfDevices = _sequencer_osx._MIDIGetNumberOfDevices
_MIDIClientCreate = _sequencer_osx._MIDIClientCreate
_MIDIClientDispose = _sequencer_osx._MIDIClientDispose
_MIDISourceCreate = _sequencer_osx._MIDISourceCreate
_MIDIOutputPortCreate = _sequencer_osx._MIDIOutputPortCreate
_MIDIPortConnectSource = _sequencer_osx._MIDIPortConnectSource
| 26.025641
| 70
| 0.698522
|
# This file is compatible with both classic and new-style classes.
import _sequencer_osx
import new
new_instancemethod = new.instancemethod
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
if name == "thisown":
return self.this.own(value)
if name == "this":
if type(value).__name__ == "PySwigObject":
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name, None)
if method:
return method(self, value)
if (not static) or hasattr(self, name):
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr(self, class_type, name):
if name == "thisown":
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
raise AttributeError(name)
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except:
strthis = ""
return "<%s.%s; %s >" % (
self.__class__.__module__,
self.__class__.__name__,
strthis,
)
import types
try:
_object = object
_newclass = 1
except AttributeError:
class _object:
pass
_newclass = 0
del types
_MIDIGetNumberOfDevices = _sequencer_osx._MIDIGetNumberOfDevices
_MIDIClientCreate = _sequencer_osx._MIDIClientCreate
_MIDIClientDispose = _sequencer_osx._MIDIClientDispose
_MIDISourceCreate = _sequencer_osx._MIDISourceCreate
_MIDIOutputPortCreate = _sequencer_osx._MIDIOutputPortCreate
_MIDIPortConnectSource = _sequencer_osx._MIDIPortConnectSource
| true
| true
|
7909b180475865b79fdf49655ea6e1c4768a2cbd
| 4,436
|
py
|
Python
|
management_layer/metrics.py
|
hedleyroos/core-management-layer
|
2a25bf5fb44fd511b8b2626ec09a4bc05098334c
|
[
"BSD-3-Clause"
] | null | null | null |
management_layer/metrics.py
|
hedleyroos/core-management-layer
|
2a25bf5fb44fd511b8b2626ec09a4bc05098334c
|
[
"BSD-3-Clause"
] | 90
|
2018-01-23T10:30:01.000Z
|
2019-01-31T10:53:42.000Z
|
management_layer/metrics.py
|
hedleyroos/core-management-layer
|
2a25bf5fb44fd511b8b2626ec09a4bc05098334c
|
[
"BSD-3-Clause"
] | 1
|
2021-08-17T14:16:23.000Z
|
2021-08-17T14:16:23.000Z
|
import asyncio
import functools
import logging
from types import FunctionType, ModuleType
from typing import Type
from prometheus_client import Histogram, Counter
logger = logging.getLogger(__name__)
H = Histogram(f"management_layer_call_duration_seconds", "API call duration (s)",
["call"])
def _prometheus_module_metric_decorator(f: FunctionType):
"""
A Prometheus decorator adding timing metrics to a function.
This decorator will work on both asynchronous and synchronous functions.
Note, however, that this function will turn synchronous functions into
asynchronous ones when used as a decorator.
:param f: The function for which to capture metrics
"""
module_ = f.__module__.split(".")[-1]
call_key = "{}_{}".format(module_, f.__name__)
@functools.wraps(f)
async def wrapper(*args, **kwargs):
with H.labels(call=call_key).time():
if asyncio.iscoroutinefunction(f):
return await f(*args, **kwargs)
else:
return f(*args, **kwargs)
return wrapper
def _prometheus_class_metric_decorator(f: FunctionType):
"""
A Prometheus decorator adding timing metrics to a function in a class.
This decorator will work on both asynchronous and synchronous functions.
Note, however, that this function will turn synchronous functions into
asynchronous ones when used as a decorator.
:param f: The function for which to capture metrics
"""
@functools.wraps(f)
async def wrapper(*args, **kwargs):
with H.labels(call=f.__name__).time():
if asyncio.iscoroutinefunction(f):
return await f(*args, **kwargs)
else:
return f(*args, **kwargs)
return wrapper
def add_prometheus_metrics_for_module(module_: ModuleType):
"""
Convenience function applying the Prometheus metrics decorator to the
specified module's functions.
:param module_: The module to which the instrumentation will be applied
"""
decorate_all_in_module(module_, _prometheus_module_metric_decorator, [])
def add_prometheus_metrics_for_class(klass: Type):
"""
Convenience function applying the Prometheus metrics decorator to the
specified class functions.
:param klass: The class to which the instrumentation will be applied
"""
decorate_all_in_class(klass, _prometheus_class_metric_decorator, [])
def decorate_all_in_module(module_: ModuleType, decorator: FunctionType, whitelist: list):
"""
Decorate all functions in a module with the specified decorator
:param module_: The module to interrogate
:param decorator: The decorator to apply
:param whitelist: Functions not to be decorated.
"""
for name in dir(module_):
if name not in whitelist:
obj = getattr(module_, name)
if isinstance(obj, FunctionType) or asyncio.iscoroutinefunction(obj):
# We only check functions that are defined in the module we
# specified. Some of the functions in the module may have been
# imported from other modules. These are ignored.
if obj.__module__ == module_.__name__:
logger.debug(f"Adding metrics to {module_}:{name}")
setattr(module_, name, decorator(obj))
else:
logger.debug(f"No metrics on {module_}:{name} because it belongs to another "
f"module")
else:
logger.debug(f"No metrics on {module_}:{name} because it is not a coroutine or "
f"function")
def decorate_all_in_class(klass: Type, decorator: FunctionType, whitelist: list):
"""
Decorate all functions in a class with the specified decorator
:param klass: The class to interrogate
:param decorator: The decorator to apply
:param whitelist: Functions not to be decorated.
"""
for name in dir(klass):
if name not in whitelist:
obj = getattr(klass, name)
if isinstance(obj, FunctionType) or asyncio.iscoroutinefunction(obj):
logger.debug(f"Adding metrics to {klass}:{name}")
setattr(klass, name, decorator(obj))
else:
logger.debug(f"No metrics on {klass}:{name} because it is not a coroutine or "
f"function")
| 38.241379
| 97
| 0.656673
|
import asyncio
import functools
import logging
from types import FunctionType, ModuleType
from typing import Type
from prometheus_client import Histogram, Counter
logger = logging.getLogger(__name__)
H = Histogram(f"management_layer_call_duration_seconds", "API call duration (s)",
["call"])
def _prometheus_module_metric_decorator(f: FunctionType):
module_ = f.__module__.split(".")[-1]
call_key = "{}_{}".format(module_, f.__name__)
@functools.wraps(f)
async def wrapper(*args, **kwargs):
with H.labels(call=call_key).time():
if asyncio.iscoroutinefunction(f):
return await f(*args, **kwargs)
else:
return f(*args, **kwargs)
return wrapper
def _prometheus_class_metric_decorator(f: FunctionType):
@functools.wraps(f)
async def wrapper(*args, **kwargs):
with H.labels(call=f.__name__).time():
if asyncio.iscoroutinefunction(f):
return await f(*args, **kwargs)
else:
return f(*args, **kwargs)
return wrapper
def add_prometheus_metrics_for_module(module_: ModuleType):
decorate_all_in_module(module_, _prometheus_module_metric_decorator, [])
def add_prometheus_metrics_for_class(klass: Type):
decorate_all_in_class(klass, _prometheus_class_metric_decorator, [])
def decorate_all_in_module(module_: ModuleType, decorator: FunctionType, whitelist: list):
for name in dir(module_):
if name not in whitelist:
obj = getattr(module_, name)
if isinstance(obj, FunctionType) or asyncio.iscoroutinefunction(obj):
if obj.__module__ == module_.__name__:
logger.debug(f"Adding metrics to {module_}:{name}")
setattr(module_, name, decorator(obj))
else:
logger.debug(f"No metrics on {module_}:{name} because it belongs to another "
f"module")
else:
logger.debug(f"No metrics on {module_}:{name} because it is not a coroutine or "
f"function")
def decorate_all_in_class(klass: Type, decorator: FunctionType, whitelist: list):
for name in dir(klass):
if name not in whitelist:
obj = getattr(klass, name)
if isinstance(obj, FunctionType) or asyncio.iscoroutinefunction(obj):
logger.debug(f"Adding metrics to {klass}:{name}")
setattr(klass, name, decorator(obj))
else:
logger.debug(f"No metrics on {klass}:{name} because it is not a coroutine or "
f"function")
| true
| true
|
7909b19a066340a1529eebfe78168bc8bd103b80
| 2,079
|
py
|
Python
|
pycad/web.py
|
CaptObvious/PyCAD
|
faf143798cbf6ffb4d6808681687a587d7b922a7
|
[
"WTFPL"
] | null | null | null |
pycad/web.py
|
CaptObvious/PyCAD
|
faf143798cbf6ffb4d6808681687a587d7b922a7
|
[
"WTFPL"
] | null | null | null |
pycad/web.py
|
CaptObvious/PyCAD
|
faf143798cbf6ffb4d6808681687a587d7b922a7
|
[
"WTFPL"
] | null | null | null |
import json
import re
from flask import g, redirect, send_file, session
from flask_openid import OpenID
from flask_session import Session
import pycad.controllers as controllers
import pycad.models as models
import pycad.schemas as schemas
from pycad import app
from pycad.config import config
STEAM_ID_REGEX = re.compile("steamcommunity.com/openid/id/(.*?)$")
app.config["SESSION_TYPE"] = config["session_storage_type"]
app.secret_key = config["secret_key"]
Session(app)
oid = OpenID(app)
@app.route("/login_redirect")
@oid.loginhandler
def login():
if session.get("user_id", None) is not None:
return redirect(oid.get_next_url())
return oid.try_login("https://steamcommunity.com/openid")
@oid.after_login
def after_login(response):
session["user_id"] = STEAM_ID_REGEX.search(response.identity_url).group(1)
g.user = models.User.get_or_create(session["user_id"])
return redirect(oid.get_next_url())
@app.route("/logout")
def logout():
session.pop("user_id", None)
g.user = None
return redirect(oid.get_next_url())
@app.route("/")
@app.route("/<path:path>")
def index(path="index.html"):
allowed_prefixes = [
"css",
"fonts",
"images",
"js"
]
# If they're not requesting static assets we return index.html as this allows single page applications to
# be deeplinked
if path.split("/")[0] not in allowed_prefixes:
return send_file("../static/index.html")
return send_file("../static/" + path)
@app.route("/api")
@app.route("/api/<path:path>")
def api(path=""):
return "API! " + path + "<br />" + session.get("user_id", ""), 200
@app.route("/api/user")
def current_user():
if not (session.get("user_id", None)):
return "Unauthorized.", 401
return user_by_id(session["user_id"])
@app.route("/api/user/<user_id>")
def user_by_id(user_id):
user = models.User.get_or_create(user_id)
return json.dumps(schemas.UserSchema().dump(user), indent=4)
| 24.458824
| 110
| 0.655604
|
import json
import re
from flask import g, redirect, send_file, session
from flask_openid import OpenID
from flask_session import Session
import pycad.controllers as controllers
import pycad.models as models
import pycad.schemas as schemas
from pycad import app
from pycad.config import config
STEAM_ID_REGEX = re.compile("steamcommunity.com/openid/id/(.*?)$")
app.config["SESSION_TYPE"] = config["session_storage_type"]
app.secret_key = config["secret_key"]
Session(app)
oid = OpenID(app)
@app.route("/login_redirect")
@oid.loginhandler
def login():
if session.get("user_id", None) is not None:
return redirect(oid.get_next_url())
return oid.try_login("https://steamcommunity.com/openid")
@oid.after_login
def after_login(response):
session["user_id"] = STEAM_ID_REGEX.search(response.identity_url).group(1)
g.user = models.User.get_or_create(session["user_id"])
return redirect(oid.get_next_url())
@app.route("/logout")
def logout():
session.pop("user_id", None)
g.user = None
return redirect(oid.get_next_url())
@app.route("/")
@app.route("/<path:path>")
def index(path="index.html"):
allowed_prefixes = [
"css",
"fonts",
"images",
"js"
]
# be deeplinked
if path.split("/")[0] not in allowed_prefixes:
return send_file("../static/index.html")
return send_file("../static/" + path)
@app.route("/api")
@app.route("/api/<path:path>")
def api(path=""):
return "API! " + path + "<br />" + session.get("user_id", ""), 200
@app.route("/api/user")
def current_user():
if not (session.get("user_id", None)):
return "Unauthorized.", 401
return user_by_id(session["user_id"])
@app.route("/api/user/<user_id>")
def user_by_id(user_id):
user = models.User.get_or_create(user_id)
return json.dumps(schemas.UserSchema().dump(user), indent=4)
| true
| true
|
7909b229c2c15bce6eb24f4deac67c97ce8d66dd
| 1,861
|
py
|
Python
|
Python/shutdown.py
|
gwyatt40/Engineering_4_Notebook
|
9925b8290b61c143918ad1c1a9eeeccc368dd83a
|
[
"CNRI-Python",
"Xnet",
"Info-ZIP",
"X11"
] | null | null | null |
Python/shutdown.py
|
gwyatt40/Engineering_4_Notebook
|
9925b8290b61c143918ad1c1a9eeeccc368dd83a
|
[
"CNRI-Python",
"Xnet",
"Info-ZIP",
"X11"
] | null | null | null |
Python/shutdown.py
|
gwyatt40/Engineering_4_Notebook
|
9925b8290b61c143918ad1c1a9eeeccc368dd83a
|
[
"CNRI-Python",
"Xnet",
"Info-ZIP",
"X11"
] | null | null | null |
# import time and GPIO
import time
import RPi.GPIO as GPIO
# set pin
reset_shutdown_pin = 18 # changed from original pin
# suppress warnings
GPIO.setwarnings(False)
# GPIO Mode for pin numbers
GPIO.setmode(GPIO.BCM)
# use internal pull up resistor
GPIO.setup(reset_shutdown_pin, GPIO.IN, pull_up_down=GPIO.PUD_UP)
# define restart function
def restart():
print("restarting Pi")
command = "/usr/bin/sudo /sbin/shutdown -r now"
import subprocess
process = subprocess.Popen(command.split(), stdout=subprocess.PIPE)
output = process.communicate()[0]
print(output)
# define shutdown function
def shut_down():
print("shutting down")
command = "/usr/bin/sudo /sbin/shutdown -h now" #shutdown command
import subprocess
process = subprocess.Popen(command.split(), stdout=subprocess.PIPE)
output = process.communicate()[0]
print(output)
while True:
# delay so doesn't use too much processing power
time.sleep(0.5)
# waits for button press w/ debounce
channel = GPIO.wait_for_edge(reset_shutdown_pin, GPIO.FALLING, bouncetime=200)
if channel is None:
print('Timeout occurred')
else:
print('Edge detected on channel', channel)
# For troubleshooting, uncomment this line to output button status on
command line
#print('GPIO state is = ', GPIO.input(reset_shutdown_pin))
counter = 0
while GPIO.input(reset_shutdown_pin) == False:
# For troubleshooting, uncomment this line to view the counter. If it
reaches a value above 4, we will restart.
#print(counter)
counter += 1
time.sleep(0.5)
# long button press = shutdown
if counter > 4:
shut_down() # run shutdown function
# short button press = restart
restart() # run restart function
| 35.113208
| 82
| 0.670607
|
import time
import RPi.GPIO as GPIO
reset_shutdown_pin = 18
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM)
GPIO.setup(reset_shutdown_pin, GPIO.IN, pull_up_down=GPIO.PUD_UP)
def restart():
print("restarting Pi")
command = "/usr/bin/sudo /sbin/shutdown -r now"
import subprocess
process = subprocess.Popen(command.split(), stdout=subprocess.PIPE)
output = process.communicate()[0]
print(output)
def shut_down():
print("shutting down")
command = "/usr/bin/sudo /sbin/shutdown -h now"
import subprocess
process = subprocess.Popen(command.split(), stdout=subprocess.PIPE)
output = process.communicate()[0]
print(output)
while True:
time.sleep(0.5)
# waits for button press w/ debounce
channel = GPIO.wait_for_edge(reset_shutdown_pin, GPIO.FALLING, bouncetime=200)
if channel is None:
print('Timeout occurred')
else:
print('Edge detected on channel', channel)
# For troubleshooting, uncomment this line to output button status on
command line
#print('GPIO state is = ', GPIO.input(reset_shutdown_pin))
counter = 0
while GPIO.input(reset_shutdown_pin) == False:
# For troubleshooting, uncomment this line to view the counter. If it
reaches a value above 4, we will restart.
#print(counter)
counter += 1
time.sleep(0.5)
# long button press = shutdown
if counter > 4:
shut_down() # run shutdown function
# short button press = restart
restart() # run restart function
| false
| true
|
7909b25e555d9147a186e2a33ff0ab82fd77802b
| 951
|
py
|
Python
|
cowait/utils/version.py
|
ProgHaj/cowait
|
e95c30faab8caf8b0413de4e1784529a3a06475d
|
[
"Apache-2.0"
] | 2
|
2021-08-11T08:51:42.000Z
|
2021-08-11T08:55:19.000Z
|
cowait/utils/version.py
|
ProgHaj/cowait
|
e95c30faab8caf8b0413de4e1784529a3a06475d
|
[
"Apache-2.0"
] | null | null | null |
cowait/utils/version.py
|
ProgHaj/cowait
|
e95c30faab8caf8b0413de4e1784529a3a06475d
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import annotations
import re
import pkg_resources
VERSION_FORMAT = re.compile('([0-9]+)\\.([0-9]+)\\.([0-9]+)')
class Version(object):
def __init__(self, major: int, minor: int, revision: int):
self.major = major
self.minor = minor
self.revision = revision
def __str__(self) -> str:
return f'{self.major}.{self.minor}.{self.revision}'
def is_compatible(self):
if self.minor < 3:
return False
return True
@staticmethod
def current():
return Version.parse(version_string())
@staticmethod
def parse(version: str) -> Version:
if re.match(VERSION_FORMAT, version) is None:
raise ValueError(f'Illegal version string {version}')
major, minor, rev = map(lambda v: int(v), version.split('.'))
return Version(major, minor, rev)
def version_string():
return pkg_resources.require("cowait")[0].version
| 26.416667
| 69
| 0.624606
|
from __future__ import annotations
import re
import pkg_resources
VERSION_FORMAT = re.compile('([0-9]+)\\.([0-9]+)\\.([0-9]+)')
class Version(object):
def __init__(self, major: int, minor: int, revision: int):
self.major = major
self.minor = minor
self.revision = revision
def __str__(self) -> str:
return f'{self.major}.{self.minor}.{self.revision}'
def is_compatible(self):
if self.minor < 3:
return False
return True
@staticmethod
def current():
return Version.parse(version_string())
@staticmethod
def parse(version: str) -> Version:
if re.match(VERSION_FORMAT, version) is None:
raise ValueError(f'Illegal version string {version}')
major, minor, rev = map(lambda v: int(v), version.split('.'))
return Version(major, minor, rev)
def version_string():
return pkg_resources.require("cowait")[0].version
| true
| true
|
7909b318d3d5b680fe11b41753618c7279e8ab33
| 21,334
|
py
|
Python
|
src/oci/jms/models/jre_usage.py
|
LaudateCorpus1/oci-python-sdk
|
b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
src/oci/jms/models/jre_usage.py
|
LaudateCorpus1/oci-python-sdk
|
b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
src/oci/jms/models/jre_usage.py
|
LaudateCorpus1/oci-python-sdk
|
b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
# coding: utf-8
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class JreUsage(object):
"""
Java Runtime usage during a specified time period. A Java Runtime is identified by its vendor and version.
"""
#: A constant which can be used with the security_status property of a JreUsage.
#: This constant has a value of "UNKNOWN"
SECURITY_STATUS_UNKNOWN = "UNKNOWN"
#: A constant which can be used with the security_status property of a JreUsage.
#: This constant has a value of "UP_TO_DATE"
SECURITY_STATUS_UP_TO_DATE = "UP_TO_DATE"
#: A constant which can be used with the security_status property of a JreUsage.
#: This constant has a value of "UPDATE_REQUIRED"
SECURITY_STATUS_UPDATE_REQUIRED = "UPDATE_REQUIRED"
#: A constant which can be used with the security_status property of a JreUsage.
#: This constant has a value of "UPGRADE_REQUIRED"
SECURITY_STATUS_UPGRADE_REQUIRED = "UPGRADE_REQUIRED"
def __init__(self, **kwargs):
"""
Initializes a new JreUsage object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param id:
The value to assign to the id property of this JreUsage.
:type id: str
:param fleet_id:
The value to assign to the fleet_id property of this JreUsage.
:type fleet_id: str
:param managed_instance_id:
The value to assign to the managed_instance_id property of this JreUsage.
:type managed_instance_id: str
:param security_status:
The value to assign to the security_status property of this JreUsage.
Allowed values for this property are: "UNKNOWN", "UP_TO_DATE", "UPDATE_REQUIRED", "UPGRADE_REQUIRED", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:type security_status: str
:param release_date:
The value to assign to the release_date property of this JreUsage.
:type release_date: datetime
:param end_of_support_life_date:
The value to assign to the end_of_support_life_date property of this JreUsage.
:type end_of_support_life_date: datetime
:param vendor:
The value to assign to the vendor property of this JreUsage.
:type vendor: str
:param distribution:
The value to assign to the distribution property of this JreUsage.
:type distribution: str
:param version:
The value to assign to the version property of this JreUsage.
:type version: str
:param operating_systems:
The value to assign to the operating_systems property of this JreUsage.
:type operating_systems: list[oci.jms.models.OperatingSystem]
:param approximate_installation_count:
The value to assign to the approximate_installation_count property of this JreUsage.
:type approximate_installation_count: int
:param approximate_application_count:
The value to assign to the approximate_application_count property of this JreUsage.
:type approximate_application_count: int
:param approximate_managed_instance_count:
The value to assign to the approximate_managed_instance_count property of this JreUsage.
:type approximate_managed_instance_count: int
:param time_start:
The value to assign to the time_start property of this JreUsage.
:type time_start: datetime
:param time_end:
The value to assign to the time_end property of this JreUsage.
:type time_end: datetime
:param time_first_seen:
The value to assign to the time_first_seen property of this JreUsage.
:type time_first_seen: datetime
:param time_last_seen:
The value to assign to the time_last_seen property of this JreUsage.
:type time_last_seen: datetime
"""
self.swagger_types = {
'id': 'str',
'fleet_id': 'str',
'managed_instance_id': 'str',
'security_status': 'str',
'release_date': 'datetime',
'end_of_support_life_date': 'datetime',
'vendor': 'str',
'distribution': 'str',
'version': 'str',
'operating_systems': 'list[OperatingSystem]',
'approximate_installation_count': 'int',
'approximate_application_count': 'int',
'approximate_managed_instance_count': 'int',
'time_start': 'datetime',
'time_end': 'datetime',
'time_first_seen': 'datetime',
'time_last_seen': 'datetime'
}
self.attribute_map = {
'id': 'id',
'fleet_id': 'fleetId',
'managed_instance_id': 'managedInstanceId',
'security_status': 'securityStatus',
'release_date': 'releaseDate',
'end_of_support_life_date': 'endOfSupportLifeDate',
'vendor': 'vendor',
'distribution': 'distribution',
'version': 'version',
'operating_systems': 'operatingSystems',
'approximate_installation_count': 'approximateInstallationCount',
'approximate_application_count': 'approximateApplicationCount',
'approximate_managed_instance_count': 'approximateManagedInstanceCount',
'time_start': 'timeStart',
'time_end': 'timeEnd',
'time_first_seen': 'timeFirstSeen',
'time_last_seen': 'timeLastSeen'
}
self._id = None
self._fleet_id = None
self._managed_instance_id = None
self._security_status = None
self._release_date = None
self._end_of_support_life_date = None
self._vendor = None
self._distribution = None
self._version = None
self._operating_systems = None
self._approximate_installation_count = None
self._approximate_application_count = None
self._approximate_managed_instance_count = None
self._time_start = None
self._time_end = None
self._time_first_seen = None
self._time_last_seen = None
@property
def id(self):
"""
Gets the id of this JreUsage.
The internal identifier of the Java Runtime.
:return: The id of this JreUsage.
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""
Sets the id of this JreUsage.
The internal identifier of the Java Runtime.
:param id: The id of this JreUsage.
:type: str
"""
self._id = id
@property
def fleet_id(self):
"""
Gets the fleet_id of this JreUsage.
The `OCID`__ of the related fleet. This property value is present only for /actions/listJreUsage.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:return: The fleet_id of this JreUsage.
:rtype: str
"""
return self._fleet_id
@fleet_id.setter
def fleet_id(self, fleet_id):
"""
Sets the fleet_id of this JreUsage.
The `OCID`__ of the related fleet. This property value is present only for /actions/listJreUsage.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param fleet_id: The fleet_id of this JreUsage.
:type: str
"""
self._fleet_id = fleet_id
@property
def managed_instance_id(self):
"""
Gets the managed_instance_id of this JreUsage.
The `OCID`__ of the related managed instance. This property value is present only for /actions/listJreUsage.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:return: The managed_instance_id of this JreUsage.
:rtype: str
"""
return self._managed_instance_id
@managed_instance_id.setter
def managed_instance_id(self, managed_instance_id):
"""
Sets the managed_instance_id of this JreUsage.
The `OCID`__ of the related managed instance. This property value is present only for /actions/listJreUsage.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param managed_instance_id: The managed_instance_id of this JreUsage.
:type: str
"""
self._managed_instance_id = managed_instance_id
@property
def security_status(self):
"""
Gets the security_status of this JreUsage.
The security status of the Java Runtime.
Allowed values for this property are: "UNKNOWN", "UP_TO_DATE", "UPDATE_REQUIRED", "UPGRADE_REQUIRED", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:return: The security_status of this JreUsage.
:rtype: str
"""
return self._security_status
@security_status.setter
def security_status(self, security_status):
"""
Sets the security_status of this JreUsage.
The security status of the Java Runtime.
:param security_status: The security_status of this JreUsage.
:type: str
"""
allowed_values = ["UNKNOWN", "UP_TO_DATE", "UPDATE_REQUIRED", "UPGRADE_REQUIRED"]
if not value_allowed_none_or_none_sentinel(security_status, allowed_values):
security_status = 'UNKNOWN_ENUM_VALUE'
self._security_status = security_status
@property
def release_date(self):
"""
Gets the release_date of this JreUsage.
The release date of the Java Runtime (formatted according to `RFC3339`__).
__ https://datatracker.ietf.org/doc/html/rfc3339
:return: The release_date of this JreUsage.
:rtype: datetime
"""
return self._release_date
@release_date.setter
def release_date(self, release_date):
"""
Sets the release_date of this JreUsage.
The release date of the Java Runtime (formatted according to `RFC3339`__).
__ https://datatracker.ietf.org/doc/html/rfc3339
:param release_date: The release_date of this JreUsage.
:type: datetime
"""
self._release_date = release_date
@property
def end_of_support_life_date(self):
"""
Gets the end_of_support_life_date of this JreUsage.
The End of Support Life (EOSL) date of the Java Runtime (formatted according to `RFC3339`__).
__ https://datatracker.ietf.org/doc/html/rfc3339
:return: The end_of_support_life_date of this JreUsage.
:rtype: datetime
"""
return self._end_of_support_life_date
@end_of_support_life_date.setter
def end_of_support_life_date(self, end_of_support_life_date):
"""
Sets the end_of_support_life_date of this JreUsage.
The End of Support Life (EOSL) date of the Java Runtime (formatted according to `RFC3339`__).
__ https://datatracker.ietf.org/doc/html/rfc3339
:param end_of_support_life_date: The end_of_support_life_date of this JreUsage.
:type: datetime
"""
self._end_of_support_life_date = end_of_support_life_date
@property
def vendor(self):
"""
**[Required]** Gets the vendor of this JreUsage.
The vendor of the Java Runtime.
:return: The vendor of this JreUsage.
:rtype: str
"""
return self._vendor
@vendor.setter
def vendor(self, vendor):
"""
Sets the vendor of this JreUsage.
The vendor of the Java Runtime.
:param vendor: The vendor of this JreUsage.
:type: str
"""
self._vendor = vendor
@property
def distribution(self):
"""
**[Required]** Gets the distribution of this JreUsage.
The distribution of a Java Runtime is the name of the lineage of product to which it belongs, for example _Java(TM) SE Runtime Environment_.
:return: The distribution of this JreUsage.
:rtype: str
"""
return self._distribution
@distribution.setter
def distribution(self, distribution):
"""
Sets the distribution of this JreUsage.
The distribution of a Java Runtime is the name of the lineage of product to which it belongs, for example _Java(TM) SE Runtime Environment_.
:param distribution: The distribution of this JreUsage.
:type: str
"""
self._distribution = distribution
@property
def version(self):
"""
**[Required]** Gets the version of this JreUsage.
The version of the Java Runtime.
:return: The version of this JreUsage.
:rtype: str
"""
return self._version
@version.setter
def version(self, version):
"""
Sets the version of this JreUsage.
The version of the Java Runtime.
:param version: The version of this JreUsage.
:type: str
"""
self._version = version
@property
def operating_systems(self):
"""
Gets the operating_systems of this JreUsage.
The operating systems that have this Java Runtime installed.
:return: The operating_systems of this JreUsage.
:rtype: list[oci.jms.models.OperatingSystem]
"""
return self._operating_systems
@operating_systems.setter
def operating_systems(self, operating_systems):
"""
Sets the operating_systems of this JreUsage.
The operating systems that have this Java Runtime installed.
:param operating_systems: The operating_systems of this JreUsage.
:type: list[oci.jms.models.OperatingSystem]
"""
self._operating_systems = operating_systems
@property
def approximate_installation_count(self):
"""
Gets the approximate_installation_count of this JreUsage.
The approximate count of installations that are installations of this Java Runtime.
:return: The approximate_installation_count of this JreUsage.
:rtype: int
"""
return self._approximate_installation_count
@approximate_installation_count.setter
def approximate_installation_count(self, approximate_installation_count):
"""
Sets the approximate_installation_count of this JreUsage.
The approximate count of installations that are installations of this Java Runtime.
:param approximate_installation_count: The approximate_installation_count of this JreUsage.
:type: int
"""
self._approximate_installation_count = approximate_installation_count
@property
def approximate_application_count(self):
"""
Gets the approximate_application_count of this JreUsage.
The approximate count of the applications running on this Java Runtime.
:return: The approximate_application_count of this JreUsage.
:rtype: int
"""
return self._approximate_application_count
@approximate_application_count.setter
def approximate_application_count(self, approximate_application_count):
"""
Sets the approximate_application_count of this JreUsage.
The approximate count of the applications running on this Java Runtime.
:param approximate_application_count: The approximate_application_count of this JreUsage.
:type: int
"""
self._approximate_application_count = approximate_application_count
@property
def approximate_managed_instance_count(self):
"""
Gets the approximate_managed_instance_count of this JreUsage.
The approximate count of the managed instances that report this Java Runtime.
:return: The approximate_managed_instance_count of this JreUsage.
:rtype: int
"""
return self._approximate_managed_instance_count
@approximate_managed_instance_count.setter
def approximate_managed_instance_count(self, approximate_managed_instance_count):
"""
Sets the approximate_managed_instance_count of this JreUsage.
The approximate count of the managed instances that report this Java Runtime.
:param approximate_managed_instance_count: The approximate_managed_instance_count of this JreUsage.
:type: int
"""
self._approximate_managed_instance_count = approximate_managed_instance_count
@property
def time_start(self):
"""
Gets the time_start of this JreUsage.
Lower bound of the specified time period filter. JMS provides a view of the data that is _per day_. The query uses only the date element of the parameter.
:return: The time_start of this JreUsage.
:rtype: datetime
"""
return self._time_start
@time_start.setter
def time_start(self, time_start):
"""
Sets the time_start of this JreUsage.
Lower bound of the specified time period filter. JMS provides a view of the data that is _per day_. The query uses only the date element of the parameter.
:param time_start: The time_start of this JreUsage.
:type: datetime
"""
self._time_start = time_start
@property
def time_end(self):
"""
Gets the time_end of this JreUsage.
Upper bound of the specified time period filter. JMS provides a view of the data that is _per day_. The query uses only the date element of the parameter.
:return: The time_end of this JreUsage.
:rtype: datetime
"""
return self._time_end
@time_end.setter
def time_end(self, time_end):
"""
Sets the time_end of this JreUsage.
Upper bound of the specified time period filter. JMS provides a view of the data that is _per day_. The query uses only the date element of the parameter.
:param time_end: The time_end of this JreUsage.
:type: datetime
"""
self._time_end = time_end
@property
def time_first_seen(self):
"""
Gets the time_first_seen of this JreUsage.
The date and time the resource was _first_ reported to JMS.
This is potentially _before_ the specified time period provided by the filters.
For example, a resource can be first reported to JMS before the start of a specified time period,
if it is also reported during the time period.
:return: The time_first_seen of this JreUsage.
:rtype: datetime
"""
return self._time_first_seen
@time_first_seen.setter
def time_first_seen(self, time_first_seen):
"""
Sets the time_first_seen of this JreUsage.
The date and time the resource was _first_ reported to JMS.
This is potentially _before_ the specified time period provided by the filters.
For example, a resource can be first reported to JMS before the start of a specified time period,
if it is also reported during the time period.
:param time_first_seen: The time_first_seen of this JreUsage.
:type: datetime
"""
self._time_first_seen = time_first_seen
@property
def time_last_seen(self):
"""
Gets the time_last_seen of this JreUsage.
The date and time the resource was _last_ reported to JMS.
This is potentially _after_ the specified time period provided by the filters.
For example, a resource can be last reported to JMS before the start of a specified time period,
if it is also reported during the time period.
:return: The time_last_seen of this JreUsage.
:rtype: datetime
"""
return self._time_last_seen
@time_last_seen.setter
def time_last_seen(self, time_last_seen):
"""
Sets the time_last_seen of this JreUsage.
The date and time the resource was _last_ reported to JMS.
This is potentially _after_ the specified time period provided by the filters.
For example, a resource can be last reported to JMS before the start of a specified time period,
if it is also reported during the time period.
:param time_last_seen: The time_last_seen of this JreUsage.
:type: datetime
"""
self._time_last_seen = time_last_seen
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| 34.465267
| 245
| 0.662089
|
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class JreUsage(object):
SECURITY_STATUS_UNKNOWN = "UNKNOWN"
SECURITY_STATUS_UP_TO_DATE = "UP_TO_DATE"
SECURITY_STATUS_UPDATE_REQUIRED = "UPDATE_REQUIRED"
SECURITY_STATUS_UPGRADE_REQUIRED = "UPGRADE_REQUIRED"
def __init__(self, **kwargs):
self.swagger_types = {
'id': 'str',
'fleet_id': 'str',
'managed_instance_id': 'str',
'security_status': 'str',
'release_date': 'datetime',
'end_of_support_life_date': 'datetime',
'vendor': 'str',
'distribution': 'str',
'version': 'str',
'operating_systems': 'list[OperatingSystem]',
'approximate_installation_count': 'int',
'approximate_application_count': 'int',
'approximate_managed_instance_count': 'int',
'time_start': 'datetime',
'time_end': 'datetime',
'time_first_seen': 'datetime',
'time_last_seen': 'datetime'
}
self.attribute_map = {
'id': 'id',
'fleet_id': 'fleetId',
'managed_instance_id': 'managedInstanceId',
'security_status': 'securityStatus',
'release_date': 'releaseDate',
'end_of_support_life_date': 'endOfSupportLifeDate',
'vendor': 'vendor',
'distribution': 'distribution',
'version': 'version',
'operating_systems': 'operatingSystems',
'approximate_installation_count': 'approximateInstallationCount',
'approximate_application_count': 'approximateApplicationCount',
'approximate_managed_instance_count': 'approximateManagedInstanceCount',
'time_start': 'timeStart',
'time_end': 'timeEnd',
'time_first_seen': 'timeFirstSeen',
'time_last_seen': 'timeLastSeen'
}
self._id = None
self._fleet_id = None
self._managed_instance_id = None
self._security_status = None
self._release_date = None
self._end_of_support_life_date = None
self._vendor = None
self._distribution = None
self._version = None
self._operating_systems = None
self._approximate_installation_count = None
self._approximate_application_count = None
self._approximate_managed_instance_count = None
self._time_start = None
self._time_end = None
self._time_first_seen = None
self._time_last_seen = None
@property
def id(self):
return self._id
@id.setter
def id(self, id):
self._id = id
@property
def fleet_id(self):
return self._fleet_id
@fleet_id.setter
def fleet_id(self, fleet_id):
self._fleet_id = fleet_id
@property
def managed_instance_id(self):
return self._managed_instance_id
@managed_instance_id.setter
def managed_instance_id(self, managed_instance_id):
self._managed_instance_id = managed_instance_id
@property
def security_status(self):
return self._security_status
@security_status.setter
def security_status(self, security_status):
allowed_values = ["UNKNOWN", "UP_TO_DATE", "UPDATE_REQUIRED", "UPGRADE_REQUIRED"]
if not value_allowed_none_or_none_sentinel(security_status, allowed_values):
security_status = 'UNKNOWN_ENUM_VALUE'
self._security_status = security_status
@property
def release_date(self):
return self._release_date
@release_date.setter
def release_date(self, release_date):
self._release_date = release_date
@property
def end_of_support_life_date(self):
return self._end_of_support_life_date
@end_of_support_life_date.setter
def end_of_support_life_date(self, end_of_support_life_date):
self._end_of_support_life_date = end_of_support_life_date
@property
def vendor(self):
return self._vendor
@vendor.setter
def vendor(self, vendor):
self._vendor = vendor
@property
def distribution(self):
return self._distribution
@distribution.setter
def distribution(self, distribution):
self._distribution = distribution
@property
def version(self):
return self._version
@version.setter
def version(self, version):
self._version = version
@property
def operating_systems(self):
return self._operating_systems
@operating_systems.setter
def operating_systems(self, operating_systems):
self._operating_systems = operating_systems
@property
def approximate_installation_count(self):
return self._approximate_installation_count
@approximate_installation_count.setter
def approximate_installation_count(self, approximate_installation_count):
self._approximate_installation_count = approximate_installation_count
@property
def approximate_application_count(self):
return self._approximate_application_count
@approximate_application_count.setter
def approximate_application_count(self, approximate_application_count):
self._approximate_application_count = approximate_application_count
@property
def approximate_managed_instance_count(self):
return self._approximate_managed_instance_count
@approximate_managed_instance_count.setter
def approximate_managed_instance_count(self, approximate_managed_instance_count):
self._approximate_managed_instance_count = approximate_managed_instance_count
@property
def time_start(self):
return self._time_start
@time_start.setter
def time_start(self, time_start):
self._time_start = time_start
@property
def time_end(self):
return self._time_end
@time_end.setter
def time_end(self, time_end):
self._time_end = time_end
@property
def time_first_seen(self):
return self._time_first_seen
@time_first_seen.setter
def time_first_seen(self, time_first_seen):
self._time_first_seen = time_first_seen
@property
def time_last_seen(self):
return self._time_last_seen
@time_last_seen.setter
def time_last_seen(self, time_last_seen):
self._time_last_seen = time_last_seen
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true
| true
|
7909b3419825cedff9c3e7e1130631d52fa8e9a6
| 1,150
|
py
|
Python
|
selection/layers/utils.py
|
icc2115/dl-selection
|
e39ef0e73bf631e413bac48db791aed617dd7e32
|
[
"MIT"
] | 8
|
2021-03-08T08:46:23.000Z
|
2022-02-28T12:19:50.000Z
|
selection/layers/utils.py
|
icc2115/dl-selection
|
e39ef0e73bf631e413bac48db791aed617dd7e32
|
[
"MIT"
] | null | null | null |
selection/layers/utils.py
|
icc2115/dl-selection
|
e39ef0e73bf631e413bac48db791aed617dd7e32
|
[
"MIT"
] | 2
|
2021-04-18T08:24:16.000Z
|
2022-02-28T12:19:54.000Z
|
import torch
import torch.nn.functional as F
def clamp_probs(probs):
eps = torch.finfo(probs.dtype).eps
return torch.clamp(probs, min=eps, max=1-eps)
def concrete_sample(logits, temperature, shape=torch.Size([])):
'''
Sampling for Concrete distribution.
See Eq. 10 of Maddison et al., 2017.
'''
uniform_shape = torch.Size(shape) + logits.shape
u = clamp_probs(torch.rand(uniform_shape, dtype=torch.float32,
device=logits.device))
gumbels = - torch.log(- torch.log(u))
scores = (logits + gumbels) / temperature
return scores.softmax(dim=-1)
def bernoulli_concrete_sample(logits, temperature, shape=torch.Size([])):
'''
Sampling for BinConcrete distribution.
See PyTorch source code, differs from Eq. 16 of Maddison et al., 2017.
'''
uniform_shape = torch.Size(shape) + logits.shape
u = clamp_probs(torch.rand(uniform_shape, dtype=torch.float32,
device=logits.device))
return torch.sigmoid((F.logsigmoid(logits) - F.logsigmoid(-logits)
+ torch.log(u) - torch.log(1 - u)) / temperature)
| 34.848485
| 75
| 0.643478
|
import torch
import torch.nn.functional as F
def clamp_probs(probs):
eps = torch.finfo(probs.dtype).eps
return torch.clamp(probs, min=eps, max=1-eps)
def concrete_sample(logits, temperature, shape=torch.Size([])):
uniform_shape = torch.Size(shape) + logits.shape
u = clamp_probs(torch.rand(uniform_shape, dtype=torch.float32,
device=logits.device))
gumbels = - torch.log(- torch.log(u))
scores = (logits + gumbels) / temperature
return scores.softmax(dim=-1)
def bernoulli_concrete_sample(logits, temperature, shape=torch.Size([])):
uniform_shape = torch.Size(shape) + logits.shape
u = clamp_probs(torch.rand(uniform_shape, dtype=torch.float32,
device=logits.device))
return torch.sigmoid((F.logsigmoid(logits) - F.logsigmoid(-logits)
+ torch.log(u) - torch.log(1 - u)) / temperature)
| true
| true
|
7909b601f941d8b4ac0f27ce0115d87f03f7ac93
| 18
|
py
|
Python
|
tridesclous/version.py
|
caniko/tridesclous
|
9f412a42697561e3c7d8e3a35249cd13240239a0
|
[
"MIT"
] | 36
|
2016-01-27T22:27:12.000Z
|
2022-01-28T08:49:27.000Z
|
tridesclous/version.py
|
caniko/tridesclous
|
9f412a42697561e3c7d8e3a35249cd13240239a0
|
[
"MIT"
] | 87
|
2015-12-14T08:16:16.000Z
|
2022-03-22T14:35:55.000Z
|
tridesclous/version.py
|
caniko/tridesclous
|
9f412a42697561e3c7d8e3a35249cd13240239a0
|
[
"MIT"
] | 31
|
2015-11-10T14:37:28.000Z
|
2022-03-30T06:41:19.000Z
|
version = '1.6.4'
| 9
| 17
| 0.555556
|
version = '1.6.4'
| true
| true
|
7909b6a057cc685593eb5e717d4c920f4ad95040
| 1,354
|
py
|
Python
|
src/m1r_functions.py
|
ColinBalitewicz/03-AccumulatorsAndFunctionsWithParameters
|
fd7be18090b7b472cd181ad855032805da2f9cb7
|
[
"MIT"
] | null | null | null |
src/m1r_functions.py
|
ColinBalitewicz/03-AccumulatorsAndFunctionsWithParameters
|
fd7be18090b7b472cd181ad855032805da2f9cb7
|
[
"MIT"
] | null | null | null |
src/m1r_functions.py
|
ColinBalitewicz/03-AccumulatorsAndFunctionsWithParameters
|
fd7be18090b7b472cd181ad855032805da2f9cb7
|
[
"MIT"
] | null | null | null |
###############################################################################
#
# DONE:
#
# 1. READ the code below.
# 2. TRACE (by hand) the execution of the code,
# predicting what will get printed.
# 3. Run the code and compare your prediction to what actually was printed.
# 4. Decide whether you are 100% clear on the CONCEPTS and the NOTATIONS for:
# -- DEFINING a function that has PARAMETERS
# -- CALLING a function with actual ARGUMENTS.
#
# *****************************************************************************
# If you are NOT 100% clear on the above concepts,
# ask your instructor or a student assistant about them during class.
# *****************************************************************************
#
# After you have completed the above, mark this _TODO_ as DONE.
#
###############################################################################
def main():
hello("Snow White")
goodbye("Bashful")
hello("Grumpy")
hello("Sleepy")
hello_and_goodbye("Magic Mirror", "Cruel Queen")
def hello(friend):
print("Hello,", friend, "- how are things?")
def goodbye(friend):
print("Goodbye,", friend, '- see you later!')
print(' Ciao!')
print(' Bai bai!')
def hello_and_goodbye(person1, person2):
hello(person1)
goodbye(person2)
main()
| 28.808511
| 79
| 0.5
| true
| true
|
|
7909b772e038b647505e7007add8a614faaecd7e
| 14,656
|
py
|
Python
|
tutorials/image/cifar10/cifar10.py
|
blahster/tf-models
|
eaa4a000ef8e5f094764c42a590bb1c49b7b6f7c
|
[
"Apache-2.0"
] | 443
|
2017-11-15T15:26:44.000Z
|
2019-05-06T11:08:20.000Z
|
tutorials/image/cifar10/cifar10.py
|
blahster/tf-models
|
eaa4a000ef8e5f094764c42a590bb1c49b7b6f7c
|
[
"Apache-2.0"
] | 46
|
2017-11-22T15:27:58.000Z
|
2019-04-29T02:53:30.000Z
|
tutorials/image/cifar10/cifar10.py
|
blahster/tf-models
|
eaa4a000ef8e5f094764c42a590bb1c49b7b6f7c
|
[
"Apache-2.0"
] | 177
|
2017-11-16T10:46:44.000Z
|
2019-04-22T12:37:37.000Z
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Builds the CIFAR-10 network.
Summary of available functions:
# Compute input images and labels for training. If you would like to run
# evaluations, use inputs() instead.
inputs, labels = distorted_inputs()
# Compute inference on the model inputs to make a prediction.
predictions = inference(inputs)
# Compute the total loss of the prediction with respect to the labels.
loss = loss(predictions, labels)
# Create a graph to run one step of training with respect to the loss.
train_op = train(loss, global_step)
"""
# pylint: disable=missing-docstring
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import re
import sys
import tarfile
from six.moves import urllib
import tensorflow as tf
import cifar10_input
FLAGS = tf.app.flags.FLAGS
# Basic model parameters.
tf.app.flags.DEFINE_integer('batch_size', 128,
"""Number of images to process in a batch.""")
tf.app.flags.DEFINE_string('data_dir', '/tmp/cifar10_data',
"""Path to the CIFAR-10 data directory.""")
tf.app.flags.DEFINE_boolean('use_fp16', False,
"""Train the model using fp16.""")
# Global constants describing the CIFAR-10 data set.
IMAGE_SIZE = cifar10_input.IMAGE_SIZE
NUM_CLASSES = cifar10_input.NUM_CLASSES
NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN = cifar10_input.NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN
NUM_EXAMPLES_PER_EPOCH_FOR_EVAL = cifar10_input.NUM_EXAMPLES_PER_EPOCH_FOR_EVAL
# Constants describing the training process.
MOVING_AVERAGE_DECAY = 0.9999 # The decay to use for the moving average.
NUM_EPOCHS_PER_DECAY = 350.0 # Epochs after which learning rate decays.
LEARNING_RATE_DECAY_FACTOR = 0.1 # Learning rate decay factor.
INITIAL_LEARNING_RATE = 0.1 # Initial learning rate.
# If a model is trained with multiple GPUs, prefix all Op names with tower_name
# to differentiate the operations. Note that this prefix is removed from the
# names of the summaries when visualizing a model.
TOWER_NAME = 'tower'
DATA_URL = 'http://www.cs.toronto.edu/~kriz/cifar-10-binary.tar.gz'
def _activation_summary(x):
"""Helper to create summaries for activations.
Creates a summary that provides a histogram of activations.
Creates a summary that measures the sparsity of activations.
Args:
x: Tensor
Returns:
nothing
"""
# Remove 'tower_[0-9]/' from the name in case this is a multi-GPU training
# session. This helps the clarity of presentation on tensorboard.
tensor_name = re.sub('%s_[0-9]*/' % TOWER_NAME, '', x.op.name)
tf.summary.histogram(tensor_name + '/activations', x)
tf.summary.scalar(tensor_name + '/sparsity',
tf.nn.zero_fraction(x))
def _variable_on_cpu(name, shape, initializer):
"""Helper to create a Variable stored on CPU memory.
Args:
name: name of the variable
shape: list of ints
initializer: initializer for Variable
Returns:
Variable Tensor
"""
with tf.device('/cpu:0'):
dtype = tf.float16 if FLAGS.use_fp16 else tf.float32
var = tf.get_variable(name, shape, initializer=initializer, dtype=dtype)
return var
def _variable_with_weight_decay(name, shape, stddev, wd):
"""Helper to create an initialized Variable with weight decay.
Note that the Variable is initialized with a truncated normal distribution.
A weight decay is added only if one is specified.
Args:
name: name of the variable
shape: list of ints
stddev: standard deviation of a truncated Gaussian
wd: add L2Loss weight decay multiplied by this float. If None, weight
decay is not added for this Variable.
Returns:
Variable Tensor
"""
dtype = tf.float16 if FLAGS.use_fp16 else tf.float32
var = _variable_on_cpu(
name,
shape,
tf.truncated_normal_initializer(stddev=stddev, dtype=dtype))
if wd is not None:
weight_decay = tf.multiply(tf.nn.l2_loss(var), wd, name='weight_loss')
tf.add_to_collection('losses', weight_decay)
return var
def distorted_inputs():
"""Construct distorted input for CIFAR training using the Reader ops.
Returns:
images: Images. 4D tensor of [batch_size, IMAGE_SIZE, IMAGE_SIZE, 3] size.
labels: Labels. 1D tensor of [batch_size] size.
Raises:
ValueError: If no data_dir
"""
if not FLAGS.data_dir:
raise ValueError('Please supply a data_dir')
data_dir = os.path.join(FLAGS.data_dir, 'cifar-10-batches-bin')
images, labels = cifar10_input.distorted_inputs(data_dir=data_dir,
batch_size=FLAGS.batch_size)
if FLAGS.use_fp16:
images = tf.cast(images, tf.float16)
labels = tf.cast(labels, tf.float16)
return images, labels
def inputs(eval_data):
"""Construct input for CIFAR evaluation using the Reader ops.
Args:
eval_data: bool, indicating if one should use the train or eval data set.
Returns:
images: Images. 4D tensor of [batch_size, IMAGE_SIZE, IMAGE_SIZE, 3] size.
labels: Labels. 1D tensor of [batch_size] size.
Raises:
ValueError: If no data_dir
"""
if not FLAGS.data_dir:
raise ValueError('Please supply a data_dir')
data_dir = os.path.join(FLAGS.data_dir, 'cifar-10-batches-bin')
images, labels = cifar10_input.inputs(eval_data=eval_data,
data_dir=data_dir,
batch_size=FLAGS.batch_size)
if FLAGS.use_fp16:
images = tf.cast(images, tf.float16)
labels = tf.cast(labels, tf.float16)
return images, labels
def inference(images):
"""Build the CIFAR-10 model.
Args:
images: Images returned from distorted_inputs() or inputs().
Returns:
Logits.
"""
# We instantiate all variables using tf.get_variable() instead of
# tf.Variable() in order to share variables across multiple GPU training runs.
# If we only ran this model on a single GPU, we could simplify this function
# by replacing all instances of tf.get_variable() with tf.Variable().
#
# conv1
with tf.variable_scope('conv1') as scope:
kernel = _variable_with_weight_decay('weights',
shape=[5, 5, 3, 64],
stddev=5e-2,
wd=0.0)
conv = tf.nn.conv2d(images, kernel, [1, 1, 1, 1], padding='SAME')
biases = _variable_on_cpu('biases', [64], tf.constant_initializer(0.0))
pre_activation = tf.nn.bias_add(conv, biases)
conv1 = tf.nn.relu(pre_activation, name=scope.name)
_activation_summary(conv1)
# pool1
pool1 = tf.nn.max_pool(conv1, ksize=[1, 3, 3, 1], strides=[1, 2, 2, 1],
padding='SAME', name='pool1')
# norm1
norm1 = tf.nn.lrn(pool1, 4, bias=1.0, alpha=0.001 / 9.0, beta=0.75,
name='norm1')
# conv2
with tf.variable_scope('conv2') as scope:
kernel = _variable_with_weight_decay('weights',
shape=[5, 5, 64, 64],
stddev=5e-2,
wd=0.0)
conv = tf.nn.conv2d(norm1, kernel, [1, 1, 1, 1], padding='SAME')
biases = _variable_on_cpu('biases', [64], tf.constant_initializer(0.1))
pre_activation = tf.nn.bias_add(conv, biases)
conv2 = tf.nn.relu(pre_activation, name=scope.name)
_activation_summary(conv2)
# norm2
norm2 = tf.nn.lrn(conv2, 4, bias=1.0, alpha=0.001 / 9.0, beta=0.75,
name='norm2')
# pool2
pool2 = tf.nn.max_pool(norm2, ksize=[1, 3, 3, 1],
strides=[1, 2, 2, 1], padding='SAME', name='pool2')
# local3
with tf.variable_scope('local3') as scope:
# Move everything into depth so we can perform a single matrix multiply.
reshape = tf.reshape(pool2, [FLAGS.batch_size, -1])
dim = reshape.get_shape()[1].value
weights = _variable_with_weight_decay('weights', shape=[dim, 384],
stddev=0.04, wd=0.004)
biases = _variable_on_cpu('biases', [384], tf.constant_initializer(0.1))
local3 = tf.nn.relu(tf.matmul(reshape, weights) + biases, name=scope.name)
_activation_summary(local3)
# local4
with tf.variable_scope('local4') as scope:
weights = _variable_with_weight_decay('weights', shape=[384, 192],
stddev=0.04, wd=0.004)
biases = _variable_on_cpu('biases', [192], tf.constant_initializer(0.1))
local4 = tf.nn.relu(tf.matmul(local3, weights) + biases, name=scope.name)
_activation_summary(local4)
# linear layer(WX + b),
# We don't apply softmax here because
# tf.nn.sparse_softmax_cross_entropy_with_logits accepts the unscaled logits
# and performs the softmax internally for efficiency.
with tf.variable_scope('softmax_linear') as scope:
weights = _variable_with_weight_decay('weights', [192, NUM_CLASSES],
stddev=1/192.0, wd=0.0)
biases = _variable_on_cpu('biases', [NUM_CLASSES],
tf.constant_initializer(0.0))
softmax_linear = tf.add(tf.matmul(local4, weights), biases, name=scope.name)
_activation_summary(softmax_linear)
return softmax_linear
def loss(logits, labels):
"""Add L2Loss to all the trainable variables.
Add summary for "Loss" and "Loss/avg".
Args:
logits: Logits from inference().
labels: Labels from distorted_inputs or inputs(). 1-D tensor
of shape [batch_size]
Returns:
Loss tensor of type float.
"""
# Calculate the average cross entropy loss across the batch.
labels = tf.cast(labels, tf.int64)
cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(
labels=labels, logits=logits, name='cross_entropy_per_example')
cross_entropy_mean = tf.reduce_mean(cross_entropy, name='cross_entropy')
tf.add_to_collection('losses', cross_entropy_mean)
# The total loss is defined as the cross entropy loss plus all of the weight
# decay terms (L2 loss).
return tf.add_n(tf.get_collection('losses'), name='total_loss')
def _add_loss_summaries(total_loss):
"""Add summaries for losses in CIFAR-10 model.
Generates moving average for all losses and associated summaries for
visualizing the performance of the network.
Args:
total_loss: Total loss from loss().
Returns:
loss_averages_op: op for generating moving averages of losses.
"""
# Compute the moving average of all individual losses and the total loss.
loss_averages = tf.train.ExponentialMovingAverage(0.9, name='avg')
losses = tf.get_collection('losses')
loss_averages_op = loss_averages.apply(losses + [total_loss])
# Attach a scalar summary to all individual losses and the total loss; do the
# same for the averaged version of the losses.
for l in losses + [total_loss]:
# Name each loss as '(raw)' and name the moving average version of the loss
# as the original loss name.
tf.summary.scalar(l.op.name + ' (raw)', l)
tf.summary.scalar(l.op.name, loss_averages.average(l))
return loss_averages_op
def train(total_loss, global_step):
"""Train CIFAR-10 model.
Create an optimizer and apply to all trainable variables. Add moving
average for all trainable variables.
Args:
total_loss: Total loss from loss().
global_step: Integer Variable counting the number of training steps
processed.
Returns:
train_op: op for training.
"""
# Variables that affect learning rate.
num_batches_per_epoch = NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN / FLAGS.batch_size
decay_steps = int(num_batches_per_epoch * NUM_EPOCHS_PER_DECAY)
# Decay the learning rate exponentially based on the number of steps.
lr = tf.train.exponential_decay(INITIAL_LEARNING_RATE,
global_step,
decay_steps,
LEARNING_RATE_DECAY_FACTOR,
staircase=True)
tf.summary.scalar('learning_rate', lr)
# Generate moving averages of all losses and associated summaries.
loss_averages_op = _add_loss_summaries(total_loss)
# Compute gradients.
with tf.control_dependencies([loss_averages_op]):
opt = tf.train.GradientDescentOptimizer(lr)
grads = opt.compute_gradients(total_loss)
# Apply gradients.
apply_gradient_op = opt.apply_gradients(grads, global_step=global_step)
# Add histograms for trainable variables.
for var in tf.trainable_variables():
tf.summary.histogram(var.op.name, var)
# Add histograms for gradients.
for grad, var in grads:
if grad is not None:
tf.summary.histogram(var.op.name + '/gradients', grad)
# Track the moving averages of all trainable variables.
variable_averages = tf.train.ExponentialMovingAverage(
MOVING_AVERAGE_DECAY, global_step)
variables_averages_op = variable_averages.apply(tf.trainable_variables())
with tf.control_dependencies([apply_gradient_op, variables_averages_op]):
train_op = tf.no_op(name='train')
return train_op
def maybe_download_and_extract():
"""Download and extract the tarball from Alex's website."""
dest_directory = FLAGS.data_dir
if not os.path.exists(dest_directory):
os.makedirs(dest_directory)
filename = DATA_URL.split('/')[-1]
filepath = os.path.join(dest_directory, filename)
if not os.path.exists(filepath):
def _progress(count, block_size, total_size):
sys.stdout.write('\r>> Downloading %s %.1f%%' % (filename,
float(count * block_size) / float(total_size) * 100.0))
sys.stdout.flush()
filepath, _ = urllib.request.urlretrieve(DATA_URL, filepath, _progress)
print()
statinfo = os.stat(filepath)
print('Successfully downloaded', filename, statinfo.st_size, 'bytes.')
extracted_dir_path = os.path.join(dest_directory, 'cifar-10-batches-bin')
if not os.path.exists(extracted_dir_path):
tarfile.open(filepath, 'r:gz').extractall(dest_directory)
| 36.64
| 81
| 0.683816
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import re
import sys
import tarfile
from six.moves import urllib
import tensorflow as tf
import cifar10_input
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_integer('batch_size', 128,
"""Number of images to process in a batch.""")
tf.app.flags.DEFINE_string('data_dir', '/tmp/cifar10_data',
"""Path to the CIFAR-10 data directory.""")
tf.app.flags.DEFINE_boolean('use_fp16', False,
"""Train the model using fp16.""")
IMAGE_SIZE = cifar10_input.IMAGE_SIZE
NUM_CLASSES = cifar10_input.NUM_CLASSES
NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN = cifar10_input.NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN
NUM_EXAMPLES_PER_EPOCH_FOR_EVAL = cifar10_input.NUM_EXAMPLES_PER_EPOCH_FOR_EVAL
MOVING_AVERAGE_DECAY = 0.9999
NUM_EPOCHS_PER_DECAY = 350.0
LEARNING_RATE_DECAY_FACTOR = 0.1
INITIAL_LEARNING_RATE = 0.1
TOWER_NAME = 'tower'
DATA_URL = 'http://www.cs.toronto.edu/~kriz/cifar-10-binary.tar.gz'
def _activation_summary(x):
tensor_name = re.sub('%s_[0-9]*/' % TOWER_NAME, '', x.op.name)
tf.summary.histogram(tensor_name + '/activations', x)
tf.summary.scalar(tensor_name + '/sparsity',
tf.nn.zero_fraction(x))
def _variable_on_cpu(name, shape, initializer):
with tf.device('/cpu:0'):
dtype = tf.float16 if FLAGS.use_fp16 else tf.float32
var = tf.get_variable(name, shape, initializer=initializer, dtype=dtype)
return var
def _variable_with_weight_decay(name, shape, stddev, wd):
dtype = tf.float16 if FLAGS.use_fp16 else tf.float32
var = _variable_on_cpu(
name,
shape,
tf.truncated_normal_initializer(stddev=stddev, dtype=dtype))
if wd is not None:
weight_decay = tf.multiply(tf.nn.l2_loss(var), wd, name='weight_loss')
tf.add_to_collection('losses', weight_decay)
return var
def distorted_inputs():
if not FLAGS.data_dir:
raise ValueError('Please supply a data_dir')
data_dir = os.path.join(FLAGS.data_dir, 'cifar-10-batches-bin')
images, labels = cifar10_input.distorted_inputs(data_dir=data_dir,
batch_size=FLAGS.batch_size)
if FLAGS.use_fp16:
images = tf.cast(images, tf.float16)
labels = tf.cast(labels, tf.float16)
return images, labels
def inputs(eval_data):
if not FLAGS.data_dir:
raise ValueError('Please supply a data_dir')
data_dir = os.path.join(FLAGS.data_dir, 'cifar-10-batches-bin')
images, labels = cifar10_input.inputs(eval_data=eval_data,
data_dir=data_dir,
batch_size=FLAGS.batch_size)
if FLAGS.use_fp16:
images = tf.cast(images, tf.float16)
labels = tf.cast(labels, tf.float16)
return images, labels
def inference(images):
with tf.variable_scope('conv1') as scope:
kernel = _variable_with_weight_decay('weights',
shape=[5, 5, 3, 64],
stddev=5e-2,
wd=0.0)
conv = tf.nn.conv2d(images, kernel, [1, 1, 1, 1], padding='SAME')
biases = _variable_on_cpu('biases', [64], tf.constant_initializer(0.0))
pre_activation = tf.nn.bias_add(conv, biases)
conv1 = tf.nn.relu(pre_activation, name=scope.name)
_activation_summary(conv1)
pool1 = tf.nn.max_pool(conv1, ksize=[1, 3, 3, 1], strides=[1, 2, 2, 1],
padding='SAME', name='pool1')
norm1 = tf.nn.lrn(pool1, 4, bias=1.0, alpha=0.001 / 9.0, beta=0.75,
name='norm1')
with tf.variable_scope('conv2') as scope:
kernel = _variable_with_weight_decay('weights',
shape=[5, 5, 64, 64],
stddev=5e-2,
wd=0.0)
conv = tf.nn.conv2d(norm1, kernel, [1, 1, 1, 1], padding='SAME')
biases = _variable_on_cpu('biases', [64], tf.constant_initializer(0.1))
pre_activation = tf.nn.bias_add(conv, biases)
conv2 = tf.nn.relu(pre_activation, name=scope.name)
_activation_summary(conv2)
norm2 = tf.nn.lrn(conv2, 4, bias=1.0, alpha=0.001 / 9.0, beta=0.75,
name='norm2')
pool2 = tf.nn.max_pool(norm2, ksize=[1, 3, 3, 1],
strides=[1, 2, 2, 1], padding='SAME', name='pool2')
with tf.variable_scope('local3') as scope:
reshape = tf.reshape(pool2, [FLAGS.batch_size, -1])
dim = reshape.get_shape()[1].value
weights = _variable_with_weight_decay('weights', shape=[dim, 384],
stddev=0.04, wd=0.004)
biases = _variable_on_cpu('biases', [384], tf.constant_initializer(0.1))
local3 = tf.nn.relu(tf.matmul(reshape, weights) + biases, name=scope.name)
_activation_summary(local3)
with tf.variable_scope('local4') as scope:
weights = _variable_with_weight_decay('weights', shape=[384, 192],
stddev=0.04, wd=0.004)
biases = _variable_on_cpu('biases', [192], tf.constant_initializer(0.1))
local4 = tf.nn.relu(tf.matmul(local3, weights) + biases, name=scope.name)
_activation_summary(local4)
# tf.nn.sparse_softmax_cross_entropy_with_logits accepts the unscaled logits
# and performs the softmax internally for efficiency.
with tf.variable_scope('softmax_linear') as scope:
weights = _variable_with_weight_decay('weights', [192, NUM_CLASSES],
stddev=1/192.0, wd=0.0)
biases = _variable_on_cpu('biases', [NUM_CLASSES],
tf.constant_initializer(0.0))
softmax_linear = tf.add(tf.matmul(local4, weights), biases, name=scope.name)
_activation_summary(softmax_linear)
return softmax_linear
def loss(logits, labels):
# Calculate the average cross entropy loss across the batch.
labels = tf.cast(labels, tf.int64)
cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(
labels=labels, logits=logits, name='cross_entropy_per_example')
cross_entropy_mean = tf.reduce_mean(cross_entropy, name='cross_entropy')
tf.add_to_collection('losses', cross_entropy_mean)
# The total loss is defined as the cross entropy loss plus all of the weight
# decay terms (L2 loss).
return tf.add_n(tf.get_collection('losses'), name='total_loss')
def _add_loss_summaries(total_loss):
# Compute the moving average of all individual losses and the total loss.
loss_averages = tf.train.ExponentialMovingAverage(0.9, name='avg')
losses = tf.get_collection('losses')
loss_averages_op = loss_averages.apply(losses + [total_loss])
# Attach a scalar summary to all individual losses and the total loss; do the
# same for the averaged version of the losses.
for l in losses + [total_loss]:
# Name each loss as '(raw)' and name the moving average version of the loss
# as the original loss name.
tf.summary.scalar(l.op.name + ' (raw)', l)
tf.summary.scalar(l.op.name, loss_averages.average(l))
return loss_averages_op
def train(total_loss, global_step):
# Variables that affect learning rate.
num_batches_per_epoch = NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN / FLAGS.batch_size
decay_steps = int(num_batches_per_epoch * NUM_EPOCHS_PER_DECAY)
# Decay the learning rate exponentially based on the number of steps.
lr = tf.train.exponential_decay(INITIAL_LEARNING_RATE,
global_step,
decay_steps,
LEARNING_RATE_DECAY_FACTOR,
staircase=True)
tf.summary.scalar('learning_rate', lr)
# Generate moving averages of all losses and associated summaries.
loss_averages_op = _add_loss_summaries(total_loss)
# Compute gradients.
with tf.control_dependencies([loss_averages_op]):
opt = tf.train.GradientDescentOptimizer(lr)
grads = opt.compute_gradients(total_loss)
# Apply gradients.
apply_gradient_op = opt.apply_gradients(grads, global_step=global_step)
# Add histograms for trainable variables.
for var in tf.trainable_variables():
tf.summary.histogram(var.op.name, var)
# Add histograms for gradients.
for grad, var in grads:
if grad is not None:
tf.summary.histogram(var.op.name + '/gradients', grad)
# Track the moving averages of all trainable variables.
variable_averages = tf.train.ExponentialMovingAverage(
MOVING_AVERAGE_DECAY, global_step)
variables_averages_op = variable_averages.apply(tf.trainable_variables())
with tf.control_dependencies([apply_gradient_op, variables_averages_op]):
train_op = tf.no_op(name='train')
return train_op
def maybe_download_and_extract():
dest_directory = FLAGS.data_dir
if not os.path.exists(dest_directory):
os.makedirs(dest_directory)
filename = DATA_URL.split('/')[-1]
filepath = os.path.join(dest_directory, filename)
if not os.path.exists(filepath):
def _progress(count, block_size, total_size):
sys.stdout.write('\r>> Downloading %s %.1f%%' % (filename,
float(count * block_size) / float(total_size) * 100.0))
sys.stdout.flush()
filepath, _ = urllib.request.urlretrieve(DATA_URL, filepath, _progress)
print()
statinfo = os.stat(filepath)
print('Successfully downloaded', filename, statinfo.st_size, 'bytes.')
extracted_dir_path = os.path.join(dest_directory, 'cifar-10-batches-bin')
if not os.path.exists(extracted_dir_path):
tarfile.open(filepath, 'r:gz').extractall(dest_directory)
| true
| true
|
7909bb858eccad7af1a2b8501e8ca2b651ca5d17
| 3,931
|
py
|
Python
|
py/lib/jumpplus_downloader.py
|
Lutwidse/jumpplus-downloader
|
e6c466ca90dda41e9e265b41b7e75c545e6ed9c8
|
[
"MIT"
] | null | null | null |
py/lib/jumpplus_downloader.py
|
Lutwidse/jumpplus-downloader
|
e6c466ca90dda41e9e265b41b7e75c545e6ed9c8
|
[
"MIT"
] | null | null | null |
py/lib/jumpplus_downloader.py
|
Lutwidse/jumpplus-downloader
|
e6c466ca90dda41e9e265b41b7e75c545e6ed9c8
|
[
"MIT"
] | null | null | null |
import requests, json, os, time
from PIL import Image
from io import BytesIO
import img2pdf
class jumpplus_downloader:
def __init__(self):
self.file=0
self.h=1200
self.w=760
def auto_list_download(self, url, next=False, sleeptime=20,pdfConversion=True):
self.json_download(url)
self.file=0
if os.path.isdir(self.list["readableProduct"]["title"])!=True:
os.mkdir(self.list["readableProduct"]["title"])
for page in self.list["readableProduct"]["pageStructure"]["pages"]:
time.sleep(sleeptime)
if page["type"]=="main":
self.h=page["height"]
self.w=page["width"]
self.download(page["src"],False)
self.processing()
self.output("./"+self.list["readableProduct"]["title"]+"/")
if pdfConversion:
self.convertToPdf()
if self.list["readableProduct"]["nextReadableProductUri"]!=None and next==True:
self.auto_list_download(self.list["readableProduct"]["nextReadableProductUri"],True)
def json_download(self,url):
#Counterfeit User agent for absolutely successfully connection.
session=requests.session()
headers={"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36"}
json_data=session.get(url+".json",headers=headers).text
self.list=json.loads(json_data)
def json_localread(self, filepath):
with open(filepath) as json_file:
json_data=json.load(json_file)
self.list=json_data
def download(self,url,fakeque=False):
if fakeque:
print("Emulating Download : " + url)
self.img=url
else:
session=requests.session()
headers={"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36"}
self.img=requests.get(url)
def processing(self):
readImage=Image.open(BytesIO(self.img.content))
imageSize=readImage.size
width=imageSize[0]-24
height=imageSize[1]-16
buff=[]
counterX=0
counterY=0
for wx in range(4):
inbuff=[]
for lx in range(4):
cropped=readImage.crop(box=(width/4*counterX,height/4*counterY, width/4*(counterX+1),height/4*(counterY+1)))
inbuff.append(cropped)
counterY+=1
buff.append(inbuff)
counterX+=1
counterY=0
self.converted_img=Image.new("RGB",(int(width),int(height)))
counterX=0
counterY=0
for wdx in buff:
for ldx in wdx:
print(str(counterY))
self.converted_img.paste(ldx, (int(width/4*counterX) , int(height/4*counterY)))
counterX+=1
counterX=0
print("Current Y Counter:"+str(counterY))
counterY+=1
def output(self, file="./"):
self.converted_img.save(file+str(self.file)+".png")
self.file+=1
def convertToPdf(self):
directory="./"+self.list["readableProduct"]["title"]+"/"
sourceDir=os.listdir(directory)
imgcount=0
img=[]
filextend=sourceDir[0].split(".")
filextend=(str(".")+str(filextend[1]))
for images in sourceDir:
img.append(directory + str(imgcount) + filextend )
imgcount=imgcount+1
with open("./"+self.list["readableProduct"]["title"]+".pdf","wb") as f:
f.write(img2pdf.convert(img))
#A simple Json Dumper for debugging.
def dumpSimplifiedJson(self,jsObject):
f=open("JSON.json","w")
json.dump(jsObject, f, ensure_ascii=False, indent=4, sort_keys=True, separators=(',',': '))
| 35.414414
| 159
| 0.583821
|
import requests, json, os, time
from PIL import Image
from io import BytesIO
import img2pdf
class jumpplus_downloader:
def __init__(self):
self.file=0
self.h=1200
self.w=760
def auto_list_download(self, url, next=False, sleeptime=20,pdfConversion=True):
self.json_download(url)
self.file=0
if os.path.isdir(self.list["readableProduct"]["title"])!=True:
os.mkdir(self.list["readableProduct"]["title"])
for page in self.list["readableProduct"]["pageStructure"]["pages"]:
time.sleep(sleeptime)
if page["type"]=="main":
self.h=page["height"]
self.w=page["width"]
self.download(page["src"],False)
self.processing()
self.output("./"+self.list["readableProduct"]["title"]+"/")
if pdfConversion:
self.convertToPdf()
if self.list["readableProduct"]["nextReadableProductUri"]!=None and next==True:
self.auto_list_download(self.list["readableProduct"]["nextReadableProductUri"],True)
def json_download(self,url):
session=requests.session()
headers={"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36"}
json_data=session.get(url+".json",headers=headers).text
self.list=json.loads(json_data)
def json_localread(self, filepath):
with open(filepath) as json_file:
json_data=json.load(json_file)
self.list=json_data
def download(self,url,fakeque=False):
if fakeque:
print("Emulating Download : " + url)
self.img=url
else:
session=requests.session()
headers={"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36"}
self.img=requests.get(url)
def processing(self):
readImage=Image.open(BytesIO(self.img.content))
imageSize=readImage.size
width=imageSize[0]-24
height=imageSize[1]-16
buff=[]
counterX=0
counterY=0
for wx in range(4):
inbuff=[]
for lx in range(4):
cropped=readImage.crop(box=(width/4*counterX,height/4*counterY, width/4*(counterX+1),height/4*(counterY+1)))
inbuff.append(cropped)
counterY+=1
buff.append(inbuff)
counterX+=1
counterY=0
self.converted_img=Image.new("RGB",(int(width),int(height)))
counterX=0
counterY=0
for wdx in buff:
for ldx in wdx:
print(str(counterY))
self.converted_img.paste(ldx, (int(width/4*counterX) , int(height/4*counterY)))
counterX+=1
counterX=0
print("Current Y Counter:"+str(counterY))
counterY+=1
def output(self, file="./"):
self.converted_img.save(file+str(self.file)+".png")
self.file+=1
def convertToPdf(self):
directory="./"+self.list["readableProduct"]["title"]+"/"
sourceDir=os.listdir(directory)
imgcount=0
img=[]
filextend=sourceDir[0].split(".")
filextend=(str(".")+str(filextend[1]))
for images in sourceDir:
img.append(directory + str(imgcount) + filextend )
imgcount=imgcount+1
with open("./"+self.list["readableProduct"]["title"]+".pdf","wb") as f:
f.write(img2pdf.convert(img))
def dumpSimplifiedJson(self,jsObject):
f=open("JSON.json","w")
json.dump(jsObject, f, ensure_ascii=False, indent=4, sort_keys=True, separators=(',',': '))
| true
| true
|
7909bc5935c7986764219a514137459e4a7165df
| 1,094
|
py
|
Python
|
config.py
|
AnnabelNkir/My_Hello_World
|
6e37b4a90ce69a589444a66c8db5b261b75a9db1
|
[
"Unlicense"
] | null | null | null |
config.py
|
AnnabelNkir/My_Hello_World
|
6e37b4a90ce69a589444a66c8db5b261b75a9db1
|
[
"Unlicense"
] | null | null | null |
config.py
|
AnnabelNkir/My_Hello_World
|
6e37b4a90ce69a589444a66c8db5b261b75a9db1
|
[
"Unlicense"
] | null | null | null |
import os
class Config:
SECRET_KEY = os.environ.get('SECRET_KEY')
SQLALCHEMY_TRACK_MODIFICATIONS = False
UPLOADED_PHOTOS_DEST = 'app/static/photos'
# email configurations
MAIL_SERVER = 'smtp.googlemail.com'
MAIL_PORT = 587
MAIL_USE_TLS = True
MAIL_USERNAME = os.environ.get("MAIL_USERNAME")
MAIL_PASSWORD = os.environ.get("MAIL_PASSWORD")
# simple mde configurations
SIMPLEMDE_JS_IIFE = True
SIMPLEMDE_USE_CDN = True
@staticmethod
def init_app(app):
pass
class TestConfig(Config):
pass
class ProdConfig(Config):
SQLALCHEMY_DATABASE_URI = os.environ.get("DATABASE_URL")
if SQLALCHEMY_DATABASE_URI and SQLALCHEMY_DATABASE_URI.startswith("postgres://"):
SQLALCHEMY_DATABASE_URI = SQLALCHEMY_DATABASE_URI.replace("postgres://", "postgresql://", 1)
pass
class DevConfig(Config):
SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://moringa:Anna123!@localhost/blogapp1'
DEBUG = True
config_options = {
'development': DevConfig,
'production': ProdConfig,
'test': TestConfig
}
| 22.791667
| 100
| 0.707495
|
import os
class Config:
SECRET_KEY = os.environ.get('SECRET_KEY')
SQLALCHEMY_TRACK_MODIFICATIONS = False
UPLOADED_PHOTOS_DEST = 'app/static/photos'
MAIL_SERVER = 'smtp.googlemail.com'
MAIL_PORT = 587
MAIL_USE_TLS = True
MAIL_USERNAME = os.environ.get("MAIL_USERNAME")
MAIL_PASSWORD = os.environ.get("MAIL_PASSWORD")
SIMPLEMDE_JS_IIFE = True
SIMPLEMDE_USE_CDN = True
@staticmethod
def init_app(app):
pass
class TestConfig(Config):
pass
class ProdConfig(Config):
SQLALCHEMY_DATABASE_URI = os.environ.get("DATABASE_URL")
if SQLALCHEMY_DATABASE_URI and SQLALCHEMY_DATABASE_URI.startswith("postgres://"):
SQLALCHEMY_DATABASE_URI = SQLALCHEMY_DATABASE_URI.replace("postgres://", "postgresql://", 1)
pass
class DevConfig(Config):
SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://moringa:Anna123!@localhost/blogapp1'
DEBUG = True
config_options = {
'development': DevConfig,
'production': ProdConfig,
'test': TestConfig
}
| true
| true
|
7909bcb785a9dae468718714a34bf72a9f4ab3ae
| 92
|
py
|
Python
|
2015/03/quiz-ace/graphic_config.py
|
nprapps/graphics-archive
|
97b0ef326b46a959df930f5522d325e537f7a655
|
[
"FSFAP"
] | 14
|
2015-05-08T13:41:51.000Z
|
2021-02-24T12:34:55.000Z
|
2015/03/quiz-ace/graphic_config.py
|
nprapps/graphics-archive
|
97b0ef326b46a959df930f5522d325e537f7a655
|
[
"FSFAP"
] | null | null | null |
2015/03/quiz-ace/graphic_config.py
|
nprapps/graphics-archive
|
97b0ef326b46a959df930f5522d325e537f7a655
|
[
"FSFAP"
] | 7
|
2015-04-04T04:45:54.000Z
|
2021-02-18T11:12:48.000Z
|
#!/usr/bin/env python
COPY_GOOGLE_DOC_KEY = '1mORX04vNiG06X52RdbbdBBZ9RSu4cV3ak9a0jamvjW0'
| 23
| 68
| 0.847826
|
COPY_GOOGLE_DOC_KEY = '1mORX04vNiG06X52RdbbdBBZ9RSu4cV3ak9a0jamvjW0'
| true
| true
|
7909bccdc0b40c2892adeab78b7ea3435524e54d
| 8,126
|
py
|
Python
|
cotyledon/_service.py
|
y-gupta/cotyledon
|
319faa2673a986733d9a7622bee29e187f2e7391
|
[
"Apache-2.0"
] | 71
|
2016-11-14T15:02:41.000Z
|
2022-02-20T06:41:59.000Z
|
cotyledon/_service.py
|
y-gupta/cotyledon
|
319faa2673a986733d9a7622bee29e187f2e7391
|
[
"Apache-2.0"
] | 26
|
2016-07-14T13:42:15.000Z
|
2022-03-18T07:03:39.000Z
|
cotyledon/_service.py
|
y-gupta/cotyledon
|
319faa2673a986733d9a7622bee29e187f2e7391
|
[
"Apache-2.0"
] | 17
|
2016-05-02T15:48:42.000Z
|
2021-11-24T15:26:08.000Z
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import os
import random
import signal
import sys
import threading
from cotyledon import _utils
LOG = logging.getLogger(__name__)
class Service(object):
"""Base class for a service
This class will be executed in a new child process/worker
:py:class:`ServiceWorker` of a :py:class:`ServiceManager`. It registers
signals to manager the reloading and the ending of the process.
Methods :py:meth:`run`, :py:meth:`terminate` and :py:meth:`reload` are
optional.
"""
name = None
"""Service name used in the process title and the log messages in additionnal
of the worker_id."""
graceful_shutdown_timeout = None
"""Timeout after which a gracefully shutdown service will exit. zero means
endless wait. None means same as ServiceManager that launch the service"""
def __init__(self, worker_id):
"""Create a new Service
:param worker_id: the identifier of this service instance
:type worker_id: int
The identifier of the worker can be used for workload repartition
because it's consistent and always the same.
For example, if the number of workers for this service is 3,
one will got 0, the second got 1 and the last got 2.
if worker_id 1 died, the new spawned process will got 1 again.
"""
super(Service, self).__init__()
self._initialize(worker_id)
def _initialize(self, worker_id):
if getattr(self, '_initialized', False):
return
self._initialized = True
if self.name is None:
self.name = self.__class__.__name__
self.worker_id = worker_id
self.pid = os.getpid()
self._signal_lock = threading.Lock()
# Only used by oslo_config_glue for now, so we don't need
# to have a list of hook
self._on_reload_internal_hook = self._noop_hook
def _noop_hook(self, service):
pass
def terminate(self):
"""Gracefully shutdown the service
This method will be executed when the Service has to shutdown cleanly.
If not implemented the process will just end with status 0.
To customize the exit code, the :py:class:`SystemExit` exception can be
used.
Any exceptions raised by this method will be logged and the worker will
exit with status 1.
"""
def reload(self):
"""Reloading of the service
This method will be executed when the Service receives a SIGHUP.
If not implemented the process will just end with status 0 and
:py:class:`ServiceRunner` will start a new fresh process for this
service with the same worker_id.
Any exceptions raised by this method will be logged and the worker will
exit with status 1.
"""
os.kill(os.getpid(), signal.SIGTERM)
def run(self):
"""Method representing the service activity
If not implemented the process will just wait to receive an ending
signal.
This method is ran into the thread and can block or return as needed
Any exceptions raised by this method will be logged and the worker will
exit with status 1.
"""
# Helper to run application methods in a safety way when signal are
# received
def _reload(self):
with _utils.exit_on_exception():
if self._signal_lock.acquire(False):
try:
self._on_reload_internal_hook(self)
self.reload()
finally:
self._signal_lock.release()
def _terminate(self):
with _utils.exit_on_exception(), self._signal_lock:
self.terminate()
sys.exit(0)
def _run(self):
with _utils.exit_on_exception():
self.run()
class ServiceConfig(object):
def __init__(self, service_id, service, workers, args, kwargs):
self.service = service
self.workers = workers
self.args = args
self.kwargs = kwargs
self.service_id = service_id
class ServiceWorker(_utils.SignalManager):
"""Service Worker Wrapper
This represents the child process spawned by ServiceManager
All methods implemented here, must run in the main threads
"""
@classmethod
def create_and_wait(cls, *args, **kwargs):
sw = cls(*args, **kwargs)
sw.wait_forever()
def __init__(self, config, service_id, worker_id, parent_pipe,
started_hooks, graceful_shutdown_timeout):
super(ServiceWorker, self).__init__()
self._ready = threading.Event()
_utils.spawn(self._watch_parent_process, parent_pipe)
# Reseed random number generator
random.seed()
args = tuple() if config.args is None else config.args
kwargs = dict() if config.kwargs is None else config.kwargs
self.service = config.service(worker_id, *args, **kwargs)
self.service._initialize(worker_id)
if self.service.graceful_shutdown_timeout is None:
self.service.graceful_shutdown_timeout = graceful_shutdown_timeout
self.title = "%(name)s(%(worker_id)d) [%(pid)d]" % dict(
name=self.service.name, worker_id=worker_id, pid=os.getpid())
# Set process title
_utils.setproctitle(
"%(pname)s: %(name)s worker(%(worker_id)d)" % dict(
pname=_utils.get_process_name(), name=self.service.name,
worker_id=worker_id))
# We are ready tell them
self._ready.set()
_utils.run_hooks('new_worker', started_hooks, service_id, worker_id,
self.service)
def _watch_parent_process(self, parent_pipe):
# This will block until the write end is closed when the parent
# dies unexpectedly
parent_pipe[1].close()
try:
parent_pipe[0].recv()
except EOFError:
pass
if self._ready.is_set():
LOG.info('Parent process has died unexpectedly, %s exiting'
% self.title)
if os.name == "posix":
os.kill(os.getpid(), signal.SIGTERM)
else:
# Fallback to process signal later
self._signals_received.appendleft(signal.SIGTERM)
else:
os._exit(0)
def _alarm(self):
LOG.info('Graceful shutdown timeout (%d) exceeded, '
'exiting %s now.' %
(self.service.graceful_shutdown_timeout,
self.title))
os._exit(1)
def _on_signal_received(self, sig):
# Code below must not block to return to select.select() and catch
# next signals
if sig == _utils.SIGALRM:
self._alarm()
elif sig == signal.SIGTERM:
LOG.info('Caught SIGTERM signal, '
'graceful exiting of service %s' % self.title)
if self.service.graceful_shutdown_timeout > 0:
if os.name == "posix":
signal.alarm(self.service.graceful_shutdown_timeout)
else:
threading.Timer(self.service.graceful_shutdown_timeout,
self._alarm).start()
_utils.spawn(self.service._terminate)
elif sig == _utils.SIGHUP:
_utils.spawn(self.service._reload)
def wait_forever(self):
LOG.debug("Run service %s" % self.title)
_utils.spawn(self.service._run)
super(ServiceWorker, self)._wait_forever()
| 33.303279
| 81
| 0.629461
|
import logging
import os
import random
import signal
import sys
import threading
from cotyledon import _utils
LOG = logging.getLogger(__name__)
class Service(object):
name = None
graceful_shutdown_timeout = None
def __init__(self, worker_id):
super(Service, self).__init__()
self._initialize(worker_id)
def _initialize(self, worker_id):
if getattr(self, '_initialized', False):
return
self._initialized = True
if self.name is None:
self.name = self.__class__.__name__
self.worker_id = worker_id
self.pid = os.getpid()
self._signal_lock = threading.Lock()
# to have a list of hook
self._on_reload_internal_hook = self._noop_hook
def _noop_hook(self, service):
pass
def terminate(self):
def reload(self):
os.kill(os.getpid(), signal.SIGTERM)
def run(self):
# Helper to run application methods in a safety way when signal are
# received
def _reload(self):
with _utils.exit_on_exception():
if self._signal_lock.acquire(False):
try:
self._on_reload_internal_hook(self)
self.reload()
finally:
self._signal_lock.release()
def _terminate(self):
with _utils.exit_on_exception(), self._signal_lock:
self.terminate()
sys.exit(0)
def _run(self):
with _utils.exit_on_exception():
self.run()
class ServiceConfig(object):
def __init__(self, service_id, service, workers, args, kwargs):
self.service = service
self.workers = workers
self.args = args
self.kwargs = kwargs
self.service_id = service_id
class ServiceWorker(_utils.SignalManager):
@classmethod
def create_and_wait(cls, *args, **kwargs):
sw = cls(*args, **kwargs)
sw.wait_forever()
def __init__(self, config, service_id, worker_id, parent_pipe,
started_hooks, graceful_shutdown_timeout):
super(ServiceWorker, self).__init__()
self._ready = threading.Event()
_utils.spawn(self._watch_parent_process, parent_pipe)
# Reseed random number generator
random.seed()
args = tuple() if config.args is None else config.args
kwargs = dict() if config.kwargs is None else config.kwargs
self.service = config.service(worker_id, *args, **kwargs)
self.service._initialize(worker_id)
if self.service.graceful_shutdown_timeout is None:
self.service.graceful_shutdown_timeout = graceful_shutdown_timeout
self.title = "%(name)s(%(worker_id)d) [%(pid)d]" % dict(
name=self.service.name, worker_id=worker_id, pid=os.getpid())
# Set process title
_utils.setproctitle(
"%(pname)s: %(name)s worker(%(worker_id)d)" % dict(
pname=_utils.get_process_name(), name=self.service.name,
worker_id=worker_id))
# We are ready tell them
self._ready.set()
_utils.run_hooks('new_worker', started_hooks, service_id, worker_id,
self.service)
def _watch_parent_process(self, parent_pipe):
# This will block until the write end is closed when the parent
# dies unexpectedly
parent_pipe[1].close()
try:
parent_pipe[0].recv()
except EOFError:
pass
if self._ready.is_set():
LOG.info('Parent process has died unexpectedly, %s exiting'
% self.title)
if os.name == "posix":
os.kill(os.getpid(), signal.SIGTERM)
else:
# Fallback to process signal later
self._signals_received.appendleft(signal.SIGTERM)
else:
os._exit(0)
def _alarm(self):
LOG.info('Graceful shutdown timeout (%d) exceeded, '
'exiting %s now.' %
(self.service.graceful_shutdown_timeout,
self.title))
os._exit(1)
def _on_signal_received(self, sig):
# Code below must not block to return to select.select() and catch
# next signals
if sig == _utils.SIGALRM:
self._alarm()
elif sig == signal.SIGTERM:
LOG.info('Caught SIGTERM signal, '
'graceful exiting of service %s' % self.title)
if self.service.graceful_shutdown_timeout > 0:
if os.name == "posix":
signal.alarm(self.service.graceful_shutdown_timeout)
else:
threading.Timer(self.service.graceful_shutdown_timeout,
self._alarm).start()
_utils.spawn(self.service._terminate)
elif sig == _utils.SIGHUP:
_utils.spawn(self.service._reload)
def wait_forever(self):
LOG.debug("Run service %s" % self.title)
_utils.spawn(self.service._run)
super(ServiceWorker, self)._wait_forever()
| true
| true
|
7909bdef63b6b3ecf9529a20f73e40e5fcec6b6d
| 2,304
|
py
|
Python
|
h/util/db.py
|
tgiardina/rpp-h
|
fece590f901b052a59c19a24acfeba52cee33c84
|
[
"BSD-2-Clause"
] | null | null | null |
h/util/db.py
|
tgiardina/rpp-h
|
fece590f901b052a59c19a24acfeba52cee33c84
|
[
"BSD-2-Clause"
] | null | null | null |
h/util/db.py
|
tgiardina/rpp-h
|
fece590f901b052a59c19a24acfeba52cee33c84
|
[
"BSD-2-Clause"
] | null | null | null |
from functools import lru_cache
import sqlalchemy
class lru_cache_in_transaction: # noqa: N801
"""
Decorator to wrap a function with a memoizing callable that saves up to
the `maxsize` most recent calls. The underlying cache is automatically
cleared at the end of the database transaction.
Since a dictionary is used to cache results, the positional and keyword
arguments to the function must be hashable.
For documentation of the `maxsize` and `typed` arguments, see the
documentation of :py:func:`functools.lru_cache`.
Example::
@lru_cache_in_transaction(session)
def fetch_user(userid):
return session.query(models.User).filter_by(userid=userid).one_or_none()
fetch_user('acct:foo@example.com') # => executes a query
fetch_user('acct:foo@example.com') # => returns cached value
fetch_user('acct:bar@example.com') # => executes a query
session.commit()
fetch_user('acct:foo@example.com') # => executes a query
"""
def __init__(self, session, maxsize=128, typed=False):
self._session = session
self._maxsize = maxsize
self._typed = typed
def __call__(self, func):
decorator = lru_cache(maxsize=self._maxsize, typed=self._typed)
wrapped = decorator(func)
on_transaction_end(self._session)(wrapped.cache_clear)
return wrapped
def on_transaction_end(session):
"""
Decorator for a function which should run after a top-level transaction ended.
Transactions that are either implicitly or explicitly committed or rolled back will be
closed at the end of a Pyramid view. This is here for cleaning up caches so that
code after the view, exception views for example, will not be able to access
detached instances.
Example usage:
.. code-block:: python
@util.db.on_transaction_end(session)
def flush_cache():
self._cache = {}
"""
def decorate(func):
def _handler(_, transaction):
# We only clear the cache when the top-level transaction finishes.
if transaction.parent is None:
func()
sqlalchemy.event.listen(session, "after_transaction_end", _handler)
return func
return decorate
| 31.135135
| 90
| 0.676215
|
from functools import lru_cache
import sqlalchemy
class lru_cache_in_transaction:
def __init__(self, session, maxsize=128, typed=False):
self._session = session
self._maxsize = maxsize
self._typed = typed
def __call__(self, func):
decorator = lru_cache(maxsize=self._maxsize, typed=self._typed)
wrapped = decorator(func)
on_transaction_end(self._session)(wrapped.cache_clear)
return wrapped
def on_transaction_end(session):
def decorate(func):
def _handler(_, transaction):
if transaction.parent is None:
func()
sqlalchemy.event.listen(session, "after_transaction_end", _handler)
return func
return decorate
| true
| true
|
7909c13ed17cdeeb071b18dbf3357bd20509b0a0
| 1,158
|
py
|
Python
|
great_international/migrations/0005_internationalukhqpages.py
|
kaedroho/dit-directory-cms
|
67c15eeed19e7b3583f1fce1969230ddf83b6813
|
[
"MIT"
] | 6
|
2018-03-20T11:19:07.000Z
|
2021-10-05T07:53:11.000Z
|
great_international/migrations/0005_internationalukhqpages.py
|
kaedroho/dit-directory-cms
|
67c15eeed19e7b3583f1fce1969230ddf83b6813
|
[
"MIT"
] | 802
|
2018-02-05T14:16:13.000Z
|
2022-02-10T10:59:21.000Z
|
great_international/migrations/0005_internationalukhqpages.py
|
kaedroho/dit-directory-cms
|
67c15eeed19e7b3583f1fce1969230ddf83b6813
|
[
"MIT"
] | 6
|
2019-01-22T13:19:37.000Z
|
2019-07-01T10:35:26.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.18 on 2019-02-12 11:43
from __future__ import unicode_literals
import core.models
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('wagtailcore', '0040_page_draft_title'),
('great_international', '0004_merge_20190212_1003'),
]
operations = [
migrations.CreateModel(
name='InternationalUKHQPages',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('service_name', models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components'), ('GREAT_INTERNATIONAL', 'Great International')], db_index=True, max_length=100, null=True)),
],
options={
'abstract': False,
},
bases=(core.models.ExclusivePageMixin, 'wagtailcore.page'),
),
]
| 38.6
| 285
| 0.645941
|
from __future__ import unicode_literals
import core.models
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('wagtailcore', '0040_page_draft_title'),
('great_international', '0004_merge_20190212_1003'),
]
operations = [
migrations.CreateModel(
name='InternationalUKHQPages',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('service_name', models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components'), ('GREAT_INTERNATIONAL', 'Great International')], db_index=True, max_length=100, null=True)),
],
options={
'abstract': False,
},
bases=(core.models.ExclusivePageMixin, 'wagtailcore.page'),
),
]
| true
| true
|
7909c154f27799067353744113dbc182d0e307d3
| 12,731
|
py
|
Python
|
conductor/conductor/solver/optimizer/optimizer.py
|
aalsudais/optf-has
|
c3e070b6ebc713a571c10d7a5cd87e5053047136
|
[
"Apache-2.0"
] | null | null | null |
conductor/conductor/solver/optimizer/optimizer.py
|
aalsudais/optf-has
|
c3e070b6ebc713a571c10d7a5cd87e5053047136
|
[
"Apache-2.0"
] | null | null | null |
conductor/conductor/solver/optimizer/optimizer.py
|
aalsudais/optf-has
|
c3e070b6ebc713a571c10d7a5cd87e5053047136
|
[
"Apache-2.0"
] | null | null | null |
#
# -------------------------------------------------------------------------
# Copyright (c) 2015-2017 AT&T Intellectual Property
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# -------------------------------------------------------------------------
#
from oslo_config import cfg
from oslo_log import log
import copy
import time
from conductor import service
# from conductor.solver.optimizer import decision_path as dpath
# from conductor.solver.optimizer import best_first
# from conductor.solver.optimizer import greedy
from conductor.solver.optimizer import fit_first
from conductor.solver.optimizer import random_pick
from conductor.solver.request import demand
from conductor.solver.triage_tool.triage_data import TriageData
LOG = log.getLogger(__name__)
CONF = cfg.CONF
SOLVER_OPTS = [
]
CONF.register_opts(SOLVER_OPTS, group='solver')
class Optimizer(object):
# FIXME(gjung): _requests should be request (no underscore, one item)
def __init__(self, conf, _requests=None, _begin_time=None):
self.conf = conf
# start time of solving the plan
if _begin_time is not None:
self._begin_time = _begin_time
# self.search = greedy.Greedy(self.conf)
self.search = None
# self.search = best_first.BestFirst(self.conf)
if _requests is not None:
self.requests = _requests
# Were the 'simulators' ever used? It doesn't look like this.
# Since solver/simulator code needs cleansing before being moved to ONAP,
# I see no value for having this piece of code which is not letting us do
# that cleanup. Also, Shankar has confirmed solver/simulators folder needs
# to go away. Commenting out for now - may be should be removed permanently.
# Shankar (TODO).
# else:
# ''' for simulation '''
# req_sim = request_simulator.RequestSimulator(self.conf)
# req_sim.generate_requests()
# self.requests = req_sim.requests
def get_solution(self, num_solutions):
LOG.debug("search start for max {} solutions".format(num_solutions))
for rk in self.requests:
request = self.requests[rk]
LOG.debug("--- request = {}".format(rk))
decision_list = list()
LOG.debug("1. sort demands")
demand_list = self._sort_demands(request)
for d in demand_list:
LOG.debug(" demand = {}".format(d.name))
LOG.debug("2. search")
rand_counter = 10
while num_solutions == 'all' or num_solutions > 0:
LOG.debug("searching for the solution {}".format(len(decision_list) + 1))
st = time.time()
_copy_demand_list = copy.deepcopy(demand_list)
if not request.objective.goal:
LOG.debug("No objective function is provided. "
"Random pick algorithm is used")
self.search = random_pick.RandomPick(self.conf)
best_path = self.search.search(demand_list, request)
else:
LOG.debug("Fit first algorithm is used")
self.search = fit_first.FitFirst(self.conf)
best_path = self.search.search(demand_list,
request.objective, request)
LOG.debug("search delay = {} sec".format(time.time() - st))
demand_list = copy.deepcopy(_copy_demand_list)
if best_path is not None:
self.search.print_decisions(best_path)
rand_counter = 10
elif not request.objective.goal and rand_counter > 0 and self._has_candidates(request):
# RandomPick gave no candidates after applying constraints. If there are any candidates left
# lets' try again several times until some solution is found. When one of the demands is not unique
# it persists in the list all the time. In order to prevent infinite loop we need to have counter
rand_counter -= 1
LOG.debug("Incomplete random solution - repeat {}".format(rand_counter))
continue
else:
LOG.debug("no solution found")
break
# add the current solution to decision_list
decision_list.append(best_path.decisions)
#remove the candidate with "uniqueness = true"
self._remove_unique_candidate(request, best_path, demand_list)
if num_solutions != 'all':
num_solutions -= 1
self.search.triageSolver.getSolution(decision_list)
return decision_list
def _has_candidates(self, request):
for demand_name, demand in request.demands.items():
LOG.debug("Req Available resources: {} {}".format(demand_name, len(request.demands[demand_name].resources)))
if len(demand.resources) == 0:
LOG.debug("No more candidates for demand {}".format(demand_name))
return False
return True
def _remove_unique_candidate(self, _request, current_decision, demand_list):
# This method is to remove previous solved/used candidate from consideration
# when Conductor needs to provide multiple solutions to the user/client
for demand_name, candidate_attr in current_decision.decisions.items():
candidate_uniqueness = candidate_attr.get('uniqueness')
if candidate_uniqueness and candidate_uniqueness == 'true':
# if the candidate uniqueness is 'false', then remove
# that solved candidate from the translated candidates list
_request.demands[demand_name].resources.pop(candidate_attr.get('candidate_id'))
# update the demand_list
for demand in demand_list:
if(getattr(demand, 'name') == demand_name):
demand.resources = _request.demands[demand_name].resources
def _sort_demands(self, _request):
LOG.debug(" _sort_demands")
demand_list = []
# first, find loc-demand dependencies
# using constraints and objective functions
open_demand_list = []
for key in _request.constraints:
c = _request.constraints[key]
if c.constraint_type == "access_distance":
for dk in c.demand_list:
if _request.demands[dk].sort_base != 1:
_request.demands[dk].sort_base = 1
open_demand_list.append(_request.demands[dk])
for op in _request.objective.operand_list:
if op.function.func_type == "latency_between": #TODO do i need to include the region_group here?
if isinstance(op.function.loc_a, demand.Location):
if _request.demands[op.function.loc_z.name].sort_base != 1:
_request.demands[op.function.loc_z.name].sort_base = 1
open_demand_list.append(op.function.loc_z)
elif isinstance(op.function.loc_z, demand.Location):
if _request.demands[op.function.loc_a.name].sort_base != 1:
_request.demands[op.function.loc_a.name].sort_base = 1
open_demand_list.append(op.function.loc_a)
elif op.function.func_type == "distance_between":
if isinstance(op.function.loc_a, demand.Location):
if _request.demands[op.function.loc_z.name].sort_base != 1:
_request.demands[op.function.loc_z.name].sort_base = 1
open_demand_list.append(op.function.loc_z)
elif isinstance(op.function.loc_z, demand.Location):
if _request.demands[op.function.loc_a.name].sort_base != 1:
_request.demands[op.function.loc_a.name].sort_base = 1
open_demand_list.append(op.function.loc_a)
if len(open_demand_list) == 0:
init_demand = self._exist_not_sorted_demand(_request.demands)
open_demand_list.append(init_demand)
# second, find demand-demand dependencies
while True:
d_list = self._get_depended_demands(open_demand_list, _request)
for d in d_list:
demand_list.append(d)
init_demand = self._exist_not_sorted_demand(_request.demands)
if init_demand is None:
break
open_demand_list.append(init_demand)
return demand_list
def _get_depended_demands(self, _open_demand_list, _request):
demand_list = []
while True:
if len(_open_demand_list) == 0:
break
d = _open_demand_list.pop(0)
if d.sort_base != 1:
d.sort_base = 1
demand_list.append(d)
for key in _request.constraints:
c = _request.constraints[key]
# FIXME(snarayanan): "aic" only to be known by conductor-data
if c.constraint_type == "aic_distance":
if d.name in c.demand_list:
for dk in c.demand_list:
if dk != d.name and \
_request.demands[dk].sort_base != 1:
_request.demands[dk].sort_base = 1
_open_demand_list.append(
_request.demands[dk])
for op in _request.objective.operand_list:
if op.function.func_type == "latency_between": #TODO
if op.function.loc_a.name == d.name:
if op.function.loc_z.name in \
_request.demands.keys():
if _request.demands[
op.function.loc_z.name].sort_base != 1:
_request.demands[
op.function.loc_z.name].sort_base = 1
_open_demand_list.append(op.function.loc_z)
elif op.function.loc_z.name == d.name:
if op.function.loc_a.name in \
_request.demands.keys():
if _request.demands[
op.function.loc_a.name].sort_base != 1:
_request.demands[
op.function.loc_a.name].sort_base = 1
_open_demand_list.append(op.function.loc_a)
elif op.function.func_type == "distance_between":
if op.function.loc_a.name == d.name:
if op.function.loc_z.name in \
_request.demands.keys():
if _request.demands[
op.function.loc_z.name].sort_base != 1:
_request.demands[
op.function.loc_z.name].sort_base = 1
_open_demand_list.append(op.function.loc_z)
elif op.function.loc_z.name == d.name:
if op.function.loc_a.name in \
_request.demands.keys():
if _request.demands[
op.function.loc_a.name].sort_base != 1:
_request.demands[
op.function.loc_a.name].sort_base = 1
_open_demand_list.append(op.function.loc_a)
return demand_list
def _exist_not_sorted_demand(self, _demands):
not_sorted_demand = None
for key in _demands:
demand = _demands[key]
if demand.sort_base != 1:
not_sorted_demand = demand
break
return not_sorted_demand
| 44.513986
| 120
| 0.562093
|
from oslo_config import cfg
from oslo_log import log
import copy
import time
from conductor import service
from conductor.solver.optimizer import fit_first
from conductor.solver.optimizer import random_pick
from conductor.solver.request import demand
from conductor.solver.triage_tool.triage_data import TriageData
LOG = log.getLogger(__name__)
CONF = cfg.CONF
SOLVER_OPTS = [
]
CONF.register_opts(SOLVER_OPTS, group='solver')
class Optimizer(object):
def __init__(self, conf, _requests=None, _begin_time=None):
self.conf = conf
if _begin_time is not None:
self._begin_time = _begin_time
self.search = None
if _requests is not None:
self.requests = _requests
# Since solver/simulator code needs cleansing before being moved to ONAP,
# I see no value for having this piece of code which is not letting us do
# that cleanup. Also, Shankar has confirmed solver/simulators folder needs
# to go away. Commenting out for now - may be should be removed permanently.
# Shankar (TODO).
# else:
# ''' for simulation '''
# req_sim = request_simulator.RequestSimulator(self.conf)
# req_sim.generate_requests()
# self.requests = req_sim.requests
def get_solution(self, num_solutions):
LOG.debug("search start for max {} solutions".format(num_solutions))
for rk in self.requests:
request = self.requests[rk]
LOG.debug("--- request = {}".format(rk))
decision_list = list()
LOG.debug("1. sort demands")
demand_list = self._sort_demands(request)
for d in demand_list:
LOG.debug(" demand = {}".format(d.name))
LOG.debug("2. search")
rand_counter = 10
while num_solutions == 'all' or num_solutions > 0:
LOG.debug("searching for the solution {}".format(len(decision_list) + 1))
st = time.time()
_copy_demand_list = copy.deepcopy(demand_list)
if not request.objective.goal:
LOG.debug("No objective function is provided. "
"Random pick algorithm is used")
self.search = random_pick.RandomPick(self.conf)
best_path = self.search.search(demand_list, request)
else:
LOG.debug("Fit first algorithm is used")
self.search = fit_first.FitFirst(self.conf)
best_path = self.search.search(demand_list,
request.objective, request)
LOG.debug("search delay = {} sec".format(time.time() - st))
demand_list = copy.deepcopy(_copy_demand_list)
if best_path is not None:
self.search.print_decisions(best_path)
rand_counter = 10
elif not request.objective.goal and rand_counter > 0 and self._has_candidates(request):
# RandomPick gave no candidates after applying constraints. If there are any candidates left
# lets' try again several times until some solution is found. When one of the demands is not unique
rand_counter -= 1
LOG.debug("Incomplete random solution - repeat {}".format(rand_counter))
continue
else:
LOG.debug("no solution found")
break
decision_list.append(best_path.decisions)
self._remove_unique_candidate(request, best_path, demand_list)
if num_solutions != 'all':
num_solutions -= 1
self.search.triageSolver.getSolution(decision_list)
return decision_list
def _has_candidates(self, request):
for demand_name, demand in request.demands.items():
LOG.debug("Req Available resources: {} {}".format(demand_name, len(request.demands[demand_name].resources)))
if len(demand.resources) == 0:
LOG.debug("No more candidates for demand {}".format(demand_name))
return False
return True
def _remove_unique_candidate(self, _request, current_decision, demand_list):
for demand_name, candidate_attr in current_decision.decisions.items():
candidate_uniqueness = candidate_attr.get('uniqueness')
if candidate_uniqueness and candidate_uniqueness == 'true':
_request.demands[demand_name].resources.pop(candidate_attr.get('candidate_id'))
for demand in demand_list:
if(getattr(demand, 'name') == demand_name):
demand.resources = _request.demands[demand_name].resources
def _sort_demands(self, _request):
LOG.debug(" _sort_demands")
demand_list = []
open_demand_list = []
for key in _request.constraints:
c = _request.constraints[key]
if c.constraint_type == "access_distance":
for dk in c.demand_list:
if _request.demands[dk].sort_base != 1:
_request.demands[dk].sort_base = 1
open_demand_list.append(_request.demands[dk])
for op in _request.objective.operand_list:
if op.function.func_type == "latency_between":
if isinstance(op.function.loc_a, demand.Location):
if _request.demands[op.function.loc_z.name].sort_base != 1:
_request.demands[op.function.loc_z.name].sort_base = 1
open_demand_list.append(op.function.loc_z)
elif isinstance(op.function.loc_z, demand.Location):
if _request.demands[op.function.loc_a.name].sort_base != 1:
_request.demands[op.function.loc_a.name].sort_base = 1
open_demand_list.append(op.function.loc_a)
elif op.function.func_type == "distance_between":
if isinstance(op.function.loc_a, demand.Location):
if _request.demands[op.function.loc_z.name].sort_base != 1:
_request.demands[op.function.loc_z.name].sort_base = 1
open_demand_list.append(op.function.loc_z)
elif isinstance(op.function.loc_z, demand.Location):
if _request.demands[op.function.loc_a.name].sort_base != 1:
_request.demands[op.function.loc_a.name].sort_base = 1
open_demand_list.append(op.function.loc_a)
if len(open_demand_list) == 0:
init_demand = self._exist_not_sorted_demand(_request.demands)
open_demand_list.append(init_demand)
while True:
d_list = self._get_depended_demands(open_demand_list, _request)
for d in d_list:
demand_list.append(d)
init_demand = self._exist_not_sorted_demand(_request.demands)
if init_demand is None:
break
open_demand_list.append(init_demand)
return demand_list
def _get_depended_demands(self, _open_demand_list, _request):
demand_list = []
while True:
if len(_open_demand_list) == 0:
break
d = _open_demand_list.pop(0)
if d.sort_base != 1:
d.sort_base = 1
demand_list.append(d)
for key in _request.constraints:
c = _request.constraints[key]
if c.constraint_type == "aic_distance":
if d.name in c.demand_list:
for dk in c.demand_list:
if dk != d.name and \
_request.demands[dk].sort_base != 1:
_request.demands[dk].sort_base = 1
_open_demand_list.append(
_request.demands[dk])
for op in _request.objective.operand_list:
if op.function.func_type == "latency_between":
if op.function.loc_a.name == d.name:
if op.function.loc_z.name in \
_request.demands.keys():
if _request.demands[
op.function.loc_z.name].sort_base != 1:
_request.demands[
op.function.loc_z.name].sort_base = 1
_open_demand_list.append(op.function.loc_z)
elif op.function.loc_z.name == d.name:
if op.function.loc_a.name in \
_request.demands.keys():
if _request.demands[
op.function.loc_a.name].sort_base != 1:
_request.demands[
op.function.loc_a.name].sort_base = 1
_open_demand_list.append(op.function.loc_a)
elif op.function.func_type == "distance_between":
if op.function.loc_a.name == d.name:
if op.function.loc_z.name in \
_request.demands.keys():
if _request.demands[
op.function.loc_z.name].sort_base != 1:
_request.demands[
op.function.loc_z.name].sort_base = 1
_open_demand_list.append(op.function.loc_z)
elif op.function.loc_z.name == d.name:
if op.function.loc_a.name in \
_request.demands.keys():
if _request.demands[
op.function.loc_a.name].sort_base != 1:
_request.demands[
op.function.loc_a.name].sort_base = 1
_open_demand_list.append(op.function.loc_a)
return demand_list
def _exist_not_sorted_demand(self, _demands):
not_sorted_demand = None
for key in _demands:
demand = _demands[key]
if demand.sort_base != 1:
not_sorted_demand = demand
break
return not_sorted_demand
| true
| true
|
7909c1b5f43eb4ec1ea4c09b60122f8dce395a43
| 147
|
py
|
Python
|
hhcms/apps/account/tests.py
|
youngershen/hhcms
|
748bfcaaf250584b2b7233f271644ca33f8ff80b
|
[
"MIT"
] | null | null | null |
hhcms/apps/account/tests.py
|
youngershen/hhcms
|
748bfcaaf250584b2b7233f271644ca33f8ff80b
|
[
"MIT"
] | null | null | null |
hhcms/apps/account/tests.py
|
youngershen/hhcms
|
748bfcaaf250584b2b7233f271644ca33f8ff80b
|
[
"MIT"
] | 1
|
2018-07-15T05:33:34.000Z
|
2018-07-15T05:33:34.000Z
|
from django.test import TestCase
# Create your tests here.
class Account(TestCase):
def test_register(self):
self.assertTrue(True)
| 14.7
| 32
| 0.714286
|
from django.test import TestCase
class Account(TestCase):
def test_register(self):
self.assertTrue(True)
| true
| true
|
7909c1f371a4e74a39785fb31956682f9faa2871
| 16,126
|
py
|
Python
|
mayan/apps/document_states/tests/test_workflow_transition_views.py
|
CMU-313/fall-2021-hw2-451-unavailable-for-legal-reasons
|
0e4e919fd2e1ded6711354a0330135283e87f8c7
|
[
"Apache-2.0"
] | 2
|
2021-09-12T19:41:19.000Z
|
2021-09-12T19:41:20.000Z
|
mayan/apps/document_states/tests/test_workflow_transition_views.py
|
CMU-313/fall-2021-hw2-451-unavailable-for-legal-reasons
|
0e4e919fd2e1ded6711354a0330135283e87f8c7
|
[
"Apache-2.0"
] | 37
|
2021-09-13T01:00:12.000Z
|
2021-10-02T03:54:30.000Z
|
mayan/apps/document_states/tests/test_workflow_transition_views.py
|
CMU-313/fall-2021-hw2-451-unavailable-for-legal-reasons
|
0e4e919fd2e1ded6711354a0330135283e87f8c7
|
[
"Apache-2.0"
] | 1
|
2021-09-22T13:17:30.000Z
|
2021-09-22T13:17:30.000Z
|
from mayan.apps.documents.tests.base import GenericDocumentViewTestCase
from mayan.apps.testing.tests.base import GenericViewTestCase
from ..events import event_workflow_template_edited
from ..models import WorkflowTransition
from ..permissions import (
permission_workflow_template_edit, permission_workflow_template_view
)
from .literals import TEST_WORKFLOW_TEMPLATE_TRANSITION_LABEL
from .mixins.workflow_instance_mixins import WorkflowInstanceViewTestMixin
from .mixins.workflow_template_mixins import (
WorkflowTemplateTestMixin, WorkflowTemplateViewTestMixin
)
from .mixins.workflow_template_transition_mixins import (
WorkflowTransitionEventViewTestMixin, WorkflowTransitionFieldTestMixin,
WorkflowTransitionFieldViewTestMixin, WorkflowTransitionViewTestMixin
)
class WorkflowTransitionViewTestCase(
WorkflowTemplateTestMixin, WorkflowTemplateViewTestMixin,
WorkflowTransitionViewTestMixin, GenericViewTestCase
):
def setUp(self):
super().setUp()
self._create_test_workflow_template()
self._create_test_workflow_template_state()
self._create_test_workflow_template_state()
def test_workflow_template_transition_create_view_no_permission(self):
self._clear_events()
response = self._request_test_workflow_template_transition_create_view()
self.assertEqual(response.status_code, 404)
self.assertEqual(WorkflowTransition.objects.count(), 0)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_workflow_template_transition_create_view_with_access(self):
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_template_edit
)
self._clear_events()
response = self._request_test_workflow_template_transition_create_view()
self.assertEqual(response.status_code, 302)
self.assertEqual(WorkflowTransition.objects.count(), 1)
self.assertEqual(
WorkflowTransition.objects.all()[0].label,
TEST_WORKFLOW_TEMPLATE_TRANSITION_LABEL
)
self.assertEqual(
WorkflowTransition.objects.all()[0].origin_state,
self.test_workflow_template_states[0]
)
self.assertEqual(
WorkflowTransition.objects.all()[0].destination_state,
self.test_workflow_template_states[1]
)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(
events[0].action_object, self.test_workflow_template_transition
)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_workflow_template)
self.assertEqual(events[0].verb, event_workflow_template_edited.id)
def test_workflow_template_transition_delete_view_no_permission(self):
self._create_test_workflow_template_transition()
self._clear_events()
response = self._request_test_workflow_template_transition_delete_view()
self.assertEqual(response.status_code, 404)
self.assertTrue(
self.test_workflow_template_transition in WorkflowTransition.objects.all()
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_workflow_template_transition_delete_view_with_access(self):
self._create_test_workflow_template_transition()
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_template_edit
)
self._clear_events()
response = self._request_test_workflow_template_transition_delete_view()
self.assertEqual(response.status_code, 302)
self.assertFalse(
self.test_workflow_template_transition in WorkflowTransition.objects.all()
)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].action_object, None)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_workflow_template)
self.assertEqual(events[0].verb, event_workflow_template_edited.id)
def test_workflow_template_transition_edit_view_no_permission(self):
self._create_test_workflow_template_transition()
test_workflow_template_transition_label = self.test_workflow_template_transition.label
self._clear_events()
response = self._request_test_workflow_template_transition_edit_view()
self.assertEqual(response.status_code, 404)
self.test_workflow_template_transition.refresh_from_db()
self.assertEqual(
self.test_workflow_template_transition.label,
test_workflow_template_transition_label
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_workflow_template_transition_edit_view_with_access(self):
self._create_test_workflow_template_transition()
test_workflow_template_transition_label = self.test_workflow_template_transition.label
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_template_edit
)
self._clear_events()
response = self._request_test_workflow_template_transition_edit_view()
self.assertEqual(response.status_code, 302)
self.test_workflow_template_transition.refresh_from_db()
self.assertNotEqual(
self.test_workflow_template_transition.label,
test_workflow_template_transition_label
)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(
events[0].action_object, self.test_workflow_template_transition
)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_workflow_template)
self.assertEqual(events[0].verb, event_workflow_template_edited.id)
def test_workflow_template_transition_list_view_no_permission(self):
self._create_test_workflow_template_transition()
self._clear_events()
response = self._request_test_workflow_template_transition_list_view()
self.assertNotContains(
response=response,
text=self.test_workflow_template_transition.label,
status_code=404
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_workflow_template_transition_list_view_with_access(self):
self._create_test_workflow_template_transition()
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_template_view
)
self._clear_events()
response = self._request_test_workflow_template_transition_list_view()
self.assertContains(
response=response,
text=self.test_workflow_template_transition.label,
status_code=200
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
class WorkflowTransitionEventViewTestCase(
WorkflowInstanceViewTestMixin, WorkflowTemplateTestMixin,
WorkflowTransitionEventViewTestMixin, GenericDocumentViewTestCase
):
auto_upload_test_document = False
def setUp(self):
super().setUp()
self._create_test_workflow_template()
self.test_workflow_template.document_types.add(
self.test_document_type
)
self._create_test_workflow_template_state()
self._create_test_workflow_template_state()
self._create_test_workflow_template_transition()
self._create_test_workflow_template_transition()
self._create_test_document_stub()
self.test_workflow_instance = self.test_document.workflows.first()
def test_workflow_template_transition_event_list_view_no_permission(self):
self._clear_events()
response = self._request_test_workflow_template_transition_event_list_view()
self.assertEqual(response.status_code, 404)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_workflow_template_transition_event_list_view_with_access(self):
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_template_edit
)
self._clear_events()
response = self._request_test_workflow_template_transition_event_list_view()
self.assertEqual(response.status_code, 200)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
class WorkflowTransitionFieldViewTestCase(
WorkflowTemplateTestMixin, WorkflowTransitionFieldTestMixin,
WorkflowTransitionFieldViewTestMixin, WorkflowTransitionViewTestMixin,
GenericViewTestCase
):
def setUp(self):
super().setUp()
self._create_test_workflow_template()
self._create_test_workflow_template_state()
self._create_test_workflow_template_state()
self._create_test_workflow_template_transition()
def test_workflow_template_transition_field_create_view_no_permission(self):
workflow_template_transition_field_count = self.test_workflow_template_transition.fields.count()
self._clear_events()
response = self._request_workflow_template_transition_field_create_view()
self.assertEqual(response.status_code, 404)
self.assertEqual(
self.test_workflow_template_transition.fields.count(),
workflow_template_transition_field_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_workflow_template_transition_field_create_view_with_access(self):
workflow_template_transition_field_count = self.test_workflow_template_transition.fields.count()
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_template_edit
)
self._clear_events()
response = self._request_workflow_template_transition_field_create_view()
self.assertEqual(response.status_code, 302)
self.assertEqual(
self.test_workflow_template_transition.fields.count(),
workflow_template_transition_field_count + 1
)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(
events[0].action_object,
self.test_workflow_template_transition_field
)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_workflow_template)
self.assertEqual(events[0].verb, event_workflow_template_edited.id)
def test_workflow_template_transition_field_delete_view_no_permission(self):
self._create_test_workflow_template_transition_field()
workflow_template_transition_field_count = self.test_workflow_template_transition.fields.count()
self._clear_events()
response = self._request_workflow_template_transition_field_delete_view()
self.assertEqual(response.status_code, 404)
self.assertEqual(
self.test_workflow_template_transition.fields.count(),
workflow_template_transition_field_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_workflow_template_transition_field_delete_view_with_access(self):
self._create_test_workflow_template_transition_field()
workflow_template_transition_field_count = self.test_workflow_template_transition.fields.count()
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_template_edit
)
self._clear_events()
response = self._request_workflow_template_transition_field_delete_view()
self.assertEqual(response.status_code, 302)
self.assertEqual(
self.test_workflow_template_transition.fields.count(),
workflow_template_transition_field_count - 1
)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].action_object, None)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_workflow_template)
self.assertEqual(events[0].verb, event_workflow_template_edited.id)
def test_workflow_template_transition_field_edit_view_no_permission(self):
self._create_test_workflow_template_transition_field()
workflow_template_transition_field_label = self.test_workflow_template_transition_field.label
self._clear_events()
response = self._request_workflow_template_transition_field_edit_view()
self.assertEqual(response.status_code, 404)
self.test_workflow_template_transition_field.refresh_from_db()
self.assertEqual(
workflow_template_transition_field_label,
self.test_workflow_template_transition_field.label
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_workflow_template_transition_field_edit_view_with_access(self):
self._create_test_workflow_template_transition_field()
workflow_template_transition_field_label = self.test_workflow_template_transition_field.label
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_template_edit
)
self._clear_events()
response = self._request_workflow_template_transition_field_edit_view()
self.assertEqual(response.status_code, 302)
self.test_workflow_template_transition_field.refresh_from_db()
self.assertNotEqual(
workflow_template_transition_field_label,
self.test_workflow_template_transition_field.label
)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(
events[0].action_object,
self.test_workflow_template_transition_field
)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_workflow_template)
self.assertEqual(events[0].verb, event_workflow_template_edited.id)
def test_workflow_template_transition_field_list_view_no_permission(self):
self._create_test_workflow_template_transition_field()
self._clear_events()
response = self._request_test_workflow_template_transition_field_list_view()
self.assertNotContains(
response=response,
text=self.test_workflow_template_transition_field.label,
status_code=404
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_workflow_template_transition_field_list_view_with_access(self):
self._create_test_workflow_template_transition_field()
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_template_edit
)
self._clear_events()
response = self._request_test_workflow_template_transition_field_list_view()
self.assertContains(
response=response,
text=self.test_workflow_template_transition_field.label,
status_code=200
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
| 38.21327
| 105
| 0.708483
|
from mayan.apps.documents.tests.base import GenericDocumentViewTestCase
from mayan.apps.testing.tests.base import GenericViewTestCase
from ..events import event_workflow_template_edited
from ..models import WorkflowTransition
from ..permissions import (
permission_workflow_template_edit, permission_workflow_template_view
)
from .literals import TEST_WORKFLOW_TEMPLATE_TRANSITION_LABEL
from .mixins.workflow_instance_mixins import WorkflowInstanceViewTestMixin
from .mixins.workflow_template_mixins import (
WorkflowTemplateTestMixin, WorkflowTemplateViewTestMixin
)
from .mixins.workflow_template_transition_mixins import (
WorkflowTransitionEventViewTestMixin, WorkflowTransitionFieldTestMixin,
WorkflowTransitionFieldViewTestMixin, WorkflowTransitionViewTestMixin
)
class WorkflowTransitionViewTestCase(
WorkflowTemplateTestMixin, WorkflowTemplateViewTestMixin,
WorkflowTransitionViewTestMixin, GenericViewTestCase
):
def setUp(self):
super().setUp()
self._create_test_workflow_template()
self._create_test_workflow_template_state()
self._create_test_workflow_template_state()
def test_workflow_template_transition_create_view_no_permission(self):
self._clear_events()
response = self._request_test_workflow_template_transition_create_view()
self.assertEqual(response.status_code, 404)
self.assertEqual(WorkflowTransition.objects.count(), 0)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_workflow_template_transition_create_view_with_access(self):
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_template_edit
)
self._clear_events()
response = self._request_test_workflow_template_transition_create_view()
self.assertEqual(response.status_code, 302)
self.assertEqual(WorkflowTransition.objects.count(), 1)
self.assertEqual(
WorkflowTransition.objects.all()[0].label,
TEST_WORKFLOW_TEMPLATE_TRANSITION_LABEL
)
self.assertEqual(
WorkflowTransition.objects.all()[0].origin_state,
self.test_workflow_template_states[0]
)
self.assertEqual(
WorkflowTransition.objects.all()[0].destination_state,
self.test_workflow_template_states[1]
)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(
events[0].action_object, self.test_workflow_template_transition
)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_workflow_template)
self.assertEqual(events[0].verb, event_workflow_template_edited.id)
def test_workflow_template_transition_delete_view_no_permission(self):
self._create_test_workflow_template_transition()
self._clear_events()
response = self._request_test_workflow_template_transition_delete_view()
self.assertEqual(response.status_code, 404)
self.assertTrue(
self.test_workflow_template_transition in WorkflowTransition.objects.all()
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_workflow_template_transition_delete_view_with_access(self):
self._create_test_workflow_template_transition()
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_template_edit
)
self._clear_events()
response = self._request_test_workflow_template_transition_delete_view()
self.assertEqual(response.status_code, 302)
self.assertFalse(
self.test_workflow_template_transition in WorkflowTransition.objects.all()
)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].action_object, None)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_workflow_template)
self.assertEqual(events[0].verb, event_workflow_template_edited.id)
def test_workflow_template_transition_edit_view_no_permission(self):
self._create_test_workflow_template_transition()
test_workflow_template_transition_label = self.test_workflow_template_transition.label
self._clear_events()
response = self._request_test_workflow_template_transition_edit_view()
self.assertEqual(response.status_code, 404)
self.test_workflow_template_transition.refresh_from_db()
self.assertEqual(
self.test_workflow_template_transition.label,
test_workflow_template_transition_label
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_workflow_template_transition_edit_view_with_access(self):
self._create_test_workflow_template_transition()
test_workflow_template_transition_label = self.test_workflow_template_transition.label
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_template_edit
)
self._clear_events()
response = self._request_test_workflow_template_transition_edit_view()
self.assertEqual(response.status_code, 302)
self.test_workflow_template_transition.refresh_from_db()
self.assertNotEqual(
self.test_workflow_template_transition.label,
test_workflow_template_transition_label
)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(
events[0].action_object, self.test_workflow_template_transition
)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_workflow_template)
self.assertEqual(events[0].verb, event_workflow_template_edited.id)
def test_workflow_template_transition_list_view_no_permission(self):
self._create_test_workflow_template_transition()
self._clear_events()
response = self._request_test_workflow_template_transition_list_view()
self.assertNotContains(
response=response,
text=self.test_workflow_template_transition.label,
status_code=404
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_workflow_template_transition_list_view_with_access(self):
self._create_test_workflow_template_transition()
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_template_view
)
self._clear_events()
response = self._request_test_workflow_template_transition_list_view()
self.assertContains(
response=response,
text=self.test_workflow_template_transition.label,
status_code=200
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
class WorkflowTransitionEventViewTestCase(
WorkflowInstanceViewTestMixin, WorkflowTemplateTestMixin,
WorkflowTransitionEventViewTestMixin, GenericDocumentViewTestCase
):
auto_upload_test_document = False
def setUp(self):
super().setUp()
self._create_test_workflow_template()
self.test_workflow_template.document_types.add(
self.test_document_type
)
self._create_test_workflow_template_state()
self._create_test_workflow_template_state()
self._create_test_workflow_template_transition()
self._create_test_workflow_template_transition()
self._create_test_document_stub()
self.test_workflow_instance = self.test_document.workflows.first()
def test_workflow_template_transition_event_list_view_no_permission(self):
self._clear_events()
response = self._request_test_workflow_template_transition_event_list_view()
self.assertEqual(response.status_code, 404)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_workflow_template_transition_event_list_view_with_access(self):
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_template_edit
)
self._clear_events()
response = self._request_test_workflow_template_transition_event_list_view()
self.assertEqual(response.status_code, 200)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
class WorkflowTransitionFieldViewTestCase(
WorkflowTemplateTestMixin, WorkflowTransitionFieldTestMixin,
WorkflowTransitionFieldViewTestMixin, WorkflowTransitionViewTestMixin,
GenericViewTestCase
):
def setUp(self):
super().setUp()
self._create_test_workflow_template()
self._create_test_workflow_template_state()
self._create_test_workflow_template_state()
self._create_test_workflow_template_transition()
def test_workflow_template_transition_field_create_view_no_permission(self):
workflow_template_transition_field_count = self.test_workflow_template_transition.fields.count()
self._clear_events()
response = self._request_workflow_template_transition_field_create_view()
self.assertEqual(response.status_code, 404)
self.assertEqual(
self.test_workflow_template_transition.fields.count(),
workflow_template_transition_field_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_workflow_template_transition_field_create_view_with_access(self):
workflow_template_transition_field_count = self.test_workflow_template_transition.fields.count()
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_template_edit
)
self._clear_events()
response = self._request_workflow_template_transition_field_create_view()
self.assertEqual(response.status_code, 302)
self.assertEqual(
self.test_workflow_template_transition.fields.count(),
workflow_template_transition_field_count + 1
)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(
events[0].action_object,
self.test_workflow_template_transition_field
)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_workflow_template)
self.assertEqual(events[0].verb, event_workflow_template_edited.id)
def test_workflow_template_transition_field_delete_view_no_permission(self):
self._create_test_workflow_template_transition_field()
workflow_template_transition_field_count = self.test_workflow_template_transition.fields.count()
self._clear_events()
response = self._request_workflow_template_transition_field_delete_view()
self.assertEqual(response.status_code, 404)
self.assertEqual(
self.test_workflow_template_transition.fields.count(),
workflow_template_transition_field_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_workflow_template_transition_field_delete_view_with_access(self):
self._create_test_workflow_template_transition_field()
workflow_template_transition_field_count = self.test_workflow_template_transition.fields.count()
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_template_edit
)
self._clear_events()
response = self._request_workflow_template_transition_field_delete_view()
self.assertEqual(response.status_code, 302)
self.assertEqual(
self.test_workflow_template_transition.fields.count(),
workflow_template_transition_field_count - 1
)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].action_object, None)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_workflow_template)
self.assertEqual(events[0].verb, event_workflow_template_edited.id)
def test_workflow_template_transition_field_edit_view_no_permission(self):
self._create_test_workflow_template_transition_field()
workflow_template_transition_field_label = self.test_workflow_template_transition_field.label
self._clear_events()
response = self._request_workflow_template_transition_field_edit_view()
self.assertEqual(response.status_code, 404)
self.test_workflow_template_transition_field.refresh_from_db()
self.assertEqual(
workflow_template_transition_field_label,
self.test_workflow_template_transition_field.label
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_workflow_template_transition_field_edit_view_with_access(self):
self._create_test_workflow_template_transition_field()
workflow_template_transition_field_label = self.test_workflow_template_transition_field.label
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_template_edit
)
self._clear_events()
response = self._request_workflow_template_transition_field_edit_view()
self.assertEqual(response.status_code, 302)
self.test_workflow_template_transition_field.refresh_from_db()
self.assertNotEqual(
workflow_template_transition_field_label,
self.test_workflow_template_transition_field.label
)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(
events[0].action_object,
self.test_workflow_template_transition_field
)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_workflow_template)
self.assertEqual(events[0].verb, event_workflow_template_edited.id)
def test_workflow_template_transition_field_list_view_no_permission(self):
self._create_test_workflow_template_transition_field()
self._clear_events()
response = self._request_test_workflow_template_transition_field_list_view()
self.assertNotContains(
response=response,
text=self.test_workflow_template_transition_field.label,
status_code=404
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_workflow_template_transition_field_list_view_with_access(self):
self._create_test_workflow_template_transition_field()
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_template_edit
)
self._clear_events()
response = self._request_test_workflow_template_transition_field_list_view()
self.assertContains(
response=response,
text=self.test_workflow_template_transition_field.label,
status_code=200
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
| true
| true
|
7909c29c35d849d451c85ed206bcb40b5bd17018
| 2,156
|
py
|
Python
|
sdb/commands/zfs/internal/__init__.py
|
cneira/sdb
|
3e6c69ab6ab498204aa4a66ea985a61ea1d2f12a
|
[
"Apache-2.0"
] | 47
|
2019-08-21T23:27:41.000Z
|
2021-11-21T03:13:27.000Z
|
sdb/commands/zfs/internal/__init__.py
|
cneira/sdb
|
3e6c69ab6ab498204aa4a66ea985a61ea1d2f12a
|
[
"Apache-2.0"
] | 228
|
2019-08-21T20:13:24.000Z
|
2022-02-01T04:42:20.000Z
|
sdb/commands/zfs/internal/__init__.py
|
cneira/sdb
|
3e6c69ab6ab498204aa4a66ea985a61ea1d2f12a
|
[
"Apache-2.0"
] | 22
|
2019-08-23T20:17:53.000Z
|
2022-02-03T12:44:53.000Z
|
#
# Copyright 2019 Delphix
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# pylint: disable=missing-docstring
import os
import drgn
import sdb
def enum_lookup(enum_type_name: str, value: int) -> str:
"""return a string which is the short name of the enum value
(truncating off the common prefix) """
fields = sdb.get_type(enum_type_name).type.enumerators
enum_string: str = fields[value].name
prefix = os.path.commonprefix([f[0] for f in fields])
return enum_string[prefix.rfind("_") + 1:]
def nicenum(num: int, suffix: str = "B") -> str:
for unit in ["", "K", "M", "G", "T", "P", "E", "Z"]:
if num < 1024:
return "{}{}{}".format(int(num), unit, suffix)
num = int(num / 1024)
return "{}{}{}".format(int(num), "Y", suffix)
def P2PHASE(x: drgn.Object, align: int) -> int:
return int(x & (align - 1))
def BF64_DECODE(x: drgn.Object, low: int, length: int) -> int:
return int(P2PHASE(x >> low, 1 << length))
def BF64_GET(x: drgn.Object, low: int, length: int) -> int:
return BF64_DECODE(x, low, length)
def WEIGHT_IS_SPACEBASED(weight: int) -> bool:
return weight == 0 or (BF64_GET(weight, 60, 1) != 0)
def WEIGHT_GET_INDEX(weight: int) -> int:
return BF64_GET((weight), 54, 6)
def WEIGHT_GET_COUNT(weight: int) -> int:
return BF64_GET((weight), 0, 54)
METASLAB_WEIGHT_PRIMARY = int(1 << 63)
METASLAB_WEIGHT_SECONDARY = int(1 << 62)
METASLAB_WEIGHT_CLAIM = int(1 << 61)
METASLAB_WEIGHT_TYPE = int(1 << 60)
METASLAB_ACTIVE_MASK = (METASLAB_WEIGHT_PRIMARY | METASLAB_WEIGHT_SECONDARY |
METASLAB_WEIGHT_CLAIM)
BTREE_LEAF_SIZE = 4096
| 29.534247
| 77
| 0.674397
|
import os
import drgn
import sdb
def enum_lookup(enum_type_name: str, value: int) -> str:
fields = sdb.get_type(enum_type_name).type.enumerators
enum_string: str = fields[value].name
prefix = os.path.commonprefix([f[0] for f in fields])
return enum_string[prefix.rfind("_") + 1:]
def nicenum(num: int, suffix: str = "B") -> str:
for unit in ["", "K", "M", "G", "T", "P", "E", "Z"]:
if num < 1024:
return "{}{}{}".format(int(num), unit, suffix)
num = int(num / 1024)
return "{}{}{}".format(int(num), "Y", suffix)
def P2PHASE(x: drgn.Object, align: int) -> int:
return int(x & (align - 1))
def BF64_DECODE(x: drgn.Object, low: int, length: int) -> int:
return int(P2PHASE(x >> low, 1 << length))
def BF64_GET(x: drgn.Object, low: int, length: int) -> int:
return BF64_DECODE(x, low, length)
def WEIGHT_IS_SPACEBASED(weight: int) -> bool:
return weight == 0 or (BF64_GET(weight, 60, 1) != 0)
def WEIGHT_GET_INDEX(weight: int) -> int:
return BF64_GET((weight), 54, 6)
def WEIGHT_GET_COUNT(weight: int) -> int:
return BF64_GET((weight), 0, 54)
METASLAB_WEIGHT_PRIMARY = int(1 << 63)
METASLAB_WEIGHT_SECONDARY = int(1 << 62)
METASLAB_WEIGHT_CLAIM = int(1 << 61)
METASLAB_WEIGHT_TYPE = int(1 << 60)
METASLAB_ACTIVE_MASK = (METASLAB_WEIGHT_PRIMARY | METASLAB_WEIGHT_SECONDARY |
METASLAB_WEIGHT_CLAIM)
BTREE_LEAF_SIZE = 4096
| true
| true
|
7909c50b0b308c2834eb338e5909cd9e5b9467d1
| 5,527
|
py
|
Python
|
07-AdventOfCode2021/18/day-18.py
|
StrangeGirlMurph/CodingProjects
|
8400a610c0a54a2721a73824df7aab4e92ec891d
|
[
"MIT"
] | null | null | null |
07-AdventOfCode2021/18/day-18.py
|
StrangeGirlMurph/CodingProjects
|
8400a610c0a54a2721a73824df7aab4e92ec891d
|
[
"MIT"
] | null | null | null |
07-AdventOfCode2021/18/day-18.py
|
StrangeGirlMurph/CodingProjects
|
8400a610c0a54a2721a73824df7aab4e92ec891d
|
[
"MIT"
] | null | null | null |
import ast
import math
import copy
with open('input.txt', 'r') as f:
lines = f.readlines()
lines = [line[:-1] for line in lines]
lines = [ast.literal_eval(line) for line in lines]
def reduce(num):
explodeCriteriaMet = True
splitCriteriaMet = True
while explodeCriteriaMet or splitCriteriaMet:
explodeCriteriaMet = False
splitCriteriaMet = False
# check for pair nested inside four pairs
for idx1, val1 in enumerate(num):
if explodeCriteriaMet:
break
if type(val1) == list:
for idx2, val2 in enumerate(val1):
if explodeCriteriaMet:
break
if type(val2) == list:
for idx3, val3 in enumerate(val2):
if explodeCriteriaMet:
break
if type(val3) == list:
for idx4, val4 in enumerate(val3):
if type(val4) == list:
explodeCriteriaMet = True
num = explode(num, val4, [idx1, idx2, idx3, idx4])
break
if explodeCriteriaMet:
continue
# regular number is 10 or greater
for idx1, val1 in enumerate(num):
if splitCriteriaMet:
break
if type(val1) == list:
for idx2, val2 in enumerate(val1):
if splitCriteriaMet:
break
if type(val2) == list:
for idx3, val3 in enumerate(val2):
if splitCriteriaMet:
break
if type(val3) == list:
for idx4, val4 in enumerate(val3):
if val4 >= 10:
splitCriteriaMet = True
num = split(num, val4, [idx1, idx2, idx3, idx4])
break
elif val3 >= 10:
splitCriteriaMet = True
num = split(num, val3, [idx1, idx2, idx3])
break
elif val2 >= 10:
splitCriteriaMet = True
num = split(num, val2, [idx1, idx2])
break
elif val1 >= 10:
splitCriteriaMet = True
num = split(num, val1, [idx1])
break
return num
def split(num, value, idx):
# print("SPLIT")
# print("IDX:", idx)
# print("VAL:", value)
pair = [math.floor(value/2), math.ceil(value/2)]
if len(idx) == 4:
num[idx[0]][idx[1]][idx[2]][idx[3]] = pair
elif len(idx) == 3:
num[idx[0]][idx[1]][idx[2]] = pair
elif len(idx) == 2:
num[idx[0]][idx[1]] = pair
elif len(idx) == 1:
num[idx[0]] = pair
return num
def getValueAtIndex(num, idx):
for i in idx:
num = num[i]
return num
def changeValueAtIndex(num, idx, value):
if len(idx) == 5:
num[idx[0]][idx[1]][idx[2]][idx[3]][idx[4]] += value
elif len(idx) == 4:
num[idx[0]][idx[1]][idx[2]][idx[3]] += value
elif len(idx) == 3:
num[idx[0]][idx[1]][idx[2]] += value
elif len(idx) == 2:
num[idx[0]][idx[1]] += value
elif len(idx) == 1:
num[idx[0]] += value
return num
def explode(num, item, idx):
# store values to add:
left = item[0]
right = item[1]
# the exploding pair is replaced with the regular number 0
num[idx[0]][idx[1]][idx[2]][idx[3]] = 0
# adding the values to their neighbors
# left:
for index, indexValue in enumerate(idx[:: -1]):
if indexValue != 0: # there is a neighbour to the left when the indexValue is 1
idxLeft = idx[: 4-index]
idxLeft[-1] = 0
while type(getValueAtIndex(num, idxLeft)) == list:
idxLeft.append(1)
num = changeValueAtIndex(num, idxLeft, left)
break
# right
for index, indexValue in enumerate(idx[:: -1]):
if indexValue != 1: # there is a neighbour to the right when the indexValue is 0
idxRight = idx[: 4-index]
idxRight[-1] = 1
while type(getValueAtIndex(num, idxRight)) == list:
idxRight.append(0)
num = changeValueAtIndex(num, idxRight, right)
break
return num
def calculateSum(num1, num2):
sum = [copy.deepcopy(num1), copy.deepcopy(num2)]
return reduce(sum)
def calculateMagnitude(num):
left = num[0]
right = num[1]
if type(left) == list:
left = calculateMagnitude(left)
if type(right) == list:
right = calculateMagnitude(right)
return (3*left + 2*right)
# part 1
sum = lines[0]
for line in lines[1:]:
sum = calculateSum(sum, line)
print("What is the magnitude of the final sum?", calculateMagnitude(sum))
# part 2
largest = 0
for x in lines:
for y in lines:
if x != y:
magnitude = calculateMagnitude(calculateSum(x, y))
if magnitude > largest:
largest = magnitude
print("What is the largest magnitude of any sum of two different snailfish numbers from the homework assignment?", largest)
| 29.089474
| 123
| 0.486159
|
import ast
import math
import copy
with open('input.txt', 'r') as f:
lines = f.readlines()
lines = [line[:-1] for line in lines]
lines = [ast.literal_eval(line) for line in lines]
def reduce(num):
explodeCriteriaMet = True
splitCriteriaMet = True
while explodeCriteriaMet or splitCriteriaMet:
explodeCriteriaMet = False
splitCriteriaMet = False
for idx1, val1 in enumerate(num):
if explodeCriteriaMet:
break
if type(val1) == list:
for idx2, val2 in enumerate(val1):
if explodeCriteriaMet:
break
if type(val2) == list:
for idx3, val3 in enumerate(val2):
if explodeCriteriaMet:
break
if type(val3) == list:
for idx4, val4 in enumerate(val3):
if type(val4) == list:
explodeCriteriaMet = True
num = explode(num, val4, [idx1, idx2, idx3, idx4])
break
if explodeCriteriaMet:
continue
for idx1, val1 in enumerate(num):
if splitCriteriaMet:
break
if type(val1) == list:
for idx2, val2 in enumerate(val1):
if splitCriteriaMet:
break
if type(val2) == list:
for idx3, val3 in enumerate(val2):
if splitCriteriaMet:
break
if type(val3) == list:
for idx4, val4 in enumerate(val3):
if val4 >= 10:
splitCriteriaMet = True
num = split(num, val4, [idx1, idx2, idx3, idx4])
break
elif val3 >= 10:
splitCriteriaMet = True
num = split(num, val3, [idx1, idx2, idx3])
break
elif val2 >= 10:
splitCriteriaMet = True
num = split(num, val2, [idx1, idx2])
break
elif val1 >= 10:
splitCriteriaMet = True
num = split(num, val1, [idx1])
break
return num
def split(num, value, idx):
pair = [math.floor(value/2), math.ceil(value/2)]
if len(idx) == 4:
num[idx[0]][idx[1]][idx[2]][idx[3]] = pair
elif len(idx) == 3:
num[idx[0]][idx[1]][idx[2]] = pair
elif len(idx) == 2:
num[idx[0]][idx[1]] = pair
elif len(idx) == 1:
num[idx[0]] = pair
return num
def getValueAtIndex(num, idx):
for i in idx:
num = num[i]
return num
def changeValueAtIndex(num, idx, value):
if len(idx) == 5:
num[idx[0]][idx[1]][idx[2]][idx[3]][idx[4]] += value
elif len(idx) == 4:
num[idx[0]][idx[1]][idx[2]][idx[3]] += value
elif len(idx) == 3:
num[idx[0]][idx[1]][idx[2]] += value
elif len(idx) == 2:
num[idx[0]][idx[1]] += value
elif len(idx) == 1:
num[idx[0]] += value
return num
def explode(num, item, idx):
left = item[0]
right = item[1]
num[idx[0]][idx[1]][idx[2]][idx[3]] = 0
for index, indexValue in enumerate(idx[:: -1]):
if indexValue != 0:
idxLeft = idx[: 4-index]
idxLeft[-1] = 0
while type(getValueAtIndex(num, idxLeft)) == list:
idxLeft.append(1)
num = changeValueAtIndex(num, idxLeft, left)
break
for index, indexValue in enumerate(idx[:: -1]):
if indexValue != 1:
idxRight = idx[: 4-index]
idxRight[-1] = 1
while type(getValueAtIndex(num, idxRight)) == list:
idxRight.append(0)
num = changeValueAtIndex(num, idxRight, right)
break
return num
def calculateSum(num1, num2):
sum = [copy.deepcopy(num1), copy.deepcopy(num2)]
return reduce(sum)
def calculateMagnitude(num):
left = num[0]
right = num[1]
if type(left) == list:
left = calculateMagnitude(left)
if type(right) == list:
right = calculateMagnitude(right)
return (3*left + 2*right)
sum = lines[0]
for line in lines[1:]:
sum = calculateSum(sum, line)
print("What is the magnitude of the final sum?", calculateMagnitude(sum))
largest = 0
for x in lines:
for y in lines:
if x != y:
magnitude = calculateMagnitude(calculateSum(x, y))
if magnitude > largest:
largest = magnitude
print("What is the largest magnitude of any sum of two different snailfish numbers from the homework assignment?", largest)
| true
| true
|
7909c52ea6bc126a74a6e80ca330265aae3cdf15
| 4,281
|
py
|
Python
|
braindecode/samplers/ssl.py
|
lapaill/braindecode
|
d5d6e34baef1c8df092e77d1f3e757b53d0e69ea
|
[
"BSD-3-Clause"
] | 301
|
2020-01-15T16:40:59.000Z
|
2022-03-31T05:28:00.000Z
|
braindecode/samplers/ssl.py
|
Mrswolf/braindecode
|
d1781c465239c45eccbf5f92e7d7a627ff985e16
|
[
"BSD-3-Clause"
] | 325
|
2020-01-12T21:36:55.000Z
|
2022-03-21T11:59:01.000Z
|
braindecode/samplers/ssl.py
|
Mrswolf/braindecode
|
d1781c465239c45eccbf5f92e7d7a627ff985e16
|
[
"BSD-3-Clause"
] | 98
|
2020-01-12T21:22:42.000Z
|
2022-03-24T14:36:08.000Z
|
"""
Self-supervised learning samplers.
"""
# Authors: Hubert Banville <hubert.jbanville@gmail.com>
#
# License: BSD (3-clause)
import numpy as np
from . import RecordingSampler
class RelativePositioningSampler(RecordingSampler):
"""Sample examples for the relative positioning task from [Banville2020]_.
Sample examples as tuples of two window indices, with a label indicating
whether the windows are close or far, as defined by tau_pos and tau_neg.
Parameters
----------
metadata : pd.DataFrame
See RecordingSampler.
tau_pos : int
Size of the positive context, in samples. A positive pair contains two
windows x1 and x2 which are separated by at most `tau_pos` samples.
tau_neg : int
Size of the negative context, in samples. A negative pair contains two
windows x1 and x2 which are separated by at least `tau_neg` samples and
at most `tau_max` samples. Ignored if `same_rec_neg` is False.
n_examples : int
Number of pairs to extract.
tau_max : int | None
See `tau_neg`.
same_rec_neg : bool
If True, sample negative pairs from within the same recording. If
False, sample negative pairs from two different recordings.
random_state : None | np.RandomState | int
Random state.
References
----------
.. [Banville2020] Banville, H., Chehab, O., Hyvärinen, A., Engemann, D. A.,
& Gramfort, A. (2020). Uncovering the structure of clinical EEG
signals with self-supervised learning.
arXiv preprint arXiv:2007.16104.
"""
def __init__(self, metadata, tau_pos, tau_neg, n_examples, tau_max=None,
same_rec_neg=True, random_state=None):
super().__init__(metadata, random_state=random_state)
self.tau_pos = tau_pos
self.tau_neg = tau_neg
self.tau_max = np.inf if tau_max is None else tau_max
self.n_examples = n_examples
self.same_rec_neg = same_rec_neg
if not same_rec_neg and self.n_recordings < 2:
raise ValueError('More than one recording must be available when '
'using across-recording negative sampling.')
def _sample_pair(self):
"""Sample a pair of two windows.
"""
# Sample first window
win_ind1, rec_ind1 = self.sample_window()
ts1 = self.metadata.iloc[win_ind1]['i_start_in_trial']
ts = self.info.iloc[rec_ind1]['i_start_in_trial']
# Decide whether the pair will be positive or negative
pair_type = self.rng.binomial(1, 0.5)
win_ind2 = None
if pair_type == 0: # Negative example
if self.same_rec_neg:
mask = (
((ts <= ts1 - self.tau_neg) & (ts >= ts1 - self.tau_max)) |
((ts >= ts1 + self.tau_neg) & (ts <= ts1 + self.tau_max))
)
else:
rec_ind2 = rec_ind1
while rec_ind2 == rec_ind1:
win_ind2, rec_ind2 = self.sample_window()
elif pair_type == 1: # Positive example
mask = (ts >= ts1 - self.tau_pos) & (ts <= ts1 + self.tau_pos)
if win_ind2 is None:
mask[ts == ts1] = False # same window cannot be sampled twice
if sum(mask) == 0:
raise NotImplementedError
win_ind2 = self.rng.choice(self.info.iloc[rec_ind1]['index'][mask])
return win_ind1, win_ind2, float(pair_type)
def presample(self):
"""Presample examples.
Once presampled, the examples are the same from one epoch to another.
"""
self.examples = [self._sample_pair() for _ in range(self.n_examples)]
return self
def __iter__(self):
"""Iterate over pairs.
Yields
------
(int): position of the first window in the dataset.
(int): position of the second window in the dataset.
(float): 0 for negative pair, 1 for positive pair.
"""
for i in range(self.n_examples):
if hasattr(self, 'examples'):
yield self.examples[i]
else:
yield self._sample_pair()
def __len__(self):
return self.n_examples
| 35.97479
| 79
| 0.605466
|
import numpy as np
from . import RecordingSampler
class RelativePositioningSampler(RecordingSampler):
def __init__(self, metadata, tau_pos, tau_neg, n_examples, tau_max=None,
same_rec_neg=True, random_state=None):
super().__init__(metadata, random_state=random_state)
self.tau_pos = tau_pos
self.tau_neg = tau_neg
self.tau_max = np.inf if tau_max is None else tau_max
self.n_examples = n_examples
self.same_rec_neg = same_rec_neg
if not same_rec_neg and self.n_recordings < 2:
raise ValueError('More than one recording must be available when '
'using across-recording negative sampling.')
def _sample_pair(self):
win_ind1, rec_ind1 = self.sample_window()
ts1 = self.metadata.iloc[win_ind1]['i_start_in_trial']
ts = self.info.iloc[rec_ind1]['i_start_in_trial']
pair_type = self.rng.binomial(1, 0.5)
win_ind2 = None
if pair_type == 0:
if self.same_rec_neg:
mask = (
((ts <= ts1 - self.tau_neg) & (ts >= ts1 - self.tau_max)) |
((ts >= ts1 + self.tau_neg) & (ts <= ts1 + self.tau_max))
)
else:
rec_ind2 = rec_ind1
while rec_ind2 == rec_ind1:
win_ind2, rec_ind2 = self.sample_window()
elif pair_type == 1:
mask = (ts >= ts1 - self.tau_pos) & (ts <= ts1 + self.tau_pos)
if win_ind2 is None:
mask[ts == ts1] = False
if sum(mask) == 0:
raise NotImplementedError
win_ind2 = self.rng.choice(self.info.iloc[rec_ind1]['index'][mask])
return win_ind1, win_ind2, float(pair_type)
def presample(self):
self.examples = [self._sample_pair() for _ in range(self.n_examples)]
return self
def __iter__(self):
for i in range(self.n_examples):
if hasattr(self, 'examples'):
yield self.examples[i]
else:
yield self._sample_pair()
def __len__(self):
return self.n_examples
| true
| true
|
7909c5394eceb5543fd39451738e64db09f1e5a5
| 6,930
|
py
|
Python
|
data-clean/clean.py
|
willidert/aux_est_micro
|
6882794efa609f4171d86a0de1599bb97ac1ff2b
|
[
"MIT"
] | 1
|
2021-10-02T13:11:01.000Z
|
2021-10-02T13:11:01.000Z
|
data-clean/clean.py
|
willidert/aux_est_micro
|
6882794efa609f4171d86a0de1599bb97ac1ff2b
|
[
"MIT"
] | 4
|
2021-06-25T22:31:23.000Z
|
2021-07-19T03:54:02.000Z
|
data-clean/clean.py
|
willidert/auxilio-estudantil-microservice
|
6882794efa609f4171d86a0de1599bb97ac1ff2b
|
[
"MIT"
] | null | null | null |
import pandas as pd
import numpy as np
def gloriosafuncao(df):
df = pd.DataFrame([df])
numerico = [
11, "email", 1, 2, 3, 7,
8, 9, 12, 10, 13, 14,
15, 16, 17, 18, 19, 20, 21, 4, 5, 6
]
df.columns = numerico
labels = [
'email',
'PPI',
'ProgramasSociais',
'ModalidadeEnsino',
# 'Beneficiario',
'QtdDependentes',
'EscolaridadePai',
'EscolaridadeMae',
'RendaPerCapita',
'AtividadeRemunerada',
'SituacaoFinanceira',
'QtdResponsaveisFinanceiros',
'CondicaoTrabalho',
'CondicaoRenda',
'MoraCidadeCampus',
'CondMoradia',
'TipoTransporte',
'NConducoes',
'DoencaCronica',
'Medicacao',
'Deficiencia',
'FDoencaCronica',
'FMedicacao',
]
nomes_ordenados = [df.columns.to_list()[0]] + df.columns.to_list()[2:]
nomes_ordenados.sort()
nomes_ordenados = [df.columns.to_list()[1]] + nomes_ordenados
df = df[nomes_ordenados]
df.columns = labels
condicoes = [
'Desempregado',
'Trabalhador Informal',
'Trabalhador Autônomo',
'Aposentado',
'Empregado CLT',
# 'Pescador/agricultor familiar',
'Beneficiário INSS',
'Funcionário Público'
]
rotulos = [
'Desempregado',
'Informal',
'Autonomo',
'Aposentado',
'CLT',
# 'PescAgriF',
'INSS',
'FuncionarioPublico'
]
for rotulo, cond in zip(rotulos, condicoes):
df[rotulo] = df['CondicaoTrabalho'].map(
lambda x: 'sim' if cond in x else 'nao')
df['MoraCidadeCampus'] = df['MoraCidadeCampus'].apply(
lambda x: x.split(',')[0].lower())
df['TipoTransporte'] = df['TipoTransporte'].apply(
lambda x: ''.join(x.split()[1]).capitalize())
df['AteDois'] = df['QtdResponsaveisFinanceiros']\
.apply(lambda x: 'sim' if ' '
.join(x.split()[:-1]) == '1' or ' '
.join(x.split()[:-1]) == '2' else 'nao')
df[['TipoTransporte', 'QtdResponsaveisFinanceiros',
'MoraCidadeCampus', 'AteDois']].head()
binario = [
'PPI',
'ProgramasSociais',
# 'Beneficiario',
'AtividadeRemunerada',
'MoraCidadeCampus',
'DoencaCronica',
'Medicacao',
'Deficiencia',
'FDoencaCronica',
'FMedicacao',
'AteDois',
'Desempregado',
'Informal',
'Autonomo',
'Aposentado',
'CLT',
# 'PescAgriF',
'INSS',
'FuncionarioPublico'
]
df_binario = pd.DataFrame()
for elemento in binario:
df_binario[elemento] = df[elemento].replace(
['sim', 'nao'], [1, 0]).astype(int)
modalidade_map = {
'Graduação': 1,
'Médio Integrado EJA': 2,
'Médio Técnico Integrado': 4,
'Técnico Subsequente': 3,
}
transporte_map = {
'Pé': 1,
'Próprio': 1,
'Público': 2,
'Alternativo': 3
}
escolaridade_map = {
'Desconheço': 4,
'Não se aplica': 4,
'Sem escolaridade': 4,
'Ensino fundamental': 3,
'Ensino médio': 2,
'Ensino superior': 1,
}
moradia_map = {
'Própria': 1,
'Cedida': 2,
'Financiada': 3,
'Alugada': 4,
'Outros': 4
}
categorias = df['RendaPerCapita'].astype(
'category').cat.categories.tolist()
valores = [3, 2, 9, 8, 7, 6, 5, 4, 10, 1]
renda_percapita_map = {k: v for k, v in zip(categorias, valores)}
categorias = df['SituacaoFinanceira'].astype(
'category').cat.categories.tolist()
valores = [4, 2, 2, 1, 4, 5, 1]
situacao_fin_map = {k: v for k, v in zip(categorias, valores)}
categorias = df['QtdDependentes'].astype(
'category').cat.categories.tolist()
valores = [2, 3, 4, 5, 1]
dependentes_map = {k: v for k, v in zip(categorias, valores)}
categorias = df['NConducoes'].astype('category').cat.categories.tolist()
valores = [2, 3, 1]
conducoes_map = {k: v for k, v in zip(categorias, valores)}
categorias = df['CondicaoRenda'].astype('category').cat.categories.tolist()
valores = [1, 2, 3]
cond_renda_map = {k: v for k, v in zip(categorias, valores)}
labels = [
'CondMoradia',
'TipoTransporte',
'RendaPerCapita',
'SituacaoFinanceira',
'NConducoes',
'CondicaoRenda',
"ModalidadeEnsino",
"EscolaridadeMae",
"EscolaridadePai",
"QtdDependentes"
]
label_encode = df[labels].copy()
label_encode['CondMoradia'].replace(moradia_map, inplace=True)
label_encode['TipoTransporte'].replace(transporte_map, inplace=True)
label_encode['EscolaridadePai'].replace(escolaridade_map, inplace=True)
label_encode['EscolaridadeMae'].replace(escolaridade_map, inplace=True)
label_encode['SituacaoFinanceira'].replace(situacao_fin_map, inplace=True)
label_encode['RendaPerCapita'].replace(renda_percapita_map, inplace=True)
label_encode['QtdDependentes'].replace(dependentes_map, inplace=True)
label_encode['NConducoes'].replace(conducoes_map, inplace=True)
label_encode['CondicaoRenda'].replace(cond_renda_map, inplace=True)
label_encode['ModalidadeEnsino'].replace(modalidade_map, inplace=True)
qtd = pd.DataFrame()
qtd_res = ['ResFin_1', 'ResFin_2', 'ResFin_3', 'ResFin_4ouMais']
opcs = [
'1 membro',
'2 membros',
'3 membros',
'4 ou mais membros'
]
df['QtdResponsaveisFinanceiros'].replace(opcs, qtd_res)
for iqtd in qtd_res:
qtd[iqtd] = df['QtdResponsaveisFinanceiros'].map(
lambda x: int(1) if iqtd in x else int(0))
dados_limpos = pd.concat([df_binario, label_encode, qtd], axis=1)
ordem = ['PPI',
'ProgramasSociais',
'AtividadeRemunerada',
'MoraCidadeCampus',
'DoencaCronica',
'Medicacao',
'Deficiencia',
'FDoencaCronica',
'FMedicacao',
'AteDois',
'Desempregado',
'Informal',
'Autonomo',
'Aposentado',
'CLT',
'INSS',
'FuncionarioPublico',
'ModalidadeEnsino',
'CondMoradia',
'TipoTransporte',
'EscolaridadeMae',
'EscolaridadePai',
'RendaPerCapita',
'SituacaoFinanceira',
'QtdDependentes',
'NConducoes',
'CondicaoRenda',
'ResFin_1',
'ResFin_2',
'ResFin_3',
'ResFin_4ouMais']
dados_limpos = dados_limpos[ordem]
dados_limpos['email'] = df['email']
return np.array(dados_limpos.loc[0]).reshape(1, -1)
| 27.72
| 79
| 0.553535
|
import pandas as pd
import numpy as np
def gloriosafuncao(df):
df = pd.DataFrame([df])
numerico = [
11, "email", 1, 2, 3, 7,
8, 9, 12, 10, 13, 14,
15, 16, 17, 18, 19, 20, 21, 4, 5, 6
]
df.columns = numerico
labels = [
'email',
'PPI',
'ProgramasSociais',
'ModalidadeEnsino',
'QtdDependentes',
'EscolaridadePai',
'EscolaridadeMae',
'RendaPerCapita',
'AtividadeRemunerada',
'SituacaoFinanceira',
'QtdResponsaveisFinanceiros',
'CondicaoTrabalho',
'CondicaoRenda',
'MoraCidadeCampus',
'CondMoradia',
'TipoTransporte',
'NConducoes',
'DoencaCronica',
'Medicacao',
'Deficiencia',
'FDoencaCronica',
'FMedicacao',
]
nomes_ordenados = [df.columns.to_list()[0]] + df.columns.to_list()[2:]
nomes_ordenados.sort()
nomes_ordenados = [df.columns.to_list()[1]] + nomes_ordenados
df = df[nomes_ordenados]
df.columns = labels
condicoes = [
'Desempregado',
'Trabalhador Informal',
'Trabalhador Autônomo',
'Aposentado',
'Empregado CLT',
'Beneficiário INSS',
'Funcionário Público'
]
rotulos = [
'Desempregado',
'Informal',
'Autonomo',
'Aposentado',
'CLT',
'INSS',
'FuncionarioPublico'
]
for rotulo, cond in zip(rotulos, condicoes):
df[rotulo] = df['CondicaoTrabalho'].map(
lambda x: 'sim' if cond in x else 'nao')
df['MoraCidadeCampus'] = df['MoraCidadeCampus'].apply(
lambda x: x.split(',')[0].lower())
df['TipoTransporte'] = df['TipoTransporte'].apply(
lambda x: ''.join(x.split()[1]).capitalize())
df['AteDois'] = df['QtdResponsaveisFinanceiros']\
.apply(lambda x: 'sim' if ' '
.join(x.split()[:-1]) == '1' or ' '
.join(x.split()[:-1]) == '2' else 'nao')
df[['TipoTransporte', 'QtdResponsaveisFinanceiros',
'MoraCidadeCampus', 'AteDois']].head()
binario = [
'PPI',
'ProgramasSociais',
'AtividadeRemunerada',
'MoraCidadeCampus',
'DoencaCronica',
'Medicacao',
'Deficiencia',
'FDoencaCronica',
'FMedicacao',
'AteDois',
'Desempregado',
'Informal',
'Autonomo',
'Aposentado',
'CLT',
'INSS',
'FuncionarioPublico'
]
df_binario = pd.DataFrame()
for elemento in binario:
df_binario[elemento] = df[elemento].replace(
['sim', 'nao'], [1, 0]).astype(int)
modalidade_map = {
'Graduação': 1,
'Médio Integrado EJA': 2,
'Médio Técnico Integrado': 4,
'Técnico Subsequente': 3,
}
transporte_map = {
'Pé': 1,
'Próprio': 1,
'Público': 2,
'Alternativo': 3
}
escolaridade_map = {
'Desconheço': 4,
'Não se aplica': 4,
'Sem escolaridade': 4,
'Ensino fundamental': 3,
'Ensino médio': 2,
'Ensino superior': 1,
}
moradia_map = {
'Própria': 1,
'Cedida': 2,
'Financiada': 3,
'Alugada': 4,
'Outros': 4
}
categorias = df['RendaPerCapita'].astype(
'category').cat.categories.tolist()
valores = [3, 2, 9, 8, 7, 6, 5, 4, 10, 1]
renda_percapita_map = {k: v for k, v in zip(categorias, valores)}
categorias = df['SituacaoFinanceira'].astype(
'category').cat.categories.tolist()
valores = [4, 2, 2, 1, 4, 5, 1]
situacao_fin_map = {k: v for k, v in zip(categorias, valores)}
categorias = df['QtdDependentes'].astype(
'category').cat.categories.tolist()
valores = [2, 3, 4, 5, 1]
dependentes_map = {k: v for k, v in zip(categorias, valores)}
categorias = df['NConducoes'].astype('category').cat.categories.tolist()
valores = [2, 3, 1]
conducoes_map = {k: v for k, v in zip(categorias, valores)}
categorias = df['CondicaoRenda'].astype('category').cat.categories.tolist()
valores = [1, 2, 3]
cond_renda_map = {k: v for k, v in zip(categorias, valores)}
labels = [
'CondMoradia',
'TipoTransporte',
'RendaPerCapita',
'SituacaoFinanceira',
'NConducoes',
'CondicaoRenda',
"ModalidadeEnsino",
"EscolaridadeMae",
"EscolaridadePai",
"QtdDependentes"
]
label_encode = df[labels].copy()
label_encode['CondMoradia'].replace(moradia_map, inplace=True)
label_encode['TipoTransporte'].replace(transporte_map, inplace=True)
label_encode['EscolaridadePai'].replace(escolaridade_map, inplace=True)
label_encode['EscolaridadeMae'].replace(escolaridade_map, inplace=True)
label_encode['SituacaoFinanceira'].replace(situacao_fin_map, inplace=True)
label_encode['RendaPerCapita'].replace(renda_percapita_map, inplace=True)
label_encode['QtdDependentes'].replace(dependentes_map, inplace=True)
label_encode['NConducoes'].replace(conducoes_map, inplace=True)
label_encode['CondicaoRenda'].replace(cond_renda_map, inplace=True)
label_encode['ModalidadeEnsino'].replace(modalidade_map, inplace=True)
qtd = pd.DataFrame()
qtd_res = ['ResFin_1', 'ResFin_2', 'ResFin_3', 'ResFin_4ouMais']
opcs = [
'1 membro',
'2 membros',
'3 membros',
'4 ou mais membros'
]
df['QtdResponsaveisFinanceiros'].replace(opcs, qtd_res)
for iqtd in qtd_res:
qtd[iqtd] = df['QtdResponsaveisFinanceiros'].map(
lambda x: int(1) if iqtd in x else int(0))
dados_limpos = pd.concat([df_binario, label_encode, qtd], axis=1)
ordem = ['PPI',
'ProgramasSociais',
'AtividadeRemunerada',
'MoraCidadeCampus',
'DoencaCronica',
'Medicacao',
'Deficiencia',
'FDoencaCronica',
'FMedicacao',
'AteDois',
'Desempregado',
'Informal',
'Autonomo',
'Aposentado',
'CLT',
'INSS',
'FuncionarioPublico',
'ModalidadeEnsino',
'CondMoradia',
'TipoTransporte',
'EscolaridadeMae',
'EscolaridadePai',
'RendaPerCapita',
'SituacaoFinanceira',
'QtdDependentes',
'NConducoes',
'CondicaoRenda',
'ResFin_1',
'ResFin_2',
'ResFin_3',
'ResFin_4ouMais']
dados_limpos = dados_limpos[ordem]
dados_limpos['email'] = df['email']
return np.array(dados_limpos.loc[0]).reshape(1, -1)
| true
| true
|
7909c6ba4b4300f7b64c63e2349a0c4da2c0d76f
| 4,371
|
py
|
Python
|
cride/users/serializers/users.py
|
eocode/Rider-App
|
9629f76f97c605a3f40486a4d93707afbaf22563
|
[
"MIT"
] | 9
|
2020-05-10T05:56:40.000Z
|
2022-01-24T08:49:27.000Z
|
cride/users/serializers/users.py
|
eocode/Rider-App
|
9629f76f97c605a3f40486a4d93707afbaf22563
|
[
"MIT"
] | 6
|
2020-04-10T20:26:38.000Z
|
2021-06-10T20:02:11.000Z
|
cride/users/serializers/users.py
|
eocode/Rider-App
|
9629f76f97c605a3f40486a4d93707afbaf22563
|
[
"MIT"
] | 5
|
2020-04-24T11:38:25.000Z
|
2021-01-02T09:41:04.000Z
|
"""Users serializers"""
# Django
from django.conf import settings
from django.contrib.auth import password_validation, authenticate
from django.core.validators import RegexValidator
# Serializers
from cride.users.serializers.profiles import ProfileModelSerializer
# Django REST Framework
from rest_framework import serializers
from rest_framework.authtoken.models import Token
from rest_framework.validators import UniqueValidator
# Models
from cride.users.models import User, Profile
# Task
from cride.taskapp.task import send_confirmation_email
# Utilities
import jwt
class UserModelSerializer(serializers.ModelSerializer):
"""User model serializer"""
profile = ProfileModelSerializer(read_only=True)
class Meta:
"""Meta class."""
model = User
fields = (
'username',
'first_name',
'last_name',
'email',
'phone_number',
'profile'
)
class UserSignUpSerializer(serializers.Serializer):
"""User sign up serializer.
Handle sign up data validation and user/profile creation.
"""
email = serializers.EmailField(
validators=[UniqueValidator(queryset=User.objects.all())]
)
username = serializers.CharField(
min_length=4,
max_length=20,
validators=[UniqueValidator(queryset=User.objects.all())]
)
# Phone number
phone_regex = RegexValidator(
regex=r'\+?1?\d{9,15}$',
message="Phone number must be entered in the format: +999999999. Up to 15 digits allowed."
)
phone_number = serializers.CharField(validators=[phone_regex])
# Password
password = serializers.CharField(min_length=8, max_length=64)
password_confirmation = serializers.CharField(min_length=8, max_length=64)
# Name
first_name = serializers.CharField(min_length=2, max_length=30)
last_name = serializers.CharField(min_length=2, max_length=30)
def validate(self, data):
"""Verify passwords match."""
passwd = data['password']
passwd_conf = data['password_confirmation']
if passwd != passwd_conf:
raise serializers.ValidationError("Passwords don't match.")
password_validation.validate_password(passwd)
return data
def create(self, data):
"""Handle user and profile creation."""
data.pop('password_confirmation')
user = User.objects.create_user(**data, is_verified=False, is_client=True)
profile = Profile.objects.create(user=user)
send_confirmation_email.delay(user_pk=user.pk)
return user
class UserLoginSerializer(serializers.Serializer):
"""User Login serializer
Handle the login request data.
"""
email = serializers.EmailField()
password = serializers.CharField(min_length=8, max_length=64)
def validate(self, data):
"""Check credentials"""
user = authenticate(username=data['email'], password=data['password'])
if not user:
raise serializers.ValidationError('Invalid credentials')
if not user.is_verified:
raise serializers.ValidationError('Account is not active yet')
self.context['user'] = user
return data
def create(self, data):
"""Generate or retrieve new token"""
token, created = Token.objects.get_or_create(user=self.context['user'])
return self.context['user'], token.key
class AccountVerificationSerializer(serializers.Serializer):
"""Account verification serializer"""
token = serializers.CharField()
def validate_token(self, data):
"""Verify token is valid"""
try:
payload = jwt.decode(data, settings.SECRET_KEY, algorithms=['HS256'])
except jwt.ExpiredSignatureError:
raise serializers.ValidationError('Verification link has expired.')
except jwt.exceptions.PyJWTError:
raise serializers.ValidationError('Invalid token')
if payload['type'] != 'email_confirmation':
raise serializers.ValidationError('Invalid token')
self.context['payload'] = payload
return data
def save(self):
"""Update user's verified status"""
payload = self.context['payload']
user = User.objects.get(username=payload['user'])
user.is_verified = True
user.save()
| 30.566434
| 98
| 0.672615
|
from django.conf import settings
from django.contrib.auth import password_validation, authenticate
from django.core.validators import RegexValidator
from cride.users.serializers.profiles import ProfileModelSerializer
from rest_framework import serializers
from rest_framework.authtoken.models import Token
from rest_framework.validators import UniqueValidator
from cride.users.models import User, Profile
from cride.taskapp.task import send_confirmation_email
import jwt
class UserModelSerializer(serializers.ModelSerializer):
profile = ProfileModelSerializer(read_only=True)
class Meta:
model = User
fields = (
'username',
'first_name',
'last_name',
'email',
'phone_number',
'profile'
)
class UserSignUpSerializer(serializers.Serializer):
email = serializers.EmailField(
validators=[UniqueValidator(queryset=User.objects.all())]
)
username = serializers.CharField(
min_length=4,
max_length=20,
validators=[UniqueValidator(queryset=User.objects.all())]
)
phone_regex = RegexValidator(
regex=r'\+?1?\d{9,15}$',
message="Phone number must be entered in the format: +999999999. Up to 15 digits allowed."
)
phone_number = serializers.CharField(validators=[phone_regex])
password = serializers.CharField(min_length=8, max_length=64)
password_confirmation = serializers.CharField(min_length=8, max_length=64)
first_name = serializers.CharField(min_length=2, max_length=30)
last_name = serializers.CharField(min_length=2, max_length=30)
def validate(self, data):
passwd = data['password']
passwd_conf = data['password_confirmation']
if passwd != passwd_conf:
raise serializers.ValidationError("Passwords don't match.")
password_validation.validate_password(passwd)
return data
def create(self, data):
data.pop('password_confirmation')
user = User.objects.create_user(**data, is_verified=False, is_client=True)
profile = Profile.objects.create(user=user)
send_confirmation_email.delay(user_pk=user.pk)
return user
class UserLoginSerializer(serializers.Serializer):
email = serializers.EmailField()
password = serializers.CharField(min_length=8, max_length=64)
def validate(self, data):
user = authenticate(username=data['email'], password=data['password'])
if not user:
raise serializers.ValidationError('Invalid credentials')
if not user.is_verified:
raise serializers.ValidationError('Account is not active yet')
self.context['user'] = user
return data
def create(self, data):
token, created = Token.objects.get_or_create(user=self.context['user'])
return self.context['user'], token.key
class AccountVerificationSerializer(serializers.Serializer):
token = serializers.CharField()
def validate_token(self, data):
try:
payload = jwt.decode(data, settings.SECRET_KEY, algorithms=['HS256'])
except jwt.ExpiredSignatureError:
raise serializers.ValidationError('Verification link has expired.')
except jwt.exceptions.PyJWTError:
raise serializers.ValidationError('Invalid token')
if payload['type'] != 'email_confirmation':
raise serializers.ValidationError('Invalid token')
self.context['payload'] = payload
return data
def save(self):
payload = self.context['payload']
user = User.objects.get(username=payload['user'])
user.is_verified = True
user.save()
| true
| true
|
7909c6c2a0341977808eaf310bae19b6dd3dfc34
| 1,474
|
py
|
Python
|
datasets/grid/testexpt.py
|
saraswat/TensorLog
|
c56cebfa33b5123d5340a7b429e333da09d223d8
|
[
"Apache-2.0"
] | 108
|
2016-05-24T16:49:56.000Z
|
2022-02-02T19:06:14.000Z
|
datasets/grid/testexpt.py
|
saraswat/TensorLog
|
c56cebfa33b5123d5340a7b429e333da09d223d8
|
[
"Apache-2.0"
] | 12
|
2016-09-07T18:04:38.000Z
|
2020-12-07T01:18:08.000Z
|
datasets/grid/testexpt.py
|
saraswat/TensorLog
|
c56cebfa33b5123d5340a7b429e333da09d223d8
|
[
"Apache-2.0"
] | 22
|
2016-06-17T18:59:18.000Z
|
2020-05-28T02:13:59.000Z
|
import unittest
import tfexpt
import expt
from tensorlog import matrixdb
from tensorlog import program
from tensorlog import dataset
class TestNative(unittest.TestCase):
def setUp(self):
(self.n,self.maxD,self.epochs) = (16,8,20)
(self.factFile,trainFile,testFile) = expt.genInputs(self.n)
# (self.factFile,self.trainFile,self.testFile) = ('inputs/g16.cfacts','inputs/g16-train.exam','inputs/g16-test.exam')
self.db = matrixdb.MatrixDB.loadFile(self.factFile)
self.prog = program.Program.loadRules("grid.ppr",self.db)
self.trainData = dataset.Dataset.loadExamples(self.prog.db,trainFile)
self.testData = dataset.Dataset.loadExamples(self.prog.db,testFile)
def testIt(self):
acc,loss = expt.accExpt(self.prog,self.trainData,self.testData,self.n,self.maxD,self.epochs)
print('acc',acc)
self.assertTrue(acc >= 0.85)
times = expt.timingExpt(self.prog)
for t in times:
print('time',t)
self.assertTrue(t < 0.05)
class TestAccTF(unittest.TestCase):
def setUp(self):
(self.n,self.maxD,self.epochs) = (16,8,20)
(self.factFile,self.trainFile,self.testFile) = expt.genInputs(self.n)
(self.tlog,self.trainData,self.testData) = tfexpt.setup_tlog(self.maxD,self.factFile,self.trainFile,self.testFile)
def testIt(self):
acc = tfexpt.trainAndTest(self.tlog,self.trainData,self.testData,self.epochs)
print('acc',acc)
self.assertTrue(acc >= 0.85)
if __name__ == "__main__":
unittest.main()
| 33.5
| 120
| 0.71981
|
import unittest
import tfexpt
import expt
from tensorlog import matrixdb
from tensorlog import program
from tensorlog import dataset
class TestNative(unittest.TestCase):
def setUp(self):
(self.n,self.maxD,self.epochs) = (16,8,20)
(self.factFile,trainFile,testFile) = expt.genInputs(self.n)
self.db = matrixdb.MatrixDB.loadFile(self.factFile)
self.prog = program.Program.loadRules("grid.ppr",self.db)
self.trainData = dataset.Dataset.loadExamples(self.prog.db,trainFile)
self.testData = dataset.Dataset.loadExamples(self.prog.db,testFile)
def testIt(self):
acc,loss = expt.accExpt(self.prog,self.trainData,self.testData,self.n,self.maxD,self.epochs)
print('acc',acc)
self.assertTrue(acc >= 0.85)
times = expt.timingExpt(self.prog)
for t in times:
print('time',t)
self.assertTrue(t < 0.05)
class TestAccTF(unittest.TestCase):
def setUp(self):
(self.n,self.maxD,self.epochs) = (16,8,20)
(self.factFile,self.trainFile,self.testFile) = expt.genInputs(self.n)
(self.tlog,self.trainData,self.testData) = tfexpt.setup_tlog(self.maxD,self.factFile,self.trainFile,self.testFile)
def testIt(self):
acc = tfexpt.trainAndTest(self.tlog,self.trainData,self.testData,self.epochs)
print('acc',acc)
self.assertTrue(acc >= 0.85)
if __name__ == "__main__":
unittest.main()
| true
| true
|
7909c844f56cd69e22c2359eca04f9f718929574
| 59
|
py
|
Python
|
otk/asbt1/__init__.py
|
draustin/otk
|
c6e91423ec79b85b380ee9385f6d27c91f92503d
|
[
"MIT"
] | 7
|
2020-05-17T14:26:42.000Z
|
2022-02-14T04:52:54.000Z
|
otk/asbt1/__init__.py
|
uamhforever/otk
|
c6e91423ec79b85b380ee9385f6d27c91f92503d
|
[
"MIT"
] | 17
|
2020-04-10T22:50:00.000Z
|
2020-06-18T04:54:19.000Z
|
otk/asbt1/__init__.py
|
uamhforever/otk
|
c6e91423ec79b85b380ee9385f6d27c91f92503d
|
[
"MIT"
] | 1
|
2022-02-14T04:52:45.000Z
|
2022-02-14T04:52:45.000Z
|
from ._tracing import Beam
from ._sbt import trace_surfaces
| 29.5
| 32
| 0.847458
|
from ._tracing import Beam
from ._sbt import trace_surfaces
| true
| true
|
7909ca043608913d17d046dcb573a8db53c1e96f
| 225
|
py
|
Python
|
response/slack/decorators/__init__.py
|
qubitdigital/response
|
9ac9d11f60d108739043697aa19478474cb94a09
|
[
"MIT"
] | null | null | null |
response/slack/decorators/__init__.py
|
qubitdigital/response
|
9ac9d11f60d108739043697aa19478474cb94a09
|
[
"MIT"
] | 9
|
2021-03-19T13:56:39.000Z
|
2021-06-10T20:48:16.000Z
|
response/slack/decorators/__init__.py
|
qubitdigital/response
|
9ac9d11f60d108739043697aa19478474cb94a09
|
[
"MIT"
] | null | null | null |
from .action_handler import *
from .event_handler import *
from .headline_post_action import *
from .incident_command import *
from .keyword_handler import *
from .incident_notification import *
from .dialog_handler import *
| 28.125
| 36
| 0.813333
|
from .action_handler import *
from .event_handler import *
from .headline_post_action import *
from .incident_command import *
from .keyword_handler import *
from .incident_notification import *
from .dialog_handler import *
| true
| true
|
7909cb31dce0f5d0d244a16c56e9e7a864d3c124
| 2,372
|
py
|
Python
|
src/gui/SubVision.py
|
bochkovoi/AHP
|
b51dc598f8f7a65a2ade039d887dccfa6d070f1e
|
[
"MIT"
] | null | null | null |
src/gui/SubVision.py
|
bochkovoi/AHP
|
b51dc598f8f7a65a2ade039d887dccfa6d070f1e
|
[
"MIT"
] | null | null | null |
src/gui/SubVision.py
|
bochkovoi/AHP
|
b51dc598f8f7a65a2ade039d887dccfa6d070f1e
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from PyQt5 import QtWidgets, QtGui, QtCore
import sys, os.path as op
path1 = op.join( op.abspath(op.dirname(__file__)), '..', 'Structure')
path2 = op.join( op.abspath(op.dirname(__file__)), '..')
sys.path.append(path1)
sys.path.append(path2)
from Structure import *
from VisObject import *
class SubVision( QtWidgets.QWidget ):
""" Базовый класс-окно для показа подчиненных объектов """
def __init__( self, main_object, is_change=True, parent=None ):
super().__init__( parent=parent )
#Устанавливаем главный объект
self.__obj = main_object
#Устанавливаем параметр возможности изменения элементов (по умолчанию - Да)
self.is_change = is_change
self.initUI()
def initUI( self ):
''' Инициализируем содержимое окна '''
#Добавляем окно данных и устанавливаем в него подчиненные объекты
self.sub_objs = QtWidgets.QListWidget( )
for obj in self.__obj.sub_objects:
#Делаем ячейку
a = QtWidgets.QListWidgetItem()
#Устанавливаем в ней подчиненный базовому объект
a.sub_obj = obj
#Устанавливаем в ней текст-имя объекта подчиненного объекта
a.setText( obj.name )
#Добавляем в список
self.sub_objs.addItem( a )
#Объявляем форму и добавляем в нее список подчиненных объектов
self.form = QtWidgets.QFormLayout()
self.form.addRow(self.sub_objs)
self.setLayout(self.form)
#Соединяем двойной щелчок с методом
self.sub_objs.itemDoubleClicked.connect( self.isDoubleClicked )
def isDoubleClicked( self, obj ):
#Если окно возможно изменить, вызываем окно изменения, иначе - окно просмотра
if self.is_change:
sub_window = ChangeVisObject( obj.sub_obj, parent=self )
else:
sub_window = SimpleVisObject( obj.sub_obj, parent=self )
sub_window.setWindowTitle( "Редактирование объекта: " + obj.sub_obj.name )
#Делаем это или родительское окно неактивным
if self.parent() is None:
self.setEnabled( False )
else:
self.parent().setEnabled( False )
#Делаем дочернее окно активным и показываем его
sub_window.setEnabled( True )
sub_window.show()
| 38.885246
| 85
| 0.643339
|
from PyQt5 import QtWidgets, QtGui, QtCore
import sys, os.path as op
path1 = op.join( op.abspath(op.dirname(__file__)), '..', 'Structure')
path2 = op.join( op.abspath(op.dirname(__file__)), '..')
sys.path.append(path1)
sys.path.append(path2)
from Structure import *
from VisObject import *
class SubVision( QtWidgets.QWidget ):
def __init__( self, main_object, is_change=True, parent=None ):
super().__init__( parent=parent )
self.__obj = main_object
self.is_change = is_change
self.initUI()
def initUI( self ):
self.sub_objs = QtWidgets.QListWidget( )
for obj in self.__obj.sub_objects:
a = QtWidgets.QListWidgetItem()
a.sub_obj = obj
a.setText( obj.name )
self.sub_objs.addItem( a )
self.form = QtWidgets.QFormLayout()
self.form.addRow(self.sub_objs)
self.setLayout(self.form)
self.sub_objs.itemDoubleClicked.connect( self.isDoubleClicked )
def isDoubleClicked( self, obj ):
if self.is_change:
sub_window = ChangeVisObject( obj.sub_obj, parent=self )
else:
sub_window = SimpleVisObject( obj.sub_obj, parent=self )
sub_window.setWindowTitle( "Редактирование объекта: " + obj.sub_obj.name )
if self.parent() is None:
self.setEnabled( False )
else:
self.parent().setEnabled( False )
sub_window.setEnabled( True )
sub_window.show()
| true
| true
|
7909cb38f536b0176e1f5661c9e55e2740118f12
| 3,079
|
py
|
Python
|
huaweicloud-sdk-rds/huaweicloudsdkrds/v3/model/create_configuration_response.py
|
wuchen-huawei/huaweicloud-sdk-python-v3
|
3683d703f4320edb2b8516f36f16d485cff08fc2
|
[
"Apache-2.0"
] | 1
|
2021-11-03T07:54:50.000Z
|
2021-11-03T07:54:50.000Z
|
huaweicloud-sdk-rds/huaweicloudsdkrds/v3/model/create_configuration_response.py
|
wuchen-huawei/huaweicloud-sdk-python-v3
|
3683d703f4320edb2b8516f36f16d485cff08fc2
|
[
"Apache-2.0"
] | null | null | null |
huaweicloud-sdk-rds/huaweicloudsdkrds/v3/model/create_configuration_response.py
|
wuchen-huawei/huaweicloud-sdk-python-v3
|
3683d703f4320edb2b8516f36f16d485cff08fc2
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
import pprint
import re
import six
from huaweicloudsdkcore.sdk_response import SdkResponse
class CreateConfigurationResponse(SdkResponse):
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'configuration': 'ConfigurationSummaryForCreate'
}
attribute_map = {
'configuration': 'configuration'
}
def __init__(self, configuration=None):
"""CreateConfigurationResponse - a model defined in huaweicloud sdk"""
super(CreateConfigurationResponse, self).__init__()
self._configuration = None
self.discriminator = None
if configuration is not None:
self.configuration = configuration
@property
def configuration(self):
"""Gets the configuration of this CreateConfigurationResponse.
:return: The configuration of this CreateConfigurationResponse.
:rtype: ConfigurationSummaryForCreate
"""
return self._configuration
@configuration.setter
def configuration(self, configuration):
"""Sets the configuration of this CreateConfigurationResponse.
:param configuration: The configuration of this CreateConfigurationResponse.
:type: ConfigurationSummaryForCreate
"""
self._configuration = configuration
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, CreateConfigurationResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 28.247706
| 84
| 0.583956
|
import pprint
import re
import six
from huaweicloudsdkcore.sdk_response import SdkResponse
class CreateConfigurationResponse(SdkResponse):
sensitive_list = []
openapi_types = {
'configuration': 'ConfigurationSummaryForCreate'
}
attribute_map = {
'configuration': 'configuration'
}
def __init__(self, configuration=None):
super(CreateConfigurationResponse, self).__init__()
self._configuration = None
self.discriminator = None
if configuration is not None:
self.configuration = configuration
@property
def configuration(self):
return self._configuration
@configuration.setter
def configuration(self, configuration):
self._configuration = configuration
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, CreateConfigurationResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true
| true
|
7909cb433663fa2d5959177dd2cdcd5af536c083
| 1,255
|
py
|
Python
|
1_boston.py
|
ZXTFINAL/deeplearning
|
52208b43fc8f9a1ea8508b1c07140c70e1529459
|
[
"Apache-2.0"
] | 2
|
2021-04-26T07:31:57.000Z
|
2022-01-24T06:38:50.000Z
|
1_boston.py
|
ZXTFINAL/deeplearning
|
52208b43fc8f9a1ea8508b1c07140c70e1529459
|
[
"Apache-2.0"
] | null | null | null |
1_boston.py
|
ZXTFINAL/deeplearning
|
52208b43fc8f9a1ea8508b1c07140c70e1529459
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
from sklearn.datasets import load_boston
from sklearn.model_selection import train_test_split
dataset = load_boston()
X = dataset.data
y = dataset.target
mean = X.mean(axis=0)
std = X.std(axis=0)
X = (X-mean)/std
# print(X)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2)
n_train = X_train.shape[0]
n_features = X_train.shape[1]
# 权重初始化
w = np.random.rand(n_features)
b = 1.1
lr = 0.001
epoches = 3000
def model(x):
y_hat = w.dot(x)+b
return y_hat
def loss_funtion(X, y):
total_loss = 0
n_samples = len(X)
for i in range(n_samples):
xi = X[i]
yi = y[i]
yi_hat = model(xi)
total_loss += abs(yi_hat-yi)**2
avg_loss = (1/n_samples)*total_loss
return avg_loss
reg = 0.5
for epoch in range(epoches):
sum_w = 0.0
sum_b = 0.0
for i in range(n_train):
xi = X_train[i]
yi = y_train[i]
yi_hat = model(xi)
sum_w += (yi_hat-yi)*xi
sum_b += (yi_hat - yi)
grad_w = (2/n_train)*sum_w+(2.0*reg*w)
grad_b = (2/n_train)*sum_b # 偏置项不做正则化处理
w = w-lr*grad_w
b = b-lr*grad_b
train_loss = loss_funtion(X_train, y_train)
test_loss = loss_funtion(X_test, y_test)
print(train_loss)
print(test_loss)
| 21.271186
| 72
| 0.636653
|
import numpy as np
from sklearn.datasets import load_boston
from sklearn.model_selection import train_test_split
dataset = load_boston()
X = dataset.data
y = dataset.target
mean = X.mean(axis=0)
std = X.std(axis=0)
X = (X-mean)/std
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2)
n_train = X_train.shape[0]
n_features = X_train.shape[1]
w = np.random.rand(n_features)
b = 1.1
lr = 0.001
epoches = 3000
def model(x):
y_hat = w.dot(x)+b
return y_hat
def loss_funtion(X, y):
total_loss = 0
n_samples = len(X)
for i in range(n_samples):
xi = X[i]
yi = y[i]
yi_hat = model(xi)
total_loss += abs(yi_hat-yi)**2
avg_loss = (1/n_samples)*total_loss
return avg_loss
reg = 0.5
for epoch in range(epoches):
sum_w = 0.0
sum_b = 0.0
for i in range(n_train):
xi = X_train[i]
yi = y_train[i]
yi_hat = model(xi)
sum_w += (yi_hat-yi)*xi
sum_b += (yi_hat - yi)
grad_w = (2/n_train)*sum_w+(2.0*reg*w)
grad_b = (2/n_train)*sum_b
w = w-lr*grad_w
b = b-lr*grad_b
train_loss = loss_funtion(X_train, y_train)
test_loss = loss_funtion(X_test, y_test)
print(train_loss)
print(test_loss)
| true
| true
|
7909cbc06e25c672eddf4706f64f472276ea30a7
| 1,392
|
py
|
Python
|
iot_message/tests/test_plain_cryptor.py
|
bkosciow/python_iot-1
|
f3cd2bdbb75cb9afe13fecb603b5b8c026d23500
|
[
"MIT"
] | null | null | null |
iot_message/tests/test_plain_cryptor.py
|
bkosciow/python_iot-1
|
f3cd2bdbb75cb9afe13fecb603b5b8c026d23500
|
[
"MIT"
] | null | null | null |
iot_message/tests/test_plain_cryptor.py
|
bkosciow/python_iot-1
|
f3cd2bdbb75cb9afe13fecb603b5b8c026d23500
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
#pylint: skip-file
from nose.tools import assert_equal
from iot_message.cryptor.plain import Cryptor
from iot_message.message import Message
__author__ = 'Bartosz Kościów'
import iot_message.factory as factory
class TestCryptorPlain(object):
def setUp(self):
Message.chip_id = 'pc'
Message.node_name = 'Turkusik'
Message.drop_unencrypted = False
Message.encoders = []
Message.decoders = {}
def test_encode_message(self):
Message.add_encoder(Cryptor())
msg = factory.MessageFactory.create()
inp = {"event": "channel.on", "parameters": {"channel": 0}, "response": "", "targets": ["node-north"]}
msg.set(inp)
msg.encrypt()
assert_equal(inp["event"], msg.data["event"])
assert_equal(inp["parameters"], msg.data["parameters"])
assert_equal(inp["targets"], msg.data["targets"])
def test_decrypt_message(self):
Message.add_decoder(Cryptor())
inp = """{"protocol": "iot:1", "node": "Turkusik", "chip_id": "pc", "event": "message.plain", "parameters": ["a"], "response": "", "targets": ["Turkusik"]}"""
msg = factory.MessageFactory.create(inp)
assert_equal(msg.data["event"], "message.plain")
assert_equal(msg.data["parameters"], ["a"])
assert_equal(msg.data["targets"], ['Turkusik'])
| 33.95122
| 166
| 0.630747
|
from nose.tools import assert_equal
from iot_message.cryptor.plain import Cryptor
from iot_message.message import Message
__author__ = 'Bartosz Kościów'
import iot_message.factory as factory
class TestCryptorPlain(object):
def setUp(self):
Message.chip_id = 'pc'
Message.node_name = 'Turkusik'
Message.drop_unencrypted = False
Message.encoders = []
Message.decoders = {}
def test_encode_message(self):
Message.add_encoder(Cryptor())
msg = factory.MessageFactory.create()
inp = {"event": "channel.on", "parameters": {"channel": 0}, "response": "", "targets": ["node-north"]}
msg.set(inp)
msg.encrypt()
assert_equal(inp["event"], msg.data["event"])
assert_equal(inp["parameters"], msg.data["parameters"])
assert_equal(inp["targets"], msg.data["targets"])
def test_decrypt_message(self):
Message.add_decoder(Cryptor())
inp = """{"protocol": "iot:1", "node": "Turkusik", "chip_id": "pc", "event": "message.plain", "parameters": ["a"], "response": "", "targets": ["Turkusik"]}"""
msg = factory.MessageFactory.create(inp)
assert_equal(msg.data["event"], "message.plain")
assert_equal(msg.data["parameters"], ["a"])
assert_equal(msg.data["targets"], ['Turkusik'])
| true
| true
|
7909cbdda24cae4c4415c0b2b3edc44a297c2cd7
| 2,742
|
py
|
Python
|
flash/image/embedding/vissl/transforms/utilities.py
|
Isaac-Flath/lightning-flash
|
320f87707587d92a13c8831778864b33af4fe421
|
[
"Apache-2.0"
] | null | null | null |
flash/image/embedding/vissl/transforms/utilities.py
|
Isaac-Flath/lightning-flash
|
320f87707587d92a13c8831778864b33af4fe421
|
[
"Apache-2.0"
] | null | null | null |
flash/image/embedding/vissl/transforms/utilities.py
|
Isaac-Flath/lightning-flash
|
320f87707587d92a13c8831778864b33af4fe421
|
[
"Apache-2.0"
] | null | null | null |
# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
from flash.core.data.data_source import DefaultDataKeys
def vissl_collate_helper(samples):
result = []
for batch_ele in samples:
_batch_ele_dict = {}
_batch_ele_dict.update(batch_ele)
_batch_ele_dict[DefaultDataKeys.INPUT] = -1
result.append(_batch_ele_dict)
return torch.utils.data._utils.collate.default_collate(result)
def multicrop_collate_fn(samples):
"""Multi-crop collate function for VISSL integration.
Run custom collate on a single key since VISSL transforms affect only DefaultDataKeys.INPUT
"""
result = vissl_collate_helper(samples)
inputs = [[] for _ in range(len(samples[0][DefaultDataKeys.INPUT]))]
for batch_ele in samples:
multi_crop_imgs = batch_ele[DefaultDataKeys.INPUT]
for idx, crop in enumerate(multi_crop_imgs):
inputs[idx].append(crop)
for idx, ele in enumerate(inputs):
inputs[idx] = torch.stack(ele)
result[DefaultDataKeys.INPUT] = inputs
return result
def simclr_collate_fn(samples):
"""Multi-crop collate function for VISSL integration.
Run custom collate on a single key since VISSL transforms affect only DefaultDataKeys.INPUT
"""
result = vissl_collate_helper(samples)
inputs = []
num_views = len(samples[0][DefaultDataKeys.INPUT])
view_idx = 0
while view_idx < num_views:
for batch_ele in samples:
imgs = batch_ele[DefaultDataKeys.INPUT]
inputs.append(imgs[view_idx])
view_idx += 1
result[DefaultDataKeys.INPUT] = torch.stack(inputs)
return result
def moco_collate_fn(samples):
"""MOCO collate function for VISSL integration.
Run custom collate on a single key since VISSL transforms affect only DefaultDataKeys.INPUT
"""
result = vissl_collate_helper(samples)
inputs = []
for batch_ele in samples:
inputs.append(torch.stack(batch_ele[DefaultDataKeys.INPUT]))
result[DefaultDataKeys.INPUT] = torch.stack(inputs).squeeze()[:, 0, :, :, :].squeeze()
result["data_momentum"] = torch.stack(inputs).squeeze()[:, 1, :, :, :].squeeze()
return result
| 30.131868
| 95
| 0.707877
|
import torch
from flash.core.data.data_source import DefaultDataKeys
def vissl_collate_helper(samples):
result = []
for batch_ele in samples:
_batch_ele_dict = {}
_batch_ele_dict.update(batch_ele)
_batch_ele_dict[DefaultDataKeys.INPUT] = -1
result.append(_batch_ele_dict)
return torch.utils.data._utils.collate.default_collate(result)
def multicrop_collate_fn(samples):
result = vissl_collate_helper(samples)
inputs = [[] for _ in range(len(samples[0][DefaultDataKeys.INPUT]))]
for batch_ele in samples:
multi_crop_imgs = batch_ele[DefaultDataKeys.INPUT]
for idx, crop in enumerate(multi_crop_imgs):
inputs[idx].append(crop)
for idx, ele in enumerate(inputs):
inputs[idx] = torch.stack(ele)
result[DefaultDataKeys.INPUT] = inputs
return result
def simclr_collate_fn(samples):
result = vissl_collate_helper(samples)
inputs = []
num_views = len(samples[0][DefaultDataKeys.INPUT])
view_idx = 0
while view_idx < num_views:
for batch_ele in samples:
imgs = batch_ele[DefaultDataKeys.INPUT]
inputs.append(imgs[view_idx])
view_idx += 1
result[DefaultDataKeys.INPUT] = torch.stack(inputs)
return result
def moco_collate_fn(samples):
result = vissl_collate_helper(samples)
inputs = []
for batch_ele in samples:
inputs.append(torch.stack(batch_ele[DefaultDataKeys.INPUT]))
result[DefaultDataKeys.INPUT] = torch.stack(inputs).squeeze()[:, 0, :, :, :].squeeze()
result["data_momentum"] = torch.stack(inputs).squeeze()[:, 1, :, :, :].squeeze()
return result
| true
| true
|
7909cd5410e1cf6286cb0446d96f361dd895483b
| 855
|
py
|
Python
|
TencentYoutuyun/conf.py
|
walle13/Face_recognition
|
651bed13971b439bc555b612458709082eb50a49
|
[
"MIT"
] | 28
|
2017-12-02T05:09:06.000Z
|
2021-12-23T15:36:27.000Z
|
TencentYoutuyun/conf.py
|
walle13/Face_recognition
|
651bed13971b439bc555b612458709082eb50a49
|
[
"MIT"
] | null | null | null |
TencentYoutuyun/conf.py
|
walle13/Face_recognition
|
651bed13971b439bc555b612458709082eb50a49
|
[
"MIT"
] | 6
|
2017-12-02T05:25:08.000Z
|
2021-12-23T15:36:28.000Z
|
# -*- coding: utf-8 -*-
import pkg_resources
import platform
API_YOUTU_END_POINT = 'http://api.youtu.qq.com/'
API_TENCENTYUN_END_POINT = 'https://youtu.api.qcloud.com/'
API_YOUTU_VIP_END_POINT = 'https://vip-api.youtu.qq.com/'
APPID = 'xxx'
SECRET_ID = 'xxx'
SECRET_KEY = 'xx'
USER_ID = 'xx'
_config = {
'end_point':API_YOUTU_END_POINT,
'appid':APPID,
'secret_id':SECRET_ID,
'secret_key':SECRET_KEY,
'userid':USER_ID,
}
def get_app_info():
return _config
def set_app_info(appid=None, secret_id=None, secret_key=None, userid=None, end_point=None):
if appid:
_config['appid'] = appid
if secret_id:
_config['secret_id'] = secret_id
if secret_key:
_config['secret_key'] = secret_key
if userid:
_config['userid'] = userid
if end_point:
_config['end_point'] = end_point
| 22.5
| 91
| 0.666667
|
import pkg_resources
import platform
API_YOUTU_END_POINT = 'http://api.youtu.qq.com/'
API_TENCENTYUN_END_POINT = 'https://youtu.api.qcloud.com/'
API_YOUTU_VIP_END_POINT = 'https://vip-api.youtu.qq.com/'
APPID = 'xxx'
SECRET_ID = 'xxx'
SECRET_KEY = 'xx'
USER_ID = 'xx'
_config = {
'end_point':API_YOUTU_END_POINT,
'appid':APPID,
'secret_id':SECRET_ID,
'secret_key':SECRET_KEY,
'userid':USER_ID,
}
def get_app_info():
return _config
def set_app_info(appid=None, secret_id=None, secret_key=None, userid=None, end_point=None):
if appid:
_config['appid'] = appid
if secret_id:
_config['secret_id'] = secret_id
if secret_key:
_config['secret_key'] = secret_key
if userid:
_config['userid'] = userid
if end_point:
_config['end_point'] = end_point
| true
| true
|
7909cd5dbf00a00a8c2f0815dea33ccee71e579d
| 2,099
|
py
|
Python
|
python/iviz/Util.py
|
eddy-ilg/iviz
|
8c392dcc75a6563c5d076bbbd84152273a3c0f71
|
[
"MIT"
] | null | null | null |
python/iviz/Util.py
|
eddy-ilg/iviz
|
8c392dcc75a6563c5d076bbbd84152273a3c0f71
|
[
"MIT"
] | null | null | null |
python/iviz/Util.py
|
eddy-ilg/iviz
|
8c392dcc75a6563c5d076bbbd84152273a3c0f71
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
#-*- coding: utf-8 -*-
from PyQt5.QtGui import QImage, qRgb, QPixmap
import numpy as np
import numpy as np
gray_color_table = [qRgb(i, i, i) for i in range(256)]
def toQImage(data, copy=True):
if data is None:
return QImage()
data = data.copy()
data[data>255] = 255
data[data<0] = 0
data = data.astype(np.uint8)
if data.dtype == np.uint8:
if len(data.shape) == 2:
qim = QImage(data.data, data.shape[1], data.shape[0], data.strides[0], QImage.Format_Indexed8)
qim.setColorTable(gray_color_table)
return qim.copy() if copy else qim
elif len(data.shape) == 3:
if data.shape[2] == 1:
qim = QImage(data.data, data.shape[1], data.shape[0], data.strides[0], QImage.Format_Grayscale8)
return qim.copy() if copy else qim
if data.shape[2] == 3:
qim = QImage(data.data, data.shape[1], data.shape[0], data.strides[0], QImage.Format_RGB888)
return qim.copy() if copy else qim
elif data.shape[2] == 4:
qim = QImage(data.data, data.shape[1], data.shape[0], data.strides[0], QImage.Format_ARGB32)
return qim.copy() if copy else qim
else:
raise Exception("Conversion of %d channel array to QImage not implemented" % data.shape[2])
raise Exception("Conversion of %d dimension array to QImage not implemented" % len(data.shape))
def toQPixmap(data):
if data is None: return QPixmap()
elif isinstance(data, QPixmap): return data
elif isinstance(data, QImage): QPixmap.fromImage(data)
elif hasattr(data, 'pixmap'): return data.pixmap()
else: return QPixmap.fromImage(toQImage(data))
def qPixmapToNumpy(pixmap):
image = pixmap.toImage()
image = image.convertToFormat(QImage.Format.Format_RGB32)
width = image.width()
height = image.height()
ptr = image.bits()
ptr.setsize(height * width * 4)
arr = np.frombuffer(ptr, np.uint8).reshape((height, width, 4))
return arr[:, :, 0:3].copy()
| 35.576271
| 112
| 0.618866
|
from PyQt5.QtGui import QImage, qRgb, QPixmap
import numpy as np
import numpy as np
gray_color_table = [qRgb(i, i, i) for i in range(256)]
def toQImage(data, copy=True):
if data is None:
return QImage()
data = data.copy()
data[data>255] = 255
data[data<0] = 0
data = data.astype(np.uint8)
if data.dtype == np.uint8:
if len(data.shape) == 2:
qim = QImage(data.data, data.shape[1], data.shape[0], data.strides[0], QImage.Format_Indexed8)
qim.setColorTable(gray_color_table)
return qim.copy() if copy else qim
elif len(data.shape) == 3:
if data.shape[2] == 1:
qim = QImage(data.data, data.shape[1], data.shape[0], data.strides[0], QImage.Format_Grayscale8)
return qim.copy() if copy else qim
if data.shape[2] == 3:
qim = QImage(data.data, data.shape[1], data.shape[0], data.strides[0], QImage.Format_RGB888)
return qim.copy() if copy else qim
elif data.shape[2] == 4:
qim = QImage(data.data, data.shape[1], data.shape[0], data.strides[0], QImage.Format_ARGB32)
return qim.copy() if copy else qim
else:
raise Exception("Conversion of %d channel array to QImage not implemented" % data.shape[2])
raise Exception("Conversion of %d dimension array to QImage not implemented" % len(data.shape))
def toQPixmap(data):
if data is None: return QPixmap()
elif isinstance(data, QPixmap): return data
elif isinstance(data, QImage): QPixmap.fromImage(data)
elif hasattr(data, 'pixmap'): return data.pixmap()
else: return QPixmap.fromImage(toQImage(data))
def qPixmapToNumpy(pixmap):
image = pixmap.toImage()
image = image.convertToFormat(QImage.Format.Format_RGB32)
width = image.width()
height = image.height()
ptr = image.bits()
ptr.setsize(height * width * 4)
arr = np.frombuffer(ptr, np.uint8).reshape((height, width, 4))
return arr[:, :, 0:3].copy()
| true
| true
|
7909cde157769993cb1c4d789af67ba4b666042c
| 3,378
|
py
|
Python
|
tools/train.py
|
ar90n/ttfnet
|
99dbfa888f90c8161c2c1666b2d17cdb144dbc30
|
[
"Apache-2.0"
] | 3
|
2019-12-02T01:13:10.000Z
|
2021-11-10T04:50:45.000Z
|
tools/train.py
|
ar90n/ttfnet
|
99dbfa888f90c8161c2c1666b2d17cdb144dbc30
|
[
"Apache-2.0"
] | null | null | null |
tools/train.py
|
ar90n/ttfnet
|
99dbfa888f90c8161c2c1666b2d17cdb144dbc30
|
[
"Apache-2.0"
] | 3
|
2020-08-20T03:14:11.000Z
|
2021-03-01T06:19:10.000Z
|
from __future__ import division
import argparse
import os
import torch
from mmcv import Config
from mmdet import __version__
from mmdet.apis import (get_root_logger, init_dist, set_random_seed,
train_detector)
from mmdet.datasets import build_dataset
from mmdet.models import build_detector
def parse_args():
parser = argparse.ArgumentParser(description='Train a detector')
parser.add_argument('config', help='train config file path')
parser.add_argument('--work_dir', help='the dir to save logs and models')
parser.add_argument(
'--resume_from', help='the checkpoint file to resume from')
parser.add_argument(
'--validate',
action='store_true',
help='whether to evaluate the checkpoint during training')
parser.add_argument(
'--gpus',
type=int,
default=1,
help='number of gpus to use '
'(only applicable to non-distributed training)')
parser.add_argument('--seed', type=int, default=None, help='random seed')
parser.add_argument(
'--launcher',
choices=['none', 'pytorch', 'slurm', 'mpi'],
default='none',
help='job launcher')
parser.add_argument('--local_rank', type=int, default=0)
parser.add_argument(
'--autoscale-lr',
action='store_true',
help='automatically scale lr with the number of gpus')
args = parser.parse_args()
if 'LOCAL_RANK' not in os.environ:
os.environ['LOCAL_RANK'] = str(args.local_rank)
return args
def main():
args = parse_args()
cfg = Config.fromfile(args.config)
# set cudnn_benchmark
if cfg.get('cudnn_benchmark', False):
torch.backends.cudnn.benchmark = True
# update configs according to CLI args
if args.work_dir is not None:
cfg.work_dir = args.work_dir
if args.resume_from is not None:
cfg.resume_from = args.resume_from
cfg.gpus = args.gpus
if args.autoscale_lr:
# apply the linear scaling rule (https://arxiv.org/abs/1706.02677)
cfg.optimizer['lr'] = cfg.optimizer['lr'] * cfg.gpus / 8
# init distributed env first, since logger depends on the dist info.
if args.launcher == 'none':
distributed = False
else:
distributed = True
init_dist(args.launcher, **cfg.dist_params)
# init logger before other steps
logger = get_root_logger(cfg.log_level)
logger.info('Distributed training: {}'.format(distributed))
# set random seeds
if args.seed is not None:
logger.info('Set random seed to {}'.format(args.seed))
set_random_seed(args.seed)
model = build_detector(
cfg.model, train_cfg=cfg.train_cfg, test_cfg=cfg.test_cfg)
train_dataset = build_dataset(cfg.data.train)
if cfg.checkpoint_config is not None:
# save mmdet version, config file content and class names in
# checkpoints as meta data
cfg.checkpoint_config.meta = dict(
mmdet_version=__version__,
config=cfg.text,
CLASSES=train_dataset.CLASSES)
# add an attribute for visualization convenience
model.CLASSES = train_dataset.CLASSES
train_detector(
model,
train_dataset,
cfg,
distributed=distributed,
validate=args.validate,
logger=logger)
if __name__ == '__main__':
main()
| 31.570093
| 77
| 0.657194
|
from __future__ import division
import argparse
import os
import torch
from mmcv import Config
from mmdet import __version__
from mmdet.apis import (get_root_logger, init_dist, set_random_seed,
train_detector)
from mmdet.datasets import build_dataset
from mmdet.models import build_detector
def parse_args():
parser = argparse.ArgumentParser(description='Train a detector')
parser.add_argument('config', help='train config file path')
parser.add_argument('--work_dir', help='the dir to save logs and models')
parser.add_argument(
'--resume_from', help='the checkpoint file to resume from')
parser.add_argument(
'--validate',
action='store_true',
help='whether to evaluate the checkpoint during training')
parser.add_argument(
'--gpus',
type=int,
default=1,
help='number of gpus to use '
'(only applicable to non-distributed training)')
parser.add_argument('--seed', type=int, default=None, help='random seed')
parser.add_argument(
'--launcher',
choices=['none', 'pytorch', 'slurm', 'mpi'],
default='none',
help='job launcher')
parser.add_argument('--local_rank', type=int, default=0)
parser.add_argument(
'--autoscale-lr',
action='store_true',
help='automatically scale lr with the number of gpus')
args = parser.parse_args()
if 'LOCAL_RANK' not in os.environ:
os.environ['LOCAL_RANK'] = str(args.local_rank)
return args
def main():
args = parse_args()
cfg = Config.fromfile(args.config)
if cfg.get('cudnn_benchmark', False):
torch.backends.cudnn.benchmark = True
if args.work_dir is not None:
cfg.work_dir = args.work_dir
if args.resume_from is not None:
cfg.resume_from = args.resume_from
cfg.gpus = args.gpus
if args.autoscale_lr:
cfg.optimizer['lr'] = cfg.optimizer['lr'] * cfg.gpus / 8
if args.launcher == 'none':
distributed = False
else:
distributed = True
init_dist(args.launcher, **cfg.dist_params)
logger = get_root_logger(cfg.log_level)
logger.info('Distributed training: {}'.format(distributed))
if args.seed is not None:
logger.info('Set random seed to {}'.format(args.seed))
set_random_seed(args.seed)
model = build_detector(
cfg.model, train_cfg=cfg.train_cfg, test_cfg=cfg.test_cfg)
train_dataset = build_dataset(cfg.data.train)
if cfg.checkpoint_config is not None:
cfg.checkpoint_config.meta = dict(
mmdet_version=__version__,
config=cfg.text,
CLASSES=train_dataset.CLASSES)
model.CLASSES = train_dataset.CLASSES
train_detector(
model,
train_dataset,
cfg,
distributed=distributed,
validate=args.validate,
logger=logger)
if __name__ == '__main__':
main()
| true
| true
|
7909cfcaafa38dfbf9d18b8994f6398dd4eeac4e
| 12,791
|
py
|
Python
|
model/recommendation_functions.py
|
dalpengholic/Udacity_Recommendations_with_IBM
|
8c620b733bf91b7b97b607373d0e6ff86934d03d
|
[
"MIT"
] | null | null | null |
model/recommendation_functions.py
|
dalpengholic/Udacity_Recommendations_with_IBM
|
8c620b733bf91b7b97b607373d0e6ff86934d03d
|
[
"MIT"
] | null | null | null |
model/recommendation_functions.py
|
dalpengholic/Udacity_Recommendations_with_IBM
|
8c620b733bf91b7b97b607373d0e6ff86934d03d
|
[
"MIT"
] | null | null | null |
import pandas as pd
import re
import nltk
from nltk.corpus import stopwords
from nltk.stem.wordnet import WordNetLemmatizer
from nltk.tokenize import word_tokenize
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.metrics.pairwise import cosine_similarity
from sklearn.metrics.pairwise import linear_kernel
def email_mapper(df):
coded_dict = dict()
cter = 1
email_encoded = []
for val in df['email']:
if val not in coded_dict:
coded_dict[val] = cter
cter+=1
email_encoded.append(coded_dict[val])
return email_encoded
def create_user_item_matrix(df):
'''
INPUT:
df - pandas dataframe with article_id, title, user_id columns
OUTPUT:
user_item - user item matrix
Description:
Return a matrix with user ids as rows and article ids on the columns with 1 values where a user interacted with
an article and a 0 otherwise
'''
# Fill in the function here
user_item = df.groupby('user_id')['article_id'].value_counts().unstack()
user_item[user_item.isna() == False] = 1
return user_item # return the user_item matrix
def get_top_articles(n, df):
'''
INPUT:
n - (int) the number of top articles to return
df - (pandas dataframe) df as defined at the top of the notebook
OUTPUT:
top_articles - (list) A list of the top 'n' article titles
'''
article_id_grouped_df = df.groupby(['title'])
top_articles = article_id_grouped_df['user_id'].count().sort_values(ascending=False).iloc[:n].index.tolist()
return top_articles # Return the top article titles from df (not df_content)
def get_top_article_ids(n, df):
'''
INPUT:
n - (int) the number of top articles to return
df - (pandas dataframe) df as defined at the top of the notebook
OUTPUT:
top_articles - (list) A list of the top 'n' article titles
'''
article_id_grouped_df = df.groupby(['article_id'])
top_articles_ids = article_id_grouped_df['user_id'].count().sort_values(ascending=False).iloc[:n].index.tolist()
return top_articles_ids # Return the top article ids
def user_user_recs(user_id, user_item, df, m=10):
'''
INPUT:
user_id - (int) a user id
m - (int) the number of recommendations you want for the user
OUTPUT:
recs - (list) a list of recommendations for the user by article id
rec_names - (list) a list of recommendations for the user by article title
Description:
Loops through the users based on closeness to the input user_id
For each user - finds articles the user hasn't seen before and provides them as recs
Does this until m recommendations are found
Notes:
* Choose the users that have the most total article interactions
before choosing those with fewer article interactions.
* Choose articles with the articles with the most total interactions
before choosing those with fewer total interactions.
'''
def get_user_articles_names_ids(user_id):
'''
INPUT:
user_id
OUTPUT:
article_ids - (list) a list of the article ids seen by the user
article_names - (list) a list of article names associated with the list of article ids
(this is identified by the doc_full_name column in df_content)
Description:
Provides a list of the article_ids and article titles that have been seen by a user
'''
# Your code here
article_ids = user_item.loc[user_id][user_item.loc[user_id] ==1].index.tolist()
article_names = []
for i in article_ids:
try:
title = df[df['article_id'] == i]['title'].unique()[0]
except IndexError:
title ="None"
article_names.append(title)
article_ids = list(map(str, article_ids))
return article_ids, article_names # return the ids and names
def find_similar_users():
'''
OUTPUT:
similar_users - (list) an ordered list where the closest users (largest dot product users)
are listed first
Description:
Computes the similarity of every pair of users based on the dot product
Returns an ordered
'''
# compute similarity of each user to the provided user
user_item_tmp = user_item.copy()
user_item_tmp[user_item_tmp.isna() == True] = 0 # 1. Make Nan to 0
row = user_item_tmp.loc[user_id] # 2. Select a row
result_dot = row@user_item_tmp.T # 3. Dot product of each of row of the matrix
result_dot.drop(labels = [user_id], inplace=True) # remove the own user's id
most_similar_users = result_dot.sort_values(ascending=False).index.tolist() # sort by similarity # create list of just the ids
return most_similar_users # return a list of the users in order from most to least similar
def get_top_sorted_users(most_similar_users):
'''
INPUT:
most_similar_users - (list) an ordered list where the closest users (largest dot product users)
are listed first
OUTPUT:
neighbors_df - (pandas dataframe) a dataframe with:
neighbor_id - is a neighbor user_id
similarity - measure of the similarity of each user to the provided user_id
num_interactions - the number of articles viewed by the user - if a u
Other Details - sort the neighbors_df by the similarity and then by number of interactions where
highest of each is higher in the dataframe
'''
# Make neighbor_id column
df_user_id_grouped =df.groupby("user_id")
df_user_id_grouped['article_id'].count().sort_values(ascending=False)
neighbors_df = pd.DataFrame()
neighbors_df['neighbor_id'] = most_similar_users
# make similarity column
user_item_tmp = user_item.copy()
user_item_tmp[user_item_tmp.isna() == True] = 0 # 1. Make Nan to 0
row = user_item_tmp.loc[user_id] # Select a row
result_dot = row@user_item_tmp.T # Dot product of each of row of the matrix
result_dot.drop(labels = [user_id], inplace=True) # remove the own user's id
similarity = result_dot.sort_values(ascending=False).values.tolist()[0:10]
neighbors_df['similarity'] = similarity
# Make num_interactions column
num_interactions = []
for i in neighbors_df['neighbor_id']:
counted_interaction = df_user_id_grouped['article_id'].count().loc[i]
num_interactions.append(counted_interaction)
neighbors_df['num_interactions'] = num_interactions
neighbors_df = neighbors_df.sort_values(by=['similarity', 'num_interactions'], ascending=False)
return neighbors_df # Return the dataframe specified in the doc_string
recs = []
rec_names =[]
counter = 0
# Get seen article ids and names from selected user id
article_ids, article_names = get_user_articles_names_ids(user_id)
# Make set to find unseen articles
seen_ids_set = set(article_ids)
most_similar_users = find_similar_users()[0:10]
neighbors_df = get_top_sorted_users(most_similar_users)
# Find similar users of the selected user
similar_users_list = neighbors_df['neighbor_id'] # Get neighbor_df
# Make recommendation list
for sim_user in similar_users_list:
if counter < m:
# Get seen article ids and names from similar users
sim_article_ids, sim_article_names = get_user_articles_names_ids(sim_user)
# Make dict (key: article_ids, value:article_names)
sim_user_dict = dict(zip(sim_article_ids, sim_article_names))
# Make set to find unseen articles
sim_seen_ids_set = set(sim_article_ids)
# Create set of unseen articles_ids
unseen_ids_set = sim_seen_ids_set.difference(seen_ids_set)
for i in unseen_ids_set:
if counter < m:
recs.append(i)
rec_names.append(sim_user_dict[i])
counter += 1
return recs, rec_names
###
def make_Tfidf_array(df_content):
def tokenize(text):
'''
Function splits text into separate words and gets a word lowercased and removes whitespaces at the ends of a word.
The funtions also cleans irrelevant stopwords.
Input:
1. text: text message
Output:
1. Clean_tokens : list of tokenized clean words
'''
# Get rid of other sepcial characters
text = re.sub(r"[^a-zA-Z0-9]", " ", text)
# Tokenize
tokens = word_tokenize(text)
# Lemmatize
lemmatizer = WordNetLemmatizer()
clean_tokens = []
for tok in tokens:
clean_tok = lemmatizer.lemmatize(tok, pos='v').lower().strip()
clean_tokens.append(clean_tok)
# Remove stop words
stopwords = nltk.corpus.stopwords.words('english')
clean_tokens = [token for token in clean_tokens if token not in stopwords]
return clean_tokens
corpus = df_content['doc_description']
df_content['doc_description'].fillna(df_content['doc_full_name'], inplace=True)
stop_words = stopwords.words("english")
lemmatizer = WordNetLemmatizer()
# Text Processing, Feature Extraction
vect = TfidfVectorizer(tokenizer=tokenize)
# get counts of each token (word) in text data
X = vect.fit_transform(corpus)
X = X.toarray()
return vect, X
def make_content_recs(article_id, df_content, df, m=10):
'''
INPUT:
article_id = (int) a article id in df_content
m - (int) the number of recommendations you want for the user
df_content - (pandas dataframe) df_content as defined at the top of the notebook
df - (pandas dataframe) df as defined at the top of the notebook
OUTPUT:
recs - (list) a list of recommendations for the user by article id
rec_names - (list) a list of recommendations for the user by article title
'''
def tokenize(text):
'''
Function splits text into separate words and gets a word lowercased and removes whitespaces at the ends of a word.
The funtions also cleans irrelevant stopwords.
Input:
1. text: text message
Output:
1. Clean_tokens : list of tokenized clean words
'''
# Get rid of other sepcial characters
text = re.sub(r"[^a-zA-Z0-9]", " ", text)
# Tokenize
tokens = word_tokenize(text)
# Lemmatize
lemmatizer = WordNetLemmatizer()
clean_tokens = []
for tok in tokens:
clean_tok = lemmatizer.lemmatize(tok, pos='v').lower().strip()
clean_tokens.append(clean_tok)
# Remove stop words
stopwords = nltk.corpus.stopwords.words('english')
clean_tokens = [token for token in clean_tokens if token not in stopwords]
return clean_tokens
vect, X = make_Tfidf_array(df_content)
if article_id in df_content.article_id:
cosine_similarity = linear_kernel(X, X)
df_similarity = pd.DataFrame(cosine_similarity[article_id], columns=['similarity'])
df_similarity_modified = df_similarity.drop(article_id)
recs = df_similarity_modified.similarity.sort_values(ascending=False).index[0:10].tolist()
rec_names = []
for i in recs:
name = df_content[df_content['article_id'] == i]['doc_full_name'].values[0]
rec_names.append(name)
else:
tfidf_feature_name = vect.get_feature_names()
# Get title of the document of interest
booktitle = df[df['article_id'] == article_id]['title'].values[0]
# Tokenize the title
booktitle_tokenized = tokenize(booktitle)
X_slice_list = []
for i in booktitle_tokenized:
if i in tfidf_feature_name:
X_slice_list.append(tfidf_feature_name.index(i))
X_slice_list.sort()
X_sliced = X[:,X_slice_list]
check_df = pd.DataFrame(X_sliced, columns=X_slice_list)
check_df['sum'] = check_df.sum(axis=1)
recs = check_df.sort_values("sum", ascending=False)[0:10].index.tolist()
rec_names = []
for i in recs:
name = df_content[df_content['article_id'] == i]['doc_full_name'].values[0]
rec_names.append(name)
return recs, rec_names
| 37.400585
| 135
| 0.642718
|
import pandas as pd
import re
import nltk
from nltk.corpus import stopwords
from nltk.stem.wordnet import WordNetLemmatizer
from nltk.tokenize import word_tokenize
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.metrics.pairwise import cosine_similarity
from sklearn.metrics.pairwise import linear_kernel
def email_mapper(df):
coded_dict = dict()
cter = 1
email_encoded = []
for val in df['email']:
if val not in coded_dict:
coded_dict[val] = cter
cter+=1
email_encoded.append(coded_dict[val])
return email_encoded
def create_user_item_matrix(df):
user_item = df.groupby('user_id')['article_id'].value_counts().unstack()
user_item[user_item.isna() == False] = 1
return user_item
def get_top_articles(n, df):
article_id_grouped_df = df.groupby(['title'])
top_articles = article_id_grouped_df['user_id'].count().sort_values(ascending=False).iloc[:n].index.tolist()
return top_articles
def get_top_article_ids(n, df):
article_id_grouped_df = df.groupby(['article_id'])
top_articles_ids = article_id_grouped_df['user_id'].count().sort_values(ascending=False).iloc[:n].index.tolist()
return top_articles_ids
def user_user_recs(user_id, user_item, df, m=10):
def get_user_articles_names_ids(user_id):
article_ids = user_item.loc[user_id][user_item.loc[user_id] ==1].index.tolist()
article_names = []
for i in article_ids:
try:
title = df[df['article_id'] == i]['title'].unique()[0]
except IndexError:
title ="None"
article_names.append(title)
article_ids = list(map(str, article_ids))
return article_ids, article_names
def find_similar_users():
user_item_tmp = user_item.copy()
user_item_tmp[user_item_tmp.isna() == True] = 0
row = user_item_tmp.loc[user_id]
result_dot = row@user_item_tmp.T
result_dot.drop(labels = [user_id], inplace=True)
most_similar_users = result_dot.sort_values(ascending=False).index.tolist() # sort by similarity # create list of just the ids
return most_similar_users # return a list of the users in order from most to least similar
def get_top_sorted_users(most_similar_users):
# Make neighbor_id column
df_user_id_grouped =df.groupby("user_id")
df_user_id_grouped['article_id'].count().sort_values(ascending=False)
neighbors_df = pd.DataFrame()
neighbors_df['neighbor_id'] = most_similar_users
# make similarity column
user_item_tmp = user_item.copy()
user_item_tmp[user_item_tmp.isna() == True] = 0 # 1. Make Nan to 0
row = user_item_tmp.loc[user_id] # Select a row
result_dot = row@user_item_tmp.T # Dot product of each of row of the matrix
result_dot.drop(labels = [user_id], inplace=True) # remove the own user's id
similarity = result_dot.sort_values(ascending=False).values.tolist()[0:10]
neighbors_df['similarity'] = similarity
num_interactions = []
for i in neighbors_df['neighbor_id']:
counted_interaction = df_user_id_grouped['article_id'].count().loc[i]
num_interactions.append(counted_interaction)
neighbors_df['num_interactions'] = num_interactions
neighbors_df = neighbors_df.sort_values(by=['similarity', 'num_interactions'], ascending=False)
return neighbors_df
recs = []
rec_names =[]
counter = 0
article_ids, article_names = get_user_articles_names_ids(user_id)
seen_ids_set = set(article_ids)
most_similar_users = find_similar_users()[0:10]
neighbors_df = get_top_sorted_users(most_similar_users)
similar_users_list = neighbors_df['neighbor_id']
for sim_user in similar_users_list:
if counter < m:
sim_article_ids, sim_article_names = get_user_articles_names_ids(sim_user)
sim_user_dict = dict(zip(sim_article_ids, sim_article_names))
sim_seen_ids_set = set(sim_article_ids)
unseen_ids_set = sim_seen_ids_set.difference(seen_ids_set)
for i in unseen_ids_set:
if counter < m:
recs.append(i)
rec_names.append(sim_user_dict[i])
counter += 1
return recs, rec_names
def make_Tfidf_array(df_content):
def tokenize(text):
text = re.sub(r"[^a-zA-Z0-9]", " ", text)
tokens = word_tokenize(text)
lemmatizer = WordNetLemmatizer()
clean_tokens = []
for tok in tokens:
clean_tok = lemmatizer.lemmatize(tok, pos='v').lower().strip()
clean_tokens.append(clean_tok)
stopwords = nltk.corpus.stopwords.words('english')
clean_tokens = [token for token in clean_tokens if token not in stopwords]
return clean_tokens
corpus = df_content['doc_description']
df_content['doc_description'].fillna(df_content['doc_full_name'], inplace=True)
stop_words = stopwords.words("english")
lemmatizer = WordNetLemmatizer()
vect = TfidfVectorizer(tokenizer=tokenize)
X = vect.fit_transform(corpus)
X = X.toarray()
return vect, X
def make_content_recs(article_id, df_content, df, m=10):
def tokenize(text):
text = re.sub(r"[^a-zA-Z0-9]", " ", text)
tokens = word_tokenize(text)
lemmatizer = WordNetLemmatizer()
clean_tokens = []
for tok in tokens:
clean_tok = lemmatizer.lemmatize(tok, pos='v').lower().strip()
clean_tokens.append(clean_tok)
stopwords = nltk.corpus.stopwords.words('english')
clean_tokens = [token for token in clean_tokens if token not in stopwords]
return clean_tokens
vect, X = make_Tfidf_array(df_content)
if article_id in df_content.article_id:
cosine_similarity = linear_kernel(X, X)
df_similarity = pd.DataFrame(cosine_similarity[article_id], columns=['similarity'])
df_similarity_modified = df_similarity.drop(article_id)
recs = df_similarity_modified.similarity.sort_values(ascending=False).index[0:10].tolist()
rec_names = []
for i in recs:
name = df_content[df_content['article_id'] == i]['doc_full_name'].values[0]
rec_names.append(name)
else:
tfidf_feature_name = vect.get_feature_names()
booktitle = df[df['article_id'] == article_id]['title'].values[0]
booktitle_tokenized = tokenize(booktitle)
X_slice_list = []
for i in booktitle_tokenized:
if i in tfidf_feature_name:
X_slice_list.append(tfidf_feature_name.index(i))
X_slice_list.sort()
X_sliced = X[:,X_slice_list]
check_df = pd.DataFrame(X_sliced, columns=X_slice_list)
check_df['sum'] = check_df.sum(axis=1)
recs = check_df.sort_values("sum", ascending=False)[0:10].index.tolist()
rec_names = []
for i in recs:
name = df_content[df_content['article_id'] == i]['doc_full_name'].values[0]
rec_names.append(name)
return recs, rec_names
| true
| true
|
7909d0092d2c063f7117e359ede7eeb7a11af4a9
| 2,594
|
py
|
Python
|
openshift/installer/vendored/openshift-ansible-3.5.91/lookup_plugins/oo_option.py
|
fahlmant/openshift-tools
|
dbb4f16ccde3404c36c23108c45ca7b67138ee12
|
[
"Apache-2.0"
] | 2
|
2015-02-04T07:24:39.000Z
|
2015-05-03T10:27:56.000Z
|
openshift/installer/vendored/openshift-ansible-3.5.91/lookup_plugins/oo_option.py
|
fahlmant/openshift-tools
|
dbb4f16ccde3404c36c23108c45ca7b67138ee12
|
[
"Apache-2.0"
] | 3
|
2016-12-01T23:01:36.000Z
|
2016-12-02T00:16:48.000Z
|
openshift/installer/vendored/openshift-ansible-3.5.91/lookup_plugins/oo_option.py
|
fahlmant/openshift-tools
|
dbb4f16ccde3404c36c23108c45ca7b67138ee12
|
[
"Apache-2.0"
] | 2
|
2018-10-16T05:11:13.000Z
|
2018-11-07T01:46:29.000Z
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
# vim: expandtab:tabstop=4:shiftwidth=4
'''
oo_option lookup plugin for openshift-ansible
Usage:
- debug:
msg: "{{ lookup('oo_option', '<key>') | default('<default_value>', True) }}"
This returns, by order of priority:
* if it exists, the `cli_<key>` ansible variable. This variable is set by `bin/cluster --option <key>=<value> …`
* if it exists, the envirnoment variable named `<key>`
* if none of the above conditions are met, empty string is returned
'''
import os
# pylint: disable=no-name-in-module,import-error,unused-argument,unused-variable,super-init-not-called,too-few-public-methods,missing-docstring
try:
# ansible-2.0
from ansible.plugins.lookup import LookupBase
except ImportError:
# ansible-1.9.x
class LookupBase(object):
def __init__(self, basedir=None, runner=None, **kwargs):
self.runner = runner
self.basedir = self.runner.basedir
def get_basedir(self, variables):
return self.basedir
# Reason: disable too-few-public-methods because the `run` method is the only
# one required by the Ansible API
# Status: permanently disabled
# pylint: disable=too-few-public-methods
class LookupModule(LookupBase):
''' oo_option lookup plugin main class '''
# Reason: disable unused-argument because Ansible is calling us with many
# parameters we are not interested in.
# The lookup plugins of Ansible have this kwargs “catch-all” parameter
# which is not used
# Status: permanently disabled unless Ansible API evolves
# pylint: disable=unused-argument
def __init__(self, basedir=None, **kwargs):
''' Constructor '''
self.basedir = basedir
# Reason: disable unused-argument because Ansible is calling us with many
# parameters we are not interested in.
# The lookup plugins of Ansible have this kwargs “catch-all” parameter
# which is not used
# Status: permanently disabled unless Ansible API evolves
# pylint: disable=unused-argument
def run(self, terms, variables, **kwargs):
''' Main execution path '''
ret = []
for term in terms:
option_name = term.split()[0]
cli_key = 'cli_' + option_name
if 'vars' in variables and cli_key in variables['vars']:
ret.append(variables['vars'][cli_key])
elif option_name in os.environ:
ret.append(os.environ[option_name])
else:
ret.append('')
return ret
| 33.688312
| 143
| 0.653045
|
import os
try:
from ansible.plugins.lookup import LookupBase
except ImportError:
class LookupBase(object):
def __init__(self, basedir=None, runner=None, **kwargs):
self.runner = runner
self.basedir = self.runner.basedir
def get_basedir(self, variables):
return self.basedir
class LookupModule(LookupBase):
def __init__(self, basedir=None, **kwargs):
self.basedir = basedir
def run(self, terms, variables, **kwargs):
ret = []
for term in terms:
option_name = term.split()[0]
cli_key = 'cli_' + option_name
if 'vars' in variables and cli_key in variables['vars']:
ret.append(variables['vars'][cli_key])
elif option_name in os.environ:
ret.append(os.environ[option_name])
else:
ret.append('')
return ret
| true
| true
|
7909d04ed29d3cb13bb51d7196e34c9b4d97aec3
| 7,615
|
py
|
Python
|
generated/intermediate/ansible-module-rest/azure_rm_apimanagementapiexport_info.py
|
audevbot/autorest.devops.debug
|
a507fb6e2dd7826212537f27d583f203aac1c28f
|
[
"MIT"
] | null | null | null |
generated/intermediate/ansible-module-rest/azure_rm_apimanagementapiexport_info.py
|
audevbot/autorest.devops.debug
|
a507fb6e2dd7826212537f27d583f203aac1c28f
|
[
"MIT"
] | null | null | null |
generated/intermediate/ansible-module-rest/azure_rm_apimanagementapiexport_info.py
|
audevbot/autorest.devops.debug
|
a507fb6e2dd7826212537f27d583f203aac1c28f
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
#
# Copyright (c) 2019 Zim Kalinowski, (@zikalino)
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_apimanagementapiexport_info
version_added: '2.9'
short_description: Get ApiExport info.
description:
- Get info of ApiExport.
options:
resource_group:
description:
- The name of the resource group.
required: true
type: str
service_name:
description:
- The name of the API Management service.
required: true
type: str
api_id:
description:
- >-
API revision identifier. Must be unique in the current API Management
service instance. Non-current revision has ;rev=n as a suffix where n is
the revision number.
required: true
type: str
format:
description:
- >-
Format in which to export the Api Details to the Storage Blob with Sas
Key valid for 5 minutes.
required: true
type: str
export:
description:
- Query parameter required to export the API details.
required: true
type: str
id:
description:
- ResourceId of the API which was exported.
type: str
value:
description:
- The object defining the schema of the exported Api Detail
type: dict
suboptions:
link:
description:
- >-
Link to the Storage Blob containing the result of the export
operation. The Blob Uri is only valid for 5 minutes.
type: str
extends_documentation_fragment:
- azure
author:
- Zim Kalinowski (@zikalino)
'''
EXAMPLES = '''
- name: ApiManagementGetApiExportInOpenApi2dot0
azure_rm_apimanagementapiexport_info:
resource_group: myResourceGroup
service_name: myService
api_id: myApi
format: swagger-link
export: 'true'
- name: ApiManagementGetApiExportInOpenApi3dot0
azure_rm_apimanagementapiexport_info:
resource_group: myResourceGroup
service_name: myService
api_id: myApi
format: openapi-link
export: 'true'
'''
RETURN = '''
api_export:
description: >-
A list of dict results where the key is the name of the ApiExport and the
values are the facts for that ApiExport.
returned: always
type: complex
contains:
apiexport_name:
description: The key is the name of the server that the values relate to.
type: complex
contains:
id:
description:
- ResourceId of the API which was exported.
returned: always
type: str
sample: null
format:
description:
- >-
Format in which the Api Details are exported to the Storage Blob
with Sas Key valid for 5 minutes.
returned: always
type: str
sample: null
value:
description:
- The object defining the schema of the exported Api Detail
returned: always
type: dict
sample: null
contains:
link:
description:
- >-
Link to the Storage Blob containing the result of the export
operation. The Blob Uri is only valid for 5 minutes.
returned: always
type: str
sample: null
'''
import time
import json
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
from ansible.module_utils.azure_rm_common_rest import GenericRestClient
from copy import deepcopy
from msrestazure.azure_exceptions import CloudError
class AzureRMApiExportInfo(AzureRMModuleBase):
def __init__(self):
self.module_arg_spec = dict(
resource_group=dict(
type='str',
required=true
),
service_name=dict(
type='str',
required=true
),
api_id=dict(
type='str',
required=true
),
format=dict(
type='str',
required=true
),
export=dict(
type='str',
required=true
)
)
self.resource_group = None
self.service_name = None
self.api_id = None
self.format = None
self.export = None
self.id = None
self.value = None
self.results = dict(changed=False)
self.mgmt_client = None
self.state = None
self.url = None
self.status_code = [200]
self.query_parameters = {}
self.query_parameters['api-version'] = '2019-01-01'
self.header_parameters = {}
self.header_parameters['Content-Type'] = 'application/json; charset=utf-8'
self.mgmt_client = None
super(AzureRMApiExportInfo, self).__init__(self.module_arg_spec, supports_tags=True)
def exec_module(self, **kwargs):
for key in self.module_arg_spec:
setattr(self, key, kwargs[key])
self.mgmt_client = self.get_mgmt_svc_client(GenericRestClient,
base_url=self._cloud_environment.endpoints.resource_manager)
if (self.resource_group is not None and
self.service_name is not None and
self.api_id is not None and
self.format is not None and
self.export is not None):
self.results['api_export'] = self.format_item(self.get())
return self.results
def get(self):
response = None
results = {}
# prepare url
self.url = ('/subscriptions' +
'/{{ subscription_id }}' +
'/resourceGroups' +
'/{{ resource_group }}' +
'/providers' +
'/Microsoft.ApiManagement' +
'/service' +
'/{{ service_name }}' +
'/apis' +
'/{{ api_name }}')
self.url = self.url.replace('{{ subscription_id }}', self.subscription_id)
self.url = self.url.replace('{{ resource_group }}', self.resource_group)
self.url = self.url.replace('{{ service_name }}', self.service_name)
self.url = self.url.replace('{{ api_name }}', self.name)
try:
response = self.mgmt_client.query(self.url,
'GET',
self.query_parameters,
self.header_parameters,
None,
self.status_code,
600,
30)
results['temp_item'] = json.loads(response.text)
# self.log('Response : {0}'.format(response))
except CloudError as e:
self.log('Could not get info for @(Model.ModuleOperationNameUpper).')
return results
def format_item(item):
return item
def main():
AzureRMApiExportInfo()
if __name__ == '__main__':
main()
| 29.980315
| 113
| 0.548785
|
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_apimanagementapiexport_info
version_added: '2.9'
short_description: Get ApiExport info.
description:
- Get info of ApiExport.
options:
resource_group:
description:
- The name of the resource group.
required: true
type: str
service_name:
description:
- The name of the API Management service.
required: true
type: str
api_id:
description:
- >-
API revision identifier. Must be unique in the current API Management
service instance. Non-current revision has ;rev=n as a suffix where n is
the revision number.
required: true
type: str
format:
description:
- >-
Format in which to export the Api Details to the Storage Blob with Sas
Key valid for 5 minutes.
required: true
type: str
export:
description:
- Query parameter required to export the API details.
required: true
type: str
id:
description:
- ResourceId of the API which was exported.
type: str
value:
description:
- The object defining the schema of the exported Api Detail
type: dict
suboptions:
link:
description:
- >-
Link to the Storage Blob containing the result of the export
operation. The Blob Uri is only valid for 5 minutes.
type: str
extends_documentation_fragment:
- azure
author:
- Zim Kalinowski (@zikalino)
'''
EXAMPLES = '''
- name: ApiManagementGetApiExportInOpenApi2dot0
azure_rm_apimanagementapiexport_info:
resource_group: myResourceGroup
service_name: myService
api_id: myApi
format: swagger-link
export: 'true'
- name: ApiManagementGetApiExportInOpenApi3dot0
azure_rm_apimanagementapiexport_info:
resource_group: myResourceGroup
service_name: myService
api_id: myApi
format: openapi-link
export: 'true'
'''
RETURN = '''
api_export:
description: >-
A list of dict results where the key is the name of the ApiExport and the
values are the facts for that ApiExport.
returned: always
type: complex
contains:
apiexport_name:
description: The key is the name of the server that the values relate to.
type: complex
contains:
id:
description:
- ResourceId of the API which was exported.
returned: always
type: str
sample: null
format:
description:
- >-
Format in which the Api Details are exported to the Storage Blob
with Sas Key valid for 5 minutes.
returned: always
type: str
sample: null
value:
description:
- The object defining the schema of the exported Api Detail
returned: always
type: dict
sample: null
contains:
link:
description:
- >-
Link to the Storage Blob containing the result of the export
operation. The Blob Uri is only valid for 5 minutes.
returned: always
type: str
sample: null
'''
import time
import json
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
from ansible.module_utils.azure_rm_common_rest import GenericRestClient
from copy import deepcopy
from msrestazure.azure_exceptions import CloudError
class AzureRMApiExportInfo(AzureRMModuleBase):
def __init__(self):
self.module_arg_spec = dict(
resource_group=dict(
type='str',
required=true
),
service_name=dict(
type='str',
required=true
),
api_id=dict(
type='str',
required=true
),
format=dict(
type='str',
required=true
),
export=dict(
type='str',
required=true
)
)
self.resource_group = None
self.service_name = None
self.api_id = None
self.format = None
self.export = None
self.id = None
self.value = None
self.results = dict(changed=False)
self.mgmt_client = None
self.state = None
self.url = None
self.status_code = [200]
self.query_parameters = {}
self.query_parameters['api-version'] = '2019-01-01'
self.header_parameters = {}
self.header_parameters['Content-Type'] = 'application/json; charset=utf-8'
self.mgmt_client = None
super(AzureRMApiExportInfo, self).__init__(self.module_arg_spec, supports_tags=True)
def exec_module(self, **kwargs):
for key in self.module_arg_spec:
setattr(self, key, kwargs[key])
self.mgmt_client = self.get_mgmt_svc_client(GenericRestClient,
base_url=self._cloud_environment.endpoints.resource_manager)
if (self.resource_group is not None and
self.service_name is not None and
self.api_id is not None and
self.format is not None and
self.export is not None):
self.results['api_export'] = self.format_item(self.get())
return self.results
def get(self):
response = None
results = {}
self.url = ('/subscriptions' +
'/{{ subscription_id }}' +
'/resourceGroups' +
'/{{ resource_group }}' +
'/providers' +
'/Microsoft.ApiManagement' +
'/service' +
'/{{ service_name }}' +
'/apis' +
'/{{ api_name }}')
self.url = self.url.replace('{{ subscription_id }}', self.subscription_id)
self.url = self.url.replace('{{ resource_group }}', self.resource_group)
self.url = self.url.replace('{{ service_name }}', self.service_name)
self.url = self.url.replace('{{ api_name }}', self.name)
try:
response = self.mgmt_client.query(self.url,
'GET',
self.query_parameters,
self.header_parameters,
None,
self.status_code,
600,
30)
results['temp_item'] = json.loads(response.text)
except CloudError as e:
self.log('Could not get info for @(Model.ModuleOperationNameUpper).')
return results
def format_item(item):
return item
def main():
AzureRMApiExportInfo()
if __name__ == '__main__':
main()
| true
| true
|
7909d08b73a5d759fe402b271e78c50a1f845e1f
| 28,848
|
py
|
Python
|
lib/filemanager.py
|
jhkennedy/processflow
|
c404bd3ad043fd6ae18d4f24d735777574faa660
|
[
"MIT"
] | null | null | null |
lib/filemanager.py
|
jhkennedy/processflow
|
c404bd3ad043fd6ae18d4f24d735777574faa660
|
[
"MIT"
] | null | null | null |
lib/filemanager.py
|
jhkennedy/processflow
|
c404bd3ad043fd6ae18d4f24d735777574faa660
|
[
"MIT"
] | null | null | null |
import os
import re
import sys
import threading
import logging
import random
from time import sleep
from peewee import *
from enum import IntEnum
from threading import Thread
from models import DataFile
from lib.jobstatus import JobStatus
from lib.util import print_debug
from lib.util import print_line
from lib.util import print_message
class FileStatus(IntEnum):
PRESENT = 0
NOT_PRESENT = 1
IN_TRANSIT = 2
class FileManager(object):
"""
Manage all files required by jobs
"""
def __init__(self, event_list, config, database='processflow.db'):
"""
Parameters:
database (str): the path to where to create the sqlite database file
config (dict): the global configuration dict
"""
self._event_list = event_list
self._db_path = database
self._config = config
if os.path.exists(database):
os.remove(database)
DataFile._meta.database.init(database)
if DataFile.table_exists():
DataFile.drop_table()
DataFile.create_table()
self.thread_list = list()
self.kill_event = threading.Event()
def __str__(self):
# TODO: make this better
return str({
'db_path': self._db_path,
})
def get_endpoints(self):
"""
Return a list of globus endpoints for all cases
"""
q = (DataFile
.select()
.where(
DataFile.transfer_type == 'globus'))
endpoints = list()
for x in q.execute():
if x.remote_uuid not in endpoints:
endpoints.append(x.remote_uuid)
return endpoints
def write_database(self):
"""
Write out a human readable version of the database for debug purposes
"""
file_list_path = os.path.join(
self._config['global']['project_path'],
'output',
'file_list.txt')
with open(file_list_path, 'w') as fp:
try:
for case in self._config['simulations']:
if case in ['start_year', 'end_year', 'comparisons']:
continue
fp.write('+++++++++++++++++++++++++++++++++++++++++++++')
fp.write('\n\t{case}\t\n'.format(case=case))
fp.write('+++++++++++++++++++++++++++++++++++++++++++++\n')
q = (DataFile
.select(DataFile.datatype)
.where(DataFile.case == case)
.distinct())
for df_type in q.execute():
_type = df_type.datatype
fp.write('===================================\n')
fp.write('\t' + _type + ':\n')
datafiles = (DataFile
.select()
.where(
(DataFile.datatype == _type) &
(DataFile.case == case)))
for datafile in datafiles.execute():
filestr = '-------------------------------------'
filestr += '\n\t name: ' + datafile.name + '\n\t local_status: '
if datafile.local_status == 0:
filestr += ' present, '
elif datafile.local_status == 1:
filestr += ' missing, '
else:
filestr += ' in transit, '
filestr += '\n\t remote_status: '
if datafile.remote_status == 0:
filestr += ' present'
elif datafile.remote_status == 1:
filestr += ' missing'
else:
filestr += ' in transit'
filestr += '\n\t local_size: ' + \
str(datafile.local_size)
filestr += '\n\t local_path: ' + datafile.local_path
filestr += '\n\t remote_path: ' + datafile.remote_path
filestr += '\n\t year: ' + str(datafile.year)
filestr += '\n\t month: ' + str(datafile.month) + '\n'
fp.write(filestr)
except Exception as e:
print_debug(e)
def check_data_ready(self, data_required, case, start_year=None, end_year=None):
try:
for datatype in data_required:
if start_year and end_year:
q = (DataFile
.select()
.where(
(DataFile.year >= start_year) &
(DataFile.year <= end_year) &
(DataFile.case == case) &
(DataFile.datatype == datatype)))
else:
q = (DataFile
.select()
.where(
(DataFile.case == case) &
(DataFile.datatype == datatype)))
datafiles = q.execute()
for df in datafiles:
if not os.path.exists(df.local_path) and df.local_status == FileStatus.PRESENT.value:
df.local_status = FileStatus.NOT_PRESENT.value
df.save()
elif os.path.exists(df.local_path) and df.local_status == FileStatus.NOT_PRESENT.value:
df.local_status = FileStatus.PRESENT.value
df.save()
if df.local_status != FileStatus.PRESENT.value:
return False
return True
except Exception as e:
print_debug(e)
def render_file_string(self, data_type, data_type_option, case, year=None, month=None):
"""
Takes strings from the data_types dict and replaces the keywords with the appropriate values
"""
# setup the replacement dict
start_year = int(self._config['simulations']['start_year'])
end_year = int(self._config['simulations']['end_year'])
replace = {
'PROJECT_PATH': self._config['global']['project_path'],
'REMOTE_PATH': self._config['simulations'][case].get('remote_path', ''),
'CASEID': case,
'REST_YR': '{:04d}'.format(start_year + 1),
'START_YR': '{:04d}'.format(start_year),
'END_YR': '{:04d}'.format(end_year),
'LOCAL_PATH': self._config['simulations'][case].get('local_path', '')
}
if year is not None:
replace['YEAR'] = '{:04d}'.format(year)
if month is not None:
replace['MONTH'] = '{:02d}'.format(month)
if self._config['data_types'][data_type].get(case):
if self._config['data_types'][data_type][case].get(data_type_option):
instring = self._config['data_types'][data_type][case][data_type_option]
for item in self._config['simulations'][case]:
if item.upper() in self._config['data_types'][data_type][case][data_type_option]:
instring = instring.replace(item.upper(), self._config['simulations'][case][item])
return instring
instring = self._config['data_types'][data_type][data_type_option]
for string, val in replace.items():
if string in instring:
instring = instring.replace(string, val)
return instring
def populate_file_list(self):
"""
Populate the database with the required DataFile entries
"""
msg = 'Creating file table'
print_line(
line=msg,
event_list=self._event_list)
newfiles = list()
start_year = int(self._config['simulations']['start_year'])
end_year = int(self._config['simulations']['end_year'])
with DataFile._meta.database.atomic():
# for each case
for case in self._config['simulations']:
if case in ['start_year', 'end_year', 'comparisons']:
continue
# for each data type
for _type in self._config['data_types']:
data_types_for_case = self._config['simulations'][case]['data_types']
if 'all' not in data_types_for_case:
if _type not in data_types_for_case:
continue
# setup the base local_path
local_path = self.render_file_string(
data_type=_type,
data_type_option='local_path',
case=case)
new_files = list()
if self._config['data_types'][_type].get('monthly') and self._config['data_types'][_type]['monthly'] in ['True', 'true', '1', 1]:
# handle monthly data
for year in range(start_year, end_year + 1):
for month in range(1, 13):
filename = self.render_file_string(
data_type=_type,
data_type_option='file_format',
case=case,
year=year,
month=month)
r_path = self.render_file_string(
data_type=_type,
data_type_option='remote_path',
case=case,
year=year,
month=month)
new_files.append({
'name': filename,
'remote_path': os.path.join(r_path, filename),
'local_path': os.path.join(local_path, filename),
'local_status': FileStatus.NOT_PRESENT.value,
'case': case,
'remote_status': FileStatus.NOT_PRESENT.value,
'year': year,
'month': month,
'datatype': _type,
'local_size': 0,
'transfer_type': self._config['simulations'][case]['transfer_type'],
'remote_uuid': self._config['simulations'][case].get('remote_uuid', ''),
'remote_hostname': self._config['simulations'][case].get('remote_hostname', '')
})
else:
# handle one-off data
filename = self.render_file_string(
data_type=_type,
data_type_option='file_format',
case=case)
r_path = self.render_file_string(
data_type=_type,
data_type_option='remote_path',
case=case)
new_files.append({
'name': filename,
'remote_path': os.path.join(r_path, filename),
'local_path': os.path.join(local_path, filename),
'local_status': FileStatus.NOT_PRESENT.value,
'case': case,
'remote_status': FileStatus.NOT_PRESENT.value,
'year': 0,
'month': 0,
'datatype': _type,
'local_size': 0,
'transfer_type': self._config['simulations'][case]['transfer_type'],
'remote_uuid': self._config['simulations'][case].get('remote_uuid', ''),
'remote_hostname': self._config['simulations'][case].get('remote_hostname', '')
})
tail, _ = os.path.split(new_files[0]['local_path'])
if not os.path.exists(tail):
os.makedirs(tail)
step = 50
for idx in range(0, len(new_files), step):
DataFile.insert_many(
new_files[idx: idx + step]).execute()
msg = 'Database update complete'
print_line(msg, self._event_list)
def verify_remote_files(self, client, case):
"""
Check that the user supplied file paths are valid for remote files
Parameters:
client: either an ssh_client or a globus_client
case: the case to check remote paths for
"""
if not self._config['global']['verify']:
return True
msg = 'verifying remote file paths'
print_line(msg, self._event_list)
data_types_to_verify = []
q = (DataFile
.select()
.where(
(DataFile.case == case) &
(DataFile.local_status != FileStatus.PRESENT.value)))
for datafile in q.execute():
if datafile.datatype not in data_types_to_verify:
data_types_to_verify.append(datafile.datatype)
found_all = True
for datatype in data_types_to_verify:
q = (DataFile
.select()
.where(
(DataFile.case == case) &
(DataFile.datatype == datatype)))
files = q.execute()
remote_path, _ = os.path.split(files[0].remote_path)
msg = 'Checking {} files in {}'.format(datatype, remote_path)
print_line(msg, self._event_list)
if files[0].transfer_type == 'globus':
from lib.globus_interface import get_ls as globus_ls
remote_contents = globus_ls(
client=client,
path=remote_path,
endpoint=self._config['simulations'][case]['remote_uuid'])
elif files[0].transfer_type == 'sftp':
from lib.ssh_interface import get_ls as ssh_ls
remote_contents = ssh_ls(
client=client,
remote_path=remote_path)
remote_names = [x['name'] for x in remote_contents]
for df in files:
if df.name not in remote_names:
msg = 'Unable to find file {name} at {remote_path}'.format(
name=df.name,
remote_path=remote_path)
print_message(msg, 'error')
found_all = False
if not found_all:
return False
else:
msg = 'found all remote files for {}'.format(case)
print_message(msg, 'ok')
return True
def terminate_transfers(self):
self.kill_event.set()
for thread in self.thread_list:
msg = 'terminating {}, this may take a moment'.format(thread.name)
print_line(msg, self._event_list)
thread.join()
def print_db(self):
for df in DataFile.select():
print {
'case': df.case,
'type': df.datatype,
'name': df.name,
'local_path': df.local_path,
'remote_path': df.remote_path,
'transfer_type': df.transfer_type,
}
def add_files(self, data_type, file_list):
"""
Add files to the database
Parameters:
data_type (str): the data_type of the new files
file_list (list): a list of dictionaries in the format
local_path (str): path to the file,
case (str): the case these files belong to
name (str): the filename
remote_path (str): the remote path of these files, optional
transfer_type (str): the transfer type of these files, optional
year (int): the year of the file, optional
month (int): the month of the file, optional
remote_uuid (str): remote globus endpoint id, optional
remote_hostname (str): remote hostname for sftp transfer, optional
"""
try:
new_files = list()
for file in file_list:
new_files.append({
'name': file['name'],
'local_path': file['local_path'],
'local_status': file.get('local_status', FileStatus.NOT_PRESENT.value),
'datatype': data_type,
'case': file['case'],
'year': file.get('year', 0),
'month': file.get('month', 0),
'remote_uuid': file.get('remote_uuid', ''),
'remote_hostname': file.get('remote_hostname', ''),
'remote_path': file.get('remote_path', ''),
'remote_status': FileStatus.NOT_PRESENT.value,
'local_size': 0,
'transfer_type': file.get('transfer_type', 'local')
})
step = 50
for idx in range(0, len(new_files), step):
DataFile.insert_many(
new_files[idx: idx + step]).execute()
except Exception as e:
print_debug(e)
def update_local_status(self):
"""
Update the database with the local status of the expected files
Return True if there was new local data found, False othewise
"""
try:
query = (DataFile
.select()
.where(
(DataFile.local_status == FileStatus.NOT_PRESENT.value) |
(DataFile.local_status == FileStatus.IN_TRANSIT.value)))
printed = False
change = False
for datafile in query.execute():
marked = False
if os.path.exists(datafile.local_path):
if datafile.local_status == FileStatus.NOT_PRESENT.value or datafile.local_status == FileStatus.IN_TRANSIT.value:
datafile.local_status = FileStatus.PRESENT.value
marked = True
change = True
else:
if datafile.transfer_type == 'local':
msg = '{case} transfer_type is local, but {filename} is not present'.format(
case=datafile.case, filename=datafile.name)
logging.error(msg)
if not printed:
print_line(msg, self._event_list)
printed = True
if datafile.local_status == FileStatus.PRESENT.value:
datafile.local_status = FileStatus.NOT_PRESENT.value
marked = True
if marked:
datafile.save()
except Exception as e:
print_debug(e)
return change
def all_data_local(self):
"""
Returns True if all data is local, False otherwise
"""
try:
query = (DataFile
.select()
.where(
(DataFile.local_status == FileStatus.NOT_PRESENT.value) |
(DataFile.local_status == FileStatus.IN_TRANSIT.value)))
missing_data = query.execute()
# if any of the data is missing, not all data is local
if missing_data:
logging.debug('All data is not local, missing the following')
logging.debug([x.name for x in missing_data])
return False
except Exception as e:
print_debug(e)
logging.debug('All data is local')
return True
def transfer_needed(self, event_list, event, config):
"""
Start a transfer job for any files that arent local, but do exist remotely
Globus user must already be logged in
"""
# required files dont exist locally, do exist remotely
# or if they do exist locally have a different local and remote size
target_files = list()
try:
q = (DataFile
.select(DataFile.case)
.where(
DataFile.local_status == FileStatus.NOT_PRESENT.value))
caselist = [x.case for x in q.execute()]
if not caselist or len(caselist) == 0:
return
cases = list()
for case in caselist:
if case not in cases:
cases.append(case)
for case in cases:
q = (DataFile
.select()
.where(
(DataFile.case == case) &
(DataFile.local_status == FileStatus.NOT_PRESENT.value)))
required_files = [x for x in q.execute()]
for file in required_files:
if file.transfer_type == 'local':
required_files.remove(file)
if not required_files:
msg = 'ERROR: all missing files are marked as local'
print_line(msg, event_list)
return
# mark files as in-transit so we dont double-copy
# cant do a bulk update since there may be to many records for the db to handle
step = 50
for idx in range(0, len(required_files), step):
q = (DataFile
.update({DataFile.local_status: FileStatus.IN_TRANSIT})
.where(DataFile.name << [x.name for x in required_files[idx: step + idx]]))
q.execute()
for file in required_files:
target_files.append({
'local_path': file.local_path,
'remote_path': file.remote_path,
})
if required_files[0].transfer_type == 'globus':
from lib.globus_interface import transfer as globus_transfer
from globus_cli.services.transfer import get_client as get_globus_client
msg = 'Starting globus file transfer of {} files'.format(
len(required_files))
print_line(msg, event_list)
msg = 'See https://www.globus.org/app/activity for transfer details'
print_line(msg, event_list)
client = get_globus_client()
if not self.verify_remote_files(client=client, case=case):
return False
remote_uuid = required_files[0].remote_uuid
local_uuid = self._config['global']['local_globus_uuid']
thread_name = '{}_globus_transfer'.format(required_files[0].case)
_args = (client, remote_uuid,
local_uuid, target_files,
self.kill_event)
thread = Thread(
target=globus_transfer,
name=thread_name,
args=_args)
self.thread_list.append(thread)
thread.start()
elif required_files[0].transfer_type == 'sftp':
from lib.ssh_interface import get_ssh_client
msg = 'Starting sftp file transfer of {} files'.format(
len(required_files))
print_line(msg, event_list)
client = get_ssh_client(required_files[0].remote_hostname)
if not self.verify_remote_files(client=client, case=case):
return False
thread_name = '{}_sftp_transfer'.format(required_files[0].case)
_args = (target_files, client, self.kill_event)
thread = Thread(
target=self._ssh_transfer,
name=thread_name,
args=_args)
self.thread_list.append(thread)
thread.start()
except Exception as e:
print_debug(e)
return False
def _ssh_transfer(self, target_files, client, event):
from lib.ssh_interface import transfer as ssh_transfer
sftp_client = client.open_sftp()
for file in target_files:
if event.is_set():
return
_, filename = os.path.split(file['local_path'])
msg = 'sftp transfer from {} to {}'.format(
file['remote_path'], file['local_path'])
logging.info(msg)
msg = 'starting sftp transfer for {}'.format(filename)
print_line(msg, self._event_list)
ssh_transfer(sftp_client, file)
msg = 'sftp transfer complete for {}'.format(filename)
print_line(msg, self._event_list)
msg = self.report_files_local()
print_line(msg, self._event_list)
def report_files_local(self):
"""
Return a string in the format 'X of Y files availabe locally' where X is the number here, and Y is the total
"""
q = (DataFile
.select(DataFile.local_status)
.where(DataFile.local_status == FileStatus.PRESENT.value))
local = len([x.local_status for x in q.execute()])
q = (DataFile.select(DataFile.local_status))
total = len([x.local_status for x in q.execute()])
msg = '{local}/{total} files available locally or {prec:.2f}%'.format(
local=local, total=total, prec=((local*1.0)/total)*100)
return msg
def get_file_paths_by_year(self, datatype, case, start_year=None, end_year=None):
"""
Return paths to files that match the given type, start, and end year
Parameters:
datatype (str): the type of data
case (str): the name of the case to return files for
monthly (bool): is this datatype monthly frequency
start_year (int): the first year to return data for
end_year (int): the last year to return data for
"""
try:
if start_year and end_year:
if datatype in ['climo_regrid', 'climo_native', 'ts_regrid', 'ts_native']:
query = (DataFile
.select()
.where(
(DataFile.month == end_year) &
(DataFile.year == start_year) &
(DataFile.case == case) &
(DataFile.datatype == datatype) &
(DataFile.local_status == FileStatus.PRESENT.value)))
else:
query = (DataFile
.select()
.where(
(DataFile.year <= end_year) &
(DataFile.year >= start_year) &
(DataFile.case == case) &
(DataFile.datatype == datatype) &
(DataFile.local_status == FileStatus.PRESENT.value)))
else:
query = (DataFile
.select()
.where(
(DataFile.case == case) &
(DataFile.datatype == datatype) &
(DataFile.local_status == FileStatus.PRESENT.value)))
datafiles = query.execute()
if datafiles is None or len(datafiles) == 0:
return None
return [x.local_path for x in datafiles]
except Exception as e:
print_debug(e)
| 44.313364
| 149
| 0.475284
|
import os
import re
import sys
import threading
import logging
import random
from time import sleep
from peewee import *
from enum import IntEnum
from threading import Thread
from models import DataFile
from lib.jobstatus import JobStatus
from lib.util import print_debug
from lib.util import print_line
from lib.util import print_message
class FileStatus(IntEnum):
PRESENT = 0
NOT_PRESENT = 1
IN_TRANSIT = 2
class FileManager(object):
"""
Manage all files required by jobs
"""
def __init__(self, event_list, config, database='processflow.db'):
"""
Parameters:
database (str): the path to where to create the sqlite database file
config (dict): the global configuration dict
"""
self._event_list = event_list
self._db_path = database
self._config = config
if os.path.exists(database):
os.remove(database)
DataFile._meta.database.init(database)
if DataFile.table_exists():
DataFile.drop_table()
DataFile.create_table()
self.thread_list = list()
self.kill_event = threading.Event()
def __str__(self):
return str({
'db_path': self._db_path,
})
def get_endpoints(self):
"""
Return a list of globus endpoints for all cases
"""
q = (DataFile
.select()
.where(
DataFile.transfer_type == 'globus'))
endpoints = list()
for x in q.execute():
if x.remote_uuid not in endpoints:
endpoints.append(x.remote_uuid)
return endpoints
def write_database(self):
"""
Write out a human readable version of the database for debug purposes
"""
file_list_path = os.path.join(
self._config['global']['project_path'],
'output',
'file_list.txt')
with open(file_list_path, 'w') as fp:
try:
for case in self._config['simulations']:
if case in ['start_year', 'end_year', 'comparisons']:
continue
fp.write('+++++++++++++++++++++++++++++++++++++++++++++')
fp.write('\n\t{case}\t\n'.format(case=case))
fp.write('+++++++++++++++++++++++++++++++++++++++++++++\n')
q = (DataFile
.select(DataFile.datatype)
.where(DataFile.case == case)
.distinct())
for df_type in q.execute():
_type = df_type.datatype
fp.write('===================================\n')
fp.write('\t' + _type + ':\n')
datafiles = (DataFile
.select()
.where(
(DataFile.datatype == _type) &
(DataFile.case == case)))
for datafile in datafiles.execute():
filestr = '-------------------------------------'
filestr += '\n\t name: ' + datafile.name + '\n\t local_status: '
if datafile.local_status == 0:
filestr += ' present, '
elif datafile.local_status == 1:
filestr += ' missing, '
else:
filestr += ' in transit, '
filestr += '\n\t remote_status: '
if datafile.remote_status == 0:
filestr += ' present'
elif datafile.remote_status == 1:
filestr += ' missing'
else:
filestr += ' in transit'
filestr += '\n\t local_size: ' + \
str(datafile.local_size)
filestr += '\n\t local_path: ' + datafile.local_path
filestr += '\n\t remote_path: ' + datafile.remote_path
filestr += '\n\t year: ' + str(datafile.year)
filestr += '\n\t month: ' + str(datafile.month) + '\n'
fp.write(filestr)
except Exception as e:
print_debug(e)
def check_data_ready(self, data_required, case, start_year=None, end_year=None):
try:
for datatype in data_required:
if start_year and end_year:
q = (DataFile
.select()
.where(
(DataFile.year >= start_year) &
(DataFile.year <= end_year) &
(DataFile.case == case) &
(DataFile.datatype == datatype)))
else:
q = (DataFile
.select()
.where(
(DataFile.case == case) &
(DataFile.datatype == datatype)))
datafiles = q.execute()
for df in datafiles:
if not os.path.exists(df.local_path) and df.local_status == FileStatus.PRESENT.value:
df.local_status = FileStatus.NOT_PRESENT.value
df.save()
elif os.path.exists(df.local_path) and df.local_status == FileStatus.NOT_PRESENT.value:
df.local_status = FileStatus.PRESENT.value
df.save()
if df.local_status != FileStatus.PRESENT.value:
return False
return True
except Exception as e:
print_debug(e)
def render_file_string(self, data_type, data_type_option, case, year=None, month=None):
"""
Takes strings from the data_types dict and replaces the keywords with the appropriate values
"""
start_year = int(self._config['simulations']['start_year'])
end_year = int(self._config['simulations']['end_year'])
replace = {
'PROJECT_PATH': self._config['global']['project_path'],
'REMOTE_PATH': self._config['simulations'][case].get('remote_path', ''),
'CASEID': case,
'REST_YR': '{:04d}'.format(start_year + 1),
'START_YR': '{:04d}'.format(start_year),
'END_YR': '{:04d}'.format(end_year),
'LOCAL_PATH': self._config['simulations'][case].get('local_path', '')
}
if year is not None:
replace['YEAR'] = '{:04d}'.format(year)
if month is not None:
replace['MONTH'] = '{:02d}'.format(month)
if self._config['data_types'][data_type].get(case):
if self._config['data_types'][data_type][case].get(data_type_option):
instring = self._config['data_types'][data_type][case][data_type_option]
for item in self._config['simulations'][case]:
if item.upper() in self._config['data_types'][data_type][case][data_type_option]:
instring = instring.replace(item.upper(), self._config['simulations'][case][item])
return instring
instring = self._config['data_types'][data_type][data_type_option]
for string, val in replace.items():
if string in instring:
instring = instring.replace(string, val)
return instring
def populate_file_list(self):
"""
Populate the database with the required DataFile entries
"""
msg = 'Creating file table'
print_line(
line=msg,
event_list=self._event_list)
newfiles = list()
start_year = int(self._config['simulations']['start_year'])
end_year = int(self._config['simulations']['end_year'])
with DataFile._meta.database.atomic():
for case in self._config['simulations']:
if case in ['start_year', 'end_year', 'comparisons']:
continue
for _type in self._config['data_types']:
data_types_for_case = self._config['simulations'][case]['data_types']
if 'all' not in data_types_for_case:
if _type not in data_types_for_case:
continue
local_path = self.render_file_string(
data_type=_type,
data_type_option='local_path',
case=case)
new_files = list()
if self._config['data_types'][_type].get('monthly') and self._config['data_types'][_type]['monthly'] in ['True', 'true', '1', 1]:
for year in range(start_year, end_year + 1):
for month in range(1, 13):
filename = self.render_file_string(
data_type=_type,
data_type_option='file_format',
case=case,
year=year,
month=month)
r_path = self.render_file_string(
data_type=_type,
data_type_option='remote_path',
case=case,
year=year,
month=month)
new_files.append({
'name': filename,
'remote_path': os.path.join(r_path, filename),
'local_path': os.path.join(local_path, filename),
'local_status': FileStatus.NOT_PRESENT.value,
'case': case,
'remote_status': FileStatus.NOT_PRESENT.value,
'year': year,
'month': month,
'datatype': _type,
'local_size': 0,
'transfer_type': self._config['simulations'][case]['transfer_type'],
'remote_uuid': self._config['simulations'][case].get('remote_uuid', ''),
'remote_hostname': self._config['simulations'][case].get('remote_hostname', '')
})
else:
filename = self.render_file_string(
data_type=_type,
data_type_option='file_format',
case=case)
r_path = self.render_file_string(
data_type=_type,
data_type_option='remote_path',
case=case)
new_files.append({
'name': filename,
'remote_path': os.path.join(r_path, filename),
'local_path': os.path.join(local_path, filename),
'local_status': FileStatus.NOT_PRESENT.value,
'case': case,
'remote_status': FileStatus.NOT_PRESENT.value,
'year': 0,
'month': 0,
'datatype': _type,
'local_size': 0,
'transfer_type': self._config['simulations'][case]['transfer_type'],
'remote_uuid': self._config['simulations'][case].get('remote_uuid', ''),
'remote_hostname': self._config['simulations'][case].get('remote_hostname', '')
})
tail, _ = os.path.split(new_files[0]['local_path'])
if not os.path.exists(tail):
os.makedirs(tail)
step = 50
for idx in range(0, len(new_files), step):
DataFile.insert_many(
new_files[idx: idx + step]).execute()
msg = 'Database update complete'
print_line(msg, self._event_list)
def verify_remote_files(self, client, case):
"""
Check that the user supplied file paths are valid for remote files
Parameters:
client: either an ssh_client or a globus_client
case: the case to check remote paths for
"""
if not self._config['global']['verify']:
return True
msg = 'verifying remote file paths'
print_line(msg, self._event_list)
data_types_to_verify = []
q = (DataFile
.select()
.where(
(DataFile.case == case) &
(DataFile.local_status != FileStatus.PRESENT.value)))
for datafile in q.execute():
if datafile.datatype not in data_types_to_verify:
data_types_to_verify.append(datafile.datatype)
found_all = True
for datatype in data_types_to_verify:
q = (DataFile
.select()
.where(
(DataFile.case == case) &
(DataFile.datatype == datatype)))
files = q.execute()
remote_path, _ = os.path.split(files[0].remote_path)
msg = 'Checking {} files in {}'.format(datatype, remote_path)
print_line(msg, self._event_list)
if files[0].transfer_type == 'globus':
from lib.globus_interface import get_ls as globus_ls
remote_contents = globus_ls(
client=client,
path=remote_path,
endpoint=self._config['simulations'][case]['remote_uuid'])
elif files[0].transfer_type == 'sftp':
from lib.ssh_interface import get_ls as ssh_ls
remote_contents = ssh_ls(
client=client,
remote_path=remote_path)
remote_names = [x['name'] for x in remote_contents]
for df in files:
if df.name not in remote_names:
msg = 'Unable to find file {name} at {remote_path}'.format(
name=df.name,
remote_path=remote_path)
print_message(msg, 'error')
found_all = False
if not found_all:
return False
else:
msg = 'found all remote files for {}'.format(case)
print_message(msg, 'ok')
return True
def terminate_transfers(self):
self.kill_event.set()
for thread in self.thread_list:
msg = 'terminating {}, this may take a moment'.format(thread.name)
print_line(msg, self._event_list)
thread.join()
def print_db(self):
for df in DataFile.select():
print {
'case': df.case,
'type': df.datatype,
'name': df.name,
'local_path': df.local_path,
'remote_path': df.remote_path,
'transfer_type': df.transfer_type,
}
def add_files(self, data_type, file_list):
"""
Add files to the database
Parameters:
data_type (str): the data_type of the new files
file_list (list): a list of dictionaries in the format
local_path (str): path to the file,
case (str): the case these files belong to
name (str): the filename
remote_path (str): the remote path of these files, optional
transfer_type (str): the transfer type of these files, optional
year (int): the year of the file, optional
month (int): the month of the file, optional
remote_uuid (str): remote globus endpoint id, optional
remote_hostname (str): remote hostname for sftp transfer, optional
"""
try:
new_files = list()
for file in file_list:
new_files.append({
'name': file['name'],
'local_path': file['local_path'],
'local_status': file.get('local_status', FileStatus.NOT_PRESENT.value),
'datatype': data_type,
'case': file['case'],
'year': file.get('year', 0),
'month': file.get('month', 0),
'remote_uuid': file.get('remote_uuid', ''),
'remote_hostname': file.get('remote_hostname', ''),
'remote_path': file.get('remote_path', ''),
'remote_status': FileStatus.NOT_PRESENT.value,
'local_size': 0,
'transfer_type': file.get('transfer_type', 'local')
})
step = 50
for idx in range(0, len(new_files), step):
DataFile.insert_many(
new_files[idx: idx + step]).execute()
except Exception as e:
print_debug(e)
def update_local_status(self):
"""
Update the database with the local status of the expected files
Return True if there was new local data found, False othewise
"""
try:
query = (DataFile
.select()
.where(
(DataFile.local_status == FileStatus.NOT_PRESENT.value) |
(DataFile.local_status == FileStatus.IN_TRANSIT.value)))
printed = False
change = False
for datafile in query.execute():
marked = False
if os.path.exists(datafile.local_path):
if datafile.local_status == FileStatus.NOT_PRESENT.value or datafile.local_status == FileStatus.IN_TRANSIT.value:
datafile.local_status = FileStatus.PRESENT.value
marked = True
change = True
else:
if datafile.transfer_type == 'local':
msg = '{case} transfer_type is local, but {filename} is not present'.format(
case=datafile.case, filename=datafile.name)
logging.error(msg)
if not printed:
print_line(msg, self._event_list)
printed = True
if datafile.local_status == FileStatus.PRESENT.value:
datafile.local_status = FileStatus.NOT_PRESENT.value
marked = True
if marked:
datafile.save()
except Exception as e:
print_debug(e)
return change
def all_data_local(self):
"""
Returns True if all data is local, False otherwise
"""
try:
query = (DataFile
.select()
.where(
(DataFile.local_status == FileStatus.NOT_PRESENT.value) |
(DataFile.local_status == FileStatus.IN_TRANSIT.value)))
missing_data = query.execute()
if missing_data:
logging.debug('All data is not local, missing the following')
logging.debug([x.name for x in missing_data])
return False
except Exception as e:
print_debug(e)
logging.debug('All data is local')
return True
def transfer_needed(self, event_list, event, config):
"""
Start a transfer job for any files that arent local, but do exist remotely
Globus user must already be logged in
"""
target_files = list()
try:
q = (DataFile
.select(DataFile.case)
.where(
DataFile.local_status == FileStatus.NOT_PRESENT.value))
caselist = [x.case for x in q.execute()]
if not caselist or len(caselist) == 0:
return
cases = list()
for case in caselist:
if case not in cases:
cases.append(case)
for case in cases:
q = (DataFile
.select()
.where(
(DataFile.case == case) &
(DataFile.local_status == FileStatus.NOT_PRESENT.value)))
required_files = [x for x in q.execute()]
for file in required_files:
if file.transfer_type == 'local':
required_files.remove(file)
if not required_files:
msg = 'ERROR: all missing files are marked as local'
print_line(msg, event_list)
return
step = 50
for idx in range(0, len(required_files), step):
q = (DataFile
.update({DataFile.local_status: FileStatus.IN_TRANSIT})
.where(DataFile.name << [x.name for x in required_files[idx: step + idx]]))
q.execute()
for file in required_files:
target_files.append({
'local_path': file.local_path,
'remote_path': file.remote_path,
})
if required_files[0].transfer_type == 'globus':
from lib.globus_interface import transfer as globus_transfer
from globus_cli.services.transfer import get_client as get_globus_client
msg = 'Starting globus file transfer of {} files'.format(
len(required_files))
print_line(msg, event_list)
msg = 'See https://www.globus.org/app/activity for transfer details'
print_line(msg, event_list)
client = get_globus_client()
if not self.verify_remote_files(client=client, case=case):
return False
remote_uuid = required_files[0].remote_uuid
local_uuid = self._config['global']['local_globus_uuid']
thread_name = '{}_globus_transfer'.format(required_files[0].case)
_args = (client, remote_uuid,
local_uuid, target_files,
self.kill_event)
thread = Thread(
target=globus_transfer,
name=thread_name,
args=_args)
self.thread_list.append(thread)
thread.start()
elif required_files[0].transfer_type == 'sftp':
from lib.ssh_interface import get_ssh_client
msg = 'Starting sftp file transfer of {} files'.format(
len(required_files))
print_line(msg, event_list)
client = get_ssh_client(required_files[0].remote_hostname)
if not self.verify_remote_files(client=client, case=case):
return False
thread_name = '{}_sftp_transfer'.format(required_files[0].case)
_args = (target_files, client, self.kill_event)
thread = Thread(
target=self._ssh_transfer,
name=thread_name,
args=_args)
self.thread_list.append(thread)
thread.start()
except Exception as e:
print_debug(e)
return False
def _ssh_transfer(self, target_files, client, event):
from lib.ssh_interface import transfer as ssh_transfer
sftp_client = client.open_sftp()
for file in target_files:
if event.is_set():
return
_, filename = os.path.split(file['local_path'])
msg = 'sftp transfer from {} to {}'.format(
file['remote_path'], file['local_path'])
logging.info(msg)
msg = 'starting sftp transfer for {}'.format(filename)
print_line(msg, self._event_list)
ssh_transfer(sftp_client, file)
msg = 'sftp transfer complete for {}'.format(filename)
print_line(msg, self._event_list)
msg = self.report_files_local()
print_line(msg, self._event_list)
def report_files_local(self):
"""
Return a string in the format 'X of Y files availabe locally' where X is the number here, and Y is the total
"""
q = (DataFile
.select(DataFile.local_status)
.where(DataFile.local_status == FileStatus.PRESENT.value))
local = len([x.local_status for x in q.execute()])
q = (DataFile.select(DataFile.local_status))
total = len([x.local_status for x in q.execute()])
msg = '{local}/{total} files available locally or {prec:.2f}%'.format(
local=local, total=total, prec=((local*1.0)/total)*100)
return msg
def get_file_paths_by_year(self, datatype, case, start_year=None, end_year=None):
"""
Return paths to files that match the given type, start, and end year
Parameters:
datatype (str): the type of data
case (str): the name of the case to return files for
monthly (bool): is this datatype monthly frequency
start_year (int): the first year to return data for
end_year (int): the last year to return data for
"""
try:
if start_year and end_year:
if datatype in ['climo_regrid', 'climo_native', 'ts_regrid', 'ts_native']:
query = (DataFile
.select()
.where(
(DataFile.month == end_year) &
(DataFile.year == start_year) &
(DataFile.case == case) &
(DataFile.datatype == datatype) &
(DataFile.local_status == FileStatus.PRESENT.value)))
else:
query = (DataFile
.select()
.where(
(DataFile.year <= end_year) &
(DataFile.year >= start_year) &
(DataFile.case == case) &
(DataFile.datatype == datatype) &
(DataFile.local_status == FileStatus.PRESENT.value)))
else:
query = (DataFile
.select()
.where(
(DataFile.case == case) &
(DataFile.datatype == datatype) &
(DataFile.local_status == FileStatus.PRESENT.value)))
datafiles = query.execute()
if datafiles is None or len(datafiles) == 0:
return None
return [x.local_path for x in datafiles]
except Exception as e:
print_debug(e)
| false
| true
|
7909d146b3b34368cf10072ffae5d2b5584f814a
| 3,565
|
py
|
Python
|
bindings/python/ensmallen/datasets/string/streptomycesspnrrlf5008.py
|
AnacletoLAB/ensmallen
|
b2c1b18fb1e5801712852bcc239f239e03076f09
|
[
"MIT"
] | 5
|
2021-09-10T18:31:58.000Z
|
2022-03-24T04:28:04.000Z
|
bindings/python/ensmallen/datasets/string/streptomycesspnrrlf5008.py
|
AnacletoLAB/ensmallen_graph
|
b2c1b18fb1e5801712852bcc239f239e03076f09
|
[
"MIT"
] | 18
|
2021-01-07T16:47:39.000Z
|
2021-08-12T21:51:32.000Z
|
bindings/python/ensmallen/datasets/string/streptomycesspnrrlf5008.py
|
AnacletoLAB/ensmallen
|
b2c1b18fb1e5801712852bcc239f239e03076f09
|
[
"MIT"
] | 3
|
2021-01-14T02:20:59.000Z
|
2021-08-04T19:09:52.000Z
|
"""
This file offers the methods to automatically retrieve the graph Streptomyces sp. NRRLF5008.
The graph is automatically retrieved from the STRING repository.
References
---------------------
Please cite the following if you use the data:
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
```
"""
from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen import Graph # pylint: disable=import-error
def StreptomycesSpNrrlf5008(
directed: bool = False,
preprocess: bool = True,
load_nodes: bool = True,
verbose: int = 2,
cache: bool = True,
cache_path: str = "graphs/string",
version: str = "links.v11.5",
**additional_graph_kwargs: Dict
) -> Graph:
"""Return new instance of the Streptomyces sp. NRRLF5008 graph.
The graph is automatically retrieved from the STRING repository.
Parameters
-------------------
directed: bool = False
Wether to load the graph as directed or undirected.
By default false.
preprocess: bool = True
Whether to preprocess the graph to be loaded in
optimal time and memory.
load_nodes: bool = True,
Whether to load the nodes vocabulary or treat the nodes
simply as a numeric range.
verbose: int = 2,
Wether to show loading bars during the retrieval and building
of the graph.
cache: bool = True
Whether to use cache, i.e. download files only once
and preprocess them only once.
cache_path: str = "graphs"
Where to store the downloaded graphs.
version: str = "links.v11.5"
The version of the graph to retrieve.
The available versions are:
- homology.v11.0
- homology.v11.5
- physical.links.v11.0
- physical.links.v11.5
- links.v11.0
- links.v11.5
additional_graph_kwargs: Dict
Additional graph kwargs.
Returns
-----------------------
Instace of Streptomyces sp. NRRLF5008 graph.
References
---------------------
Please cite the following if you use the data:
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
```
"""
return AutomaticallyRetrievedGraph(
graph_name="StreptomycesSpNrrlf5008",
repository="string",
version=version,
directed=directed,
preprocess=preprocess,
load_nodes=load_nodes,
verbose=verbose,
cache=cache,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
| 33.009259
| 223
| 0.677419
|
from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen import Graph
def StreptomycesSpNrrlf5008(
directed: bool = False,
preprocess: bool = True,
load_nodes: bool = True,
verbose: int = 2,
cache: bool = True,
cache_path: str = "graphs/string",
version: str = "links.v11.5",
**additional_graph_kwargs: Dict
) -> Graph:
return AutomaticallyRetrievedGraph(
graph_name="StreptomycesSpNrrlf5008",
repository="string",
version=version,
directed=directed,
preprocess=preprocess,
load_nodes=load_nodes,
verbose=verbose,
cache=cache,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
| true
| true
|
7909d3751ce9c647e474a5cfae66ed8e2a5ae126
| 7,217
|
py
|
Python
|
cloudify_cli/tests/commands/test_init.py
|
TS-at-WS/cloudify-cli
|
598b54ecd67495a76678177f910cdc5eac6128d0
|
[
"Apache-2.0"
] | null | null | null |
cloudify_cli/tests/commands/test_init.py
|
TS-at-WS/cloudify-cli
|
598b54ecd67495a76678177f910cdc5eac6128d0
|
[
"Apache-2.0"
] | 10
|
2020-08-02T07:45:42.000Z
|
2021-06-11T01:03:45.000Z
|
cloudify_cli/tests/commands/test_init.py
|
TS-at-WS/cloudify-cli
|
598b54ecd67495a76678177f910cdc5eac6128d0
|
[
"Apache-2.0"
] | null | null | null |
import os
import shutil
import yaml
from mock import patch
from dsl_parser.exceptions import DSLParsingLogicException
from .. import cfy
from ... import env
from ...config import config
from ...commands import init
from .test_base import CliCommandTest
from .constants import BLUEPRINTS_DIR, SAMPLE_INPUTS_PATH, \
DEFAULT_BLUEPRINT_FILE_NAME, SAMPLE_CUSTOM_NAME_ARCHIVE
class InitTest(CliCommandTest):
def test_init_initialized_directory(self):
self.use_manager()
self.invoke(
'cfy init',
err_str_segment='Environment is already initialized')
def test_init_overwrite(self):
# Config values shouldn't change between init resets
with open(config.CLOUDIFY_CONFIG_PATH) as f:
conf = yaml.safe_load(f.read())
self.assertFalse(conf['colors'])
with open(config.CLOUDIFY_CONFIG_PATH, 'w') as f:
conf['colors'] = True
f.write(yaml.safe_dump(conf))
cfy.invoke('init -r')
with open(config.CLOUDIFY_CONFIG_PATH) as f:
conf = yaml.safe_load(f.read())
self.assertTrue(conf['colors'])
def test_init_overwrite_hard(self):
# Config values should change between hard init resets
with open(config.CLOUDIFY_CONFIG_PATH) as f:
conf = yaml.safe_load(f.read())
self.assertFalse(conf['colors'])
with open(config.CLOUDIFY_CONFIG_PATH, 'w') as f:
conf['colors'] = True
f.write(yaml.safe_dump(conf))
self.invoke('cfy init -r --hard')
with open(config.CLOUDIFY_CONFIG_PATH) as f:
conf = yaml.safe_load(f.read())
self.assertFalse(conf['colors'])
def test_init_overwrite_on_initial_init(self):
# Simply verifying the overwrite flag doesn't break the first init
cfy.purge_dot_cloudify()
self.invoke('cfy init -r')
def test_init_invalid_blueprint_path(self):
self.invoke(
'cfy init idonotexist.yaml',
err_str_segment='You must provide either a path to a local file',
)
def test_init_default_outputs(self):
blueprint_path = os.path.join(
BLUEPRINTS_DIR,
'local',
DEFAULT_BLUEPRINT_FILE_NAME
)
self.invoke('cfy init {0}'.format(blueprint_path))
cfy.register_commands()
output = self.invoke(
'cfy deployments outputs -b local').logs.split('\n')
self.assertIn(' "key1": "default_val1", ', output)
self.assertIn(' "key2": "default_val2", ', output)
self.assertIn(' "key3": "default_val3", ', output)
self.assertIn(' "param": null, ', output)
self.assertIn(' "custom_param": null, ', output)
self.assertIn(' "provider_context": null', output)
def test_init_default_inputs(self):
blueprint_path = os.path.join(
BLUEPRINTS_DIR,
'local',
DEFAULT_BLUEPRINT_FILE_NAME
)
command = 'cfy init {0}'.format(blueprint_path)
self.invoke(command)
cfy.register_commands()
output = self.invoke(
'cfy deployments inputs -b local').logs.split('\n')
self.assertIn(' "key1": "default_val1", ', output)
self.assertIn(' "key2": "default_val2", ', output)
self.assertIn(' "key3": "default_val3"', output)
def test_init_with_inputs(self):
blueprint_path = os.path.join(
BLUEPRINTS_DIR,
'local',
DEFAULT_BLUEPRINT_FILE_NAME
)
command = 'cfy init {0} -i {1} -i key3=val3'.format(
blueprint_path,
SAMPLE_INPUTS_PATH
)
self.invoke(command)
cfy.register_commands()
output = self.invoke(
'cfy deployments inputs -b local').logs.split('\n')
self.assertIn(' "key1": "val1", ', output)
self.assertIn(' "key2": "val2", ', output)
self.assertIn(' "key3": "val3"', output)
def test_init_validate_definitions_version_false(self):
with open(config.CLOUDIFY_CONFIG_PATH) as f:
conf = yaml.safe_load(f.read())
with open(config.CLOUDIFY_CONFIG_PATH, 'w') as f:
conf['validate_definitions_version'] = False
f.write(yaml.safe_dump(conf))
self.invoke(
'cfy init {0}/local/blueprint_validate_definitions_version.yaml'
.format(BLUEPRINTS_DIR)
)
def test_init_validate_definitions_version_true(self):
self.invoke(
'cfy init {0}/local/blueprint_validate_definitions_version.yaml'
.format(BLUEPRINTS_DIR),
err_str_segment='description not supported in version',
exception=DSLParsingLogicException
)
@patch('cloudify.workflows.local.init_env')
@patch('cloudify_cli.local._install_plugins')
def test_init_install_plugins(self, install_plugins_mock, *_):
blueprint_path = os.path.join(
BLUEPRINTS_DIR,
'local',
'blueprint_with_plugins.yaml'
)
command = 'cfy init {0} --install-plugins'.format(blueprint_path)
self.invoke(command)
install_plugins_mock.assert_called_with(blueprint_path=blueprint_path)
@patch('cloudify.workflows.local.init_env')
def test_init_with_empty_requirements(self, *_):
blueprint_path = os.path.join(
BLUEPRINTS_DIR,
'local',
'blueprint_without_plugins.yaml'
)
command = 'cfy init {0} --install-plugins'.format(blueprint_path)
self.invoke(command)
def test_init_missing_plugins(self):
# TODO: put back possible solutions
blueprint_path = os.path.join(
BLUEPRINTS_DIR,
'local',
'blueprint_with_plugins.yaml'
)
self.invoke(
'cfy init {0}'.format(blueprint_path),
err_str_segment='mapping error: No module named tasks',
exception=ImportError
)
def test_no_init(self):
# make sure no error is thrown
cfy.purge_dot_cloudify()
self.invoke('cfy profiles list')
def test_init_blueprint_archive_default_name(self):
self.invoke(
'cfy init {0}'.format(SAMPLE_CUSTOM_NAME_ARCHIVE),
err_str_segment='Could not find `blueprint.yaml`'
)
def test_init_blueprint_archive(self):
self.invoke(
'cfy init {0} -b local -n simple_blueprint.yaml'
.format(SAMPLE_CUSTOM_NAME_ARCHIVE)
)
cfy.register_commands()
output = self.invoke(
'cfy deployments inputs -b local').logs.split('\n')
self.assertIn(' "key1": "default_val1", ', output)
self.assertIn(' "key2": "default_val2", ', output)
self.assertIn(' "key3": "default_val3"', output)
def test_set_config(self):
shutil.rmtree(env.CLOUDIFY_WORKDIR)
os.makedirs(env.CLOUDIFY_WORKDIR)
self.assertFalse(os.path.isfile(
os.path.join(env.CLOUDIFY_WORKDIR, 'config.yaml')))
init.set_config()
self.assertTrue(os.path.isfile(
os.path.join(env.CLOUDIFY_WORKDIR, 'config.yaml')))
| 33.882629
| 78
| 0.618678
|
import os
import shutil
import yaml
from mock import patch
from dsl_parser.exceptions import DSLParsingLogicException
from .. import cfy
from ... import env
from ...config import config
from ...commands import init
from .test_base import CliCommandTest
from .constants import BLUEPRINTS_DIR, SAMPLE_INPUTS_PATH, \
DEFAULT_BLUEPRINT_FILE_NAME, SAMPLE_CUSTOM_NAME_ARCHIVE
class InitTest(CliCommandTest):
def test_init_initialized_directory(self):
self.use_manager()
self.invoke(
'cfy init',
err_str_segment='Environment is already initialized')
def test_init_overwrite(self):
with open(config.CLOUDIFY_CONFIG_PATH) as f:
conf = yaml.safe_load(f.read())
self.assertFalse(conf['colors'])
with open(config.CLOUDIFY_CONFIG_PATH, 'w') as f:
conf['colors'] = True
f.write(yaml.safe_dump(conf))
cfy.invoke('init -r')
with open(config.CLOUDIFY_CONFIG_PATH) as f:
conf = yaml.safe_load(f.read())
self.assertTrue(conf['colors'])
def test_init_overwrite_hard(self):
# Config values should change between hard init resets
with open(config.CLOUDIFY_CONFIG_PATH) as f:
conf = yaml.safe_load(f.read())
self.assertFalse(conf['colors'])
with open(config.CLOUDIFY_CONFIG_PATH, 'w') as f:
conf['colors'] = True
f.write(yaml.safe_dump(conf))
self.invoke('cfy init -r --hard')
with open(config.CLOUDIFY_CONFIG_PATH) as f:
conf = yaml.safe_load(f.read())
self.assertFalse(conf['colors'])
def test_init_overwrite_on_initial_init(self):
# Simply verifying the overwrite flag doesn't break the first init
cfy.purge_dot_cloudify()
self.invoke('cfy init -r')
def test_init_invalid_blueprint_path(self):
self.invoke(
'cfy init idonotexist.yaml',
err_str_segment='You must provide either a path to a local file',
)
def test_init_default_outputs(self):
blueprint_path = os.path.join(
BLUEPRINTS_DIR,
'local',
DEFAULT_BLUEPRINT_FILE_NAME
)
self.invoke('cfy init {0}'.format(blueprint_path))
cfy.register_commands()
output = self.invoke(
'cfy deployments outputs -b local').logs.split('\n')
self.assertIn(' "key1": "default_val1", ', output)
self.assertIn(' "key2": "default_val2", ', output)
self.assertIn(' "key3": "default_val3", ', output)
self.assertIn(' "param": null, ', output)
self.assertIn(' "custom_param": null, ', output)
self.assertIn(' "provider_context": null', output)
def test_init_default_inputs(self):
blueprint_path = os.path.join(
BLUEPRINTS_DIR,
'local',
DEFAULT_BLUEPRINT_FILE_NAME
)
command = 'cfy init {0}'.format(blueprint_path)
self.invoke(command)
cfy.register_commands()
output = self.invoke(
'cfy deployments inputs -b local').logs.split('\n')
self.assertIn(' "key1": "default_val1", ', output)
self.assertIn(' "key2": "default_val2", ', output)
self.assertIn(' "key3": "default_val3"', output)
def test_init_with_inputs(self):
blueprint_path = os.path.join(
BLUEPRINTS_DIR,
'local',
DEFAULT_BLUEPRINT_FILE_NAME
)
command = 'cfy init {0} -i {1} -i key3=val3'.format(
blueprint_path,
SAMPLE_INPUTS_PATH
)
self.invoke(command)
cfy.register_commands()
output = self.invoke(
'cfy deployments inputs -b local').logs.split('\n')
self.assertIn(' "key1": "val1", ', output)
self.assertIn(' "key2": "val2", ', output)
self.assertIn(' "key3": "val3"', output)
def test_init_validate_definitions_version_false(self):
with open(config.CLOUDIFY_CONFIG_PATH) as f:
conf = yaml.safe_load(f.read())
with open(config.CLOUDIFY_CONFIG_PATH, 'w') as f:
conf['validate_definitions_version'] = False
f.write(yaml.safe_dump(conf))
self.invoke(
'cfy init {0}/local/blueprint_validate_definitions_version.yaml'
.format(BLUEPRINTS_DIR)
)
def test_init_validate_definitions_version_true(self):
self.invoke(
'cfy init {0}/local/blueprint_validate_definitions_version.yaml'
.format(BLUEPRINTS_DIR),
err_str_segment='description not supported in version',
exception=DSLParsingLogicException
)
@patch('cloudify.workflows.local.init_env')
@patch('cloudify_cli.local._install_plugins')
def test_init_install_plugins(self, install_plugins_mock, *_):
blueprint_path = os.path.join(
BLUEPRINTS_DIR,
'local',
'blueprint_with_plugins.yaml'
)
command = 'cfy init {0} --install-plugins'.format(blueprint_path)
self.invoke(command)
install_plugins_mock.assert_called_with(blueprint_path=blueprint_path)
@patch('cloudify.workflows.local.init_env')
def test_init_with_empty_requirements(self, *_):
blueprint_path = os.path.join(
BLUEPRINTS_DIR,
'local',
'blueprint_without_plugins.yaml'
)
command = 'cfy init {0} --install-plugins'.format(blueprint_path)
self.invoke(command)
def test_init_missing_plugins(self):
blueprint_path = os.path.join(
BLUEPRINTS_DIR,
'local',
'blueprint_with_plugins.yaml'
)
self.invoke(
'cfy init {0}'.format(blueprint_path),
err_str_segment='mapping error: No module named tasks',
exception=ImportError
)
def test_no_init(self):
cfy.purge_dot_cloudify()
self.invoke('cfy profiles list')
def test_init_blueprint_archive_default_name(self):
self.invoke(
'cfy init {0}'.format(SAMPLE_CUSTOM_NAME_ARCHIVE),
err_str_segment='Could not find `blueprint.yaml`'
)
def test_init_blueprint_archive(self):
self.invoke(
'cfy init {0} -b local -n simple_blueprint.yaml'
.format(SAMPLE_CUSTOM_NAME_ARCHIVE)
)
cfy.register_commands()
output = self.invoke(
'cfy deployments inputs -b local').logs.split('\n')
self.assertIn(' "key1": "default_val1", ', output)
self.assertIn(' "key2": "default_val2", ', output)
self.assertIn(' "key3": "default_val3"', output)
def test_set_config(self):
shutil.rmtree(env.CLOUDIFY_WORKDIR)
os.makedirs(env.CLOUDIFY_WORKDIR)
self.assertFalse(os.path.isfile(
os.path.join(env.CLOUDIFY_WORKDIR, 'config.yaml')))
init.set_config()
self.assertTrue(os.path.isfile(
os.path.join(env.CLOUDIFY_WORKDIR, 'config.yaml')))
| true
| true
|
7909d3a36dfa02f2152928b0b9545a1f800d97b5
| 350
|
py
|
Python
|
socialdistribution/users/urls.py
|
deasisrj1/CMPUT404-Project-BetterSocial
|
f5197a757b69f10d0b911a32159f7fc5641fe7bd
|
[
"Apache-2.0"
] | null | null | null |
socialdistribution/users/urls.py
|
deasisrj1/CMPUT404-Project-BetterSocial
|
f5197a757b69f10d0b911a32159f7fc5641fe7bd
|
[
"Apache-2.0"
] | 2
|
2021-10-29T20:18:57.000Z
|
2021-12-04T14:57:34.000Z
|
socialdistribution/users/urls.py
|
CMPUT404-F21T0/CMPUT404-Project-BetterSocial
|
04a621915108a434d50e900165cefdb0d4cca45c
|
[
"Apache-2.0"
] | null | null | null |
from django.urls import path, include
from . import views
app_name = 'users'
urlpatterns = [
path('login/', views.LoginView.as_view(), name = 'login'),
path('logout/', views.LogoutView.as_view(), name = 'logout'),
path('register/', views.RegisterView.as_view(), name = 'register'),
path('', include('django.contrib.auth.urls')),
]
| 26.923077
| 71
| 0.66
|
from django.urls import path, include
from . import views
app_name = 'users'
urlpatterns = [
path('login/', views.LoginView.as_view(), name = 'login'),
path('logout/', views.LogoutView.as_view(), name = 'logout'),
path('register/', views.RegisterView.as_view(), name = 'register'),
path('', include('django.contrib.auth.urls')),
]
| true
| true
|
7909d499da32e75252c56242f8dd4fc4938aa132
| 9,704
|
py
|
Python
|
strategy/alert/alert.py
|
firebird631/siis
|
8d64e8fb67619aaa5c0a62fda9de51dedcd47796
|
[
"PostgreSQL"
] | null | null | null |
strategy/alert/alert.py
|
firebird631/siis
|
8d64e8fb67619aaa5c0a62fda9de51dedcd47796
|
[
"PostgreSQL"
] | null | null | null |
strategy/alert/alert.py
|
firebird631/siis
|
8d64e8fb67619aaa5c0a62fda9de51dedcd47796
|
[
"PostgreSQL"
] | null | null | null |
# @date 2020-02-29
# @author Frederic Scherma, All rights reserved without prejudices.
# @license Copyright (c) 2020 Dream Overflow
# Strategy alert base model
from __future__ import annotations
from typing import TYPE_CHECKING, Union
if TYPE_CHECKING:
from strategy.strategytrader import StrategyTrader
from datetime import datetime
from common.utils import timeframe_to_str
from instrument.instrument import Instrument
import logging
logger = logging.getLogger('siis.strategy.alert')
class Alert(object):
"""
Strategy alert.
The alert is any event possible from a strategy, and user configurable.
The direction of the alert is optional.
"""
__slots__ = '_timeframe', '_id', '_created', '_dir', '_expiry', '_countdown', '_message'
VERSION = "1.0.0"
ALERT_UNDEFINED = 0
ALERT_PRICE_CROSS = 1
ALERT_PRICE_CROSS_UP = 2
ALERT_PRICE_CROSS_DOWN = 3
ALERT_PRICE_PCT_CHANGE = 4
ALERT_PRICE_PCT_CHANGE_UP = 5
ALERT_PRICE_PCT_CHANGE_DOWN = 6
PRICE_SRC_BID = 0
PRICE_SRC_ASK = 1
PRICE_SRC_MID = 2
NAME = "undefined"
REGION = ALERT_UNDEFINED
def __init__(self, created: float, timeframe: float):
self._id = -1 # alert unique identifier
self._created = created # creation timestamp (always defined)
self._expiry = 0.0 # expiration timestamp (<=0 never)
self._countdown = -1 # max trigger occurrences, -1 mean forever (until expiry)
self._timeframe = timeframe # specific timeframe or 0 for any
self._message = "" # optional user short message
@classmethod
def name(cls) -> str:
"""
String type name of the alert.
"""
return cls.NAME
@classmethod
def alert(cls) -> int:
"""
Integer type of alert.
"""
return cls.ALERT_UNDEFINED
@classmethod
def version(cls):
return cls.VERSION
@property
def id(self):
"""
Unique alert identifier.
"""
return self._id
@property
def created(self) -> float:
"""
Creation timestamp.
"""
return self._created
@property
def expiry(self) -> float:
"""
Expiry timestamp in second.
"""
return self._expiry
@property
def timeframe(self) -> float:
"""
Timeframe to check for.
"""
return self._timeframe
@property
def countdown(self) -> int:
"""
Expiry countdown integer. -1 for infinite. 0 means terminated.
"""
return self._countdown
@property
def message(self) -> str:
return self._message
#
# setters
#
def set_id(self, _id: int):
self._id = _id
def set_expiry(self, expiry: float):
self._expiry = expiry
def set_countdown(self, countdown: int):
self._countdown = countdown
@message.setter
def message(self, message: str):
self.message = message
#
# processing
#
def test_alert(self, timestamp: float, bid: float, ask: float, timeframes: dict):
"""
Each time the market price change perform to this test. If the test pass then
it is executed and removed from the list or kept if its a persistent alert (until its expiry).
@return True if the signal pass the test.
"""
if 0 < self._expiry <= timestamp:
# alert expired
return None
if self._timeframe > 0 and self._timeframe not in timeframes:
# missing timeframe
return None
if self._countdown == 0:
# countdown reached 0 previously
return None
result = self.test(timestamp, bid, ask, timeframes)
if result and self._countdown > 0:
# dec countdown
self._countdown -= 1
return result
#
# overrides
#
def init(self, parameters: dict):
"""
Override this method to setup alert parameters from the parameters dict.
"""
pass
def check(self) -> bool:
"""
Perform an integrity check on the data defined to the alert.
@return True if the check pass.
"""
return True
def test(self, timestamp: float, bid: float, ask: float, timeframes: dict) -> Union[dict, None]:
"""
Perform the test of the alert on the last price and timeframes data.
@return A valid dict with trigger condition if trigger, else None
"""
return None
def can_delete(self, timestamp: float, bid: float, ask: float) -> bool:
"""
By default perform a test on expiration time, but more deletion cases can be added,
like a cancellation price trigger.
@param timestamp float Current timestamp
@param bid float last bid price
@param ask float last ask price
"""
return (0 < self._expiry <= timestamp) or self._countdown == 0
def str_info(self, instrument: Instrument) -> str:
"""
Override this method to implement the single line message info of the alert.
"""
return ""
#
# helpers
#
def basetime(self, timestamp: float) -> float:
"""
Related candle base time of the timestamp of the signal.
"""
return Instrument.basetime(self._timeframe, timestamp)
#
# helpers
#
def timeframe_to_str(self) -> str:
return timeframe_to_str(self._timeframe)
def created_to_str(self) -> str:
return datetime.fromtimestamp(self._created).strftime('%Y-%m-%d %H:%M:%S')
def expiry_to_str(self) -> str:
if self._expiry > 0:
return datetime.fromtimestamp(self._expiry).strftime('%Y-%m-%d %H:%M:%S')
else:
return "never"
def countdown_to_str(self) -> str:
if self._countdown >= 0:
return str(self._countdown)
else:
return "inf"
def condition_str(self, instrument: Instrument) -> str:
"""
Dump a string with alert condition details.
"""
return ""
def cancellation_str(self, instrument: Instrument) -> str:
"""
Dump a string with alert cancellation details.
"""
return ""
#
# dumps for notify/history
#
def dump_timestamp(self, timestamp: float, v1: bool = False):
if v1:
return datetime.utcfromtimestamp(timestamp).strftime('%Y-%m-%dT%H:%M:%SZ')
else:
return datetime.utcfromtimestamp(timestamp).strftime('%Y-%m-%dT%H:%M:%S.%fZ')
def dumps_notify(self, timestamp: float, alert_result: dict, strategy_trader: StrategyTrader) -> dict:
"""
Dumps to dict for notify/history.
"""
return {
'version': self.version(),
'alert': self.alert(),
'name': self.name(),
'id': self._id,
'app-name': strategy_trader.strategy.name,
'app-id': strategy_trader.strategy.identifier,
'timestamp': timestamp,
'market-id': strategy_trader.instrument.market_id,
'symbol': strategy_trader.instrument.symbol,
'timeframe': timeframe_to_str(self._timeframe),
'message': self._message,
'trigger': 0, # 1 for up, -1 for down
'last-price': strategy_trader.instrument.format_price(strategy_trader.instrument.market_price),
'reason': "", # alert specific detail of the trigger
}
#
# persistence
#
def parameters(self) -> dict:
"""
Override this method and add specific parameters to be displayed into an UI or a table.
"""
return {
'name': self.name(),
'id': self._id,
'created': self.created_to_str(),
'timeframe': self.timeframe_to_str(),
'expiry': self.expiry_to_str(),
'countdown': self.countdown_to_str(),
'message': self._message
}
def dumps(self) -> dict:
"""
Override this method and add specific parameters for dumps parameters for persistence model.
"""
return {
'version': self.version(), # str version (M.m.s)
'alert': self.alert(), # integer type
'name': self.name(), # str type
'id': self._id, # previous integer unique id
'created': self._created, # created timestamp datetime.utcfromtimestamp(self._created).strftime('%Y-%m-%dT%H:%M:%SZ'),
'timeframe': self._timeframe, # timeframe_to_str(self._timeframe),
'expiry': self._expiry, # datetime.utcfromtimestamp(self._expiry).strftime('%Y-%m-%dT%H:%M:%SZ'),
'countdown': self._countdown, # integer countdown
'message': self._message # str user message
}
def loads(self, data: dict):
"""
Override this method and add specific parameters for loads parameters from persistence model.
"""
self._id = data.get('id', -1)
self._created = data.get('created', 0) # datetime.strptime(data.get('created', '1970-01-01T00:00:00Z'), '%Y-%m-%dT%H:%M:%SZ').replace(tzinfo=UTC()).timestamp()
self._timeframe = data.get('timeframe') # timeframe_from_str(data.get('timeframe', 't'))
self._expiry = data.get('expiry', 0) # datetime.strptime(data.get('expiry', '1970-01-01T00:00:00Z'), '%Y-%m-%dT%H:%M:%SZ').replace(tzinfo=UTC())..timestamp()
self._countdown = data.get('countdown', -1)
self._message = data.get('message', "")
| 30.043344
| 168
| 0.588108
|
from __future__ import annotations
from typing import TYPE_CHECKING, Union
if TYPE_CHECKING:
from strategy.strategytrader import StrategyTrader
from datetime import datetime
from common.utils import timeframe_to_str
from instrument.instrument import Instrument
import logging
logger = logging.getLogger('siis.strategy.alert')
class Alert(object):
__slots__ = '_timeframe', '_id', '_created', '_dir', '_expiry', '_countdown', '_message'
VERSION = "1.0.0"
ALERT_UNDEFINED = 0
ALERT_PRICE_CROSS = 1
ALERT_PRICE_CROSS_UP = 2
ALERT_PRICE_CROSS_DOWN = 3
ALERT_PRICE_PCT_CHANGE = 4
ALERT_PRICE_PCT_CHANGE_UP = 5
ALERT_PRICE_PCT_CHANGE_DOWN = 6
PRICE_SRC_BID = 0
PRICE_SRC_ASK = 1
PRICE_SRC_MID = 2
NAME = "undefined"
REGION = ALERT_UNDEFINED
def __init__(self, created: float, timeframe: float):
self._id = -1
self._created = created
self._expiry = 0.0
self._countdown = -1
self._timeframe = timeframe
self._message = ""
@classmethod
def name(cls) -> str:
return cls.NAME
@classmethod
def alert(cls) -> int:
return cls.ALERT_UNDEFINED
@classmethod
def version(cls):
return cls.VERSION
@property
def id(self):
return self._id
@property
def created(self) -> float:
return self._created
@property
def expiry(self) -> float:
return self._expiry
@property
def timeframe(self) -> float:
return self._timeframe
@property
def countdown(self) -> int:
return self._countdown
@property
def message(self) -> str:
return self._message
def set_id(self, _id: int):
self._id = _id
def set_expiry(self, expiry: float):
self._expiry = expiry
def set_countdown(self, countdown: int):
self._countdown = countdown
@message.setter
def message(self, message: str):
self.message = message
def test_alert(self, timestamp: float, bid: float, ask: float, timeframes: dict):
if 0 < self._expiry <= timestamp:
return None
if self._timeframe > 0 and self._timeframe not in timeframes:
return None
if self._countdown == 0:
return None
result = self.test(timestamp, bid, ask, timeframes)
if result and self._countdown > 0:
self._countdown -= 1
return result
def init(self, parameters: dict):
pass
def check(self) -> bool:
return True
def test(self, timestamp: float, bid: float, ask: float, timeframes: dict) -> Union[dict, None]:
return None
def can_delete(self, timestamp: float, bid: float, ask: float) -> bool:
return (0 < self._expiry <= timestamp) or self._countdown == 0
def str_info(self, instrument: Instrument) -> str:
return ""
def basetime(self, timestamp: float) -> float:
return Instrument.basetime(self._timeframe, timestamp)
def timeframe_to_str(self) -> str:
return timeframe_to_str(self._timeframe)
def created_to_str(self) -> str:
return datetime.fromtimestamp(self._created).strftime('%Y-%m-%d %H:%M:%S')
def expiry_to_str(self) -> str:
if self._expiry > 0:
return datetime.fromtimestamp(self._expiry).strftime('%Y-%m-%d %H:%M:%S')
else:
return "never"
def countdown_to_str(self) -> str:
if self._countdown >= 0:
return str(self._countdown)
else:
return "inf"
def condition_str(self, instrument: Instrument) -> str:
return ""
def cancellation_str(self, instrument: Instrument) -> str:
return ""
def dump_timestamp(self, timestamp: float, v1: bool = False):
if v1:
return datetime.utcfromtimestamp(timestamp).strftime('%Y-%m-%dT%H:%M:%SZ')
else:
return datetime.utcfromtimestamp(timestamp).strftime('%Y-%m-%dT%H:%M:%S.%fZ')
def dumps_notify(self, timestamp: float, alert_result: dict, strategy_trader: StrategyTrader) -> dict:
return {
'version': self.version(),
'alert': self.alert(),
'name': self.name(),
'id': self._id,
'app-name': strategy_trader.strategy.name,
'app-id': strategy_trader.strategy.identifier,
'timestamp': timestamp,
'market-id': strategy_trader.instrument.market_id,
'symbol': strategy_trader.instrument.symbol,
'timeframe': timeframe_to_str(self._timeframe),
'message': self._message,
'trigger': 0,
'last-price': strategy_trader.instrument.format_price(strategy_trader.instrument.market_price),
'reason': "",
}
def parameters(self) -> dict:
return {
'name': self.name(),
'id': self._id,
'created': self.created_to_str(),
'timeframe': self.timeframe_to_str(),
'expiry': self.expiry_to_str(),
'countdown': self.countdown_to_str(),
'message': self._message
}
def dumps(self) -> dict:
return {
'version': self.version(),
'alert': self.alert(),
'name': self.name(),
'id': self._id,
'created': self._created,
'timeframe': self._timeframe,
'expiry': self._expiry,
'countdown': self._countdown,
'message': self._message
}
def loads(self, data: dict):
self._id = data.get('id', -1)
self._created = data.get('created', 0)
self._timeframe = data.get('timeframe')
self._expiry = data.get('expiry', 0)
self._countdown = data.get('countdown', -1)
self._message = data.get('message', "")
| true
| true
|
7909d53a4c754f960ae092ce1092951259f9e987
| 247
|
py
|
Python
|
chill/examples/chill/testcases/permute1.script.py
|
CompOpt4Apps/Artifact-DataDepSimplify
|
4fa1bf2bda2902fec50a54ee79ae405a554fc9f4
|
[
"MIT"
] | 5
|
2019-05-20T03:35:41.000Z
|
2021-09-16T22:22:13.000Z
|
chill/examples/chill/testcases/permute1.script.py
|
CompOpt4Apps/Artifact-DataDepSimplify
|
4fa1bf2bda2902fec50a54ee79ae405a554fc9f4
|
[
"MIT"
] | null | null | null |
chill/examples/chill/testcases/permute1.script.py
|
CompOpt4Apps/Artifact-DataDepSimplify
|
4fa1bf2bda2902fec50a54ee79ae405a554fc9f4
|
[
"MIT"
] | null | null | null |
#
# example from CHiLL manual page 14
#
# permute 3 loops
#
from chill import *
source('permute123456.c')
destination('permute1modified.c')
procedure('mm')
loop(0)
known('ambn > 0')
known('an > 0')
known('bm > 0')
permute([3,1,2])
| 9.88
| 36
| 0.631579
|
from chill import *
source('permute123456.c')
destination('permute1modified.c')
procedure('mm')
loop(0)
known('ambn > 0')
known('an > 0')
known('bm > 0')
permute([3,1,2])
| true
| true
|
7909d53db4adf4b24a7158e34c229a91da9b0369
| 2,880
|
py
|
Python
|
lib/datasets/factory.py
|
hinthornw/faster_rcnn_symbols
|
96b0341363c92631da4f02efcf6c5659d1421535
|
[
"MIT"
] | null | null | null |
lib/datasets/factory.py
|
hinthornw/faster_rcnn_symbols
|
96b0341363c92631da4f02efcf6c5659d1421535
|
[
"MIT"
] | null | null | null |
lib/datasets/factory.py
|
hinthornw/faster_rcnn_symbols
|
96b0341363c92631da4f02efcf6c5659d1421535
|
[
"MIT"
] | null | null | null |
# --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick
# --------------------------------------------------------
# pylint: disable-all
# flake8: noqa
"""Factory method for easily getting imdbs by name."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
__sets = {}
from datasets.pascal_voc import pascal_voc
from datasets.coco import coco
from datasets.imagenet import imagenet
from datasets.vg import vg
from datasets.ads import ads
import numpy as np
# Set up ads dataset
for split in ['train', 'val']:
name = 'pitt_ads_{}'.format(split)
__sets[name] = (lambda split=split : ads(split))
# Set up voc_<year>_<split>
for year in ['2007', '2012']:
for split in ['train', 'val', 'trainval', 'test']:
name = 'voc_{}_{}'.format(year, split)
__sets[name] = (lambda split=split, year=year: pascal_voc(split, year))
# Set up coco_2014_<split>
for year in ['2014']:
for split in ['train', 'val', 'minival', 'valminusminival', 'trainval']:
name = 'coco_{}_{}'.format(year, split)
__sets[name] = (lambda split=split, year=year: coco(split, year))
# Set up coco_2014_cap_<split>
for year in ['2014']:
for split in ['train', 'val', 'capval', 'valminuscapval', 'trainval']:
name = 'coco_{}_{}'.format(year, split)
__sets[name] = (lambda split=split, year=year: coco(split, year))
# Set up coco_2015_<split>
for year in ['2015']:
for split in ['test', 'test-dev']:
name = 'coco_{}_{}'.format(year, split)
__sets[name] = (lambda split=split, year=year: coco(split, year))
# Set up vg_<split>
# for version in ['1600-400-20']:
# for split in ['minitrain', 'train', 'minival', 'val', 'test']:
# name = 'vg_{}_{}'.format(version,split)
# __sets[name] = (lambda split=split, version=version: vg(version, split))
for version in ['150-50-20', '150-50-50', '500-150-80', '750-250-150', '1750-700-450', '1600-400-20']:
for split in ['minitrain', 'smalltrain', 'train', 'minival', 'smallval', 'val', 'test']:
name = 'vg_{}_{}'.format(version,split)
__sets[name] = (lambda split=split, version=version: vg(version, split))
# set up imagenet.
for split in ['train', 'val', 'val1', 'val2', 'test']:
name = 'imagenet_{}'.format(split)
devkit_path = 'data/imagenet/ILSVRC/devkit'
data_path = 'data/imagenet/ILSVRC'
__sets[name] = (lambda split=split, devkit_path=devkit_path, data_path=data_path: imagenet(split,devkit_path,data_path))
def get_imdb(name):
"""Get an imdb (image database) by name."""
if name not in __sets:
raise KeyError('Unknown dataset: {}'.format(name))
return __sets[name]()
def list_imdbs():
"""List all registered imdbs."""
return list(__sets.keys())
| 36
| 124
| 0.638889
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
__sets = {}
from datasets.pascal_voc import pascal_voc
from datasets.coco import coco
from datasets.imagenet import imagenet
from datasets.vg import vg
from datasets.ads import ads
import numpy as np
for split in ['train', 'val']:
name = 'pitt_ads_{}'.format(split)
__sets[name] = (lambda split=split : ads(split))
for year in ['2007', '2012']:
for split in ['train', 'val', 'trainval', 'test']:
name = 'voc_{}_{}'.format(year, split)
__sets[name] = (lambda split=split, year=year: pascal_voc(split, year))
for year in ['2014']:
for split in ['train', 'val', 'minival', 'valminusminival', 'trainval']:
name = 'coco_{}_{}'.format(year, split)
__sets[name] = (lambda split=split, year=year: coco(split, year))
for year in ['2014']:
for split in ['train', 'val', 'capval', 'valminuscapval', 'trainval']:
name = 'coco_{}_{}'.format(year, split)
__sets[name] = (lambda split=split, year=year: coco(split, year))
for year in ['2015']:
for split in ['test', 'test-dev']:
name = 'coco_{}_{}'.format(year, split)
__sets[name] = (lambda split=split, year=year: coco(split, year))
for version in ['150-50-20', '150-50-50', '500-150-80', '750-250-150', '1750-700-450', '1600-400-20']:
for split in ['minitrain', 'smalltrain', 'train', 'minival', 'smallval', 'val', 'test']:
name = 'vg_{}_{}'.format(version,split)
__sets[name] = (lambda split=split, version=version: vg(version, split))
for split in ['train', 'val', 'val1', 'val2', 'test']:
name = 'imagenet_{}'.format(split)
devkit_path = 'data/imagenet/ILSVRC/devkit'
data_path = 'data/imagenet/ILSVRC'
__sets[name] = (lambda split=split, devkit_path=devkit_path, data_path=data_path: imagenet(split,devkit_path,data_path))
def get_imdb(name):
if name not in __sets:
raise KeyError('Unknown dataset: {}'.format(name))
return __sets[name]()
def list_imdbs():
return list(__sets.keys())
| true
| true
|
7909d66efaa04574a0e2a6612298cc287ae163d2
| 1,356
|
py
|
Python
|
ObjectOrientedProgramming/OOPpart4.py
|
JeffreyAsuncion/TWT_TheCompletePythonCourse
|
3d0d7e086f697763bb479e66569c0f2c04762482
|
[
"MIT"
] | null | null | null |
ObjectOrientedProgramming/OOPpart4.py
|
JeffreyAsuncion/TWT_TheCompletePythonCourse
|
3d0d7e086f697763bb479e66569c0f2c04762482
|
[
"MIT"
] | null | null | null |
ObjectOrientedProgramming/OOPpart4.py
|
JeffreyAsuncion/TWT_TheCompletePythonCourse
|
3d0d7e086f697763bb479e66569c0f2c04762482
|
[
"MIT"
] | null | null | null |
# Overloading Methods
class Point():
def __init__(self, x=0, y=0):
self.x = x
self.y = y
self.coords = (self.x, self.y)
def move(self, x, y):
self.x += x
self.y += y
# Overload __dunder__
def __add__(self, p):
return Point(self.x + p.x, self.y + p.y)
def __sub__(self, p):
return Point(self.x - p.x, self.y - p.y)
def __mul__(self, p):
return Point(self.x * p.x, self.y * p.y)
def length(self):
import math
return math.sqrt(self.x**2 + self.y**2)
def __gt__(self, p):
return self.length() > p.length()
def __ge__(self, p):
return self.length() >= p.length()
def __lt__(self, p):
return self.length() < p.length()
def __le__(self, p):
return self.length() <= p.length()
def __eq__(self, p):
# this math does not alway work out correctly remeber float comparisions
#return self.length() == p.length
return self.x == p.x and self.y == p.y
# need to __str__ to represent the output of #overloaded
def __str__(self):
return "(" + str(self.x) + "," + str(self.y) + ")"
p1 = Point(3,4)
p2 = Point(3,2)
p3 = Point(1,3)
p4 = Point(0,1)
p5 = p1 + p2
p6 = p4 - p1
p7 = p2 * p3
print(p5, p6, p7)
print(p1 == p2)
print(p1 > p2)
print(p4 <= p3)
| 21.52381
| 80
| 0.539823
|
class Point():
def __init__(self, x=0, y=0):
self.x = x
self.y = y
self.coords = (self.x, self.y)
def move(self, x, y):
self.x += x
self.y += y
def __add__(self, p):
return Point(self.x + p.x, self.y + p.y)
def __sub__(self, p):
return Point(self.x - p.x, self.y - p.y)
def __mul__(self, p):
return Point(self.x * p.x, self.y * p.y)
def length(self):
import math
return math.sqrt(self.x**2 + self.y**2)
def __gt__(self, p):
return self.length() > p.length()
def __ge__(self, p):
return self.length() >= p.length()
def __lt__(self, p):
return self.length() < p.length()
def __le__(self, p):
return self.length() <= p.length()
def __eq__(self, p):
return self.x == p.x and self.y == p.y
str__(self):
return "(" + str(self.x) + "," + str(self.y) + ")"
p1 = Point(3,4)
p2 = Point(3,2)
p3 = Point(1,3)
p4 = Point(0,1)
p5 = p1 + p2
p6 = p4 - p1
p7 = p2 * p3
print(p5, p6, p7)
print(p1 == p2)
print(p1 > p2)
print(p4 <= p3)
| true
| true
|
7909d68b2a665d8ced8eac8feb3bd4a22b116e3d
| 9,568
|
py
|
Python
|
vl/model/training.py
|
hurwitzlab/viral-learning
|
8d7aebc0d58fa32a429f4a47593452ee2722ba82
|
[
"MIT"
] | 1
|
2018-02-23T16:49:30.000Z
|
2018-02-23T16:49:30.000Z
|
vl/model/training.py
|
hurwitzlab/viral-learning
|
8d7aebc0d58fa32a429f4a47593452ee2722ba82
|
[
"MIT"
] | null | null | null |
vl/model/training.py
|
hurwitzlab/viral-learning
|
8d7aebc0d58fa32a429f4a47593452ee2722ba82
|
[
"MIT"
] | null | null | null |
"""
Training and validation method for arbitrary models.
"""
import io
import os
import sys
import time
from keras import Sequential
from keras.layers import Dense, Dropout, BatchNormalization
from matplotlib.backends.backend_pdf import PdfPages
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
import numpy as np
import pandas as pd
plt.switch_backend('agg')
def train_and_evaluate(model, model_name, training_epochs, the_data):
print('model.metrics_names: {}'.format(model.metrics_names))
total_steps = training_epochs * the_data.get_training_mini_batches_per_epoch()
training_index = pd.RangeIndex(start=0, stop=total_steps, name='Training Step')
training_metrics_df = pd.DataFrame(
data=np.zeros((total_steps, len(model.metrics_names))),
columns=model.metrics_names,
index=training_index)
# evaluate the model on the dev set(s) after each epoch
dev_index = pd.RangeIndex(start=0, stop=training_epochs, name='Epoch')
dev_columns = pd.MultiIndex.from_product(
iterables=(the_data.get_dev_set_names(), model.metrics_names),
names=('dev set', 'metric'))
dev_metrics_df = pd.DataFrame(
data=np.zeros((training_epochs, len(the_data.get_dev_set_names()) * len(model.metrics_names))),
columns=dev_columns,
index=dev_index)
print(dev_metrics_df.head())
steps_per_epoch = the_data.get_training_mini_batches_per_epoch()
# n counts number of training iterations
n = 0
t0 = time.time()
##with h5py.File(the_data.fp, 'r', libver='latest', swmr=True) as train_test_file:
# train for all epochs
t00 = time.time()
##for train_X, train_y, step, epoch in the_data.get_training_mini_batches(data_file=train_test_file, yield_state=True):
for train_X, train_y, step, epoch in the_data.get_training_data_generator()(yield_state=True):
if epoch > training_epochs:
print('completed {} training epochs in {:5.2f}s'.format(training_epochs, time.time()-t0))
break
else:
# train on one mini batch
print('training on batch {} ({})'.format(step, steps_per_epoch))
training_metrics = model.train_on_batch(train_X, train_y)
training_metrics_df.loc[n, model.metrics_names] = training_metrics
n += 1
# look at performance on dev data after each epoch
# re-plot the training and dev metrics after each epoch
if step == steps_per_epoch:
print('completed training epoch {} in {:5.2f}s'.format(epoch, time.time()-t00))
print('{} steps per epoch'.format(steps_per_epoch))
print('{:5.2f}s per step'.format((time.time()-t00)/steps_per_epoch))
print(training_metrics_df.loc[n-2:n])
t00 = time.time()
print('evaluate the model on the dev set(s)')
#evaluate_dev_sets(epoch=epoch, model=model, the_data=the_data, train_test_file=train_test_file, dev_metrics_df=dev_metrics_df)
evaluate_dev_sets(epoch=epoch, model=model, the_data=the_data, dev_metrics_df=dev_metrics_df)
plot_training_and_dev_metrics(
training_metrics_df,
dev_metrics_df,
model_name=model_name,
steps_per_epoch=steps_per_epoch,
epoch_count=training_epochs,
output_fp=model_name + '.pdf')
return training_metrics_df, dev_metrics_df
def evaluate_dev_sets(epoch, model, the_data, dev_metrics_df):
for dev_steps, dev_set_name, dev_generator in the_data.get_dev_generators():
sys.stdout.write('.')
# print('dev set: "{}"'.format(dev_set_name))
# print(' dev steps: {}'.format(dev_steps))
dev_metrics = model.evaluate_generator(generator=dev_generator, steps=dev_steps)
dev_metrics_df.loc[epoch - 1, (dev_set_name, model.metrics_names)] = dev_metrics
sys.stdout.write('\n')
print('dev metrics:\n{}'.format(dev_metrics_df.loc[epoch - 1]))
def build_layer(model_name, layer_type, kwargs):
if layer_type == 'Dense':
model_name.write('_dns_{}'.format(kwargs['units']))
if 'kernel_regularizer' in kwargs:
# the l2 field is a ndarray with shape ()
# indexing with [] gives error 'too many indices'
# the item() method is the first way I found to extract the float value from l2
model_name.write('_l2_{:6.4f}'.format(kwargs['kernel_regularizer'].l2.item()))
layer = Dense(**kwargs)
elif layer_type == 'Dropout':
model_name.write('_drp_{:3.2f}'.format(kwargs['rate']))
layer = Dropout(**kwargs)
elif layer_type == 'BatchNormalization':
model_name.write('_bn')
layer = BatchNormalization(**kwargs)
else:
raise Exception()
return layer
def build_model(layers, model=None, input_dim=None):
"""
Build and return a Sequential model with Dense layers given by the layers argument.
Arguments
model (keras.Sequential) model to which layers will be added
input_dim (int) dimension of input
layers (tuple) sequence of 2-ples, one per layer, such as ((64, 'relu'), (64, 'relu'), (1, 'sigmoid'))
Return
model_name (str) a name for the model
model (Model) a compiled model
"""
if model is None:
model = Sequential()
model_name = io.StringIO()
layer_type, kwargs = layers[0]
if input_dim is None:
pass
else:
kwargs['input_dim'] = input_dim
for layer_type, kwargs in layers:
layer = build_layer(model_name, layer_type, kwargs)
model.add(layer)
model.compile(optimizer='adam',
loss='binary_crossentropy',
metrics=['accuracy'])
# trim the leading '_' from the model name - lazy!
return model_name.getvalue()[1:], model
def plot_training_and_dev_metrics(training_metrics_df, dev_metrics_df, model_name, steps_per_epoch, epoch_count, output_fp):
# generate network-specific accuracy and loss keys
output_dp, output_filename = os.path.split(output_fp)
output_basename, output_ext = os.path.splitext(output_filename)
##separate_plots_fp = os.path.join(output_dp, output_basename + '_separate' + output_ext)
##sorted_training_history_list = sorted(training_history_list, key=lambda h: h[2]['val_acc'][-1], reverse=True)
with PdfPages(output_fp) as pdfpages:
#for model_name, layers, history, t in sorted_training_history_list:
#training_accuracy_loss = {}
#validation_accuracy_loss = {}
#training_accuracy_loss['acc ' + model_name] = history['acc']
#training_accuracy_loss['loss ' + model_name] = history['loss']
#validation_accuracy_loss['val_acc ' + model_name] = history['val_acc']
#validation_accuracy_loss['val_loss ' + model_name] = history['val_loss']
#training_df = pd.DataFrame(
# data=training_accuracy_loss,
# index=[b + 1 for b in range(epoch_count * batches_per_epoch)])
#training_df.index.name = 'batch'
#validation_df = pd.DataFrame(
# data=validation_accuracy_loss,
# index=[(e + 1) * batches_per_epoch for e in range(epoch_count)])
#validation_df.index.name = 'batch'
fig, ax1 = plt.subplots()
legend = []
#for loss_column in [column for column in training_df.columns if 'loss' in column and model_name in column]:
#for training_metric_column in training_metrics_df.columns:
#print('training metric column: {}'.format(training_metric_column))
ax1.plot(training_metrics_df.index, training_metrics_df.loc[:, 'loss'], color='tab:blue', alpha=0.8)
legend.append('training loss')
#for loss_column in [column for column in validation_df.columns if
# 'loss' in column and model_name in column]:
# print('validation loss column: {}'.format(loss_column))
# ax1.plot(validation_df.index, validation_df.loc[:, loss_column], color='tab:orange', alpha=0.8)
# legend.append('val_loss')
ax1.set_xlabel('epoch')
tick_spacing = steps_per_epoch
ax1.xaxis.set_major_locator(ticker.MultipleLocator(tick_spacing))
ax1.set_xticklabels([0] + list(range(epoch_count+1)))
ax1.set_ylabel('loss')
ax1.legend(legend, loc='lower left')
ax2 = ax1.twinx()
legend = []
#for acc_column in [column for column in training_metrics_df.columns if 'acc' in column]:
#print('training acc column: {}'.format(acc_column))
ax2.plot(training_metrics_df.index, training_metrics_df.loc[:, 'acc'], color='tab:purple', alpha=0.8)
legend.append('training acc')
for dev_acc_column in [column for column in dev_metrics_df.columns if 'acc' in column]:
print('validation acc column: {}'.format(dev_acc_column))
ax2.plot([steps_per_epoch * (n + 1) for n in dev_metrics_df.index], dev_metrics_df.loc[:, dev_acc_column], alpha=0.8)
legend.append(dev_acc_column)
ax2.set_title('Training and Development Metrics\n{}'.format(model_name))
ax2.set_ylim(0.0, 1.0)
ax2.set_ylabel('accuracy')
print(legend)
ax2.legend(legend, loc='lower right')
pdfpages.savefig()
| 42.524444
| 139
| 0.650397
|
import io
import os
import sys
import time
from keras import Sequential
from keras.layers import Dense, Dropout, BatchNormalization
from matplotlib.backends.backend_pdf import PdfPages
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
import numpy as np
import pandas as pd
plt.switch_backend('agg')
def train_and_evaluate(model, model_name, training_epochs, the_data):
print('model.metrics_names: {}'.format(model.metrics_names))
total_steps = training_epochs * the_data.get_training_mini_batches_per_epoch()
training_index = pd.RangeIndex(start=0, stop=total_steps, name='Training Step')
training_metrics_df = pd.DataFrame(
data=np.zeros((total_steps, len(model.metrics_names))),
columns=model.metrics_names,
index=training_index)
dev_index = pd.RangeIndex(start=0, stop=training_epochs, name='Epoch')
dev_columns = pd.MultiIndex.from_product(
iterables=(the_data.get_dev_set_names(), model.metrics_names),
names=('dev set', 'metric'))
dev_metrics_df = pd.DataFrame(
data=np.zeros((training_epochs, len(the_data.get_dev_set_names()) * len(model.metrics_names))),
columns=dev_columns,
index=dev_index)
print(dev_metrics_df.head())
steps_per_epoch = the_data.get_training_mini_batches_per_epoch()
n = 0
t0 = time.time()
training epochs in {:5.2f}s'.format(training_epochs, time.time()-t0))
break
else:
print('training on batch {} ({})'.format(step, steps_per_epoch))
training_metrics = model.train_on_batch(train_X, train_y)
training_metrics_df.loc[n, model.metrics_names] = training_metrics
n += 1
if step == steps_per_epoch:
print('completed training epoch {} in {:5.2f}s'.format(epoch, time.time()-t00))
print('{} steps per epoch'.format(steps_per_epoch))
print('{:5.2f}s per step'.format((time.time()-t00)/steps_per_epoch))
print(training_metrics_df.loc[n-2:n])
t00 = time.time()
print('evaluate the model on the dev set(s)')
evaluate_dev_sets(epoch=epoch, model=model, the_data=the_data, dev_metrics_df=dev_metrics_df)
plot_training_and_dev_metrics(
training_metrics_df,
dev_metrics_df,
model_name=model_name,
steps_per_epoch=steps_per_epoch,
epoch_count=training_epochs,
output_fp=model_name + '.pdf')
return training_metrics_df, dev_metrics_df
def evaluate_dev_sets(epoch, model, the_data, dev_metrics_df):
for dev_steps, dev_set_name, dev_generator in the_data.get_dev_generators():
sys.stdout.write('.')
dev_metrics = model.evaluate_generator(generator=dev_generator, steps=dev_steps)
dev_metrics_df.loc[epoch - 1, (dev_set_name, model.metrics_names)] = dev_metrics
sys.stdout.write('\n')
print('dev metrics:\n{}'.format(dev_metrics_df.loc[epoch - 1]))
def build_layer(model_name, layer_type, kwargs):
if layer_type == 'Dense':
model_name.write('_dns_{}'.format(kwargs['units']))
if 'kernel_regularizer' in kwargs:
model_name.write('_l2_{:6.4f}'.format(kwargs['kernel_regularizer'].l2.item()))
layer = Dense(**kwargs)
elif layer_type == 'Dropout':
model_name.write('_drp_{:3.2f}'.format(kwargs['rate']))
layer = Dropout(**kwargs)
elif layer_type == 'BatchNormalization':
model_name.write('_bn')
layer = BatchNormalization(**kwargs)
else:
raise Exception()
return layer
def build_model(layers, model=None, input_dim=None):
if model is None:
model = Sequential()
model_name = io.StringIO()
layer_type, kwargs = layers[0]
if input_dim is None:
pass
else:
kwargs['input_dim'] = input_dim
for layer_type, kwargs in layers:
layer = build_layer(model_name, layer_type, kwargs)
model.add(layer)
model.compile(optimizer='adam',
loss='binary_crossentropy',
metrics=['accuracy'])
return model_name.getvalue()[1:], model
def plot_training_and_dev_metrics(training_metrics_df, dev_metrics_df, model_name, steps_per_epoch, epoch_count, output_fp):
output_dp, output_filename = os.path.split(output_fp)
output_basename, output_ext = os.path.splitext(output_filename)
fig, ax1 = plt.subplots()
legend = []
ax1.plot(training_metrics_df.index, training_metrics_df.loc[:, 'loss'], color='tab:blue', alpha=0.8)
legend.append('training loss')
ax1.set_xlabel('epoch')
tick_spacing = steps_per_epoch
ax1.xaxis.set_major_locator(ticker.MultipleLocator(tick_spacing))
ax1.set_xticklabels([0] + list(range(epoch_count+1)))
ax1.set_ylabel('loss')
ax1.legend(legend, loc='lower left')
ax2 = ax1.twinx()
legend = []
ax2.plot(training_metrics_df.index, training_metrics_df.loc[:, 'acc'], color='tab:purple', alpha=0.8)
legend.append('training acc')
for dev_acc_column in [column for column in dev_metrics_df.columns if 'acc' in column]:
print('validation acc column: {}'.format(dev_acc_column))
ax2.plot([steps_per_epoch * (n + 1) for n in dev_metrics_df.index], dev_metrics_df.loc[:, dev_acc_column], alpha=0.8)
legend.append(dev_acc_column)
ax2.set_title('Training and Development Metrics\n{}'.format(model_name))
ax2.set_ylim(0.0, 1.0)
ax2.set_ylabel('accuracy')
print(legend)
ax2.legend(legend, loc='lower right')
pdfpages.savefig()
| true
| true
|
7909d893850400373ab832f8318d8b2680920671
| 5,419
|
py
|
Python
|
azure-mgmt-network/azure/mgmt/network/models/security_rule.py
|
CharaD7/azure-sdk-for-python
|
9fdf0aac0cec8a15a5bb2a0ea27dd331dbfa2f5c
|
[
"MIT"
] | null | null | null |
azure-mgmt-network/azure/mgmt/network/models/security_rule.py
|
CharaD7/azure-sdk-for-python
|
9fdf0aac0cec8a15a5bb2a0ea27dd331dbfa2f5c
|
[
"MIT"
] | null | null | null |
azure-mgmt-network/azure/mgmt/network/models/security_rule.py
|
CharaD7/azure-sdk-for-python
|
9fdf0aac0cec8a15a5bb2a0ea27dd331dbfa2f5c
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .sub_resource import SubResource
class SecurityRule(SubResource):
"""Network security rule.
:param id: Resource Id
:type id: str
:param description: Gets or sets a description for this rule. Restricted
to 140 chars.
:type description: str
:param protocol: Gets or sets Network protocol this rule applies to. Can
be Tcp, Udp or All(*). Possible values include: 'Tcp', 'Udp', '*'
:type protocol: str or :class:`SecurityRuleProtocol
<azure.mgmt.network.models.SecurityRuleProtocol>`
:param source_port_range: Gets or sets Source Port or Range. Integer or
range between 0 and 65535. Asterix '*' can also be used to match all
ports.
:type source_port_range: str
:param destination_port_range: Gets or sets Destination Port or Range.
Integer or range between 0 and 65535. Asterix '*' can also be used to
match all ports.
:type destination_port_range: str
:param source_address_prefix: Gets or sets source address prefix. CIDR or
source IP range. Asterix '*' can also be used to match all source IPs.
Default tags such as 'VirtualNetwork', 'AzureLoadBalancer' and
'Internet' can also be used. If this is an ingress rule, specifies where
network traffic originates from.
:type source_address_prefix: str
:param destination_address_prefix: Gets or sets destination address
prefix. CIDR or source IP range. Asterix '*' can also be used to match
all source IPs. Default tags such as 'VirtualNetwork',
'AzureLoadBalancer' and 'Internet' can also be used.
:type destination_address_prefix: str
:param access: Gets or sets network traffic is allowed or denied.
Possible values are 'Allow' and 'Deny'. Possible values include:
'Allow', 'Deny'
:type access: str or :class:`SecurityRuleAccess
<azure.mgmt.network.models.SecurityRuleAccess>`
:param priority: Gets or sets the priority of the rule. The value can be
between 100 and 4096. The priority number must be unique for each rule
in the collection. The lower the priority number, the higher the
priority of the rule.
:type priority: int
:param direction: Gets or sets the direction of the rule.InBound or
Outbound. The direction specifies if rule will be evaluated on incoming
or outcoming traffic. Possible values include: 'Inbound', 'Outbound'
:type direction: str or :class:`SecurityRuleDirection
<azure.mgmt.network.models.SecurityRuleDirection>`
:param provisioning_state: Gets provisioning state of the PublicIP
resource Updating/Deleting/Failed
:type provisioning_state: str
:param name: Gets name of the resource that is unique within a resource
group. This name can be used to access the resource
:type name: str
:param etag: A unique read-only string that changes whenever the resource
is updated
:type etag: str
"""
_validation = {
'protocol': {'required': True},
'source_address_prefix': {'required': True},
'destination_address_prefix': {'required': True},
'access': {'required': True},
'direction': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'description': {'key': 'properties.description', 'type': 'str'},
'protocol': {'key': 'properties.protocol', 'type': 'str'},
'source_port_range': {'key': 'properties.sourcePortRange', 'type': 'str'},
'destination_port_range': {'key': 'properties.destinationPortRange', 'type': 'str'},
'source_address_prefix': {'key': 'properties.sourceAddressPrefix', 'type': 'str'},
'destination_address_prefix': {'key': 'properties.destinationAddressPrefix', 'type': 'str'},
'access': {'key': 'properties.access', 'type': 'str'},
'priority': {'key': 'properties.priority', 'type': 'int'},
'direction': {'key': 'properties.direction', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
}
def __init__(self, protocol, source_address_prefix, destination_address_prefix, access, direction, id=None, description=None, source_port_range=None, destination_port_range=None, priority=None, provisioning_state=None, name=None, etag=None):
super(SecurityRule, self).__init__(id=id)
self.description = description
self.protocol = protocol
self.source_port_range = source_port_range
self.destination_port_range = destination_port_range
self.source_address_prefix = source_address_prefix
self.destination_address_prefix = destination_address_prefix
self.access = access
self.priority = priority
self.direction = direction
self.provisioning_state = provisioning_state
self.name = name
self.etag = etag
| 49.263636
| 245
| 0.671342
|
from .sub_resource import SubResource
class SecurityRule(SubResource):
_validation = {
'protocol': {'required': True},
'source_address_prefix': {'required': True},
'destination_address_prefix': {'required': True},
'access': {'required': True},
'direction': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'description': {'key': 'properties.description', 'type': 'str'},
'protocol': {'key': 'properties.protocol', 'type': 'str'},
'source_port_range': {'key': 'properties.sourcePortRange', 'type': 'str'},
'destination_port_range': {'key': 'properties.destinationPortRange', 'type': 'str'},
'source_address_prefix': {'key': 'properties.sourceAddressPrefix', 'type': 'str'},
'destination_address_prefix': {'key': 'properties.destinationAddressPrefix', 'type': 'str'},
'access': {'key': 'properties.access', 'type': 'str'},
'priority': {'key': 'properties.priority', 'type': 'int'},
'direction': {'key': 'properties.direction', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
}
def __init__(self, protocol, source_address_prefix, destination_address_prefix, access, direction, id=None, description=None, source_port_range=None, destination_port_range=None, priority=None, provisioning_state=None, name=None, etag=None):
super(SecurityRule, self).__init__(id=id)
self.description = description
self.protocol = protocol
self.source_port_range = source_port_range
self.destination_port_range = destination_port_range
self.source_address_prefix = source_address_prefix
self.destination_address_prefix = destination_address_prefix
self.access = access
self.priority = priority
self.direction = direction
self.provisioning_state = provisioning_state
self.name = name
self.etag = etag
| true
| true
|
7909d8e3528fc6544e58911ecb9cf37704d993b8
| 137
|
py
|
Python
|
clickhouse/datadog_checks/clickhouse/__about__.py
|
mchelen-gov/integrations-core
|
81281600b3cc7025a7a32148c59620c9592a564f
|
[
"BSD-3-Clause"
] | 1
|
2021-01-28T01:45:37.000Z
|
2021-01-28T01:45:37.000Z
|
clickhouse/datadog_checks/clickhouse/__about__.py
|
mchelen-gov/integrations-core
|
81281600b3cc7025a7a32148c59620c9592a564f
|
[
"BSD-3-Clause"
] | 3
|
2021-01-27T04:56:40.000Z
|
2021-02-26T06:29:22.000Z
|
clickhouse/datadog_checks/clickhouse/__about__.py
|
mchelen-gov/integrations-core
|
81281600b3cc7025a7a32148c59620c9592a564f
|
[
"BSD-3-Clause"
] | 1
|
2021-04-07T16:58:27.000Z
|
2021-04-07T16:58:27.000Z
|
# (C) Datadog, Inc. 2019-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
__version__ = '2.3.0'
| 27.4
| 59
| 0.722628
|
__version__ = '2.3.0'
| true
| true
|
7909d8f02396dbf44f9dd65fac201b5674af416c
| 36,560
|
py
|
Python
|
tests/components/history/test_init.py
|
basicpail/core
|
5cc54618c5af3f75c08314bf2375cc7ac40d2b7e
|
[
"Apache-2.0"
] | 5
|
2020-12-15T04:09:01.000Z
|
2022-03-11T21:34:24.000Z
|
tests/components/history/test_init.py
|
basicpail/core
|
5cc54618c5af3f75c08314bf2375cc7ac40d2b7e
|
[
"Apache-2.0"
] | 77
|
2020-07-16T16:43:09.000Z
|
2022-03-31T06:14:37.000Z
|
tests/components/history/test_init.py
|
Vaarlion/core
|
f3de8b9f28de01abf72c0f5bb0b457eb1841f201
|
[
"Apache-2.0"
] | 11
|
2020-12-16T13:48:14.000Z
|
2022-02-01T00:28:05.000Z
|
"""The tests the History component."""
# pylint: disable=protected-access,invalid-name
from datetime import timedelta
import json
from unittest.mock import patch, sentinel
import pytest
from pytest import approx
from homeassistant.components import history, recorder
from homeassistant.components.recorder.history import get_significant_states
from homeassistant.components.recorder.models import process_timestamp
import homeassistant.core as ha
from homeassistant.helpers.json import JSONEncoder
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from homeassistant.util.unit_system import IMPERIAL_SYSTEM, METRIC_SYSTEM
from tests.common import init_recorder_component
from tests.components.recorder.common import trigger_db_commit, wait_recording_done
@pytest.mark.usefixtures("hass_history")
def test_setup():
"""Test setup method of history."""
# Verification occurs in the fixture
pass
def test_get_significant_states(hass_history):
"""Test that only significant states are returned.
We should get back every thermostat change that
includes an attribute change, but only the state updates for
media player (attribute changes are not significant and not returned).
"""
hass = hass_history
zero, four, states = record_states(hass)
hist = get_significant_states(hass, zero, four, filters=history.Filters())
assert states == hist
def test_get_significant_states_minimal_response(hass_history):
"""Test that only significant states are returned.
When minimal responses is set only the first and
last states return a complete state.
We should get back every thermostat change that
includes an attribute change, but only the state updates for
media player (attribute changes are not significant and not returned).
"""
hass = hass_history
zero, four, states = record_states(hass)
hist = get_significant_states(
hass, zero, four, filters=history.Filters(), minimal_response=True
)
# The second media_player.test state is reduced
# down to last_changed and state when minimal_response
# is set. We use JSONEncoder to make sure that are
# pre-encoded last_changed is always the same as what
# will happen with encoding a native state
input_state = states["media_player.test"][1]
orig_last_changed = json.dumps(
process_timestamp(input_state.last_changed),
cls=JSONEncoder,
).replace('"', "")
orig_state = input_state.state
states["media_player.test"][1] = {
"last_changed": orig_last_changed,
"state": orig_state,
}
assert states == hist
def test_get_significant_states_with_initial(hass_history):
"""Test that only significant states are returned.
We should get back every thermostat change that
includes an attribute change, but only the state updates for
media player (attribute changes are not significant and not returned).
"""
hass = hass_history
zero, four, states = record_states(hass)
one = zero + timedelta(seconds=1)
one_and_half = zero + timedelta(seconds=1.5)
for entity_id in states:
if entity_id == "media_player.test":
states[entity_id] = states[entity_id][1:]
for state in states[entity_id]:
if state.last_changed == one:
state.last_changed = one_and_half
hist = get_significant_states(
hass,
one_and_half,
four,
filters=history.Filters(),
include_start_time_state=True,
)
assert states == hist
def test_get_significant_states_without_initial(hass_history):
"""Test that only significant states are returned.
We should get back every thermostat change that
includes an attribute change, but only the state updates for
media player (attribute changes are not significant and not returned).
"""
hass = hass_history
zero, four, states = record_states(hass)
one = zero + timedelta(seconds=1)
one_and_half = zero + timedelta(seconds=1.5)
for entity_id in states:
states[entity_id] = list(
filter(lambda s: s.last_changed != one, states[entity_id])
)
del states["media_player.test2"]
hist = get_significant_states(
hass,
one_and_half,
four,
filters=history.Filters(),
include_start_time_state=False,
)
assert states == hist
def test_get_significant_states_entity_id(hass_history):
"""Test that only significant states are returned for one entity."""
hass = hass_history
zero, four, states = record_states(hass)
del states["media_player.test2"]
del states["media_player.test3"]
del states["thermostat.test"]
del states["thermostat.test2"]
del states["script.can_cancel_this_one"]
hist = get_significant_states(
hass, zero, four, ["media_player.test"], filters=history.Filters()
)
assert states == hist
def test_get_significant_states_multiple_entity_ids(hass_history):
"""Test that only significant states are returned for one entity."""
hass = hass_history
zero, four, states = record_states(hass)
del states["media_player.test2"]
del states["media_player.test3"]
del states["thermostat.test2"]
del states["script.can_cancel_this_one"]
hist = get_significant_states(
hass,
zero,
four,
["media_player.test", "thermostat.test"],
filters=history.Filters(),
)
assert states == hist
def test_get_significant_states_exclude_domain(hass_history):
"""Test if significant states are returned when excluding domains.
We should get back every thermostat change that includes an attribute
change, but no media player changes.
"""
hass = hass_history
zero, four, states = record_states(hass)
del states["media_player.test"]
del states["media_player.test2"]
del states["media_player.test3"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {
history.CONF_EXCLUDE: {history.CONF_DOMAINS: ["media_player"]}
},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_exclude_entity(hass_history):
"""Test if significant states are returned when excluding entities.
We should get back every thermostat and script changes, but no media
player changes.
"""
hass = hass_history
zero, four, states = record_states(hass)
del states["media_player.test"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {
history.CONF_EXCLUDE: {history.CONF_ENTITIES: ["media_player.test"]}
},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_exclude(hass_history):
"""Test significant states when excluding entities and domains.
We should not get back every thermostat and media player test changes.
"""
hass = hass_history
zero, four, states = record_states(hass)
del states["media_player.test"]
del states["thermostat.test"]
del states["thermostat.test2"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {
history.CONF_EXCLUDE: {
history.CONF_DOMAINS: ["thermostat"],
history.CONF_ENTITIES: ["media_player.test"],
}
},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_exclude_include_entity(hass_history):
"""Test significant states when excluding domains and include entities.
We should not get back every thermostat and media player test changes.
"""
hass = hass_history
zero, four, states = record_states(hass)
del states["media_player.test2"]
del states["media_player.test3"]
del states["thermostat.test"]
del states["thermostat.test2"]
del states["script.can_cancel_this_one"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {
history.CONF_INCLUDE: {
history.CONF_ENTITIES: ["media_player.test", "thermostat.test"]
},
history.CONF_EXCLUDE: {history.CONF_DOMAINS: ["thermostat"]},
},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_include_domain(hass_history):
"""Test if significant states are returned when including domains.
We should get back every thermostat and script changes, but no media
player changes.
"""
hass = hass_history
zero, four, states = record_states(hass)
del states["media_player.test"]
del states["media_player.test2"]
del states["media_player.test3"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {
history.CONF_INCLUDE: {history.CONF_DOMAINS: ["thermostat", "script"]}
},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_include_entity(hass_history):
"""Test if significant states are returned when including entities.
We should only get back changes of the media_player.test entity.
"""
hass = hass_history
zero, four, states = record_states(hass)
del states["media_player.test2"]
del states["media_player.test3"]
del states["thermostat.test"]
del states["thermostat.test2"]
del states["script.can_cancel_this_one"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {
history.CONF_INCLUDE: {history.CONF_ENTITIES: ["media_player.test"]}
},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_include(hass_history):
"""Test significant states when including domains and entities.
We should only get back changes of the media_player.test entity and the
thermostat domain.
"""
hass = hass_history
zero, four, states = record_states(hass)
del states["media_player.test2"]
del states["media_player.test3"]
del states["script.can_cancel_this_one"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {
history.CONF_INCLUDE: {
history.CONF_DOMAINS: ["thermostat"],
history.CONF_ENTITIES: ["media_player.test"],
}
},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_include_exclude_domain(hass_history):
"""Test if significant states when excluding and including domains.
We should not get back any changes since we include only the
media_player domain but also exclude it.
"""
hass = hass_history
zero, four, states = record_states(hass)
del states["media_player.test"]
del states["media_player.test2"]
del states["media_player.test3"]
del states["thermostat.test"]
del states["thermostat.test2"]
del states["script.can_cancel_this_one"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {
history.CONF_INCLUDE: {history.CONF_DOMAINS: ["media_player"]},
history.CONF_EXCLUDE: {history.CONF_DOMAINS: ["media_player"]},
},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_include_exclude_entity(hass_history):
"""Test if significant states when excluding and including domains.
We should not get back any changes since we include only
media_player.test but also exclude it.
"""
hass = hass_history
zero, four, states = record_states(hass)
del states["media_player.test"]
del states["media_player.test2"]
del states["media_player.test3"]
del states["thermostat.test"]
del states["thermostat.test2"]
del states["script.can_cancel_this_one"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {
history.CONF_INCLUDE: {history.CONF_ENTITIES: ["media_player.test"]},
history.CONF_EXCLUDE: {history.CONF_ENTITIES: ["media_player.test"]},
},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_include_exclude(hass_history):
"""Test if significant states when in/excluding domains and entities.
We should only get back changes of the media_player.test2 entity.
"""
hass = hass_history
zero, four, states = record_states(hass)
del states["media_player.test"]
del states["thermostat.test"]
del states["thermostat.test2"]
del states["script.can_cancel_this_one"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {
history.CONF_INCLUDE: {
history.CONF_DOMAINS: ["media_player"],
history.CONF_ENTITIES: ["thermostat.test"],
},
history.CONF_EXCLUDE: {
history.CONF_DOMAINS: ["thermostat"],
history.CONF_ENTITIES: ["media_player.test"],
},
},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_are_ordered(hass_history):
"""Test order of results from get_significant_states.
When entity ids are given, the results should be returned with the data
in the same order.
"""
hass = hass_history
zero, four, _states = record_states(hass)
entity_ids = ["media_player.test", "media_player.test2"]
hist = get_significant_states(
hass, zero, four, entity_ids, filters=history.Filters()
)
assert list(hist.keys()) == entity_ids
entity_ids = ["media_player.test2", "media_player.test"]
hist = get_significant_states(
hass, zero, four, entity_ids, filters=history.Filters()
)
assert list(hist.keys()) == entity_ids
def test_get_significant_states_only(hass_history):
"""Test significant states when significant_states_only is set."""
hass = hass_history
entity_id = "sensor.test"
def set_state(state, **kwargs):
"""Set the state."""
hass.states.set(entity_id, state, **kwargs)
wait_recording_done(hass)
return hass.states.get(entity_id)
start = dt_util.utcnow() - timedelta(minutes=4)
points = []
for i in range(1, 4):
points.append(start + timedelta(minutes=i))
states = []
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=start):
set_state("123", attributes={"attribute": 10.64})
with patch(
"homeassistant.components.recorder.dt_util.utcnow", return_value=points[0]
):
# Attributes are different, state not
states.append(set_state("123", attributes={"attribute": 21.42}))
with patch(
"homeassistant.components.recorder.dt_util.utcnow", return_value=points[1]
):
# state is different, attributes not
states.append(set_state("32", attributes={"attribute": 21.42}))
with patch(
"homeassistant.components.recorder.dt_util.utcnow", return_value=points[2]
):
# everything is different
states.append(set_state("412", attributes={"attribute": 54.23}))
hist = get_significant_states(hass, start, significant_changes_only=True)
assert len(hist[entity_id]) == 2
assert states[0] not in hist[entity_id]
assert states[1] in hist[entity_id]
assert states[2] in hist[entity_id]
hist = get_significant_states(hass, start, significant_changes_only=False)
assert len(hist[entity_id]) == 3
assert states == hist[entity_id]
def check_significant_states(hass, zero, four, states, config):
"""Check if significant states are retrieved."""
filters = history.Filters()
exclude = config[history.DOMAIN].get(history.CONF_EXCLUDE)
if exclude:
filters.excluded_entities = exclude.get(history.CONF_ENTITIES, [])
filters.excluded_domains = exclude.get(history.CONF_DOMAINS, [])
include = config[history.DOMAIN].get(history.CONF_INCLUDE)
if include:
filters.included_entities = include.get(history.CONF_ENTITIES, [])
filters.included_domains = include.get(history.CONF_DOMAINS, [])
hist = get_significant_states(hass, zero, four, filters=filters)
assert states == hist
def record_states(hass):
"""Record some test states.
We inject a bunch of state updates from media player, zone and
thermostat.
"""
mp = "media_player.test"
mp2 = "media_player.test2"
mp3 = "media_player.test3"
therm = "thermostat.test"
therm2 = "thermostat.test2"
zone = "zone.home"
script_c = "script.can_cancel_this_one"
def set_state(entity_id, state, **kwargs):
"""Set the state."""
hass.states.set(entity_id, state, **kwargs)
wait_recording_done(hass)
return hass.states.get(entity_id)
zero = dt_util.utcnow()
one = zero + timedelta(seconds=1)
two = one + timedelta(seconds=1)
three = two + timedelta(seconds=1)
four = three + timedelta(seconds=1)
states = {therm: [], therm2: [], mp: [], mp2: [], mp3: [], script_c: []}
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=one):
states[mp].append(
set_state(mp, "idle", attributes={"media_title": str(sentinel.mt1)})
)
states[mp].append(
set_state(mp, "YouTube", attributes={"media_title": str(sentinel.mt2)})
)
states[mp2].append(
set_state(mp2, "YouTube", attributes={"media_title": str(sentinel.mt2)})
)
states[mp3].append(
set_state(mp3, "idle", attributes={"media_title": str(sentinel.mt1)})
)
states[therm].append(
set_state(therm, 20, attributes={"current_temperature": 19.5})
)
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=two):
# This state will be skipped only different in time
set_state(mp, "YouTube", attributes={"media_title": str(sentinel.mt3)})
# This state will be skipped because domain is excluded
set_state(zone, "zoning")
states[script_c].append(
set_state(script_c, "off", attributes={"can_cancel": True})
)
states[therm].append(
set_state(therm, 21, attributes={"current_temperature": 19.8})
)
states[therm2].append(
set_state(therm2, 20, attributes={"current_temperature": 19})
)
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=three):
states[mp].append(
set_state(mp, "Netflix", attributes={"media_title": str(sentinel.mt4)})
)
states[mp3].append(
set_state(mp3, "Netflix", attributes={"media_title": str(sentinel.mt3)})
)
# Attributes changed even though state is the same
states[therm].append(
set_state(therm, 21, attributes={"current_temperature": 20})
)
return zero, four, states
async def test_fetch_period_api(hass, hass_client):
"""Test the fetch period view for history."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "history", {})
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
response = await client.get(f"/api/history/period/{dt_util.utcnow().isoformat()}")
assert response.status == 200
async def test_fetch_period_api_with_use_include_order(hass, hass_client):
"""Test the fetch period view for history with include order."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(
hass, "history", {history.DOMAIN: {history.CONF_ORDER: True}}
)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
response = await client.get(f"/api/history/period/{dt_util.utcnow().isoformat()}")
assert response.status == 200
async def test_fetch_period_api_with_minimal_response(hass, hass_client):
"""Test the fetch period view for history with minimal_response."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "history", {})
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
response = await client.get(
f"/api/history/period/{dt_util.utcnow().isoformat()}?minimal_response"
)
assert response.status == 200
async def test_fetch_period_api_with_no_timestamp(hass, hass_client):
"""Test the fetch period view for history with no timestamp."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "history", {})
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
response = await client.get("/api/history/period")
assert response.status == 200
async def test_fetch_period_api_with_include_order(hass, hass_client):
"""Test the fetch period view for history."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(
hass,
"history",
{
"history": {
"use_include_order": True,
"include": {"entities": ["light.kitchen"]},
}
},
)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
response = await client.get(
f"/api/history/period/{dt_util.utcnow().isoformat()}",
params={"filter_entity_id": "non.existing,something.else"},
)
assert response.status == 200
async def test_fetch_period_api_with_entity_glob_include(hass, hass_client):
"""Test the fetch period view for history."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(
hass,
"history",
{
"history": {
"include": {"entity_globs": ["light.k*"]},
}
},
)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.states.async_set("light.kitchen", "on")
hass.states.async_set("light.cow", "on")
hass.states.async_set("light.nomatch", "on")
await hass.async_block_till_done()
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
response = await client.get(
f"/api/history/period/{dt_util.utcnow().isoformat()}",
)
assert response.status == 200
response_json = await response.json()
assert response_json[0][0]["entity_id"] == "light.kitchen"
async def test_fetch_period_api_with_entity_glob_exclude(hass, hass_client):
"""Test the fetch period view for history."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(
hass,
"history",
{
"history": {
"exclude": {
"entity_globs": ["light.k*"],
"domains": "switch",
"entities": "media_player.test",
},
}
},
)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.states.async_set("light.kitchen", "on")
hass.states.async_set("light.cow", "on")
hass.states.async_set("light.match", "on")
hass.states.async_set("switch.match", "on")
hass.states.async_set("media_player.test", "on")
await hass.async_block_till_done()
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
response = await client.get(
f"/api/history/period/{dt_util.utcnow().isoformat()}",
)
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 2
assert response_json[0][0]["entity_id"] == "light.cow"
assert response_json[1][0]["entity_id"] == "light.match"
async def test_fetch_period_api_with_entity_glob_include_and_exclude(hass, hass_client):
"""Test the fetch period view for history."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(
hass,
"history",
{
"history": {
"exclude": {
"entity_globs": ["light.many*"],
},
"include": {
"entity_globs": ["light.m*"],
"domains": "switch",
"entities": "media_player.test",
},
}
},
)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.states.async_set("light.kitchen", "on")
hass.states.async_set("light.cow", "on")
hass.states.async_set("light.match", "on")
hass.states.async_set("light.many_state_changes", "on")
hass.states.async_set("switch.match", "on")
hass.states.async_set("media_player.test", "on")
await hass.async_block_till_done()
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
response = await client.get(
f"/api/history/period/{dt_util.utcnow().isoformat()}",
)
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 3
assert response_json[0][0]["entity_id"] == "light.match"
assert response_json[1][0]["entity_id"] == "media_player.test"
assert response_json[2][0]["entity_id"] == "switch.match"
async def test_entity_ids_limit_via_api(hass, hass_client):
"""Test limiting history to entity_ids."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(
hass,
"history",
{"history": {}},
)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.states.async_set("light.kitchen", "on")
hass.states.async_set("light.cow", "on")
hass.states.async_set("light.nomatch", "on")
await hass.async_block_till_done()
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
response = await client.get(
f"/api/history/period/{dt_util.utcnow().isoformat()}?filter_entity_id=light.kitchen,light.cow",
)
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 2
assert response_json[0][0]["entity_id"] == "light.kitchen"
assert response_json[1][0]["entity_id"] == "light.cow"
async def test_entity_ids_limit_via_api_with_skip_initial_state(hass, hass_client):
"""Test limiting history to entity_ids with skip_initial_state."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(
hass,
"history",
{"history": {}},
)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.states.async_set("light.kitchen", "on")
hass.states.async_set("light.cow", "on")
hass.states.async_set("light.nomatch", "on")
await hass.async_block_till_done()
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
response = await client.get(
f"/api/history/period/{dt_util.utcnow().isoformat()}?filter_entity_id=light.kitchen,light.cow&skip_initial_state",
)
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 0
when = dt_util.utcnow() - timedelta(minutes=1)
response = await client.get(
f"/api/history/period/{when.isoformat()}?filter_entity_id=light.kitchen,light.cow&skip_initial_state",
)
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 2
assert response_json[0][0]["entity_id"] == "light.kitchen"
assert response_json[1][0]["entity_id"] == "light.cow"
POWER_SENSOR_ATTRIBUTES = {
"device_class": "power",
"state_class": "measurement",
"unit_of_measurement": "kW",
}
PRESSURE_SENSOR_ATTRIBUTES = {
"device_class": "pressure",
"state_class": "measurement",
"unit_of_measurement": "hPa",
}
TEMPERATURE_SENSOR_ATTRIBUTES = {
"device_class": "temperature",
"state_class": "measurement",
"unit_of_measurement": "°C",
}
@pytest.mark.parametrize(
"units, attributes, state, value",
[
(IMPERIAL_SYSTEM, POWER_SENSOR_ATTRIBUTES, 10, 10000),
(METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, 10, 10000),
(IMPERIAL_SYSTEM, TEMPERATURE_SENSOR_ATTRIBUTES, 10, 50),
(METRIC_SYSTEM, TEMPERATURE_SENSOR_ATTRIBUTES, 10, 10),
(IMPERIAL_SYSTEM, PRESSURE_SENSOR_ATTRIBUTES, 1000, 14.503774389728312),
(METRIC_SYSTEM, PRESSURE_SENSOR_ATTRIBUTES, 1000, 100000),
],
)
async def test_statistics_during_period(
hass, hass_ws_client, units, attributes, state, value
):
"""Test statistics_during_period."""
now = dt_util.utcnow()
hass.config.units = units
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "history", {})
await async_setup_component(hass, "sensor", {})
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.states.async_set("sensor.test", state, attributes=attributes)
await hass.async_block_till_done()
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
hass.data[recorder.DATA_INSTANCE].do_adhoc_statistics(period="hourly", start=now)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_ws_client()
await client.send_json(
{
"id": 1,
"type": "history/statistics_during_period",
"start_time": now.isoformat(),
"end_time": now.isoformat(),
"statistic_ids": ["sensor.test"],
}
)
response = await client.receive_json()
assert response["success"]
assert response["result"] == {}
client = await hass_ws_client()
await client.send_json(
{
"id": 1,
"type": "history/statistics_during_period",
"start_time": now.isoformat(),
"statistic_ids": ["sensor.test"],
}
)
response = await client.receive_json()
assert response["success"]
assert response["result"] == {
"sensor.test": [
{
"statistic_id": "sensor.test",
"start": now.isoformat(),
"mean": approx(value),
"min": approx(value),
"max": approx(value),
"last_reset": None,
"state": None,
"sum": None,
}
]
}
async def test_statistics_during_period_bad_start_time(hass, hass_ws_client):
"""Test statistics_during_period."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(
hass,
"history",
{"history": {}},
)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_ws_client()
await client.send_json(
{
"id": 1,
"type": "history/statistics_during_period",
"start_time": "cats",
}
)
response = await client.receive_json()
assert not response["success"]
assert response["error"]["code"] == "invalid_start_time"
async def test_statistics_during_period_bad_end_time(hass, hass_ws_client):
"""Test statistics_during_period."""
now = dt_util.utcnow()
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(
hass,
"history",
{"history": {}},
)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_ws_client()
await client.send_json(
{
"id": 1,
"type": "history/statistics_during_period",
"start_time": now.isoformat(),
"end_time": "dogs",
}
)
response = await client.receive_json()
assert not response["success"]
assert response["error"]["code"] == "invalid_end_time"
@pytest.mark.parametrize(
"units, attributes, unit",
[
(IMPERIAL_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W"),
(METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W"),
(IMPERIAL_SYSTEM, TEMPERATURE_SENSOR_ATTRIBUTES, "°F"),
(METRIC_SYSTEM, TEMPERATURE_SENSOR_ATTRIBUTES, "°C"),
(IMPERIAL_SYSTEM, PRESSURE_SENSOR_ATTRIBUTES, "psi"),
(METRIC_SYSTEM, PRESSURE_SENSOR_ATTRIBUTES, "Pa"),
],
)
async def test_list_statistic_ids(hass, hass_ws_client, units, attributes, unit):
"""Test list_statistic_ids."""
now = dt_util.utcnow()
hass.config.units = units
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "history", {"history": {}})
await async_setup_component(hass, "sensor", {})
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_ws_client()
await client.send_json({"id": 1, "type": "history/list_statistic_ids"})
response = await client.receive_json()
assert response["success"]
assert response["result"] == []
hass.states.async_set("sensor.test", 10, attributes=attributes)
await hass.async_block_till_done()
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await client.send_json({"id": 2, "type": "history/list_statistic_ids"})
response = await client.receive_json()
assert response["success"]
assert response["result"] == [
{"statistic_id": "sensor.test", "unit_of_measurement": unit}
]
hass.data[recorder.DATA_INSTANCE].do_adhoc_statistics(period="hourly", start=now)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
# Remove the state, statistics will now be fetched from the database
hass.states.async_remove("sensor.test")
await hass.async_block_till_done()
await client.send_json({"id": 3, "type": "history/list_statistic_ids"})
response = await client.receive_json()
assert response["success"]
assert response["result"] == [
{"statistic_id": "sensor.test", "unit_of_measurement": unit}
]
await client.send_json(
{"id": 4, "type": "history/list_statistic_ids", "statistic_type": "dogs"}
)
response = await client.receive_json()
assert not response["success"]
await client.send_json(
{"id": 5, "type": "history/list_statistic_ids", "statistic_type": "mean"}
)
response = await client.receive_json()
assert response["success"]
assert response["result"] == [
{"statistic_id": "sensor.test", "unit_of_measurement": unit}
]
await client.send_json(
{"id": 6, "type": "history/list_statistic_ids", "statistic_type": "sum"}
)
response = await client.receive_json()
assert response["success"]
assert response["result"] == []
| 34.985646
| 122
| 0.666986
|
from datetime import timedelta
import json
from unittest.mock import patch, sentinel
import pytest
from pytest import approx
from homeassistant.components import history, recorder
from homeassistant.components.recorder.history import get_significant_states
from homeassistant.components.recorder.models import process_timestamp
import homeassistant.core as ha
from homeassistant.helpers.json import JSONEncoder
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from homeassistant.util.unit_system import IMPERIAL_SYSTEM, METRIC_SYSTEM
from tests.common import init_recorder_component
from tests.components.recorder.common import trigger_db_commit, wait_recording_done
@pytest.mark.usefixtures("hass_history")
def test_setup():
pass
def test_get_significant_states(hass_history):
hass = hass_history
zero, four, states = record_states(hass)
hist = get_significant_states(hass, zero, four, filters=history.Filters())
assert states == hist
def test_get_significant_states_minimal_response(hass_history):
hass = hass_history
zero, four, states = record_states(hass)
hist = get_significant_states(
hass, zero, four, filters=history.Filters(), minimal_response=True
)
input_state = states["media_player.test"][1]
orig_last_changed = json.dumps(
process_timestamp(input_state.last_changed),
cls=JSONEncoder,
).replace('"', "")
orig_state = input_state.state
states["media_player.test"][1] = {
"last_changed": orig_last_changed,
"state": orig_state,
}
assert states == hist
def test_get_significant_states_with_initial(hass_history):
hass = hass_history
zero, four, states = record_states(hass)
one = zero + timedelta(seconds=1)
one_and_half = zero + timedelta(seconds=1.5)
for entity_id in states:
if entity_id == "media_player.test":
states[entity_id] = states[entity_id][1:]
for state in states[entity_id]:
if state.last_changed == one:
state.last_changed = one_and_half
hist = get_significant_states(
hass,
one_and_half,
four,
filters=history.Filters(),
include_start_time_state=True,
)
assert states == hist
def test_get_significant_states_without_initial(hass_history):
hass = hass_history
zero, four, states = record_states(hass)
one = zero + timedelta(seconds=1)
one_and_half = zero + timedelta(seconds=1.5)
for entity_id in states:
states[entity_id] = list(
filter(lambda s: s.last_changed != one, states[entity_id])
)
del states["media_player.test2"]
hist = get_significant_states(
hass,
one_and_half,
four,
filters=history.Filters(),
include_start_time_state=False,
)
assert states == hist
def test_get_significant_states_entity_id(hass_history):
hass = hass_history
zero, four, states = record_states(hass)
del states["media_player.test2"]
del states["media_player.test3"]
del states["thermostat.test"]
del states["thermostat.test2"]
del states["script.can_cancel_this_one"]
hist = get_significant_states(
hass, zero, four, ["media_player.test"], filters=history.Filters()
)
assert states == hist
def test_get_significant_states_multiple_entity_ids(hass_history):
hass = hass_history
zero, four, states = record_states(hass)
del states["media_player.test2"]
del states["media_player.test3"]
del states["thermostat.test2"]
del states["script.can_cancel_this_one"]
hist = get_significant_states(
hass,
zero,
four,
["media_player.test", "thermostat.test"],
filters=history.Filters(),
)
assert states == hist
def test_get_significant_states_exclude_domain(hass_history):
hass = hass_history
zero, four, states = record_states(hass)
del states["media_player.test"]
del states["media_player.test2"]
del states["media_player.test3"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {
history.CONF_EXCLUDE: {history.CONF_DOMAINS: ["media_player"]}
},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_exclude_entity(hass_history):
hass = hass_history
zero, four, states = record_states(hass)
del states["media_player.test"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {
history.CONF_EXCLUDE: {history.CONF_ENTITIES: ["media_player.test"]}
},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_exclude(hass_history):
hass = hass_history
zero, four, states = record_states(hass)
del states["media_player.test"]
del states["thermostat.test"]
del states["thermostat.test2"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {
history.CONF_EXCLUDE: {
history.CONF_DOMAINS: ["thermostat"],
history.CONF_ENTITIES: ["media_player.test"],
}
},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_exclude_include_entity(hass_history):
hass = hass_history
zero, four, states = record_states(hass)
del states["media_player.test2"]
del states["media_player.test3"]
del states["thermostat.test"]
del states["thermostat.test2"]
del states["script.can_cancel_this_one"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {
history.CONF_INCLUDE: {
history.CONF_ENTITIES: ["media_player.test", "thermostat.test"]
},
history.CONF_EXCLUDE: {history.CONF_DOMAINS: ["thermostat"]},
},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_include_domain(hass_history):
hass = hass_history
zero, four, states = record_states(hass)
del states["media_player.test"]
del states["media_player.test2"]
del states["media_player.test3"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {
history.CONF_INCLUDE: {history.CONF_DOMAINS: ["thermostat", "script"]}
},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_include_entity(hass_history):
hass = hass_history
zero, four, states = record_states(hass)
del states["media_player.test2"]
del states["media_player.test3"]
del states["thermostat.test"]
del states["thermostat.test2"]
del states["script.can_cancel_this_one"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {
history.CONF_INCLUDE: {history.CONF_ENTITIES: ["media_player.test"]}
},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_include(hass_history):
hass = hass_history
zero, four, states = record_states(hass)
del states["media_player.test2"]
del states["media_player.test3"]
del states["script.can_cancel_this_one"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {
history.CONF_INCLUDE: {
history.CONF_DOMAINS: ["thermostat"],
history.CONF_ENTITIES: ["media_player.test"],
}
},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_include_exclude_domain(hass_history):
hass = hass_history
zero, four, states = record_states(hass)
del states["media_player.test"]
del states["media_player.test2"]
del states["media_player.test3"]
del states["thermostat.test"]
del states["thermostat.test2"]
del states["script.can_cancel_this_one"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {
history.CONF_INCLUDE: {history.CONF_DOMAINS: ["media_player"]},
history.CONF_EXCLUDE: {history.CONF_DOMAINS: ["media_player"]},
},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_include_exclude_entity(hass_history):
hass = hass_history
zero, four, states = record_states(hass)
del states["media_player.test"]
del states["media_player.test2"]
del states["media_player.test3"]
del states["thermostat.test"]
del states["thermostat.test2"]
del states["script.can_cancel_this_one"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {
history.CONF_INCLUDE: {history.CONF_ENTITIES: ["media_player.test"]},
history.CONF_EXCLUDE: {history.CONF_ENTITIES: ["media_player.test"]},
},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_include_exclude(hass_history):
hass = hass_history
zero, four, states = record_states(hass)
del states["media_player.test"]
del states["thermostat.test"]
del states["thermostat.test2"]
del states["script.can_cancel_this_one"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {
history.CONF_INCLUDE: {
history.CONF_DOMAINS: ["media_player"],
history.CONF_ENTITIES: ["thermostat.test"],
},
history.CONF_EXCLUDE: {
history.CONF_DOMAINS: ["thermostat"],
history.CONF_ENTITIES: ["media_player.test"],
},
},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_are_ordered(hass_history):
hass = hass_history
zero, four, _states = record_states(hass)
entity_ids = ["media_player.test", "media_player.test2"]
hist = get_significant_states(
hass, zero, four, entity_ids, filters=history.Filters()
)
assert list(hist.keys()) == entity_ids
entity_ids = ["media_player.test2", "media_player.test"]
hist = get_significant_states(
hass, zero, four, entity_ids, filters=history.Filters()
)
assert list(hist.keys()) == entity_ids
def test_get_significant_states_only(hass_history):
hass = hass_history
entity_id = "sensor.test"
def set_state(state, **kwargs):
hass.states.set(entity_id, state, **kwargs)
wait_recording_done(hass)
return hass.states.get(entity_id)
start = dt_util.utcnow() - timedelta(minutes=4)
points = []
for i in range(1, 4):
points.append(start + timedelta(minutes=i))
states = []
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=start):
set_state("123", attributes={"attribute": 10.64})
with patch(
"homeassistant.components.recorder.dt_util.utcnow", return_value=points[0]
):
# Attributes are different, state not
states.append(set_state("123", attributes={"attribute": 21.42}))
with patch(
"homeassistant.components.recorder.dt_util.utcnow", return_value=points[1]
):
# state is different, attributes not
states.append(set_state("32", attributes={"attribute": 21.42}))
with patch(
"homeassistant.components.recorder.dt_util.utcnow", return_value=points[2]
):
# everything is different
states.append(set_state("412", attributes={"attribute": 54.23}))
hist = get_significant_states(hass, start, significant_changes_only=True)
assert len(hist[entity_id]) == 2
assert states[0] not in hist[entity_id]
assert states[1] in hist[entity_id]
assert states[2] in hist[entity_id]
hist = get_significant_states(hass, start, significant_changes_only=False)
assert len(hist[entity_id]) == 3
assert states == hist[entity_id]
def check_significant_states(hass, zero, four, states, config):
filters = history.Filters()
exclude = config[history.DOMAIN].get(history.CONF_EXCLUDE)
if exclude:
filters.excluded_entities = exclude.get(history.CONF_ENTITIES, [])
filters.excluded_domains = exclude.get(history.CONF_DOMAINS, [])
include = config[history.DOMAIN].get(history.CONF_INCLUDE)
if include:
filters.included_entities = include.get(history.CONF_ENTITIES, [])
filters.included_domains = include.get(history.CONF_DOMAINS, [])
hist = get_significant_states(hass, zero, four, filters=filters)
assert states == hist
def record_states(hass):
mp = "media_player.test"
mp2 = "media_player.test2"
mp3 = "media_player.test3"
therm = "thermostat.test"
therm2 = "thermostat.test2"
zone = "zone.home"
script_c = "script.can_cancel_this_one"
def set_state(entity_id, state, **kwargs):
hass.states.set(entity_id, state, **kwargs)
wait_recording_done(hass)
return hass.states.get(entity_id)
zero = dt_util.utcnow()
one = zero + timedelta(seconds=1)
two = one + timedelta(seconds=1)
three = two + timedelta(seconds=1)
four = three + timedelta(seconds=1)
states = {therm: [], therm2: [], mp: [], mp2: [], mp3: [], script_c: []}
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=one):
states[mp].append(
set_state(mp, "idle", attributes={"media_title": str(sentinel.mt1)})
)
states[mp].append(
set_state(mp, "YouTube", attributes={"media_title": str(sentinel.mt2)})
)
states[mp2].append(
set_state(mp2, "YouTube", attributes={"media_title": str(sentinel.mt2)})
)
states[mp3].append(
set_state(mp3, "idle", attributes={"media_title": str(sentinel.mt1)})
)
states[therm].append(
set_state(therm, 20, attributes={"current_temperature": 19.5})
)
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=two):
# This state will be skipped only different in time
set_state(mp, "YouTube", attributes={"media_title": str(sentinel.mt3)})
# This state will be skipped because domain is excluded
set_state(zone, "zoning")
states[script_c].append(
set_state(script_c, "off", attributes={"can_cancel": True})
)
states[therm].append(
set_state(therm, 21, attributes={"current_temperature": 19.8})
)
states[therm2].append(
set_state(therm2, 20, attributes={"current_temperature": 19})
)
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=three):
states[mp].append(
set_state(mp, "Netflix", attributes={"media_title": str(sentinel.mt4)})
)
states[mp3].append(
set_state(mp3, "Netflix", attributes={"media_title": str(sentinel.mt3)})
)
# Attributes changed even though state is the same
states[therm].append(
set_state(therm, 21, attributes={"current_temperature": 20})
)
return zero, four, states
async def test_fetch_period_api(hass, hass_client):
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "history", {})
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
response = await client.get(f"/api/history/period/{dt_util.utcnow().isoformat()}")
assert response.status == 200
async def test_fetch_period_api_with_use_include_order(hass, hass_client):
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(
hass, "history", {history.DOMAIN: {history.CONF_ORDER: True}}
)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
response = await client.get(f"/api/history/period/{dt_util.utcnow().isoformat()}")
assert response.status == 200
async def test_fetch_period_api_with_minimal_response(hass, hass_client):
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "history", {})
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
response = await client.get(
f"/api/history/period/{dt_util.utcnow().isoformat()}?minimal_response"
)
assert response.status == 200
async def test_fetch_period_api_with_no_timestamp(hass, hass_client):
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "history", {})
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
response = await client.get("/api/history/period")
assert response.status == 200
async def test_fetch_period_api_with_include_order(hass, hass_client):
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(
hass,
"history",
{
"history": {
"use_include_order": True,
"include": {"entities": ["light.kitchen"]},
}
},
)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
response = await client.get(
f"/api/history/period/{dt_util.utcnow().isoformat()}",
params={"filter_entity_id": "non.existing,something.else"},
)
assert response.status == 200
async def test_fetch_period_api_with_entity_glob_include(hass, hass_client):
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(
hass,
"history",
{
"history": {
"include": {"entity_globs": ["light.k*"]},
}
},
)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.states.async_set("light.kitchen", "on")
hass.states.async_set("light.cow", "on")
hass.states.async_set("light.nomatch", "on")
await hass.async_block_till_done()
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
response = await client.get(
f"/api/history/period/{dt_util.utcnow().isoformat()}",
)
assert response.status == 200
response_json = await response.json()
assert response_json[0][0]["entity_id"] == "light.kitchen"
async def test_fetch_period_api_with_entity_glob_exclude(hass, hass_client):
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(
hass,
"history",
{
"history": {
"exclude": {
"entity_globs": ["light.k*"],
"domains": "switch",
"entities": "media_player.test",
},
}
},
)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.states.async_set("light.kitchen", "on")
hass.states.async_set("light.cow", "on")
hass.states.async_set("light.match", "on")
hass.states.async_set("switch.match", "on")
hass.states.async_set("media_player.test", "on")
await hass.async_block_till_done()
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
response = await client.get(
f"/api/history/period/{dt_util.utcnow().isoformat()}",
)
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 2
assert response_json[0][0]["entity_id"] == "light.cow"
assert response_json[1][0]["entity_id"] == "light.match"
async def test_fetch_period_api_with_entity_glob_include_and_exclude(hass, hass_client):
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(
hass,
"history",
{
"history": {
"exclude": {
"entity_globs": ["light.many*"],
},
"include": {
"entity_globs": ["light.m*"],
"domains": "switch",
"entities": "media_player.test",
},
}
},
)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.states.async_set("light.kitchen", "on")
hass.states.async_set("light.cow", "on")
hass.states.async_set("light.match", "on")
hass.states.async_set("light.many_state_changes", "on")
hass.states.async_set("switch.match", "on")
hass.states.async_set("media_player.test", "on")
await hass.async_block_till_done()
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
response = await client.get(
f"/api/history/period/{dt_util.utcnow().isoformat()}",
)
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 3
assert response_json[0][0]["entity_id"] == "light.match"
assert response_json[1][0]["entity_id"] == "media_player.test"
assert response_json[2][0]["entity_id"] == "switch.match"
async def test_entity_ids_limit_via_api(hass, hass_client):
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(
hass,
"history",
{"history": {}},
)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.states.async_set("light.kitchen", "on")
hass.states.async_set("light.cow", "on")
hass.states.async_set("light.nomatch", "on")
await hass.async_block_till_done()
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
response = await client.get(
f"/api/history/period/{dt_util.utcnow().isoformat()}?filter_entity_id=light.kitchen,light.cow",
)
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 2
assert response_json[0][0]["entity_id"] == "light.kitchen"
assert response_json[1][0]["entity_id"] == "light.cow"
async def test_entity_ids_limit_via_api_with_skip_initial_state(hass, hass_client):
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(
hass,
"history",
{"history": {}},
)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.states.async_set("light.kitchen", "on")
hass.states.async_set("light.cow", "on")
hass.states.async_set("light.nomatch", "on")
await hass.async_block_till_done()
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
response = await client.get(
f"/api/history/period/{dt_util.utcnow().isoformat()}?filter_entity_id=light.kitchen,light.cow&skip_initial_state",
)
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 0
when = dt_util.utcnow() - timedelta(minutes=1)
response = await client.get(
f"/api/history/period/{when.isoformat()}?filter_entity_id=light.kitchen,light.cow&skip_initial_state",
)
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 2
assert response_json[0][0]["entity_id"] == "light.kitchen"
assert response_json[1][0]["entity_id"] == "light.cow"
POWER_SENSOR_ATTRIBUTES = {
"device_class": "power",
"state_class": "measurement",
"unit_of_measurement": "kW",
}
PRESSURE_SENSOR_ATTRIBUTES = {
"device_class": "pressure",
"state_class": "measurement",
"unit_of_measurement": "hPa",
}
TEMPERATURE_SENSOR_ATTRIBUTES = {
"device_class": "temperature",
"state_class": "measurement",
"unit_of_measurement": "°C",
}
@pytest.mark.parametrize(
"units, attributes, state, value",
[
(IMPERIAL_SYSTEM, POWER_SENSOR_ATTRIBUTES, 10, 10000),
(METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, 10, 10000),
(IMPERIAL_SYSTEM, TEMPERATURE_SENSOR_ATTRIBUTES, 10, 50),
(METRIC_SYSTEM, TEMPERATURE_SENSOR_ATTRIBUTES, 10, 10),
(IMPERIAL_SYSTEM, PRESSURE_SENSOR_ATTRIBUTES, 1000, 14.503774389728312),
(METRIC_SYSTEM, PRESSURE_SENSOR_ATTRIBUTES, 1000, 100000),
],
)
async def test_statistics_during_period(
hass, hass_ws_client, units, attributes, state, value
):
now = dt_util.utcnow()
hass.config.units = units
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "history", {})
await async_setup_component(hass, "sensor", {})
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.states.async_set("sensor.test", state, attributes=attributes)
await hass.async_block_till_done()
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
hass.data[recorder.DATA_INSTANCE].do_adhoc_statistics(period="hourly", start=now)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_ws_client()
await client.send_json(
{
"id": 1,
"type": "history/statistics_during_period",
"start_time": now.isoformat(),
"end_time": now.isoformat(),
"statistic_ids": ["sensor.test"],
}
)
response = await client.receive_json()
assert response["success"]
assert response["result"] == {}
client = await hass_ws_client()
await client.send_json(
{
"id": 1,
"type": "history/statistics_during_period",
"start_time": now.isoformat(),
"statistic_ids": ["sensor.test"],
}
)
response = await client.receive_json()
assert response["success"]
assert response["result"] == {
"sensor.test": [
{
"statistic_id": "sensor.test",
"start": now.isoformat(),
"mean": approx(value),
"min": approx(value),
"max": approx(value),
"last_reset": None,
"state": None,
"sum": None,
}
]
}
async def test_statistics_during_period_bad_start_time(hass, hass_ws_client):
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(
hass,
"history",
{"history": {}},
)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_ws_client()
await client.send_json(
{
"id": 1,
"type": "history/statistics_during_period",
"start_time": "cats",
}
)
response = await client.receive_json()
assert not response["success"]
assert response["error"]["code"] == "invalid_start_time"
async def test_statistics_during_period_bad_end_time(hass, hass_ws_client):
now = dt_util.utcnow()
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(
hass,
"history",
{"history": {}},
)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_ws_client()
await client.send_json(
{
"id": 1,
"type": "history/statistics_during_period",
"start_time": now.isoformat(),
"end_time": "dogs",
}
)
response = await client.receive_json()
assert not response["success"]
assert response["error"]["code"] == "invalid_end_time"
@pytest.mark.parametrize(
"units, attributes, unit",
[
(IMPERIAL_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W"),
(METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W"),
(IMPERIAL_SYSTEM, TEMPERATURE_SENSOR_ATTRIBUTES, "°F"),
(METRIC_SYSTEM, TEMPERATURE_SENSOR_ATTRIBUTES, "°C"),
(IMPERIAL_SYSTEM, PRESSURE_SENSOR_ATTRIBUTES, "psi"),
(METRIC_SYSTEM, PRESSURE_SENSOR_ATTRIBUTES, "Pa"),
],
)
async def test_list_statistic_ids(hass, hass_ws_client, units, attributes, unit):
now = dt_util.utcnow()
hass.config.units = units
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "history", {"history": {}})
await async_setup_component(hass, "sensor", {})
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_ws_client()
await client.send_json({"id": 1, "type": "history/list_statistic_ids"})
response = await client.receive_json()
assert response["success"]
assert response["result"] == []
hass.states.async_set("sensor.test", 10, attributes=attributes)
await hass.async_block_till_done()
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await client.send_json({"id": 2, "type": "history/list_statistic_ids"})
response = await client.receive_json()
assert response["success"]
assert response["result"] == [
{"statistic_id": "sensor.test", "unit_of_measurement": unit}
]
hass.data[recorder.DATA_INSTANCE].do_adhoc_statistics(period="hourly", start=now)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
# Remove the state, statistics will now be fetched from the database
hass.states.async_remove("sensor.test")
await hass.async_block_till_done()
await client.send_json({"id": 3, "type": "history/list_statistic_ids"})
response = await client.receive_json()
assert response["success"]
assert response["result"] == [
{"statistic_id": "sensor.test", "unit_of_measurement": unit}
]
await client.send_json(
{"id": 4, "type": "history/list_statistic_ids", "statistic_type": "dogs"}
)
response = await client.receive_json()
assert not response["success"]
await client.send_json(
{"id": 5, "type": "history/list_statistic_ids", "statistic_type": "mean"}
)
response = await client.receive_json()
assert response["success"]
assert response["result"] == [
{"statistic_id": "sensor.test", "unit_of_measurement": unit}
]
await client.send_json(
{"id": 6, "type": "history/list_statistic_ids", "statistic_type": "sum"}
)
response = await client.receive_json()
assert response["success"]
assert response["result"] == []
| true
| true
|
7909da1a5d0f4cf1f5e4ed6e5b1b891963861163
| 7,707
|
py
|
Python
|
src/core/toga/app.py
|
UncleGoogle/toga
|
dc2c150670b692c26184d3267a40624133be4a4e
|
[
"BSD-3-Clause"
] | null | null | null |
src/core/toga/app.py
|
UncleGoogle/toga
|
dc2c150670b692c26184d3267a40624133be4a4e
|
[
"BSD-3-Clause"
] | null | null | null |
src/core/toga/app.py
|
UncleGoogle/toga
|
dc2c150670b692c26184d3267a40624133be4a4e
|
[
"BSD-3-Clause"
] | null | null | null |
import os
import signal
import sys
from builtins import id as identifier
from toga.command import CommandSet
from toga.handlers import wrapped_handler
from toga.icons import Icon
from toga.platform import get_platform_factory
from toga.window import Window
class MainWindow(Window):
_WINDOW_CLASS = 'MainWindow'
def __init__(self, id=None, title=None, position=(100, 100), size=(640, 480), factory=None):
super().__init__(id=id, title=title, position=position, size=size, factory=factory)
class App:
""" The App is the top level of any GUI program. It is the manager of all
the other bits of the GUI app: the main window and events that window
generates like user input.
When you create an App you need to provide it a name, an id for uniqueness
(by convention, the identifier is a "reversed domain name".) and an
optional startup function which should run once the App has initialised.
The startup function typically constructs some initial user interface.
Once the app is created you should invoke the main_loop() method, which
will hand over execution of your program to Toga to make the App interface
do its thing.
Args:
name (str): Is the name of the application.
app_id (str): The unique application identifier, the reversed domain name, e.g. 'org.beeware.me'
icon (str): Path to the icon for the application.
id (str): The DOM identifier for the app (optional)
startup(``callable``): The callback method before starting the app, typically to add the components.
Must be a ``callable`` that expects a single argument of :class:`toga.App`.
factory (:obj:`module`): A python module that is capable to return a
implementation of this class with the same name. (optional & normally not needed)
Examples:
>>> # Here is the absolute minimum App::
>>> app = toga.App('Empty App', 'org.beeware.empty')
>>> app.main_loop()
"""
app = None
def __init__(self, name, app_id,
id=None, icon=None, startup=None, on_exit=None, factory=None):
self.factory = get_platform_factory(factory)
# Keep an accessible copy of the app instance
App.app = self
App.app_module = self.__module__.split('.')[0]
App.app_dir = os.path.dirname(sys.modules[App.app_module].__file__)
self.name = name
self._app_id = app_id
self._id = id if id else identifier(self)
self.commands = CommandSet(factory=self.factory)
self._startup_method = startup
self.default_icon = Icon('tiberius', system=True)
self.icon = icon
self._main_window = None
self._on_exit = None
self._full_screen_windows = None
self._impl = self._create_impl()
self.on_exit = on_exit
def _create_impl(self):
return self.factory.App(interface=self)
@property
def app_id(self):
""" The identifier for the app.
This is the reversed domain name, often used for targetting resources, etc.
Returns:
The identifier as a ``str``.
"""
return self._app_id
@property
def id(self):
""" The DOM identifier for the app. This id can be used to target CSS directives.
Returns:
The identifier for the app as a ``str``.
"""
return self._id
@property
def icon(self):
""" The Icon for the app. On setting, the icon is loaded automatically.
Returns:
The icon of the app ``toga.Icon``.
"""
return self._icon
@icon.setter
def icon(self, name):
self._icon = Icon.load(name, default=self.default_icon)
@property
def main_window(self):
"""The main Windows for the app.
Returns:
The main Window of the app.
"""
return self._main_window
@main_window.setter
def main_window(self, window):
self._main_window = window
window.app = self
@property
def current_window(self):
"""Return the currently active content window"""
return self._impl.current_window().interface
@property
def is_full_screen(self):
"""Is the app currently in full screen mode?"""
return self._full_screen_windows is not None
def set_full_screen(self, *windows):
"""Make one or more windows full screen.
Full screen is not the same as "maximized"; full screen mode
is when all window borders and other chrome is no longer
visible.
Args:
windows: The list of windows to go full screen,
in order of allocation to screens. If the number of
windows exceeds the number of available displays,
those windows will not be visible. If no windows
are specified, the app will exit full screen mode.
"""
if not windows:
self.exit_full_screen()
else:
self._impl.enter_full_screen(windows)
self._full_screen_windows = windows
def exit_full_screen(self):
"""Exit full screen mode."""
if self.is_full_screen:
self._impl.exit_full_screen(self._full_screen_windows)
self._full_screen_windows = None
def show_cursor(self):
"""Show cursor."""
self._impl.show_cursor()
def hide_cursor(self):
"""Hide cursor from view."""
self._impl.hide_cursor()
def startup(self):
""" Create and show the main window for the application
"""
self.main_window = MainWindow(title=self.name, factory=self.factory)
if self._startup_method:
self.main_window.content = self._startup_method(self)
self.main_window.show()
def main_loop(self):
""" Invoke the application to handle user input.
This method typically only returns once the application is exiting.
"""
# Modify signal handlers to make sure Ctrl-C is caught and handled.
signal.signal(signal.SIGINT, signal.SIG_DFL)
self._impl.main_loop()
def exit(self):
""" Quit the application gracefully.
"""
self._impl.exit()
@property
def on_exit(self):
"""The handler to invoke before the application exits.
Returns:
The function ``callable`` that is called on application exit.
"""
return self._on_exit
@on_exit.setter
def on_exit(self, handler):
"""Set the handler to invoke before the app exits.
Args:
handler (:obj:`callable`): The handler to invoke before the app exits.
"""
self._on_exit = wrapped_handler(self, handler)
self._impl.set_on_exit(self._on_exit)
class DocumentApp(App):
"""
A document-based application.
Definition and arguments are the same as a base App, plus the following:
Args:
document_types (:obj:`list` of :obj:`str`): Document types.
"""
def __init__(self, name, app_id,
id=None, icon=None, startup=None, document_types=None, on_exit=None, factory=None):
self.document_types = document_types
self._documents = []
super().__init__(name, app_id,
id=id, icon=icon, startup=startup, on_exit=on_exit, factory=factory)
def _create_impl(self):
return self.factory.DocumentApp(interface=self)
@property
def documents(self):
""" Return the list of documents associated with this app.
Returns:
A ``list`` of ``str``.
"""
return self._documents
| 31.457143
| 108
| 0.631374
|
import os
import signal
import sys
from builtins import id as identifier
from toga.command import CommandSet
from toga.handlers import wrapped_handler
from toga.icons import Icon
from toga.platform import get_platform_factory
from toga.window import Window
class MainWindow(Window):
_WINDOW_CLASS = 'MainWindow'
def __init__(self, id=None, title=None, position=(100, 100), size=(640, 480), factory=None):
super().__init__(id=id, title=title, position=position, size=size, factory=factory)
class App:
app = None
def __init__(self, name, app_id,
id=None, icon=None, startup=None, on_exit=None, factory=None):
self.factory = get_platform_factory(factory)
App.app = self
App.app_module = self.__module__.split('.')[0]
App.app_dir = os.path.dirname(sys.modules[App.app_module].__file__)
self.name = name
self._app_id = app_id
self._id = id if id else identifier(self)
self.commands = CommandSet(factory=self.factory)
self._startup_method = startup
self.default_icon = Icon('tiberius', system=True)
self.icon = icon
self._main_window = None
self._on_exit = None
self._full_screen_windows = None
self._impl = self._create_impl()
self.on_exit = on_exit
def _create_impl(self):
return self.factory.App(interface=self)
@property
def app_id(self):
return self._app_id
@property
def id(self):
return self._id
@property
def icon(self):
return self._icon
@icon.setter
def icon(self, name):
self._icon = Icon.load(name, default=self.default_icon)
@property
def main_window(self):
return self._main_window
@main_window.setter
def main_window(self, window):
self._main_window = window
window.app = self
@property
def current_window(self):
return self._impl.current_window().interface
@property
def is_full_screen(self):
return self._full_screen_windows is not None
def set_full_screen(self, *windows):
if not windows:
self.exit_full_screen()
else:
self._impl.enter_full_screen(windows)
self._full_screen_windows = windows
def exit_full_screen(self):
if self.is_full_screen:
self._impl.exit_full_screen(self._full_screen_windows)
self._full_screen_windows = None
def show_cursor(self):
self._impl.show_cursor()
def hide_cursor(self):
self._impl.hide_cursor()
def startup(self):
self.main_window = MainWindow(title=self.name, factory=self.factory)
if self._startup_method:
self.main_window.content = self._startup_method(self)
self.main_window.show()
def main_loop(self):
signal.signal(signal.SIGINT, signal.SIG_DFL)
self._impl.main_loop()
def exit(self):
self._impl.exit()
@property
def on_exit(self):
return self._on_exit
@on_exit.setter
def on_exit(self, handler):
self._on_exit = wrapped_handler(self, handler)
self._impl.set_on_exit(self._on_exit)
class DocumentApp(App):
def __init__(self, name, app_id,
id=None, icon=None, startup=None, document_types=None, on_exit=None, factory=None):
self.document_types = document_types
self._documents = []
super().__init__(name, app_id,
id=id, icon=icon, startup=startup, on_exit=on_exit, factory=factory)
def _create_impl(self):
return self.factory.DocumentApp(interface=self)
@property
def documents(self):
return self._documents
| true
| true
|
7909da2113e4b7f7be284adf6f8e50e17cb15a0f
| 12,656
|
py
|
Python
|
a_storage/ingest/in_dbnsfp4.py
|
ForomePlatform/Anfisa-Annotations
|
33844f1ee3f56ce7419d5f4b783388a97bf8665a
|
[
"Apache-2.0"
] | null | null | null |
a_storage/ingest/in_dbnsfp4.py
|
ForomePlatform/Anfisa-Annotations
|
33844f1ee3f56ce7419d5f4b783388a97bf8665a
|
[
"Apache-2.0"
] | 3
|
2022-03-28T13:44:24.000Z
|
2022-03-28T13:53:57.000Z
|
a_storage/ingest/in_dbnsfp4.py
|
evgeniyabrosin/Anfisa-Annotations
|
b111a0d397408cad4a0ee8dd00203ce1e6b2464c
|
[
"Apache-2.0"
] | 3
|
2019-02-18T17:05:06.000Z
|
2022-03-22T19:42:38.000Z
|
import sys, gzip, logging
from .in_util import TimeReport, detectFileChrom, extendFileList, dumpReader
#========================================
# Schema for AStorage
#========================================
_TRASCRIPT_PROPERTIES = [
{"name": "Ensembl_geneid", "tp": "str", "opt": "repeat"},
{"name": "Ensembl_transcriptid", "tp": "str", "opt": "repeat"},
{"name": "Ensembl_proteinid", "tp": "str", "opt": "repeat"},
{"name": "refcodon", "tp": "str", "opt": "repeat"},
{"name": "codonpos", "tp": "str", "opt": "repeat"},
{"name": "FATHMM_score", "tp": "num"},
{"name": "FATHMM_pred", "tp": "str", "opt": "dict"},
{"name": "GENCODE_basic", "tp": "str"},
{"name": "HGVSc_ANNOVAR", "tp": "str"},
{"name": "HGVSp_ANNOVAR", "tp": "str"},
{"name": "HGVSc_snpEff", "tp": "str"},
{"name": "HGVSp_snpEff", "tp": "str"},
{"name": "MPC_score", "tp": "num"},
{"name": "MutationTaster_score", "tp": "num"},
{"name": "MutationAssessor_pred", "tp": "str", "opt": "dict"},
{"name": "Polyphen2_HDIV_score", "tp": "num"},
{"name": "Polyphen2_HDIV_pred", "tp": "str", "opt": "dict"},
{"name": "Polyphen2_HVAR_score", "tp": "num"},
{"name": "Polyphen2_HVAR_pred", "tp": "str", "opt": "dict"},
{"name": "SIFT_score", "tp": "num"},
{"name": "SIFT_pred", "tp": "str", "opt": "dict"},
{"name": "SIFT4G_score", "tp": "num"},
{"name": "SIFT4G_pred", "tp": "str", "opt": "dict"},
{"name": "Uniprot_acc", "tp": "str"}
]
#===============================================
_FACETS_PROPERTIES = [
{"name": "MetaLR_score", "tp": "num"},
{"name": "MetaLR_rankscore", "tp": "num"},
{"name": "MetaLR_pred", "opt": "dict", "tp": "str"},
{"name": "MutPred_score", "tp": "str"},
{"name": "MutPred_rankscore", "tp": "num"},
{"name": "MutPred_protID", "tp": "str"},
{"name": "MutPred_AAchange", "tp": "str"},
{"name": "MutPred_Top5features", "tp": "str"},
{"name": "MPC_rankscore", "tp": "num"},
{"name": "PrimateAI_score", "tp": "num"},
{"name": "PrimateAI_rankscore", "tp": "num"},
{"name": "REVEL_score", "tp": "num"},
{"name": "SIFT4G_converted_rankscore", "tp": "num"},
{
"name": "transcripts", "tp": "list",
"item": {
"tp": "dict", "items": _TRASCRIPT_PROPERTIES
}
}
]
#===============================================
_VARIANT_PROPERTIES = [
{"name": "ALT", "tp": "str", "opt": "gene"},
{"name": "REF", "tp": "str", "opt": "gene"},
{"name": "CADD_raw", "tp": "num"},
{"name": "CADD_phred", "tp": "num"},
{"name": "DANN_score", "tp": "num"},
{"name": "DANN_rankscore", "tp": "num"},
{"name": "Eigen_raw_coding", "tp": "num"},
{"name": "Eigen_raw_coding_rankscore", "tp": "num"},
{"name": "Eigen_phred_coding", "tp": "num"},
{"name": "Eigen_PC_raw_coding", "tp": "num"},
{"name": "Eigen_PC_raw_coding_rankscore", "tp": "num"},
{"name": "Eigen_PC_phred_coding", "tp": "num"},
{"name": "GTEx_V7_gene", "tp": "str", "opt": "repeat"},
{"name": "GTEx_V7_tissue", "tp": "str"},
{"name": "MutationTaster_score", "tp": "str"},
{"name": "MutationTaster_pred", "tp": "str"},
{"name": "PrimateAI_pred", "tp": "str", "opt": "dict"},
{"name": "Geuvadis_eQTL_target_gene", "tp": "str"},
{
"name": "facets",
"tp": "list",
"item": {
"tp": "dict",
"items": _FACETS_PROPERTIES
}
}
]
#===============================================
SCHEMA_DBNSFP_4 = {
"name": "DBNSFP",
"key": "hg38",
"io": {
"block-type": "page-cluster",
"max-var-count": 50
},
"filter-list": {"ref": "REF", "alt": "ALT"},
"top": {
"tp": "list",
"item": {
"tp": "dict",
"items": _VARIANT_PROPERTIES
}
}
}
#========================================
# Ingest logic
#========================================
VARIANT_TAB = [
["REF", str],
["ALT", str],
["MutationTaster_score", str],
["MutationTaster_pred", str],
["PrimateAI_pred", str],
["CADD_raw", float],
["CADD_phred", float],
["DANN_score", float],
["DANN_rankscore", float],
["Eigen_raw_coding", float],
["Eigen_raw_coding_rankscore", float],
["Eigen_phred_coding", float],
["Eigen_PC_raw_coding", float],
["Eigen_PC_raw_coding_rankscore", float],
["Eigen_PC_phred_coding", float],
["GTEx_V7_gene", str],
["GTEx_V7_tissue", str],
["Geuvadis_eQTL_target_gene", str]
]
#========================================
FACET_TAB = [
["refcodon", str],
["codonpos", str],
["SIFT4G_converted_rankscore", float],
["MetaLR_score", float],
["MetaLR_rankscore", float],
["MetaLR_pred", str],
["REVEL_score", float],
["MutPred_score", str],
["MutPred_rankscore", float],
["MutPred_protID", str],
["MutPred_AAchange", str],
["MutPred_Top5features", str],
["MPC_rankscore", float],
["PrimateAI_score", float],
["PrimateAI_rankscore", float]
]
#========================================
TRANSCRIPT_TAB = [
["Ensembl_geneid", str],
["Ensembl_transcriptid", str],
["Ensembl_proteinid", str],
["Uniprot_acc", str],
["HGVSc_ANNOVAR", str],
["HGVSp_ANNOVAR", str],
["HGVSc_snpEff", str],
["HGVSp_snpEff", str],
["GENCODE_basic", str],
["SIFT_score", float],
["SIFT_pred", str],
["SIFT4G_score", float],
["SIFT4G_pred", str],
["Polyphen2_HDIV_score", float],
["Polyphen2_HDIV_pred", str],
["Polyphen2_HVAR_score", float],
["Polyphen2_HVAR_pred", str],
["MutationAssessor_score", float],
["MutationAssessor_pred", str],
["FATHMM_score", float],
["FATHMM_pred", str],
["MPC_score", float]
]
ALL_TABS = [VARIANT_TAB, FACET_TAB, TRANSCRIPT_TAB]
#========================================
FLD_NAME_MAP = {
"ref": "REF",
"alt": "ALT",
"Eigen_pred_coding": "Eigen_phred_coding"
}
def _normFieldName(name):
global FLD_NAME_MAP
name = name.replace('-', '_')
return FLD_NAME_MAP.get(name, name)
#========================================
def setupFields(field_line):
global ALL_TABS, FLD_NAME_MAP
assert field_line.startswith('#')
field_names = field_line[1:].split()
assert field_names[0].startswith("chr")
assert field_names[1].startswith("pos")
fields_idxs = {_normFieldName(name): idx
for idx, name in enumerate(field_names)}
errors = 0
for tab in ALL_TABS:
for field_info in tab:
idx = fields_idxs.get(field_info[0])
if idx is None:
errors += 1
logging.error("No field registered: %s" % field_info[0])
else:
if len(field_info) == 2:
field_info.append(idx)
else:
field_info[2] = idx
if errors > 0:
logging.info("Available fields:\n=====\n"
+ "\n".join(sorted(fields_idxs.keys())))
assert errors == 0
#========================================
def iterFields(fields, properties_tab):
for name, tp, idx in properties_tab:
val = fields[idx]
if val == '.':
yield name, None
else:
yield name, tp(val)
def iterDeepFields(fields, properties_tab):
for name, tp, idx in properties_tab:
val_seq = []
for val in fields[idx].split(';'):
if val == '.':
val_seq.append(None)
else:
val_seq.append(tp(val))
yield name, val_seq
#========================================
class DataCollector:
def __init__(self):
self.mCounts = [0, 0, 0]
self.mCurRecord = None
def getCounts(self):
return self.mCounts
def ingestLine(self, line):
global VARIANT_TAB, FACET_TAB, TRANSCRIPT_TAB
if line.endswith('\n'):
line = line[:-1]
fields = line.split('\t')
chrom = "chr" + str(fields[0])
pos = int(fields[1])
new_record = False
if self.mCurRecord is None or (chrom, pos) != self.mCurRecord[0]:
new_record = True
new_variant = new_record
var_data = dict()
for name, val in iterFields(fields, VARIANT_TAB):
var_data[name] = val
if not new_variant and val != self.mCurRecord[1][-1][name]:
new_variant = True
facet_data = {name: val
for name, val in iterFields(fields, FACET_TAB)}
tr_data_seq = None
for name, val_seq in iterDeepFields(fields, TRANSCRIPT_TAB):
if tr_data_seq is None:
tr_data_seq = [{name: val} for val in val_seq]
else:
for idx, val in enumerate(val_seq):
tr_data_seq[idx][name] = val
if tr_data_seq is None:
tr_data_seq = []
facet_data["transcripts"] = tr_data_seq
self.mCounts[2] += len(tr_data_seq)
self.mCounts[1] += 1
ret = None
if new_record:
self.mCounts[0] += 1
var_data["facets"] = [facet_data]
ret, self.mCurRecord = self.mCurRecord, [(chrom, pos), [var_data]]
elif new_variant:
self.mCounts[0] += 1
var_data["facets"] = [facet_data]
self.mCurRecord[1].append(var_data)
else:
self.mCurRecord[1][-1]["facets"].append(facet_data)
return ret
def finishUp(self):
return self.mCurRecord
#========================================
#========================================
class ReaderDBNSFP4:
def __init__(self, file_list, chrom_loc = "chr"):
self.mFiles = extendFileList(file_list)
self.mChromLoc = chrom_loc
def read(self):
exceptions = 0
for chrom_file in self.mFiles:
chrom = detectFileChrom(chrom_file, self.mChromLoc)
logging.info("Evaluation of %s in %s" % (chrom, chrom_file))
with gzip.open(chrom_file, 'rt') as text_inp:
time_rep = TimeReport("chr" + chrom)
collector = DataCollector()
for line_no, line in enumerate(text_inp):
if line_no == 0:
setupFields(line)
continue
try:
info = collector.ingestLine(line)
if info is not None:
yield info
if (line_no % 10000) == 0:
total_var, _, _ = collector.getCounts()
time_rep.portion(total_var)
except IndexError:
exceptions += 1
info = collector.finishUp()
if info:
yield info
total_var, total_facets, total_tr = collector.getCounts()
time_rep.done(total_var)
logging.info("transcripts: %d, facets: %d, exceptions: %d"
% (total_tr, total_facets, exceptions))
#========================================
def reader_dbNSFP4(properties, schema_h = None):
return ReaderDBNSFP4(
properties["file_list"],
properties.get("chrom_loc", "chr"))
#========================================
if __name__ == '__main__':
logging.root.setLevel(logging.INFO)
reader = reader_dbNSFP4({"file_list": sys.argv[1]})
dumpReader(reader)
| 37.005848
| 78
| 0.459387
|
import sys, gzip, logging
from .in_util import TimeReport, detectFileChrom, extendFileList, dumpReader
_TRASCRIPT_PROPERTIES = [
{"name": "Ensembl_geneid", "tp": "str", "opt": "repeat"},
{"name": "Ensembl_transcriptid", "tp": "str", "opt": "repeat"},
{"name": "Ensembl_proteinid", "tp": "str", "opt": "repeat"},
{"name": "refcodon", "tp": "str", "opt": "repeat"},
{"name": "codonpos", "tp": "str", "opt": "repeat"},
{"name": "FATHMM_score", "tp": "num"},
{"name": "FATHMM_pred", "tp": "str", "opt": "dict"},
{"name": "GENCODE_basic", "tp": "str"},
{"name": "HGVSc_ANNOVAR", "tp": "str"},
{"name": "HGVSp_ANNOVAR", "tp": "str"},
{"name": "HGVSc_snpEff", "tp": "str"},
{"name": "HGVSp_snpEff", "tp": "str"},
{"name": "MPC_score", "tp": "num"},
{"name": "MutationTaster_score", "tp": "num"},
{"name": "MutationAssessor_pred", "tp": "str", "opt": "dict"},
{"name": "Polyphen2_HDIV_score", "tp": "num"},
{"name": "Polyphen2_HDIV_pred", "tp": "str", "opt": "dict"},
{"name": "Polyphen2_HVAR_score", "tp": "num"},
{"name": "Polyphen2_HVAR_pred", "tp": "str", "opt": "dict"},
{"name": "SIFT_score", "tp": "num"},
{"name": "SIFT_pred", "tp": "str", "opt": "dict"},
{"name": "SIFT4G_score", "tp": "num"},
{"name": "SIFT4G_pred", "tp": "str", "opt": "dict"},
{"name": "Uniprot_acc", "tp": "str"}
]
_FACETS_PROPERTIES = [
{"name": "MetaLR_score", "tp": "num"},
{"name": "MetaLR_rankscore", "tp": "num"},
{"name": "MetaLR_pred", "opt": "dict", "tp": "str"},
{"name": "MutPred_score", "tp": "str"},
{"name": "MutPred_rankscore", "tp": "num"},
{"name": "MutPred_protID", "tp": "str"},
{"name": "MutPred_AAchange", "tp": "str"},
{"name": "MutPred_Top5features", "tp": "str"},
{"name": "MPC_rankscore", "tp": "num"},
{"name": "PrimateAI_score", "tp": "num"},
{"name": "PrimateAI_rankscore", "tp": "num"},
{"name": "REVEL_score", "tp": "num"},
{"name": "SIFT4G_converted_rankscore", "tp": "num"},
{
"name": "transcripts", "tp": "list",
"item": {
"tp": "dict", "items": _TRASCRIPT_PROPERTIES
}
}
]
_VARIANT_PROPERTIES = [
{"name": "ALT", "tp": "str", "opt": "gene"},
{"name": "REF", "tp": "str", "opt": "gene"},
{"name": "CADD_raw", "tp": "num"},
{"name": "CADD_phred", "tp": "num"},
{"name": "DANN_score", "tp": "num"},
{"name": "DANN_rankscore", "tp": "num"},
{"name": "Eigen_raw_coding", "tp": "num"},
{"name": "Eigen_raw_coding_rankscore", "tp": "num"},
{"name": "Eigen_phred_coding", "tp": "num"},
{"name": "Eigen_PC_raw_coding", "tp": "num"},
{"name": "Eigen_PC_raw_coding_rankscore", "tp": "num"},
{"name": "Eigen_PC_phred_coding", "tp": "num"},
{"name": "GTEx_V7_gene", "tp": "str", "opt": "repeat"},
{"name": "GTEx_V7_tissue", "tp": "str"},
{"name": "MutationTaster_score", "tp": "str"},
{"name": "MutationTaster_pred", "tp": "str"},
{"name": "PrimateAI_pred", "tp": "str", "opt": "dict"},
{"name": "Geuvadis_eQTL_target_gene", "tp": "str"},
{
"name": "facets",
"tp": "list",
"item": {
"tp": "dict",
"items": _FACETS_PROPERTIES
}
}
]
SCHEMA_DBNSFP_4 = {
"name": "DBNSFP",
"key": "hg38",
"io": {
"block-type": "page-cluster",
"max-var-count": 50
},
"filter-list": {"ref": "REF", "alt": "ALT"},
"top": {
"tp": "list",
"item": {
"tp": "dict",
"items": _VARIANT_PROPERTIES
}
}
}
VARIANT_TAB = [
["REF", str],
["ALT", str],
["MutationTaster_score", str],
["MutationTaster_pred", str],
["PrimateAI_pred", str],
["CADD_raw", float],
["CADD_phred", float],
["DANN_score", float],
["DANN_rankscore", float],
["Eigen_raw_coding", float],
["Eigen_raw_coding_rankscore", float],
["Eigen_phred_coding", float],
["Eigen_PC_raw_coding", float],
["Eigen_PC_raw_coding_rankscore", float],
["Eigen_PC_phred_coding", float],
["GTEx_V7_gene", str],
["GTEx_V7_tissue", str],
["Geuvadis_eQTL_target_gene", str]
]
FACET_TAB = [
["refcodon", str],
["codonpos", str],
["SIFT4G_converted_rankscore", float],
["MetaLR_score", float],
["MetaLR_rankscore", float],
["MetaLR_pred", str],
["REVEL_score", float],
["MutPred_score", str],
["MutPred_rankscore", float],
["MutPred_protID", str],
["MutPred_AAchange", str],
["MutPred_Top5features", str],
["MPC_rankscore", float],
["PrimateAI_score", float],
["PrimateAI_rankscore", float]
]
TRANSCRIPT_TAB = [
["Ensembl_geneid", str],
["Ensembl_transcriptid", str],
["Ensembl_proteinid", str],
["Uniprot_acc", str],
["HGVSc_ANNOVAR", str],
["HGVSp_ANNOVAR", str],
["HGVSc_snpEff", str],
["HGVSp_snpEff", str],
["GENCODE_basic", str],
["SIFT_score", float],
["SIFT_pred", str],
["SIFT4G_score", float],
["SIFT4G_pred", str],
["Polyphen2_HDIV_score", float],
["Polyphen2_HDIV_pred", str],
["Polyphen2_HVAR_score", float],
["Polyphen2_HVAR_pred", str],
["MutationAssessor_score", float],
["MutationAssessor_pred", str],
["FATHMM_score", float],
["FATHMM_pred", str],
["MPC_score", float]
]
ALL_TABS = [VARIANT_TAB, FACET_TAB, TRANSCRIPT_TAB]
FLD_NAME_MAP = {
"ref": "REF",
"alt": "ALT",
"Eigen_pred_coding": "Eigen_phred_coding"
}
def _normFieldName(name):
global FLD_NAME_MAP
name = name.replace('-', '_')
return FLD_NAME_MAP.get(name, name)
def setupFields(field_line):
global ALL_TABS, FLD_NAME_MAP
assert field_line.startswith('#')
field_names = field_line[1:].split()
assert field_names[0].startswith("chr")
assert field_names[1].startswith("pos")
fields_idxs = {_normFieldName(name): idx
for idx, name in enumerate(field_names)}
errors = 0
for tab in ALL_TABS:
for field_info in tab:
idx = fields_idxs.get(field_info[0])
if idx is None:
errors += 1
logging.error("No field registered: %s" % field_info[0])
else:
if len(field_info) == 2:
field_info.append(idx)
else:
field_info[2] = idx
if errors > 0:
logging.info("Available fields:\n=====\n"
+ "\n".join(sorted(fields_idxs.keys())))
assert errors == 0
def iterFields(fields, properties_tab):
for name, tp, idx in properties_tab:
val = fields[idx]
if val == '.':
yield name, None
else:
yield name, tp(val)
def iterDeepFields(fields, properties_tab):
for name, tp, idx in properties_tab:
val_seq = []
for val in fields[idx].split(';'):
if val == '.':
val_seq.append(None)
else:
val_seq.append(tp(val))
yield name, val_seq
class DataCollector:
def __init__(self):
self.mCounts = [0, 0, 0]
self.mCurRecord = None
def getCounts(self):
return self.mCounts
def ingestLine(self, line):
global VARIANT_TAB, FACET_TAB, TRANSCRIPT_TAB
if line.endswith('\n'):
line = line[:-1]
fields = line.split('\t')
chrom = "chr" + str(fields[0])
pos = int(fields[1])
new_record = False
if self.mCurRecord is None or (chrom, pos) != self.mCurRecord[0]:
new_record = True
new_variant = new_record
var_data = dict()
for name, val in iterFields(fields, VARIANT_TAB):
var_data[name] = val
if not new_variant and val != self.mCurRecord[1][-1][name]:
new_variant = True
facet_data = {name: val
for name, val in iterFields(fields, FACET_TAB)}
tr_data_seq = None
for name, val_seq in iterDeepFields(fields, TRANSCRIPT_TAB):
if tr_data_seq is None:
tr_data_seq = [{name: val} for val in val_seq]
else:
for idx, val in enumerate(val_seq):
tr_data_seq[idx][name] = val
if tr_data_seq is None:
tr_data_seq = []
facet_data["transcripts"] = tr_data_seq
self.mCounts[2] += len(tr_data_seq)
self.mCounts[1] += 1
ret = None
if new_record:
self.mCounts[0] += 1
var_data["facets"] = [facet_data]
ret, self.mCurRecord = self.mCurRecord, [(chrom, pos), [var_data]]
elif new_variant:
self.mCounts[0] += 1
var_data["facets"] = [facet_data]
self.mCurRecord[1].append(var_data)
else:
self.mCurRecord[1][-1]["facets"].append(facet_data)
return ret
def finishUp(self):
return self.mCurRecord
class ReaderDBNSFP4:
def __init__(self, file_list, chrom_loc = "chr"):
self.mFiles = extendFileList(file_list)
self.mChromLoc = chrom_loc
def read(self):
exceptions = 0
for chrom_file in self.mFiles:
chrom = detectFileChrom(chrom_file, self.mChromLoc)
logging.info("Evaluation of %s in %s" % (chrom, chrom_file))
with gzip.open(chrom_file, 'rt') as text_inp:
time_rep = TimeReport("chr" + chrom)
collector = DataCollector()
for line_no, line in enumerate(text_inp):
if line_no == 0:
setupFields(line)
continue
try:
info = collector.ingestLine(line)
if info is not None:
yield info
if (line_no % 10000) == 0:
total_var, _, _ = collector.getCounts()
time_rep.portion(total_var)
except IndexError:
exceptions += 1
info = collector.finishUp()
if info:
yield info
total_var, total_facets, total_tr = collector.getCounts()
time_rep.done(total_var)
logging.info("transcripts: %d, facets: %d, exceptions: %d"
% (total_tr, total_facets, exceptions))
def reader_dbNSFP4(properties, schema_h = None):
return ReaderDBNSFP4(
properties["file_list"],
properties.get("chrom_loc", "chr"))
if __name__ == '__main__':
logging.root.setLevel(logging.INFO)
reader = reader_dbNSFP4({"file_list": sys.argv[1]})
dumpReader(reader)
| true
| true
|
7909da388b05f366cbb53a0299ec4c56b3bbd262
| 2,182
|
py
|
Python
|
plugins/filter/php_config.py
|
manala/ansible-roles
|
30dc7d0bcea10ac4b38c6ad85ad66dbd098131f4
|
[
"MIT"
] | 138
|
2017-05-18T13:45:45.000Z
|
2022-03-23T02:33:45.000Z
|
plugins/filter/php_config.py
|
manala/ansible-roles
|
30dc7d0bcea10ac4b38c6ad85ad66dbd098131f4
|
[
"MIT"
] | 159
|
2017-05-11T09:05:26.000Z
|
2022-03-04T07:36:59.000Z
|
plugins/filter/php_config.py
|
manala/ansible-roles
|
30dc7d0bcea10ac4b38c6ad85ad66dbd098131f4
|
[
"MIT"
] | 35
|
2017-06-29T09:01:42.000Z
|
2021-11-18T11:35:00.000Z
|
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible.errors import AnsibleFilterError
from ansible.module_utils.six import iteritems, string_types
from numbers import Number
def config(parameters, exclude=None):
exclude = exclude or []
if not isinstance(parameters, dict):
raise AnsibleFilterError('php_config expects a dict but was given a %s' % type(parameters))
[parameters.pop(key, None) for key in exclude]
result = ''
for key in sorted(parameters):
parameter = config_parameter(parameters, key)
if parameter:
result += '\n%s' % parameter
return result.lstrip()
def config_parameter(parameters, key, required=False, comment=False, **kwargs):
if not isinstance(parameters, dict):
raise AnsibleFilterError('php_config_parameter parameters expects a dict but was given a %s' % type(parameters))
if not isinstance(key, string_types):
raise AnsibleFilterError('php_config_parameter key expects a string but was given a %s' % type(key))
if key in parameters:
value = parameters.get(key)
else:
if required:
raise AnsibleFilterError('php_config_parameter requires a value for key %s' % key)
if isinstance(comment, string_types):
return comment
if 'default' not in kwargs:
raise AnsibleFilterError('php_config_parameter missing a default value for key %s' % key)
value = kwargs.get('default')
if value is True:
result = '%s = On' % key
elif value is False:
result = '%s = Off' % key
elif isinstance(value, (string_types, Number)):
result = '%s = %s' % (key, value)
else:
raise AnsibleFilterError('php_config_parameter value of an unknown type %s' % type(value))
if key not in parameters and comment:
result = ';' + result.replace('\n', '\n;')
return result
class FilterModule(object):
''' Manala php config jinja2 filters '''
def filters(self):
filters = {
'php_config': config,
'php_config_parameter': config_parameter,
}
return filters
| 33.569231
| 120
| 0.660862
|
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible.errors import AnsibleFilterError
from ansible.module_utils.six import iteritems, string_types
from numbers import Number
def config(parameters, exclude=None):
exclude = exclude or []
if not isinstance(parameters, dict):
raise AnsibleFilterError('php_config expects a dict but was given a %s' % type(parameters))
[parameters.pop(key, None) for key in exclude]
result = ''
for key in sorted(parameters):
parameter = config_parameter(parameters, key)
if parameter:
result += '\n%s' % parameter
return result.lstrip()
def config_parameter(parameters, key, required=False, comment=False, **kwargs):
if not isinstance(parameters, dict):
raise AnsibleFilterError('php_config_parameter parameters expects a dict but was given a %s' % type(parameters))
if not isinstance(key, string_types):
raise AnsibleFilterError('php_config_parameter key expects a string but was given a %s' % type(key))
if key in parameters:
value = parameters.get(key)
else:
if required:
raise AnsibleFilterError('php_config_parameter requires a value for key %s' % key)
if isinstance(comment, string_types):
return comment
if 'default' not in kwargs:
raise AnsibleFilterError('php_config_parameter missing a default value for key %s' % key)
value = kwargs.get('default')
if value is True:
result = '%s = On' % key
elif value is False:
result = '%s = Off' % key
elif isinstance(value, (string_types, Number)):
result = '%s = %s' % (key, value)
else:
raise AnsibleFilterError('php_config_parameter value of an unknown type %s' % type(value))
if key not in parameters and comment:
result = ';' + result.replace('\n', '\n;')
return result
class FilterModule(object):
def filters(self):
filters = {
'php_config': config,
'php_config_parameter': config_parameter,
}
return filters
| true
| true
|
7909daa123662f3a6b92b966f648e29f5deec9b4
| 2,328
|
py
|
Python
|
docs/conf.py
|
iancze/bettermoments
|
d97bd4ae8d11a3670ca4ee97bd658c97455bcc49
|
[
"MIT"
] | 1
|
2020-04-01T15:31:50.000Z
|
2020-04-01T15:31:50.000Z
|
docs/conf.py
|
iancze/bettermoments
|
d97bd4ae8d11a3670ca4ee97bd658c97455bcc49
|
[
"MIT"
] | null | null | null |
docs/conf.py
|
iancze/bettermoments
|
d97bd4ae8d11a3670ca4ee97bd658c97455bcc49
|
[
"MIT"
] | null | null | null |
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('.'))
sys.path.append(os.path.join(os.path.dirname(__name__), '..'))
# -- Project information -----------------------------------------------------
project = 'bettermoments'
copyright = '2019, Richard Teague'
author = 'Richard Teague'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.coverage',
'sphinx.ext.napoleon',
'sphinx.ext.imgmath',
# 'nbsphinx',
]
# Is this really necessary...
autodoc_mock_imports = ['astropy', 'scipy', 'argparse', 'numpy']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
master_doc = "index"
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
# Readthedocs.
on_rtd = os.environ.get("READTHEDOCS", None) == "True"
if not on_rtd:
import sphinx_rtd_theme
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
html_theme = "sphinx_rtd_theme"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
| 34.746269
| 79
| 0.668814
|
import os
import sys
sys.path.insert(0, os.path.abspath('.'))
sys.path.append(os.path.join(os.path.dirname(__name__), '..'))
project = 'bettermoments'
copyright = '2019, Richard Teague'
author = 'Richard Teague'
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.coverage',
'sphinx.ext.napoleon',
'sphinx.ext.imgmath',
]
autodoc_mock_imports = ['astropy', 'scipy', 'argparse', 'numpy']
templates_path = ['_templates']
master_doc = "index"
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
on_rtd = os.environ.get("READTHEDOCS", None) == "True"
if not on_rtd:
import sphinx_rtd_theme
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
html_theme = "sphinx_rtd_theme"
html_static_path = ['_static']
| true
| true
|
7909daf9cfd323110431936ce508e9cd4abe27e7
| 775
|
py
|
Python
|
Graphs/graphs creation/directed graph/adjacency matrix/index.py
|
PawanRamaMali/LeetCode
|
457700c241a754e53e3d2f1e21c9772ec8019bdc
|
[
"MIT"
] | null | null | null |
Graphs/graphs creation/directed graph/adjacency matrix/index.py
|
PawanRamaMali/LeetCode
|
457700c241a754e53e3d2f1e21c9772ec8019bdc
|
[
"MIT"
] | null | null | null |
Graphs/graphs creation/directed graph/adjacency matrix/index.py
|
PawanRamaMali/LeetCode
|
457700c241a754e53e3d2f1e21c9772ec8019bdc
|
[
"MIT"
] | null | null | null |
from collections import defaultdict
class Graph:
def __init__(self, numberOfNodes):
self.numberOfNodes = numberOfNodes+1
self.graph = [[0 for x in range(numberOfNodes+1)]
for y in range(numberOfNodes+1)]
def withInBounds(self, v1, v2):
return (v1 >= 0 and v1 <= self.numberOfNodes) and (v2 >= 0 and v2 <= self.numberOfNodes)
def insertEdge(self, v1, v2):
if(self.withInBounds(v1, v2)):
self.graph[v1][v2] = 1
def printGraph(self):
for i in range(self.numberOfNodes):
for j in range(len(self.graph[i])):
if(self.graph[i][j]):
print(i, "->", j)
g = Graph(5)
g.insertEdge(1, 2)
g.insertEdge(2, 3)
g.insertEdge(4, 5)
g.printGraph()
| 25
| 96
| 0.575484
|
from collections import defaultdict
class Graph:
def __init__(self, numberOfNodes):
self.numberOfNodes = numberOfNodes+1
self.graph = [[0 for x in range(numberOfNodes+1)]
for y in range(numberOfNodes+1)]
def withInBounds(self, v1, v2):
return (v1 >= 0 and v1 <= self.numberOfNodes) and (v2 >= 0 and v2 <= self.numberOfNodes)
def insertEdge(self, v1, v2):
if(self.withInBounds(v1, v2)):
self.graph[v1][v2] = 1
def printGraph(self):
for i in range(self.numberOfNodes):
for j in range(len(self.graph[i])):
if(self.graph[i][j]):
print(i, "->", j)
g = Graph(5)
g.insertEdge(1, 2)
g.insertEdge(2, 3)
g.insertEdge(4, 5)
g.printGraph()
| true
| true
|
7909db07c36cc57956545fc7274bc3cd67f8e0dd
| 1,286
|
py
|
Python
|
tests/test_utils.py
|
duranbe/doccano-transformer
|
1d19de73bf3b0cebadb31508e3a1864b4e1a6414
|
[
"MIT"
] | 73
|
2020-03-29T15:39:57.000Z
|
2022-02-24T08:40:11.000Z
|
tests/test_utils.py
|
duranbe/doccano-transformer
|
1d19de73bf3b0cebadb31508e3a1864b4e1a6414
|
[
"MIT"
] | 18
|
2020-03-30T10:32:24.000Z
|
2021-09-02T06:58:22.000Z
|
tests/test_utils.py
|
duranbe/doccano-transformer
|
1d19de73bf3b0cebadb31508e3a1864b4e1a6414
|
[
"MIT"
] | 22
|
2020-04-03T00:10:48.000Z
|
2022-03-11T07:14:32.000Z
|
from unittest import TestCase
from doccano_transformer import utils
class TestUtils(TestCase):
def test_get_offsets(self):
text = ' This is Doccano Transformer . '
tokens = text.split()
result = utils.get_offsets(text, tokens)
expected = [1, 6, 9, 17, 29]
self.assertListEqual(result, expected)
def test_create_bio_tags(self):
tokens = ' This is Doccano Transformer . '.split()
offsets = [1, 6, 9, 17, 29]
labels = [[9, 28, 'SOFTWARE']]
result = utils.create_bio_tags(tokens, offsets, labels)
expected = ['O', 'O', 'B-SOFTWARE', 'I-SOFTWARE', 'O']
self.assertListEqual(result, expected)
def test_convert_tokens_and_offsets_to_spacy_tokens(self):
tokens = 'This is Doccano Transformer .'.split()
offsets = [0, 5, 8, 16, 28]
spacy_tokens = utils.convert_tokens_and_offsets_to_spacy_tokens(
tokens, offsets
)
for i, (spacy_token, token, offset) in enumerate(
zip(spacy_tokens, tokens, offsets)
):
self.assertEqual(str(spacy_token), token)
self.assertEqual(len(spacy_token), len(token))
self.assertEqual(spacy_token.i, i)
self.assertEqual(spacy_token.idx, offset)
| 35.722222
| 72
| 0.623639
|
from unittest import TestCase
from doccano_transformer import utils
class TestUtils(TestCase):
def test_get_offsets(self):
text = ' This is Doccano Transformer . '
tokens = text.split()
result = utils.get_offsets(text, tokens)
expected = [1, 6, 9, 17, 29]
self.assertListEqual(result, expected)
def test_create_bio_tags(self):
tokens = ' This is Doccano Transformer . '.split()
offsets = [1, 6, 9, 17, 29]
labels = [[9, 28, 'SOFTWARE']]
result = utils.create_bio_tags(tokens, offsets, labels)
expected = ['O', 'O', 'B-SOFTWARE', 'I-SOFTWARE', 'O']
self.assertListEqual(result, expected)
def test_convert_tokens_and_offsets_to_spacy_tokens(self):
tokens = 'This is Doccano Transformer .'.split()
offsets = [0, 5, 8, 16, 28]
spacy_tokens = utils.convert_tokens_and_offsets_to_spacy_tokens(
tokens, offsets
)
for i, (spacy_token, token, offset) in enumerate(
zip(spacy_tokens, tokens, offsets)
):
self.assertEqual(str(spacy_token), token)
self.assertEqual(len(spacy_token), len(token))
self.assertEqual(spacy_token.i, i)
self.assertEqual(spacy_token.idx, offset)
| true
| true
|
7909db7b47824caba868de90babb2a3b2552836c
| 2,726
|
py
|
Python
|
test/python/transpiler/test_decompose.py
|
jagunnels/qiskit-sdk-py
|
153cdde972e65c0f23675bbe17c93e18be27bd51
|
[
"Apache-2.0"
] | 2
|
2021-09-06T19:25:36.000Z
|
2021-11-17T10:46:12.000Z
|
test/python/transpiler/test_decompose.py
|
jagunnels/qiskit-sdk-py
|
153cdde972e65c0f23675bbe17c93e18be27bd51
|
[
"Apache-2.0"
] | null | null | null |
test/python/transpiler/test_decompose.py
|
jagunnels/qiskit-sdk-py
|
153cdde972e65c0f23675bbe17c93e18be27bd51
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright 2018, IBM.
#
# This source code is licensed under the Apache License, Version 2.0 found in
# the LICENSE.txt file in the root directory of this source tree.
"""Test the decompose pass"""
from sympy import pi
from qiskit import QuantumRegister, ClassicalRegister, QuantumCircuit
from qiskit.transpiler.passes import Decompose
from qiskit.converters import circuit_to_dag
from qiskit.extensions.standard import HGate
from qiskit.extensions.standard import ToffoliGate
from qiskit.test import QiskitTestCase
class TestDecompose(QiskitTestCase):
"""Tests the decompose pass."""
def test_basic(self):
"""Test decompose a single H into u2.
"""
qr = QuantumRegister(1, 'qr')
circuit = QuantumCircuit(qr)
circuit.h(qr[0])
dag = circuit_to_dag(circuit)
pass_ = Decompose(HGate)
after_dag = pass_.run(dag)
op_nodes = after_dag.op_nodes()
self.assertEqual(len(op_nodes), 1)
self.assertEqual(op_nodes[0].name, 'u2')
def test_decompose_only_h(self):
"""Test to decompose a single H, without the rest
"""
qr = QuantumRegister(2, 'qr')
circuit = QuantumCircuit(qr)
circuit.h(qr[0])
circuit.cx(qr[0], qr[1])
dag = circuit_to_dag(circuit)
pass_ = Decompose(HGate)
after_dag = pass_.run(dag)
op_nodes = after_dag.op_nodes()
self.assertEqual(len(op_nodes), 2)
for node in op_nodes:
self.assertIn(node.name, ['cx', 'u2'])
def test_decompose_toffoli(self):
"""Test decompose CCX.
"""
qr1 = QuantumRegister(2, 'qr1')
qr2 = QuantumRegister(1, 'qr2')
circuit = QuantumCircuit(qr1, qr2)
circuit.ccx(qr1[0], qr1[1], qr2[0])
dag = circuit_to_dag(circuit)
pass_ = Decompose(ToffoliGate)
after_dag = pass_.run(dag)
op_nodes = after_dag.op_nodes()
self.assertEqual(len(op_nodes), 15)
for node in op_nodes:
self.assertIn(node.name, ['h', 't', 'tdg', 'cx'])
def test_decompose_conditional(self):
"""Test decompose a 1-qubit gates with a conditional.
"""
qr = QuantumRegister(1, 'qr')
cr = ClassicalRegister(1, 'cr')
circuit = QuantumCircuit(qr, cr)
circuit.h(qr).c_if(cr, 1)
circuit.x(qr).c_if(cr, 1)
dag = circuit_to_dag(circuit)
pass_ = Decompose(HGate)
after_dag = pass_.run(dag)
ref_circuit = QuantumCircuit(qr, cr)
ref_circuit.u2(0, pi, qr[0]).c_if(cr, 1)
ref_circuit.x(qr).c_if(cr, 1)
ref_dag = circuit_to_dag(ref_circuit)
self.assertEqual(after_dag, ref_dag)
| 32.452381
| 77
| 0.624725
|
from sympy import pi
from qiskit import QuantumRegister, ClassicalRegister, QuantumCircuit
from qiskit.transpiler.passes import Decompose
from qiskit.converters import circuit_to_dag
from qiskit.extensions.standard import HGate
from qiskit.extensions.standard import ToffoliGate
from qiskit.test import QiskitTestCase
class TestDecompose(QiskitTestCase):
def test_basic(self):
qr = QuantumRegister(1, 'qr')
circuit = QuantumCircuit(qr)
circuit.h(qr[0])
dag = circuit_to_dag(circuit)
pass_ = Decompose(HGate)
after_dag = pass_.run(dag)
op_nodes = after_dag.op_nodes()
self.assertEqual(len(op_nodes), 1)
self.assertEqual(op_nodes[0].name, 'u2')
def test_decompose_only_h(self):
qr = QuantumRegister(2, 'qr')
circuit = QuantumCircuit(qr)
circuit.h(qr[0])
circuit.cx(qr[0], qr[1])
dag = circuit_to_dag(circuit)
pass_ = Decompose(HGate)
after_dag = pass_.run(dag)
op_nodes = after_dag.op_nodes()
self.assertEqual(len(op_nodes), 2)
for node in op_nodes:
self.assertIn(node.name, ['cx', 'u2'])
def test_decompose_toffoli(self):
qr1 = QuantumRegister(2, 'qr1')
qr2 = QuantumRegister(1, 'qr2')
circuit = QuantumCircuit(qr1, qr2)
circuit.ccx(qr1[0], qr1[1], qr2[0])
dag = circuit_to_dag(circuit)
pass_ = Decompose(ToffoliGate)
after_dag = pass_.run(dag)
op_nodes = after_dag.op_nodes()
self.assertEqual(len(op_nodes), 15)
for node in op_nodes:
self.assertIn(node.name, ['h', 't', 'tdg', 'cx'])
def test_decompose_conditional(self):
qr = QuantumRegister(1, 'qr')
cr = ClassicalRegister(1, 'cr')
circuit = QuantumCircuit(qr, cr)
circuit.h(qr).c_if(cr, 1)
circuit.x(qr).c_if(cr, 1)
dag = circuit_to_dag(circuit)
pass_ = Decompose(HGate)
after_dag = pass_.run(dag)
ref_circuit = QuantumCircuit(qr, cr)
ref_circuit.u2(0, pi, qr[0]).c_if(cr, 1)
ref_circuit.x(qr).c_if(cr, 1)
ref_dag = circuit_to_dag(ref_circuit)
self.assertEqual(after_dag, ref_dag)
| true
| true
|
7909db92a59a8f9f8203bc3a73eeba69fadc53c6
| 51
|
py
|
Python
|
avalara/exceptions.py
|
SendOutCards/py-avalara
|
ac13de14a49934ed461baf4620e0c3d856ff12ff
|
[
"MIT"
] | null | null | null |
avalara/exceptions.py
|
SendOutCards/py-avalara
|
ac13de14a49934ed461baf4620e0c3d856ff12ff
|
[
"MIT"
] | null | null | null |
avalara/exceptions.py
|
SendOutCards/py-avalara
|
ac13de14a49934ed461baf4620e0c3d856ff12ff
|
[
"MIT"
] | null | null | null |
class AvalaraExceptionResponse(Exception):
pass
| 25.5
| 42
| 0.823529
|
class AvalaraExceptionResponse(Exception):
pass
| true
| true
|
7909dc1b806e2f6d50d9b99b27ec103b33dd3a90
| 16,074
|
py
|
Python
|
django/middleware/csrf.py
|
cachitas/django
|
5b618f239ceb884c9380cf42361c7cc69bf1e208
|
[
"CNRI-Python-GPL-Compatible",
"BSD-3-Clause"
] | null | null | null |
django/middleware/csrf.py
|
cachitas/django
|
5b618f239ceb884c9380cf42361c7cc69bf1e208
|
[
"CNRI-Python-GPL-Compatible",
"BSD-3-Clause"
] | null | null | null |
django/middleware/csrf.py
|
cachitas/django
|
5b618f239ceb884c9380cf42361c7cc69bf1e208
|
[
"CNRI-Python-GPL-Compatible",
"BSD-3-Clause"
] | null | null | null |
"""
Cross Site Request Forgery Middleware.
This module provides a middleware that implements protection
against request forgeries from other sites.
"""
import logging
import re
import string
from collections import defaultdict
from urllib.parse import urlparse
from django.conf import settings
from django.core.exceptions import DisallowedHost, ImproperlyConfigured
from django.urls import get_callable
from django.utils.cache import patch_vary_headers
from django.utils.crypto import constant_time_compare, get_random_string
from django.utils.deprecation import MiddlewareMixin
from django.utils.functional import cached_property
from django.utils.http import is_same_domain
from django.utils.log import log_response
logger = logging.getLogger('django.security.csrf')
REASON_BAD_ORIGIN = "Origin checking failed - %s does not match any trusted origins."
REASON_NO_REFERER = "Referer checking failed - no Referer."
REASON_BAD_REFERER = "Referer checking failed - %s does not match any trusted origins."
REASON_NO_CSRF_COOKIE = "CSRF cookie not set."
REASON_BAD_TOKEN = "CSRF token missing or incorrect."
REASON_MALFORMED_REFERER = "Referer checking failed - Referer is malformed."
REASON_INSECURE_REFERER = "Referer checking failed - Referer is insecure while host is secure."
CSRF_SECRET_LENGTH = 32
CSRF_TOKEN_LENGTH = 2 * CSRF_SECRET_LENGTH
CSRF_ALLOWED_CHARS = string.ascii_letters + string.digits
CSRF_SESSION_KEY = '_csrftoken'
def _get_failure_view():
"""Return the view to be used for CSRF rejections."""
return get_callable(settings.CSRF_FAILURE_VIEW)
def _get_new_csrf_string():
return get_random_string(CSRF_SECRET_LENGTH, allowed_chars=CSRF_ALLOWED_CHARS)
def _mask_cipher_secret(secret):
"""
Given a secret (assumed to be a string of CSRF_ALLOWED_CHARS), generate a
token by adding a mask and applying it to the secret.
"""
mask = _get_new_csrf_string()
chars = CSRF_ALLOWED_CHARS
pairs = zip((chars.index(x) for x in secret), (chars.index(x) for x in mask))
cipher = ''.join(chars[(x + y) % len(chars)] for x, y in pairs)
return mask + cipher
def _unmask_cipher_token(token):
"""
Given a token (assumed to be a string of CSRF_ALLOWED_CHARS, of length
CSRF_TOKEN_LENGTH, and that its first half is a mask), use it to decrypt
the second half to produce the original secret.
"""
mask = token[:CSRF_SECRET_LENGTH]
token = token[CSRF_SECRET_LENGTH:]
chars = CSRF_ALLOWED_CHARS
pairs = zip((chars.index(x) for x in token), (chars.index(x) for x in mask))
return ''.join(chars[x - y] for x, y in pairs) # Note negative values are ok
def _get_new_csrf_token():
return _mask_cipher_secret(_get_new_csrf_string())
def get_token(request):
"""
Return the CSRF token required for a POST form. The token is an
alphanumeric value. A new token is created if one is not already set.
A side effect of calling this function is to make the csrf_protect
decorator and the CsrfViewMiddleware add a CSRF cookie and a 'Vary: Cookie'
header to the outgoing response. For this reason, you may need to use this
function lazily, as is done by the csrf context processor.
"""
if "CSRF_COOKIE" not in request.META:
csrf_secret = _get_new_csrf_string()
request.META["CSRF_COOKIE"] = _mask_cipher_secret(csrf_secret)
else:
csrf_secret = _unmask_cipher_token(request.META["CSRF_COOKIE"])
request.META["CSRF_COOKIE_USED"] = True
return _mask_cipher_secret(csrf_secret)
def rotate_token(request):
"""
Change the CSRF token in use for a request - should be done on login
for security purposes.
"""
request.META.update({
"CSRF_COOKIE_USED": True,
"CSRF_COOKIE": _get_new_csrf_token(),
})
request.csrf_cookie_needs_reset = True
def _sanitize_token(token):
# Allow only ASCII alphanumerics
if re.search('[^a-zA-Z0-9]', token):
return _get_new_csrf_token()
elif len(token) == CSRF_TOKEN_LENGTH:
return token
elif len(token) == CSRF_SECRET_LENGTH:
# Older Django versions set cookies to values of CSRF_SECRET_LENGTH
# alphanumeric characters. For backwards compatibility, accept
# such values as unmasked secrets.
# It's easier to mask here and be consistent later, rather than add
# different code paths in the checks, although that might be a tad more
# efficient.
return _mask_cipher_secret(token)
return _get_new_csrf_token()
def _compare_masked_tokens(request_csrf_token, csrf_token):
# Assume both arguments are sanitized -- that is, strings of
# length CSRF_TOKEN_LENGTH, all CSRF_ALLOWED_CHARS.
return constant_time_compare(
_unmask_cipher_token(request_csrf_token),
_unmask_cipher_token(csrf_token),
)
class CsrfViewMiddleware(MiddlewareMixin):
"""
Require a present and correct csrfmiddlewaretoken for POST requests that
have a CSRF cookie, and set an outgoing CSRF cookie.
This middleware should be used in conjunction with the {% csrf_token %}
template tag.
"""
@cached_property
def csrf_trusted_origins_hosts(self):
return [
urlparse(origin).netloc.lstrip('*')
for origin in settings.CSRF_TRUSTED_ORIGINS
]
@cached_property
def allowed_origins_exact(self):
return {
origin for origin in settings.CSRF_TRUSTED_ORIGINS
if '*' not in origin
}
@cached_property
def allowed_origin_subdomains(self):
"""
A mapping of allowed schemes to list of allowed netlocs, where all
subdomains of the netloc are allowed.
"""
allowed_origin_subdomains = defaultdict(list)
for parsed in (urlparse(origin) for origin in settings.CSRF_TRUSTED_ORIGINS if '*' in origin):
allowed_origin_subdomains[parsed.scheme].append(parsed.netloc.lstrip('*'))
return allowed_origin_subdomains
# The _accept and _reject methods currently only exist for the sake of the
# requires_csrf_token decorator.
def _accept(self, request):
# Avoid checking the request twice by adding a custom attribute to
# request. This will be relevant when both decorator and middleware
# are used.
request.csrf_processing_done = True
return None
def _reject(self, request, reason):
response = _get_failure_view()(request, reason=reason)
log_response(
'Forbidden (%s): %s', reason, request.path,
response=response,
request=request,
logger=logger,
)
return response
def _get_token(self, request):
if settings.CSRF_USE_SESSIONS:
try:
return request.session.get(CSRF_SESSION_KEY)
except AttributeError:
raise ImproperlyConfigured(
'CSRF_USE_SESSIONS is enabled, but request.session is not '
'set. SessionMiddleware must appear before CsrfViewMiddleware '
'in MIDDLEWARE.'
)
else:
try:
cookie_token = request.COOKIES[settings.CSRF_COOKIE_NAME]
except KeyError:
return None
csrf_token = _sanitize_token(cookie_token)
if csrf_token != cookie_token:
# Cookie token needed to be replaced;
# the cookie needs to be reset.
request.csrf_cookie_needs_reset = True
return csrf_token
def _set_token(self, request, response):
if settings.CSRF_USE_SESSIONS:
if request.session.get(CSRF_SESSION_KEY) != request.META['CSRF_COOKIE']:
request.session[CSRF_SESSION_KEY] = request.META['CSRF_COOKIE']
else:
response.set_cookie(
settings.CSRF_COOKIE_NAME,
request.META['CSRF_COOKIE'],
max_age=settings.CSRF_COOKIE_AGE,
domain=settings.CSRF_COOKIE_DOMAIN,
path=settings.CSRF_COOKIE_PATH,
secure=settings.CSRF_COOKIE_SECURE,
httponly=settings.CSRF_COOKIE_HTTPONLY,
samesite=settings.CSRF_COOKIE_SAMESITE,
)
# Set the Vary header since content varies with the CSRF cookie.
patch_vary_headers(response, ('Cookie',))
def _origin_verified(self, request):
request_origin = request.META['HTTP_ORIGIN']
good_origin = '%s://%s' % (
'https' if request.is_secure() else 'http',
request.get_host(),
)
if request_origin == good_origin:
return True
if request_origin in self.allowed_origins_exact:
return True
try:
parsed_origin = urlparse(request_origin)
except ValueError:
return False
request_scheme = parsed_origin.scheme
request_netloc = parsed_origin.netloc
return any(
is_same_domain(request_netloc, host)
for host in self.allowed_origin_subdomains.get(request_scheme, ())
)
def process_request(self, request):
csrf_token = self._get_token(request)
if csrf_token is not None:
# Use same token next time.
request.META['CSRF_COOKIE'] = csrf_token
def process_view(self, request, callback, callback_args, callback_kwargs):
if getattr(request, 'csrf_processing_done', False):
return None
# Wait until request.META["CSRF_COOKIE"] has been manipulated before
# bailing out, so that get_token still works
if getattr(callback, 'csrf_exempt', False):
return None
# Assume that anything not defined as 'safe' by RFC7231 needs protection
if request.method not in ('GET', 'HEAD', 'OPTIONS', 'TRACE'):
if getattr(request, '_dont_enforce_csrf_checks', False):
# Mechanism to turn off CSRF checks for test suite.
# It comes after the creation of CSRF cookies, so that
# everything else continues to work exactly the same
# (e.g. cookies are sent, etc.), but before any
# branches that call reject().
return self._accept(request)
# Reject the request if the Origin header doesn't match an allowed
# value.
if 'HTTP_ORIGIN' in request.META:
if not self._origin_verified(request):
return self._reject(request, REASON_BAD_ORIGIN % request.META['HTTP_ORIGIN'])
elif request.is_secure():
# If the Origin header wasn't provided, reject HTTPS requests
# if the Referer header doesn't match an allowed value.
#
# Suppose user visits http://example.com/
# An active network attacker (man-in-the-middle, MITM) sends a
# POST form that targets https://example.com/detonate-bomb/ and
# submits it via JavaScript.
#
# The attacker will need to provide a CSRF cookie and token, but
# that's no problem for a MITM and the session-independent
# secret we're using. So the MITM can circumvent the CSRF
# protection. This is true for any HTTP connection, but anyone
# using HTTPS expects better! For this reason, for
# https://example.com/ we need additional protection that treats
# http://example.com/ as completely untrusted. Under HTTPS,
# Barth et al. found that the Referer header is missing for
# same-domain requests in only about 0.2% of cases or less, so
# we can use strict Referer checking.
referer = request.META.get('HTTP_REFERER')
if referer is None:
return self._reject(request, REASON_NO_REFERER)
try:
referer = urlparse(referer)
except ValueError:
return self._reject(request, REASON_MALFORMED_REFERER)
# Make sure we have a valid URL for Referer.
if '' in (referer.scheme, referer.netloc):
return self._reject(request, REASON_MALFORMED_REFERER)
# Ensure that our Referer is also secure.
if referer.scheme != 'https':
return self._reject(request, REASON_INSECURE_REFERER)
good_referer = (
settings.SESSION_COOKIE_DOMAIN
if settings.CSRF_USE_SESSIONS
else settings.CSRF_COOKIE_DOMAIN
)
if good_referer is None:
# If no cookie domain is configured, allow matching the
# current host:port exactly if it's permitted by
# ALLOWED_HOSTS.
try:
# request.get_host() includes the port.
good_referer = request.get_host()
except DisallowedHost:
pass
else:
server_port = request.get_port()
if server_port not in ('443', '80'):
good_referer = '%s:%s' % (good_referer, server_port)
# Create an iterable of all acceptable HTTP referers.
good_hosts = self.csrf_trusted_origins_hosts
if good_referer is not None:
good_hosts = (*good_hosts, good_referer)
if not any(is_same_domain(referer.netloc, host) for host in good_hosts):
reason = REASON_BAD_REFERER % referer.geturl()
return self._reject(request, reason)
# Access csrf_token via self._get_token() as rotate_token() may
# have been called by an authentication middleware during the
# process_request() phase.
csrf_token = self._get_token(request)
if csrf_token is None:
# No CSRF cookie. For POST requests, we insist on a CSRF cookie,
# and in this way we can avoid all CSRF attacks, including login
# CSRF.
return self._reject(request, REASON_NO_CSRF_COOKIE)
# Check non-cookie token for match.
request_csrf_token = ""
if request.method == "POST":
try:
request_csrf_token = request.POST.get('csrfmiddlewaretoken', '')
except OSError:
# Handle a broken connection before we've completed reading
# the POST data. process_view shouldn't raise any
# exceptions, so we'll ignore and serve the user a 403
# (assuming they're still listening, which they probably
# aren't because of the error).
pass
if request_csrf_token == "":
# Fall back to X-CSRFToken, to make things easier for AJAX,
# and possible for PUT/DELETE.
request_csrf_token = request.META.get(settings.CSRF_HEADER_NAME, '')
request_csrf_token = _sanitize_token(request_csrf_token)
if not _compare_masked_tokens(request_csrf_token, csrf_token):
return self._reject(request, REASON_BAD_TOKEN)
return self._accept(request)
def process_response(self, request, response):
if not getattr(request, 'csrf_cookie_needs_reset', False):
if getattr(response, 'csrf_cookie_set', False):
return response
if not request.META.get("CSRF_COOKIE_USED", False):
return response
# Set the CSRF cookie even if it's already set, so we renew
# the expiry timer.
self._set_token(request, response)
response.csrf_cookie_set = True
return response
| 41.321337
| 102
| 0.632885
|
import logging
import re
import string
from collections import defaultdict
from urllib.parse import urlparse
from django.conf import settings
from django.core.exceptions import DisallowedHost, ImproperlyConfigured
from django.urls import get_callable
from django.utils.cache import patch_vary_headers
from django.utils.crypto import constant_time_compare, get_random_string
from django.utils.deprecation import MiddlewareMixin
from django.utils.functional import cached_property
from django.utils.http import is_same_domain
from django.utils.log import log_response
logger = logging.getLogger('django.security.csrf')
REASON_BAD_ORIGIN = "Origin checking failed - %s does not match any trusted origins."
REASON_NO_REFERER = "Referer checking failed - no Referer."
REASON_BAD_REFERER = "Referer checking failed - %s does not match any trusted origins."
REASON_NO_CSRF_COOKIE = "CSRF cookie not set."
REASON_BAD_TOKEN = "CSRF token missing or incorrect."
REASON_MALFORMED_REFERER = "Referer checking failed - Referer is malformed."
REASON_INSECURE_REFERER = "Referer checking failed - Referer is insecure while host is secure."
CSRF_SECRET_LENGTH = 32
CSRF_TOKEN_LENGTH = 2 * CSRF_SECRET_LENGTH
CSRF_ALLOWED_CHARS = string.ascii_letters + string.digits
CSRF_SESSION_KEY = '_csrftoken'
def _get_failure_view():
return get_callable(settings.CSRF_FAILURE_VIEW)
def _get_new_csrf_string():
return get_random_string(CSRF_SECRET_LENGTH, allowed_chars=CSRF_ALLOWED_CHARS)
def _mask_cipher_secret(secret):
mask = _get_new_csrf_string()
chars = CSRF_ALLOWED_CHARS
pairs = zip((chars.index(x) for x in secret), (chars.index(x) for x in mask))
cipher = ''.join(chars[(x + y) % len(chars)] for x, y in pairs)
return mask + cipher
def _unmask_cipher_token(token):
mask = token[:CSRF_SECRET_LENGTH]
token = token[CSRF_SECRET_LENGTH:]
chars = CSRF_ALLOWED_CHARS
pairs = zip((chars.index(x) for x in token), (chars.index(x) for x in mask))
return ''.join(chars[x - y] for x, y in pairs)
def _get_new_csrf_token():
return _mask_cipher_secret(_get_new_csrf_string())
def get_token(request):
if "CSRF_COOKIE" not in request.META:
csrf_secret = _get_new_csrf_string()
request.META["CSRF_COOKIE"] = _mask_cipher_secret(csrf_secret)
else:
csrf_secret = _unmask_cipher_token(request.META["CSRF_COOKIE"])
request.META["CSRF_COOKIE_USED"] = True
return _mask_cipher_secret(csrf_secret)
def rotate_token(request):
request.META.update({
"CSRF_COOKIE_USED": True,
"CSRF_COOKIE": _get_new_csrf_token(),
})
request.csrf_cookie_needs_reset = True
def _sanitize_token(token):
if re.search('[^a-zA-Z0-9]', token):
return _get_new_csrf_token()
elif len(token) == CSRF_TOKEN_LENGTH:
return token
elif len(token) == CSRF_SECRET_LENGTH:
# different code paths in the checks, although that might be a tad more
# efficient.
return _mask_cipher_secret(token)
return _get_new_csrf_token()
def _compare_masked_tokens(request_csrf_token, csrf_token):
# Assume both arguments are sanitized -- that is, strings of
# length CSRF_TOKEN_LENGTH, all CSRF_ALLOWED_CHARS.
return constant_time_compare(
_unmask_cipher_token(request_csrf_token),
_unmask_cipher_token(csrf_token),
)
class CsrfViewMiddleware(MiddlewareMixin):
@cached_property
def csrf_trusted_origins_hosts(self):
return [
urlparse(origin).netloc.lstrip('*')
for origin in settings.CSRF_TRUSTED_ORIGINS
]
@cached_property
def allowed_origins_exact(self):
return {
origin for origin in settings.CSRF_TRUSTED_ORIGINS
if '*' not in origin
}
@cached_property
def allowed_origin_subdomains(self):
allowed_origin_subdomains = defaultdict(list)
for parsed in (urlparse(origin) for origin in settings.CSRF_TRUSTED_ORIGINS if '*' in origin):
allowed_origin_subdomains[parsed.scheme].append(parsed.netloc.lstrip('*'))
return allowed_origin_subdomains
# The _accept and _reject methods currently only exist for the sake of the
# requires_csrf_token decorator.
def _accept(self, request):
# Avoid checking the request twice by adding a custom attribute to
# request. This will be relevant when both decorator and middleware
# are used.
request.csrf_processing_done = True
return None
def _reject(self, request, reason):
response = _get_failure_view()(request, reason=reason)
log_response(
'Forbidden (%s): %s', reason, request.path,
response=response,
request=request,
logger=logger,
)
return response
def _get_token(self, request):
if settings.CSRF_USE_SESSIONS:
try:
return request.session.get(CSRF_SESSION_KEY)
except AttributeError:
raise ImproperlyConfigured(
'CSRF_USE_SESSIONS is enabled, but request.session is not '
'set. SessionMiddleware must appear before CsrfViewMiddleware '
'in MIDDLEWARE.'
)
else:
try:
cookie_token = request.COOKIES[settings.CSRF_COOKIE_NAME]
except KeyError:
return None
csrf_token = _sanitize_token(cookie_token)
if csrf_token != cookie_token:
# Cookie token needed to be replaced;
# the cookie needs to be reset.
request.csrf_cookie_needs_reset = True
return csrf_token
def _set_token(self, request, response):
if settings.CSRF_USE_SESSIONS:
if request.session.get(CSRF_SESSION_KEY) != request.META['CSRF_COOKIE']:
request.session[CSRF_SESSION_KEY] = request.META['CSRF_COOKIE']
else:
response.set_cookie(
settings.CSRF_COOKIE_NAME,
request.META['CSRF_COOKIE'],
max_age=settings.CSRF_COOKIE_AGE,
domain=settings.CSRF_COOKIE_DOMAIN,
path=settings.CSRF_COOKIE_PATH,
secure=settings.CSRF_COOKIE_SECURE,
httponly=settings.CSRF_COOKIE_HTTPONLY,
samesite=settings.CSRF_COOKIE_SAMESITE,
)
# Set the Vary header since content varies with the CSRF cookie.
patch_vary_headers(response, ('Cookie',))
def _origin_verified(self, request):
request_origin = request.META['HTTP_ORIGIN']
good_origin = '%s://%s' % (
'https' if request.is_secure() else 'http',
request.get_host(),
)
if request_origin == good_origin:
return True
if request_origin in self.allowed_origins_exact:
return True
try:
parsed_origin = urlparse(request_origin)
except ValueError:
return False
request_scheme = parsed_origin.scheme
request_netloc = parsed_origin.netloc
return any(
is_same_domain(request_netloc, host)
for host in self.allowed_origin_subdomains.get(request_scheme, ())
)
def process_request(self, request):
csrf_token = self._get_token(request)
if csrf_token is not None:
# Use same token next time.
request.META['CSRF_COOKIE'] = csrf_token
def process_view(self, request, callback, callback_args, callback_kwargs):
if getattr(request, 'csrf_processing_done', False):
return None
# Wait until request.META["CSRF_COOKIE"] has been manipulated before
# bailing out, so that get_token still works
if getattr(callback, 'csrf_exempt', False):
return None
# Assume that anything not defined as 'safe' by RFC7231 needs protection
if request.method not in ('GET', 'HEAD', 'OPTIONS', 'TRACE'):
if getattr(request, '_dont_enforce_csrf_checks', False):
# Mechanism to turn off CSRF checks for test suite.
# It comes after the creation of CSRF cookies, so that
# everything else continues to work exactly the same
# (e.g. cookies are sent, etc.), but before any
# branches that call reject().
return self._accept(request)
# Reject the request if the Origin header doesn't match an allowed
if 'HTTP_ORIGIN' in request.META:
if not self._origin_verified(request):
return self._reject(request, REASON_BAD_ORIGIN % request.META['HTTP_ORIGIN'])
elif request.is_secure():
# if the Referer header doesn't match an allowed value.
# secret we're using. So the MITM can circumvent the CSRF
referer = request.META.get('HTTP_REFERER')
if referer is None:
return self._reject(request, REASON_NO_REFERER)
try:
referer = urlparse(referer)
except ValueError:
return self._reject(request, REASON_MALFORMED_REFERER)
if '' in (referer.scheme, referer.netloc):
return self._reject(request, REASON_MALFORMED_REFERER)
if referer.scheme != 'https':
return self._reject(request, REASON_INSECURE_REFERER)
good_referer = (
settings.SESSION_COOKIE_DOMAIN
if settings.CSRF_USE_SESSIONS
else settings.CSRF_COOKIE_DOMAIN
)
if good_referer is None:
# ALLOWED_HOSTS.
try:
# request.get_host() includes the port.
good_referer = request.get_host()
except DisallowedHost:
pass
else:
server_port = request.get_port()
if server_port not in ('443', '80'):
good_referer = '%s:%s' % (good_referer, server_port)
# Create an iterable of all acceptable HTTP referers.
good_hosts = self.csrf_trusted_origins_hosts
if good_referer is not None:
good_hosts = (*good_hosts, good_referer)
if not any(is_same_domain(referer.netloc, host) for host in good_hosts):
reason = REASON_BAD_REFERER % referer.geturl()
return self._reject(request, reason)
# Access csrf_token via self._get_token() as rotate_token() may
# have been called by an authentication middleware during the
# process_request() phase.
csrf_token = self._get_token(request)
if csrf_token is None:
# No CSRF cookie. For POST requests, we insist on a CSRF cookie,
# and in this way we can avoid all CSRF attacks, including login
# CSRF.
return self._reject(request, REASON_NO_CSRF_COOKIE)
# Check non-cookie token for match.
request_csrf_token = ""
if request.method == "POST":
try:
request_csrf_token = request.POST.get('csrfmiddlewaretoken', '')
except OSError:
# Handle a broken connection before we've completed reading
# exceptions, so we'll ignore and serve the user a 403
# aren't because of the error).
pass
if request_csrf_token == "":
request_csrf_token = request.META.get(settings.CSRF_HEADER_NAME, '')
request_csrf_token = _sanitize_token(request_csrf_token)
if not _compare_masked_tokens(request_csrf_token, csrf_token):
return self._reject(request, REASON_BAD_TOKEN)
return self._accept(request)
def process_response(self, request, response):
if not getattr(request, 'csrf_cookie_needs_reset', False):
if getattr(response, 'csrf_cookie_set', False):
return response
if not request.META.get("CSRF_COOKIE_USED", False):
return response
# the expiry timer.
self._set_token(request, response)
response.csrf_cookie_set = True
return response
| true
| true
|
7909dc61c060dc9228c3dde7555aad1a4979f61f
| 84,712
|
py
|
Python
|
test/test_utils.py
|
udaykapur/yt-dlp
|
743f39750cccf53bc320e057a6ed05e301e8ed48
|
[
"Unlicense"
] | null | null | null |
test/test_utils.py
|
udaykapur/yt-dlp
|
743f39750cccf53bc320e057a6ed05e301e8ed48
|
[
"Unlicense"
] | null | null | null |
test/test_utils.py
|
udaykapur/yt-dlp
|
743f39750cccf53bc320e057a6ed05e301e8ed48
|
[
"Unlicense"
] | null | null | null |
#!/usr/bin/env python3
# Allow direct execution
import os
import sys
import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Various small unit tests
import io
import itertools
import json
import xml.etree.ElementTree
from yt_dlp.compat import (
compat_chr,
compat_etree_fromstring,
compat_getenv,
compat_HTMLParseError,
compat_os_name,
compat_setenv,
)
from yt_dlp.utils import (
Config,
DateRange,
ExtractorError,
InAdvancePagedList,
LazyList,
OnDemandPagedList,
age_restricted,
args_to_str,
base_url,
caesar,
clean_html,
clean_podcast_url,
cli_bool_option,
cli_option,
cli_valueless_option,
date_from_str,
datetime_from_str,
detect_exe_version,
determine_ext,
dfxp2srt,
dict_get,
encode_base_n,
encode_compat_str,
encodeFilename,
escape_rfc3986,
escape_url,
expand_path,
extract_attributes,
find_xpath_attr,
fix_xml_ampersands,
float_or_none,
format_bytes,
get_element_by_attribute,
get_element_by_class,
get_element_html_by_attribute,
get_element_html_by_class,
get_element_text_and_html_by_tag,
get_elements_by_attribute,
get_elements_by_class,
get_elements_html_by_attribute,
get_elements_html_by_class,
get_elements_text_and_html_by_attribute,
int_or_none,
intlist_to_bytes,
iri_to_uri,
is_html,
js_to_json,
limit_length,
locked_file,
lowercase_escape,
match_str,
merge_dicts,
mimetype2ext,
month_by_name,
multipart_encode,
ohdave_rsa_encrypt,
orderedSet,
parse_age_limit,
parse_bitrate,
parse_codecs,
parse_count,
parse_dfxp_time_expr,
parse_duration,
parse_filesize,
parse_iso8601,
parse_qs,
parse_resolution,
pkcs1pad,
prepend_extension,
read_batch_urls,
remove_end,
remove_quotes,
remove_start,
render_table,
replace_extension,
rot47,
sanitize_filename,
sanitize_path,
sanitize_url,
sanitized_Request,
shell_quote,
smuggle_url,
str_to_int,
strip_jsonp,
strip_or_none,
subtitles_filename,
timeconvert,
unescapeHTML,
unified_strdate,
unified_timestamp,
unsmuggle_url,
update_url_query,
uppercase_escape,
url_basename,
url_or_none,
urlencode_postdata,
urljoin,
urshift,
version_tuple,
xpath_attr,
xpath_element,
xpath_text,
xpath_with_ns,
)
class TestUtil(unittest.TestCase):
def test_timeconvert(self):
self.assertTrue(timeconvert('') is None)
self.assertTrue(timeconvert('bougrg') is None)
def test_sanitize_filename(self):
self.assertEqual(sanitize_filename(''), '')
self.assertEqual(sanitize_filename('abc'), 'abc')
self.assertEqual(sanitize_filename('abc_d-e'), 'abc_d-e')
self.assertEqual(sanitize_filename('123'), '123')
self.assertEqual('abc_de', sanitize_filename('abc/de'))
self.assertFalse('/' in sanitize_filename('abc/de///'))
self.assertEqual('abc_de', sanitize_filename('abc/<>\\*|de'))
self.assertEqual('xxx', sanitize_filename('xxx/<>\\*|'))
self.assertEqual('yes no', sanitize_filename('yes? no'))
self.assertEqual('this - that', sanitize_filename('this: that'))
self.assertEqual(sanitize_filename('AT&T'), 'AT&T')
aumlaut = 'ä'
self.assertEqual(sanitize_filename(aumlaut), aumlaut)
tests = '\u043a\u0438\u0440\u0438\u043b\u043b\u0438\u0446\u0430'
self.assertEqual(sanitize_filename(tests), tests)
self.assertEqual(
sanitize_filename('New World record at 0:12:34'),
'New World record at 0_12_34')
self.assertEqual(sanitize_filename('--gasdgf'), '--gasdgf')
self.assertEqual(sanitize_filename('--gasdgf', is_id=True), '--gasdgf')
self.assertEqual(sanitize_filename('--gasdgf', is_id=False), '_-gasdgf')
self.assertEqual(sanitize_filename('.gasdgf'), '.gasdgf')
self.assertEqual(sanitize_filename('.gasdgf', is_id=True), '.gasdgf')
self.assertEqual(sanitize_filename('.gasdgf', is_id=False), 'gasdgf')
forbidden = '"\0\\/'
for fc in forbidden:
for fbc in forbidden:
self.assertTrue(fbc not in sanitize_filename(fc))
def test_sanitize_filename_restricted(self):
self.assertEqual(sanitize_filename('abc', restricted=True), 'abc')
self.assertEqual(sanitize_filename('abc_d-e', restricted=True), 'abc_d-e')
self.assertEqual(sanitize_filename('123', restricted=True), '123')
self.assertEqual('abc_de', sanitize_filename('abc/de', restricted=True))
self.assertFalse('/' in sanitize_filename('abc/de///', restricted=True))
self.assertEqual('abc_de', sanitize_filename('abc/<>\\*|de', restricted=True))
self.assertEqual('xxx', sanitize_filename('xxx/<>\\*|', restricted=True))
self.assertEqual('yes_no', sanitize_filename('yes? no', restricted=True))
self.assertEqual('this_-_that', sanitize_filename('this: that', restricted=True))
tests = 'aäb\u4e2d\u56fd\u7684c'
self.assertEqual(sanitize_filename(tests, restricted=True), 'aab_c')
self.assertTrue(sanitize_filename('\xf6', restricted=True) != '') # No empty filename
forbidden = '"\0\\/&!: \'\t\n()[]{}$;`^,#'
for fc in forbidden:
for fbc in forbidden:
self.assertTrue(fbc not in sanitize_filename(fc, restricted=True))
# Handle a common case more neatly
self.assertEqual(sanitize_filename('\u5927\u58f0\u5e26 - Song', restricted=True), 'Song')
self.assertEqual(sanitize_filename('\u603b\u7edf: Speech', restricted=True), 'Speech')
# .. but make sure the file name is never empty
self.assertTrue(sanitize_filename('-', restricted=True) != '')
self.assertTrue(sanitize_filename(':', restricted=True) != '')
self.assertEqual(sanitize_filename(
'ÂÃÄÀÁÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖŐØŒÙÚÛÜŰÝÞßàáâãäåæçèéêëìíîïðñòóôõöőøœùúûüűýþÿ', restricted=True),
'AAAAAAAECEEEEIIIIDNOOOOOOOOEUUUUUYTHssaaaaaaaeceeeeiiiionooooooooeuuuuuythy')
def test_sanitize_ids(self):
self.assertEqual(sanitize_filename('_n_cd26wFpw', is_id=True), '_n_cd26wFpw')
self.assertEqual(sanitize_filename('_BD_eEpuzXw', is_id=True), '_BD_eEpuzXw')
self.assertEqual(sanitize_filename('N0Y__7-UOdI', is_id=True), 'N0Y__7-UOdI')
def test_sanitize_path(self):
if sys.platform != 'win32':
return
self.assertEqual(sanitize_path('abc'), 'abc')
self.assertEqual(sanitize_path('abc/def'), 'abc\\def')
self.assertEqual(sanitize_path('abc\\def'), 'abc\\def')
self.assertEqual(sanitize_path('abc|def'), 'abc#def')
self.assertEqual(sanitize_path('<>:"|?*'), '#######')
self.assertEqual(sanitize_path('C:/abc/def'), 'C:\\abc\\def')
self.assertEqual(sanitize_path('C?:/abc/def'), 'C##\\abc\\def')
self.assertEqual(sanitize_path('\\\\?\\UNC\\ComputerName\\abc'), '\\\\?\\UNC\\ComputerName\\abc')
self.assertEqual(sanitize_path('\\\\?\\UNC/ComputerName/abc'), '\\\\?\\UNC\\ComputerName\\abc')
self.assertEqual(sanitize_path('\\\\?\\C:\\abc'), '\\\\?\\C:\\abc')
self.assertEqual(sanitize_path('\\\\?\\C:/abc'), '\\\\?\\C:\\abc')
self.assertEqual(sanitize_path('\\\\?\\C:\\ab?c\\de:f'), '\\\\?\\C:\\ab#c\\de#f')
self.assertEqual(sanitize_path('\\\\?\\C:\\abc'), '\\\\?\\C:\\abc')
self.assertEqual(
sanitize_path('youtube/%(uploader)s/%(autonumber)s-%(title)s-%(upload_date)s.%(ext)s'),
'youtube\\%(uploader)s\\%(autonumber)s-%(title)s-%(upload_date)s.%(ext)s')
self.assertEqual(
sanitize_path('youtube/TheWreckingYard ./00001-Not bad, Especially for Free! (1987 Yamaha 700)-20141116.mp4.part'),
'youtube\\TheWreckingYard #\\00001-Not bad, Especially for Free! (1987 Yamaha 700)-20141116.mp4.part')
self.assertEqual(sanitize_path('abc/def...'), 'abc\\def..#')
self.assertEqual(sanitize_path('abc.../def'), 'abc..#\\def')
self.assertEqual(sanitize_path('abc.../def...'), 'abc..#\\def..#')
self.assertEqual(sanitize_path('../abc'), '..\\abc')
self.assertEqual(sanitize_path('../../abc'), '..\\..\\abc')
self.assertEqual(sanitize_path('./abc'), 'abc')
self.assertEqual(sanitize_path('./../abc'), '..\\abc')
def test_sanitize_url(self):
self.assertEqual(sanitize_url('//foo.bar'), 'http://foo.bar')
self.assertEqual(sanitize_url('httpss://foo.bar'), 'https://foo.bar')
self.assertEqual(sanitize_url('rmtps://foo.bar'), 'rtmps://foo.bar')
self.assertEqual(sanitize_url('https://foo.bar'), 'https://foo.bar')
self.assertEqual(sanitize_url('foo bar'), 'foo bar')
def test_extract_basic_auth(self):
auth_header = lambda url: sanitized_Request(url).get_header('Authorization')
self.assertFalse(auth_header('http://foo.bar'))
self.assertFalse(auth_header('http://:foo.bar'))
self.assertEqual(auth_header('http://@foo.bar'), 'Basic Og==')
self.assertEqual(auth_header('http://:pass@foo.bar'), 'Basic OnBhc3M=')
self.assertEqual(auth_header('http://user:@foo.bar'), 'Basic dXNlcjo=')
self.assertEqual(auth_header('http://user:pass@foo.bar'), 'Basic dXNlcjpwYXNz')
def test_expand_path(self):
def env(var):
return f'%{var}%' if sys.platform == 'win32' else f'${var}'
compat_setenv('yt_dlp_EXPATH_PATH', 'expanded')
self.assertEqual(expand_path(env('yt_dlp_EXPATH_PATH')), 'expanded')
self.assertEqual(expand_path(env('HOME')), compat_getenv('HOME'))
self.assertEqual(expand_path('~'), compat_getenv('HOME'))
self.assertEqual(
expand_path('~/%s' % env('yt_dlp_EXPATH_PATH')),
'%s/expanded' % compat_getenv('HOME'))
def test_prepend_extension(self):
self.assertEqual(prepend_extension('abc.ext', 'temp'), 'abc.temp.ext')
self.assertEqual(prepend_extension('abc.ext', 'temp', 'ext'), 'abc.temp.ext')
self.assertEqual(prepend_extension('abc.unexpected_ext', 'temp', 'ext'), 'abc.unexpected_ext.temp')
self.assertEqual(prepend_extension('abc', 'temp'), 'abc.temp')
self.assertEqual(prepend_extension('.abc', 'temp'), '.abc.temp')
self.assertEqual(prepend_extension('.abc.ext', 'temp'), '.abc.temp.ext')
def test_replace_extension(self):
self.assertEqual(replace_extension('abc.ext', 'temp'), 'abc.temp')
self.assertEqual(replace_extension('abc.ext', 'temp', 'ext'), 'abc.temp')
self.assertEqual(replace_extension('abc.unexpected_ext', 'temp', 'ext'), 'abc.unexpected_ext.temp')
self.assertEqual(replace_extension('abc', 'temp'), 'abc.temp')
self.assertEqual(replace_extension('.abc', 'temp'), '.abc.temp')
self.assertEqual(replace_extension('.abc.ext', 'temp'), '.abc.temp')
def test_subtitles_filename(self):
self.assertEqual(subtitles_filename('abc.ext', 'en', 'vtt'), 'abc.en.vtt')
self.assertEqual(subtitles_filename('abc.ext', 'en', 'vtt', 'ext'), 'abc.en.vtt')
self.assertEqual(subtitles_filename('abc.unexpected_ext', 'en', 'vtt', 'ext'), 'abc.unexpected_ext.en.vtt')
def test_remove_start(self):
self.assertEqual(remove_start(None, 'A - '), None)
self.assertEqual(remove_start('A - B', 'A - '), 'B')
self.assertEqual(remove_start('B - A', 'A - '), 'B - A')
def test_remove_end(self):
self.assertEqual(remove_end(None, ' - B'), None)
self.assertEqual(remove_end('A - B', ' - B'), 'A')
self.assertEqual(remove_end('B - A', ' - B'), 'B - A')
def test_remove_quotes(self):
self.assertEqual(remove_quotes(None), None)
self.assertEqual(remove_quotes('"'), '"')
self.assertEqual(remove_quotes("'"), "'")
self.assertEqual(remove_quotes(';'), ';')
self.assertEqual(remove_quotes('";'), '";')
self.assertEqual(remove_quotes('""'), '')
self.assertEqual(remove_quotes('";"'), ';')
def test_ordered_set(self):
self.assertEqual(orderedSet([1, 1, 2, 3, 4, 4, 5, 6, 7, 3, 5]), [1, 2, 3, 4, 5, 6, 7])
self.assertEqual(orderedSet([]), [])
self.assertEqual(orderedSet([1]), [1])
# keep the list ordered
self.assertEqual(orderedSet([135, 1, 1, 1]), [135, 1])
def test_unescape_html(self):
self.assertEqual(unescapeHTML('%20;'), '%20;')
self.assertEqual(unescapeHTML('/'), '/')
self.assertEqual(unescapeHTML('/'), '/')
self.assertEqual(unescapeHTML('é'), 'é')
self.assertEqual(unescapeHTML('�'), '�')
self.assertEqual(unescapeHTML('&a"'), '&a"')
# HTML5 entities
self.assertEqual(unescapeHTML('.''), '.\'')
def test_date_from_str(self):
self.assertEqual(date_from_str('yesterday'), date_from_str('now-1day'))
self.assertEqual(date_from_str('now+7day'), date_from_str('now+1week'))
self.assertEqual(date_from_str('now+14day'), date_from_str('now+2week'))
self.assertEqual(date_from_str('20200229+365day'), date_from_str('20200229+1year'))
self.assertEqual(date_from_str('20210131+28day'), date_from_str('20210131+1month'))
def test_datetime_from_str(self):
self.assertEqual(datetime_from_str('yesterday', precision='day'), datetime_from_str('now-1day', precision='auto'))
self.assertEqual(datetime_from_str('now+7day', precision='day'), datetime_from_str('now+1week', precision='auto'))
self.assertEqual(datetime_from_str('now+14day', precision='day'), datetime_from_str('now+2week', precision='auto'))
self.assertEqual(datetime_from_str('20200229+365day', precision='day'), datetime_from_str('20200229+1year', precision='auto'))
self.assertEqual(datetime_from_str('20210131+28day', precision='day'), datetime_from_str('20210131+1month', precision='auto'))
self.assertEqual(datetime_from_str('20210131+59day', precision='day'), datetime_from_str('20210131+2month', precision='auto'))
self.assertEqual(datetime_from_str('now+1day', precision='hour'), datetime_from_str('now+24hours', precision='auto'))
self.assertEqual(datetime_from_str('now+23hours', precision='hour'), datetime_from_str('now+23hours', precision='auto'))
def test_daterange(self):
_20century = DateRange("19000101", "20000101")
self.assertFalse("17890714" in _20century)
_ac = DateRange("00010101")
self.assertTrue("19690721" in _ac)
_firstmilenium = DateRange(end="10000101")
self.assertTrue("07110427" in _firstmilenium)
def test_unified_dates(self):
self.assertEqual(unified_strdate('December 21, 2010'), '20101221')
self.assertEqual(unified_strdate('8/7/2009'), '20090708')
self.assertEqual(unified_strdate('Dec 14, 2012'), '20121214')
self.assertEqual(unified_strdate('2012/10/11 01:56:38 +0000'), '20121011')
self.assertEqual(unified_strdate('1968 12 10'), '19681210')
self.assertEqual(unified_strdate('1968-12-10'), '19681210')
self.assertEqual(unified_strdate('28/01/2014 21:00:00 +0100'), '20140128')
self.assertEqual(
unified_strdate('11/26/2014 11:30:00 AM PST', day_first=False),
'20141126')
self.assertEqual(
unified_strdate('2/2/2015 6:47:40 PM', day_first=False),
'20150202')
self.assertEqual(unified_strdate('Feb 14th 2016 5:45PM'), '20160214')
self.assertEqual(unified_strdate('25-09-2014'), '20140925')
self.assertEqual(unified_strdate('27.02.2016 17:30'), '20160227')
self.assertEqual(unified_strdate('UNKNOWN DATE FORMAT'), None)
self.assertEqual(unified_strdate('Feb 7, 2016 at 6:35 pm'), '20160207')
self.assertEqual(unified_strdate('July 15th, 2013'), '20130715')
self.assertEqual(unified_strdate('September 1st, 2013'), '20130901')
self.assertEqual(unified_strdate('Sep 2nd, 2013'), '20130902')
self.assertEqual(unified_strdate('November 3rd, 2019'), '20191103')
self.assertEqual(unified_strdate('October 23rd, 2005'), '20051023')
def test_unified_timestamps(self):
self.assertEqual(unified_timestamp('December 21, 2010'), 1292889600)
self.assertEqual(unified_timestamp('8/7/2009'), 1247011200)
self.assertEqual(unified_timestamp('Dec 14, 2012'), 1355443200)
self.assertEqual(unified_timestamp('2012/10/11 01:56:38 +0000'), 1349920598)
self.assertEqual(unified_timestamp('1968 12 10'), -33436800)
self.assertEqual(unified_timestamp('1968-12-10'), -33436800)
self.assertEqual(unified_timestamp('28/01/2014 21:00:00 +0100'), 1390939200)
self.assertEqual(
unified_timestamp('11/26/2014 11:30:00 AM PST', day_first=False),
1417001400)
self.assertEqual(
unified_timestamp('2/2/2015 6:47:40 PM', day_first=False),
1422902860)
self.assertEqual(unified_timestamp('Feb 14th 2016 5:45PM'), 1455471900)
self.assertEqual(unified_timestamp('25-09-2014'), 1411603200)
self.assertEqual(unified_timestamp('27.02.2016 17:30'), 1456594200)
self.assertEqual(unified_timestamp('UNKNOWN DATE FORMAT'), None)
self.assertEqual(unified_timestamp('May 16, 2016 11:15 PM'), 1463440500)
self.assertEqual(unified_timestamp('Feb 7, 2016 at 6:35 pm'), 1454870100)
self.assertEqual(unified_timestamp('2017-03-30T17:52:41Q'), 1490896361)
self.assertEqual(unified_timestamp('Sep 11, 2013 | 5:49 AM'), 1378878540)
self.assertEqual(unified_timestamp('December 15, 2017 at 7:49 am'), 1513324140)
self.assertEqual(unified_timestamp('2018-03-14T08:32:43.1493874+00:00'), 1521016363)
def test_determine_ext(self):
self.assertEqual(determine_ext('http://example.com/foo/bar.mp4/?download'), 'mp4')
self.assertEqual(determine_ext('http://example.com/foo/bar/?download', None), None)
self.assertEqual(determine_ext('http://example.com/foo/bar.nonext/?download', None), None)
self.assertEqual(determine_ext('http://example.com/foo/bar/mp4?download', None), None)
self.assertEqual(determine_ext('http://example.com/foo/bar.m3u8//?download'), 'm3u8')
self.assertEqual(determine_ext('foobar', None), None)
def test_find_xpath_attr(self):
testxml = '''<root>
<node/>
<node x="a"/>
<node x="a" y="c" />
<node x="b" y="d" />
<node x="" />
</root>'''
doc = compat_etree_fromstring(testxml)
self.assertEqual(find_xpath_attr(doc, './/fourohfour', 'n'), None)
self.assertEqual(find_xpath_attr(doc, './/fourohfour', 'n', 'v'), None)
self.assertEqual(find_xpath_attr(doc, './/node', 'n'), None)
self.assertEqual(find_xpath_attr(doc, './/node', 'n', 'v'), None)
self.assertEqual(find_xpath_attr(doc, './/node', 'x'), doc[1])
self.assertEqual(find_xpath_attr(doc, './/node', 'x', 'a'), doc[1])
self.assertEqual(find_xpath_attr(doc, './/node', 'x', 'b'), doc[3])
self.assertEqual(find_xpath_attr(doc, './/node', 'y'), doc[2])
self.assertEqual(find_xpath_attr(doc, './/node', 'y', 'c'), doc[2])
self.assertEqual(find_xpath_attr(doc, './/node', 'y', 'd'), doc[3])
self.assertEqual(find_xpath_attr(doc, './/node', 'x', ''), doc[4])
def test_xpath_with_ns(self):
testxml = '''<root xmlns:media="http://example.com/">
<media:song>
<media:author>The Author</media:author>
<url>http://server.com/download.mp3</url>
</media:song>
</root>'''
doc = compat_etree_fromstring(testxml)
find = lambda p: doc.find(xpath_with_ns(p, {'media': 'http://example.com/'}))
self.assertTrue(find('media:song') is not None)
self.assertEqual(find('media:song/media:author').text, 'The Author')
self.assertEqual(find('media:song/url').text, 'http://server.com/download.mp3')
def test_xpath_element(self):
doc = xml.etree.ElementTree.Element('root')
div = xml.etree.ElementTree.SubElement(doc, 'div')
p = xml.etree.ElementTree.SubElement(div, 'p')
p.text = 'Foo'
self.assertEqual(xpath_element(doc, 'div/p'), p)
self.assertEqual(xpath_element(doc, ['div/p']), p)
self.assertEqual(xpath_element(doc, ['div/bar', 'div/p']), p)
self.assertEqual(xpath_element(doc, 'div/bar', default='default'), 'default')
self.assertEqual(xpath_element(doc, ['div/bar'], default='default'), 'default')
self.assertTrue(xpath_element(doc, 'div/bar') is None)
self.assertTrue(xpath_element(doc, ['div/bar']) is None)
self.assertTrue(xpath_element(doc, ['div/bar'], 'div/baz') is None)
self.assertRaises(ExtractorError, xpath_element, doc, 'div/bar', fatal=True)
self.assertRaises(ExtractorError, xpath_element, doc, ['div/bar'], fatal=True)
self.assertRaises(ExtractorError, xpath_element, doc, ['div/bar', 'div/baz'], fatal=True)
def test_xpath_text(self):
testxml = '''<root>
<div>
<p>Foo</p>
</div>
</root>'''
doc = compat_etree_fromstring(testxml)
self.assertEqual(xpath_text(doc, 'div/p'), 'Foo')
self.assertEqual(xpath_text(doc, 'div/bar', default='default'), 'default')
self.assertTrue(xpath_text(doc, 'div/bar') is None)
self.assertRaises(ExtractorError, xpath_text, doc, 'div/bar', fatal=True)
def test_xpath_attr(self):
testxml = '''<root>
<div>
<p x="a">Foo</p>
</div>
</root>'''
doc = compat_etree_fromstring(testxml)
self.assertEqual(xpath_attr(doc, 'div/p', 'x'), 'a')
self.assertEqual(xpath_attr(doc, 'div/bar', 'x'), None)
self.assertEqual(xpath_attr(doc, 'div/p', 'y'), None)
self.assertEqual(xpath_attr(doc, 'div/bar', 'x', default='default'), 'default')
self.assertEqual(xpath_attr(doc, 'div/p', 'y', default='default'), 'default')
self.assertRaises(ExtractorError, xpath_attr, doc, 'div/bar', 'x', fatal=True)
self.assertRaises(ExtractorError, xpath_attr, doc, 'div/p', 'y', fatal=True)
def test_smuggle_url(self):
data = {"ö": "ö", "abc": [3]}
url = 'https://foo.bar/baz?x=y#a'
smug_url = smuggle_url(url, data)
unsmug_url, unsmug_data = unsmuggle_url(smug_url)
self.assertEqual(url, unsmug_url)
self.assertEqual(data, unsmug_data)
res_url, res_data = unsmuggle_url(url)
self.assertEqual(res_url, url)
self.assertEqual(res_data, None)
smug_url = smuggle_url(url, {'a': 'b'})
smug_smug_url = smuggle_url(smug_url, {'c': 'd'})
res_url, res_data = unsmuggle_url(smug_smug_url)
self.assertEqual(res_url, url)
self.assertEqual(res_data, {'a': 'b', 'c': 'd'})
def test_shell_quote(self):
args = ['ffmpeg', '-i', encodeFilename('ñ€ß\'.mp4')]
self.assertEqual(
shell_quote(args),
"""ffmpeg -i 'ñ€ß'"'"'.mp4'""" if compat_os_name != 'nt' else '''ffmpeg -i "ñ€ß'.mp4"''')
def test_float_or_none(self):
self.assertEqual(float_or_none('42.42'), 42.42)
self.assertEqual(float_or_none('42'), 42.0)
self.assertEqual(float_or_none(''), None)
self.assertEqual(float_or_none(None), None)
self.assertEqual(float_or_none([]), None)
self.assertEqual(float_or_none(set()), None)
def test_int_or_none(self):
self.assertEqual(int_or_none('42'), 42)
self.assertEqual(int_or_none(''), None)
self.assertEqual(int_or_none(None), None)
self.assertEqual(int_or_none([]), None)
self.assertEqual(int_or_none(set()), None)
def test_str_to_int(self):
self.assertEqual(str_to_int('123,456'), 123456)
self.assertEqual(str_to_int('123.456'), 123456)
self.assertEqual(str_to_int(523), 523)
self.assertEqual(str_to_int('noninteger'), None)
self.assertEqual(str_to_int([]), None)
def test_url_basename(self):
self.assertEqual(url_basename('http://foo.de/'), '')
self.assertEqual(url_basename('http://foo.de/bar/baz'), 'baz')
self.assertEqual(url_basename('http://foo.de/bar/baz?x=y'), 'baz')
self.assertEqual(url_basename('http://foo.de/bar/baz#x=y'), 'baz')
self.assertEqual(url_basename('http://foo.de/bar/baz/'), 'baz')
self.assertEqual(
url_basename('http://media.w3.org/2010/05/sintel/trailer.mp4'),
'trailer.mp4')
def test_base_url(self):
self.assertEqual(base_url('http://foo.de/'), 'http://foo.de/')
self.assertEqual(base_url('http://foo.de/bar'), 'http://foo.de/')
self.assertEqual(base_url('http://foo.de/bar/'), 'http://foo.de/bar/')
self.assertEqual(base_url('http://foo.de/bar/baz'), 'http://foo.de/bar/')
self.assertEqual(base_url('http://foo.de/bar/baz?x=z/x/c'), 'http://foo.de/bar/')
def test_urljoin(self):
self.assertEqual(urljoin('http://foo.de/', '/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
self.assertEqual(urljoin(b'http://foo.de/', '/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
self.assertEqual(urljoin('http://foo.de/', b'/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
self.assertEqual(urljoin(b'http://foo.de/', b'/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
self.assertEqual(urljoin('//foo.de/', '/a/b/c.txt'), '//foo.de/a/b/c.txt')
self.assertEqual(urljoin('http://foo.de/', 'a/b/c.txt'), 'http://foo.de/a/b/c.txt')
self.assertEqual(urljoin('http://foo.de', '/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
self.assertEqual(urljoin('http://foo.de', 'a/b/c.txt'), 'http://foo.de/a/b/c.txt')
self.assertEqual(urljoin('http://foo.de/', 'http://foo.de/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
self.assertEqual(urljoin('http://foo.de/', '//foo.de/a/b/c.txt'), '//foo.de/a/b/c.txt')
self.assertEqual(urljoin(None, 'http://foo.de/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
self.assertEqual(urljoin(None, '//foo.de/a/b/c.txt'), '//foo.de/a/b/c.txt')
self.assertEqual(urljoin('', 'http://foo.de/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
self.assertEqual(urljoin(['foobar'], 'http://foo.de/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
self.assertEqual(urljoin('http://foo.de/', None), None)
self.assertEqual(urljoin('http://foo.de/', ''), None)
self.assertEqual(urljoin('http://foo.de/', ['foobar']), None)
self.assertEqual(urljoin('http://foo.de/a/b/c.txt', '.././../d.txt'), 'http://foo.de/d.txt')
self.assertEqual(urljoin('http://foo.de/a/b/c.txt', 'rtmp://foo.de'), 'rtmp://foo.de')
self.assertEqual(urljoin(None, 'rtmp://foo.de'), 'rtmp://foo.de')
def test_url_or_none(self):
self.assertEqual(url_or_none(None), None)
self.assertEqual(url_or_none(''), None)
self.assertEqual(url_or_none('foo'), None)
self.assertEqual(url_or_none('http://foo.de'), 'http://foo.de')
self.assertEqual(url_or_none('https://foo.de'), 'https://foo.de')
self.assertEqual(url_or_none('http$://foo.de'), None)
self.assertEqual(url_or_none('http://foo.de'), 'http://foo.de')
self.assertEqual(url_or_none('//foo.de'), '//foo.de')
self.assertEqual(url_or_none('s3://foo.de'), None)
self.assertEqual(url_or_none('rtmpte://foo.de'), 'rtmpte://foo.de')
self.assertEqual(url_or_none('mms://foo.de'), 'mms://foo.de')
self.assertEqual(url_or_none('rtspu://foo.de'), 'rtspu://foo.de')
self.assertEqual(url_or_none('ftps://foo.de'), 'ftps://foo.de')
def test_parse_age_limit(self):
self.assertEqual(parse_age_limit(None), None)
self.assertEqual(parse_age_limit(False), None)
self.assertEqual(parse_age_limit('invalid'), None)
self.assertEqual(parse_age_limit(0), 0)
self.assertEqual(parse_age_limit(18), 18)
self.assertEqual(parse_age_limit(21), 21)
self.assertEqual(parse_age_limit(22), None)
self.assertEqual(parse_age_limit('18'), 18)
self.assertEqual(parse_age_limit('18+'), 18)
self.assertEqual(parse_age_limit('PG-13'), 13)
self.assertEqual(parse_age_limit('TV-14'), 14)
self.assertEqual(parse_age_limit('TV-MA'), 17)
self.assertEqual(parse_age_limit('TV14'), 14)
self.assertEqual(parse_age_limit('TV_G'), 0)
def test_parse_duration(self):
self.assertEqual(parse_duration(None), None)
self.assertEqual(parse_duration(False), None)
self.assertEqual(parse_duration('invalid'), None)
self.assertEqual(parse_duration('1'), 1)
self.assertEqual(parse_duration('1337:12'), 80232)
self.assertEqual(parse_duration('9:12:43'), 33163)
self.assertEqual(parse_duration('12:00'), 720)
self.assertEqual(parse_duration('00:01:01'), 61)
self.assertEqual(parse_duration('x:y'), None)
self.assertEqual(parse_duration('3h11m53s'), 11513)
self.assertEqual(parse_duration('3h 11m 53s'), 11513)
self.assertEqual(parse_duration('3 hours 11 minutes 53 seconds'), 11513)
self.assertEqual(parse_duration('3 hours 11 mins 53 secs'), 11513)
self.assertEqual(parse_duration('3 hours, 11 minutes, 53 seconds'), 11513)
self.assertEqual(parse_duration('3 hours, 11 mins, 53 secs'), 11513)
self.assertEqual(parse_duration('62m45s'), 3765)
self.assertEqual(parse_duration('6m59s'), 419)
self.assertEqual(parse_duration('49s'), 49)
self.assertEqual(parse_duration('0h0m0s'), 0)
self.assertEqual(parse_duration('0m0s'), 0)
self.assertEqual(parse_duration('0s'), 0)
self.assertEqual(parse_duration('01:02:03.05'), 3723.05)
self.assertEqual(parse_duration('T30M38S'), 1838)
self.assertEqual(parse_duration('5 s'), 5)
self.assertEqual(parse_duration('3 min'), 180)
self.assertEqual(parse_duration('2.5 hours'), 9000)
self.assertEqual(parse_duration('02:03:04'), 7384)
self.assertEqual(parse_duration('01:02:03:04'), 93784)
self.assertEqual(parse_duration('1 hour 3 minutes'), 3780)
self.assertEqual(parse_duration('87 Min.'), 5220)
self.assertEqual(parse_duration('PT1H0.040S'), 3600.04)
self.assertEqual(parse_duration('PT00H03M30SZ'), 210)
self.assertEqual(parse_duration('P0Y0M0DT0H4M20.880S'), 260.88)
self.assertEqual(parse_duration('01:02:03:050'), 3723.05)
self.assertEqual(parse_duration('103:050'), 103.05)
def test_fix_xml_ampersands(self):
self.assertEqual(
fix_xml_ampersands('"&x=y&z=a'), '"&x=y&z=a')
self.assertEqual(
fix_xml_ampersands('"&x=y&wrong;&z=a'),
'"&x=y&wrong;&z=a')
self.assertEqual(
fix_xml_ampersands('&'><"'),
'&'><"')
self.assertEqual(
fix_xml_ampersands('Ӓ᪼'), 'Ӓ᪼')
self.assertEqual(fix_xml_ampersands('&#&#'), '&#&#')
def test_paged_list(self):
def testPL(size, pagesize, sliceargs, expected):
def get_page(pagenum):
firstid = pagenum * pagesize
upto = min(size, pagenum * pagesize + pagesize)
yield from range(firstid, upto)
pl = OnDemandPagedList(get_page, pagesize)
got = pl.getslice(*sliceargs)
self.assertEqual(got, expected)
iapl = InAdvancePagedList(get_page, size // pagesize + 1, pagesize)
got = iapl.getslice(*sliceargs)
self.assertEqual(got, expected)
testPL(5, 2, (), [0, 1, 2, 3, 4])
testPL(5, 2, (1,), [1, 2, 3, 4])
testPL(5, 2, (2,), [2, 3, 4])
testPL(5, 2, (4,), [4])
testPL(5, 2, (0, 3), [0, 1, 2])
testPL(5, 2, (1, 4), [1, 2, 3])
testPL(5, 2, (2, 99), [2, 3, 4])
testPL(5, 2, (20, 99), [])
def test_read_batch_urls(self):
f = io.StringIO('''\xef\xbb\xbf foo
bar\r
baz
# More after this line\r
; or after this
bam''')
self.assertEqual(read_batch_urls(f), ['foo', 'bar', 'baz', 'bam'])
def test_urlencode_postdata(self):
data = urlencode_postdata({'username': 'foo@bar.com', 'password': '1234'})
self.assertTrue(isinstance(data, bytes))
def test_update_url_query(self):
self.assertEqual(parse_qs(update_url_query(
'http://example.com/path', {'quality': ['HD'], 'format': ['mp4']})),
parse_qs('http://example.com/path?quality=HD&format=mp4'))
self.assertEqual(parse_qs(update_url_query(
'http://example.com/path', {'system': ['LINUX', 'WINDOWS']})),
parse_qs('http://example.com/path?system=LINUX&system=WINDOWS'))
self.assertEqual(parse_qs(update_url_query(
'http://example.com/path', {'fields': 'id,formats,subtitles'})),
parse_qs('http://example.com/path?fields=id,formats,subtitles'))
self.assertEqual(parse_qs(update_url_query(
'http://example.com/path', {'fields': ('id,formats,subtitles', 'thumbnails')})),
parse_qs('http://example.com/path?fields=id,formats,subtitles&fields=thumbnails'))
self.assertEqual(parse_qs(update_url_query(
'http://example.com/path?manifest=f4m', {'manifest': []})),
parse_qs('http://example.com/path'))
self.assertEqual(parse_qs(update_url_query(
'http://example.com/path?system=LINUX&system=WINDOWS', {'system': 'LINUX'})),
parse_qs('http://example.com/path?system=LINUX'))
self.assertEqual(parse_qs(update_url_query(
'http://example.com/path', {'fields': b'id,formats,subtitles'})),
parse_qs('http://example.com/path?fields=id,formats,subtitles'))
self.assertEqual(parse_qs(update_url_query(
'http://example.com/path', {'width': 1080, 'height': 720})),
parse_qs('http://example.com/path?width=1080&height=720'))
self.assertEqual(parse_qs(update_url_query(
'http://example.com/path', {'bitrate': 5020.43})),
parse_qs('http://example.com/path?bitrate=5020.43'))
self.assertEqual(parse_qs(update_url_query(
'http://example.com/path', {'test': '第二行тест'})),
parse_qs('http://example.com/path?test=%E7%AC%AC%E4%BA%8C%E8%A1%8C%D1%82%D0%B5%D1%81%D1%82'))
def test_multipart_encode(self):
self.assertEqual(
multipart_encode({b'field': b'value'}, boundary='AAAAAA')[0],
b'--AAAAAA\r\nContent-Disposition: form-data; name="field"\r\n\r\nvalue\r\n--AAAAAA--\r\n')
self.assertEqual(
multipart_encode({'欄位'.encode(): '值'.encode()}, boundary='AAAAAA')[0],
b'--AAAAAA\r\nContent-Disposition: form-data; name="\xe6\xac\x84\xe4\xbd\x8d"\r\n\r\n\xe5\x80\xbc\r\n--AAAAAA--\r\n')
self.assertRaises(
ValueError, multipart_encode, {b'field': b'value'}, boundary='value')
def test_dict_get(self):
FALSE_VALUES = {
'none': None,
'false': False,
'zero': 0,
'empty_string': '',
'empty_list': [],
}
d = FALSE_VALUES.copy()
d['a'] = 42
self.assertEqual(dict_get(d, 'a'), 42)
self.assertEqual(dict_get(d, 'b'), None)
self.assertEqual(dict_get(d, 'b', 42), 42)
self.assertEqual(dict_get(d, ('a', )), 42)
self.assertEqual(dict_get(d, ('b', 'a', )), 42)
self.assertEqual(dict_get(d, ('b', 'c', 'a', 'd', )), 42)
self.assertEqual(dict_get(d, ('b', 'c', )), None)
self.assertEqual(dict_get(d, ('b', 'c', ), 42), 42)
for key, false_value in FALSE_VALUES.items():
self.assertEqual(dict_get(d, ('b', 'c', key, )), None)
self.assertEqual(dict_get(d, ('b', 'c', key, ), skip_false_values=False), false_value)
def test_merge_dicts(self):
self.assertEqual(merge_dicts({'a': 1}, {'b': 2}), {'a': 1, 'b': 2})
self.assertEqual(merge_dicts({'a': 1}, {'a': 2}), {'a': 1})
self.assertEqual(merge_dicts({'a': 1}, {'a': None}), {'a': 1})
self.assertEqual(merge_dicts({'a': 1}, {'a': ''}), {'a': 1})
self.assertEqual(merge_dicts({'a': 1}, {}), {'a': 1})
self.assertEqual(merge_dicts({'a': None}, {'a': 1}), {'a': 1})
self.assertEqual(merge_dicts({'a': ''}, {'a': 1}), {'a': ''})
self.assertEqual(merge_dicts({'a': ''}, {'a': 'abc'}), {'a': 'abc'})
self.assertEqual(merge_dicts({'a': None}, {'a': ''}, {'a': 'abc'}), {'a': 'abc'})
def test_encode_compat_str(self):
self.assertEqual(encode_compat_str(b'\xd1\x82\xd0\xb5\xd1\x81\xd1\x82', 'utf-8'), 'тест')
self.assertEqual(encode_compat_str('тест', 'utf-8'), 'тест')
def test_parse_iso8601(self):
self.assertEqual(parse_iso8601('2014-03-23T23:04:26+0100'), 1395612266)
self.assertEqual(parse_iso8601('2014-03-23T22:04:26+0000'), 1395612266)
self.assertEqual(parse_iso8601('2014-03-23T22:04:26Z'), 1395612266)
self.assertEqual(parse_iso8601('2014-03-23T22:04:26.1234Z'), 1395612266)
self.assertEqual(parse_iso8601('2015-09-29T08:27:31.727'), 1443515251)
self.assertEqual(parse_iso8601('2015-09-29T08-27-31.727'), None)
def test_strip_jsonp(self):
stripped = strip_jsonp('cb ([ {"id":"532cb",\n\n\n"x":\n3}\n]\n);')
d = json.loads(stripped)
self.assertEqual(d, [{"id": "532cb", "x": 3}])
stripped = strip_jsonp('parseMetadata({"STATUS":"OK"})\n\n\n//epc')
d = json.loads(stripped)
self.assertEqual(d, {'STATUS': 'OK'})
stripped = strip_jsonp('ps.embedHandler({"status": "success"});')
d = json.loads(stripped)
self.assertEqual(d, {'status': 'success'})
stripped = strip_jsonp('window.cb && window.cb({"status": "success"});')
d = json.loads(stripped)
self.assertEqual(d, {'status': 'success'})
stripped = strip_jsonp('window.cb && cb({"status": "success"});')
d = json.loads(stripped)
self.assertEqual(d, {'status': 'success'})
stripped = strip_jsonp('({"status": "success"});')
d = json.loads(stripped)
self.assertEqual(d, {'status': 'success'})
def test_strip_or_none(self):
self.assertEqual(strip_or_none(' abc'), 'abc')
self.assertEqual(strip_or_none('abc '), 'abc')
self.assertEqual(strip_or_none(' abc '), 'abc')
self.assertEqual(strip_or_none('\tabc\t'), 'abc')
self.assertEqual(strip_or_none('\n\tabc\n\t'), 'abc')
self.assertEqual(strip_or_none('abc'), 'abc')
self.assertEqual(strip_or_none(''), '')
self.assertEqual(strip_or_none(None), None)
self.assertEqual(strip_or_none(42), None)
self.assertEqual(strip_or_none([]), None)
def test_uppercase_escape(self):
self.assertEqual(uppercase_escape('aä'), 'aä')
self.assertEqual(uppercase_escape('\\U0001d550'), '𝕐')
def test_lowercase_escape(self):
self.assertEqual(lowercase_escape('aä'), 'aä')
self.assertEqual(lowercase_escape('\\u0026'), '&')
def test_limit_length(self):
self.assertEqual(limit_length(None, 12), None)
self.assertEqual(limit_length('foo', 12), 'foo')
self.assertTrue(
limit_length('foo bar baz asd', 12).startswith('foo bar'))
self.assertTrue('...' in limit_length('foo bar baz asd', 12))
def test_mimetype2ext(self):
self.assertEqual(mimetype2ext(None), None)
self.assertEqual(mimetype2ext('video/x-flv'), 'flv')
self.assertEqual(mimetype2ext('application/x-mpegURL'), 'm3u8')
self.assertEqual(mimetype2ext('text/vtt'), 'vtt')
self.assertEqual(mimetype2ext('text/vtt;charset=utf-8'), 'vtt')
self.assertEqual(mimetype2ext('text/html; charset=utf-8'), 'html')
self.assertEqual(mimetype2ext('audio/x-wav'), 'wav')
self.assertEqual(mimetype2ext('audio/x-wav;codec=pcm'), 'wav')
def test_month_by_name(self):
self.assertEqual(month_by_name(None), None)
self.assertEqual(month_by_name('December', 'en'), 12)
self.assertEqual(month_by_name('décembre', 'fr'), 12)
self.assertEqual(month_by_name('December'), 12)
self.assertEqual(month_by_name('décembre'), None)
self.assertEqual(month_by_name('Unknown', 'unknown'), None)
def test_parse_codecs(self):
self.assertEqual(parse_codecs(''), {})
self.assertEqual(parse_codecs('avc1.77.30, mp4a.40.2'), {
'vcodec': 'avc1.77.30',
'acodec': 'mp4a.40.2',
'dynamic_range': None,
})
self.assertEqual(parse_codecs('mp4a.40.2'), {
'vcodec': 'none',
'acodec': 'mp4a.40.2',
'dynamic_range': None,
})
self.assertEqual(parse_codecs('mp4a.40.5,avc1.42001e'), {
'vcodec': 'avc1.42001e',
'acodec': 'mp4a.40.5',
'dynamic_range': None,
})
self.assertEqual(parse_codecs('avc3.640028'), {
'vcodec': 'avc3.640028',
'acodec': 'none',
'dynamic_range': None,
})
self.assertEqual(parse_codecs(', h264,,newcodec,aac'), {
'vcodec': 'h264',
'acodec': 'aac',
'dynamic_range': None,
})
self.assertEqual(parse_codecs('av01.0.05M.08'), {
'vcodec': 'av01.0.05M.08',
'acodec': 'none',
'dynamic_range': None,
})
self.assertEqual(parse_codecs('vp9.2'), {
'vcodec': 'vp9.2',
'acodec': 'none',
'dynamic_range': 'HDR10',
})
self.assertEqual(parse_codecs('av01.0.12M.10.0.110.09.16.09.0'), {
'vcodec': 'av01.0.12M.10',
'acodec': 'none',
'dynamic_range': 'HDR10',
})
self.assertEqual(parse_codecs('dvhe'), {
'vcodec': 'dvhe',
'acodec': 'none',
'dynamic_range': 'DV',
})
self.assertEqual(parse_codecs('theora, vorbis'), {
'vcodec': 'theora',
'acodec': 'vorbis',
'dynamic_range': None,
})
self.assertEqual(parse_codecs('unknownvcodec, unknownacodec'), {
'vcodec': 'unknownvcodec',
'acodec': 'unknownacodec',
})
self.assertEqual(parse_codecs('unknown'), {})
def test_escape_rfc3986(self):
reserved = "!*'();:@&=+$,/?#[]"
unreserved = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_.~'
self.assertEqual(escape_rfc3986(reserved), reserved)
self.assertEqual(escape_rfc3986(unreserved), unreserved)
self.assertEqual(escape_rfc3986('тест'), '%D1%82%D0%B5%D1%81%D1%82')
self.assertEqual(escape_rfc3986('%D1%82%D0%B5%D1%81%D1%82'), '%D1%82%D0%B5%D1%81%D1%82')
self.assertEqual(escape_rfc3986('foo bar'), 'foo%20bar')
self.assertEqual(escape_rfc3986('foo%20bar'), 'foo%20bar')
def test_escape_url(self):
self.assertEqual(
escape_url('http://wowza.imust.org/srv/vod/telemb/new/UPLOAD/UPLOAD/20224_IncendieHavré_FD.mp4'),
'http://wowza.imust.org/srv/vod/telemb/new/UPLOAD/UPLOAD/20224_IncendieHavre%CC%81_FD.mp4'
)
self.assertEqual(
escape_url('http://www.ardmediathek.de/tv/Sturm-der-Liebe/Folge-2036-Zu-Mann-und-Frau-erklärt/Das-Erste/Video?documentId=22673108&bcastId=5290'),
'http://www.ardmediathek.de/tv/Sturm-der-Liebe/Folge-2036-Zu-Mann-und-Frau-erkl%C3%A4rt/Das-Erste/Video?documentId=22673108&bcastId=5290'
)
self.assertEqual(
escape_url('http://тест.рф/фрагмент'),
'http://xn--e1aybc.xn--p1ai/%D1%84%D1%80%D0%B0%D0%B3%D0%BC%D0%B5%D0%BD%D1%82'
)
self.assertEqual(
escape_url('http://тест.рф/абв?абв=абв#абв'),
'http://xn--e1aybc.xn--p1ai/%D0%B0%D0%B1%D0%B2?%D0%B0%D0%B1%D0%B2=%D0%B0%D0%B1%D0%B2#%D0%B0%D0%B1%D0%B2'
)
self.assertEqual(escape_url('http://vimeo.com/56015672#at=0'), 'http://vimeo.com/56015672#at=0')
def test_js_to_json_realworld(self):
inp = '''{
'clip':{'provider':'pseudo'}
}'''
self.assertEqual(js_to_json(inp), '''{
"clip":{"provider":"pseudo"}
}''')
json.loads(js_to_json(inp))
inp = '''{
'playlist':[{'controls':{'all':null}}]
}'''
self.assertEqual(js_to_json(inp), '''{
"playlist":[{"controls":{"all":null}}]
}''')
inp = '''"The CW\\'s \\'Crazy Ex-Girlfriend\\'"'''
self.assertEqual(js_to_json(inp), '''"The CW's 'Crazy Ex-Girlfriend'"''')
inp = '"SAND Number: SAND 2013-7800P\\nPresenter: Tom Russo\\nHabanero Software Training - Xyce Software\\nXyce, Sandia\\u0027s"'
json_code = js_to_json(inp)
self.assertEqual(json.loads(json_code), json.loads(inp))
inp = '''{
0:{src:'skipped', type: 'application/dash+xml'},
1:{src:'skipped', type: 'application/vnd.apple.mpegURL'},
}'''
self.assertEqual(js_to_json(inp), '''{
"0":{"src":"skipped", "type": "application/dash+xml"},
"1":{"src":"skipped", "type": "application/vnd.apple.mpegURL"}
}''')
inp = '''{"foo":101}'''
self.assertEqual(js_to_json(inp), '''{"foo":101}''')
inp = '''{"duration": "00:01:07"}'''
self.assertEqual(js_to_json(inp), '''{"duration": "00:01:07"}''')
inp = '''{segments: [{"offset":-3.885780586188048e-16,"duration":39.75000000000001}]}'''
self.assertEqual(js_to_json(inp), '''{"segments": [{"offset":-3.885780586188048e-16,"duration":39.75000000000001}]}''')
def test_js_to_json_edgecases(self):
on = js_to_json("{abc_def:'1\\'\\\\2\\\\\\'3\"4'}")
self.assertEqual(json.loads(on), {"abc_def": "1'\\2\\'3\"4"})
on = js_to_json('{"abc": true}')
self.assertEqual(json.loads(on), {'abc': True})
# Ignore JavaScript code as well
on = js_to_json('''{
"x": 1,
y: "a",
z: some.code
}''')
d = json.loads(on)
self.assertEqual(d['x'], 1)
self.assertEqual(d['y'], 'a')
# Just drop ! prefix for now though this results in a wrong value
on = js_to_json('''{
a: !0,
b: !1,
c: !!0,
d: !!42.42,
e: !!![],
f: !"abc",
g: !"",
!42: 42
}''')
self.assertEqual(json.loads(on), {
'a': 0,
'b': 1,
'c': 0,
'd': 42.42,
'e': [],
'f': "abc",
'g': "",
'42': 42
})
on = js_to_json('["abc", "def",]')
self.assertEqual(json.loads(on), ['abc', 'def'])
on = js_to_json('[/*comment\n*/"abc"/*comment\n*/,/*comment\n*/"def",/*comment\n*/]')
self.assertEqual(json.loads(on), ['abc', 'def'])
on = js_to_json('[//comment\n"abc" //comment\n,//comment\n"def",//comment\n]')
self.assertEqual(json.loads(on), ['abc', 'def'])
on = js_to_json('{"abc": "def",}')
self.assertEqual(json.loads(on), {'abc': 'def'})
on = js_to_json('{/*comment\n*/"abc"/*comment\n*/:/*comment\n*/"def"/*comment\n*/,/*comment\n*/}')
self.assertEqual(json.loads(on), {'abc': 'def'})
on = js_to_json('{ 0: /* " \n */ ",]" , }')
self.assertEqual(json.loads(on), {'0': ',]'})
on = js_to_json('{ /*comment\n*/0/*comment\n*/: /* " \n */ ",]" , }')
self.assertEqual(json.loads(on), {'0': ',]'})
on = js_to_json('{ 0: // comment\n1 }')
self.assertEqual(json.loads(on), {'0': 1})
on = js_to_json(r'["<p>x<\/p>"]')
self.assertEqual(json.loads(on), ['<p>x</p>'])
on = js_to_json(r'["\xaa"]')
self.assertEqual(json.loads(on), ['\u00aa'])
on = js_to_json("['a\\\nb']")
self.assertEqual(json.loads(on), ['ab'])
on = js_to_json("/*comment\n*/[/*comment\n*/'a\\\nb'/*comment\n*/]/*comment\n*/")
self.assertEqual(json.loads(on), ['ab'])
on = js_to_json('{0xff:0xff}')
self.assertEqual(json.loads(on), {'255': 255})
on = js_to_json('{/*comment\n*/0xff/*comment\n*/:/*comment\n*/0xff/*comment\n*/}')
self.assertEqual(json.loads(on), {'255': 255})
on = js_to_json('{077:077}')
self.assertEqual(json.loads(on), {'63': 63})
on = js_to_json('{/*comment\n*/077/*comment\n*/:/*comment\n*/077/*comment\n*/}')
self.assertEqual(json.loads(on), {'63': 63})
on = js_to_json('{42:42}')
self.assertEqual(json.loads(on), {'42': 42})
on = js_to_json('{/*comment\n*/42/*comment\n*/:/*comment\n*/42/*comment\n*/}')
self.assertEqual(json.loads(on), {'42': 42})
on = js_to_json('{42:4.2e1}')
self.assertEqual(json.loads(on), {'42': 42.0})
on = js_to_json('{ "0x40": "0x40" }')
self.assertEqual(json.loads(on), {'0x40': '0x40'})
on = js_to_json('{ "040": "040" }')
self.assertEqual(json.loads(on), {'040': '040'})
on = js_to_json('[1,//{},\n2]')
self.assertEqual(json.loads(on), [1, 2])
def test_js_to_json_malformed(self):
self.assertEqual(js_to_json('42a1'), '42"a1"')
self.assertEqual(js_to_json('42a-1'), '42"a"-1')
def test_extract_attributes(self):
self.assertEqual(extract_attributes('<e x="y">'), {'x': 'y'})
self.assertEqual(extract_attributes("<e x='y'>"), {'x': 'y'})
self.assertEqual(extract_attributes('<e x=y>'), {'x': 'y'})
self.assertEqual(extract_attributes('<e x="a \'b\' c">'), {'x': "a 'b' c"})
self.assertEqual(extract_attributes('<e x=\'a "b" c\'>'), {'x': 'a "b" c'})
self.assertEqual(extract_attributes('<e x="y">'), {'x': 'y'})
self.assertEqual(extract_attributes('<e x="y">'), {'x': 'y'})
self.assertEqual(extract_attributes('<e x="&">'), {'x': '&'}) # XML
self.assertEqual(extract_attributes('<e x=""">'), {'x': '"'})
self.assertEqual(extract_attributes('<e x="£">'), {'x': '£'}) # HTML 3.2
self.assertEqual(extract_attributes('<e x="λ">'), {'x': 'λ'}) # HTML 4.0
self.assertEqual(extract_attributes('<e x="&foo">'), {'x': '&foo'})
self.assertEqual(extract_attributes('<e x="\'">'), {'x': "'"})
self.assertEqual(extract_attributes('<e x=\'"\'>'), {'x': '"'})
self.assertEqual(extract_attributes('<e x >'), {'x': None})
self.assertEqual(extract_attributes('<e x=y a>'), {'x': 'y', 'a': None})
self.assertEqual(extract_attributes('<e x= y>'), {'x': 'y'})
self.assertEqual(extract_attributes('<e x=1 y=2 x=3>'), {'y': '2', 'x': '3'})
self.assertEqual(extract_attributes('<e \nx=\ny\n>'), {'x': 'y'})
self.assertEqual(extract_attributes('<e \nx=\n"y"\n>'), {'x': 'y'})
self.assertEqual(extract_attributes("<e \nx=\n'y'\n>"), {'x': 'y'})
self.assertEqual(extract_attributes('<e \nx="\ny\n">'), {'x': '\ny\n'})
self.assertEqual(extract_attributes('<e CAPS=x>'), {'caps': 'x'}) # Names lowercased
self.assertEqual(extract_attributes('<e x=1 X=2>'), {'x': '2'})
self.assertEqual(extract_attributes('<e X=1 x=2>'), {'x': '2'})
self.assertEqual(extract_attributes('<e _:funny-name1=1>'), {'_:funny-name1': '1'})
self.assertEqual(extract_attributes('<e x="Fáilte 世界 \U0001f600">'), {'x': 'Fáilte 世界 \U0001f600'})
self.assertEqual(extract_attributes('<e x="décomposé">'), {'x': 'décompose\u0301'})
# "Narrow" Python builds don't support unicode code points outside BMP.
try:
compat_chr(0x10000)
supports_outside_bmp = True
except ValueError:
supports_outside_bmp = False
if supports_outside_bmp:
self.assertEqual(extract_attributes('<e x="Smile 😀!">'), {'x': 'Smile \U0001f600!'})
# Malformed HTML should not break attributes extraction on older Python
self.assertEqual(extract_attributes('<mal"formed/>'), {})
def test_clean_html(self):
self.assertEqual(clean_html('a:\nb'), 'a: b')
self.assertEqual(clean_html('a:\n "b"'), 'a: "b"')
self.assertEqual(clean_html('a<br>\xa0b'), 'a\nb')
def test_intlist_to_bytes(self):
self.assertEqual(
intlist_to_bytes([0, 1, 127, 128, 255]),
b'\x00\x01\x7f\x80\xff')
def test_args_to_str(self):
self.assertEqual(
args_to_str(['foo', 'ba/r', '-baz', '2 be', '']),
'foo ba/r -baz \'2 be\' \'\'' if compat_os_name != 'nt' else 'foo ba/r -baz "2 be" ""'
)
def test_parse_filesize(self):
self.assertEqual(parse_filesize(None), None)
self.assertEqual(parse_filesize(''), None)
self.assertEqual(parse_filesize('91 B'), 91)
self.assertEqual(parse_filesize('foobar'), None)
self.assertEqual(parse_filesize('2 MiB'), 2097152)
self.assertEqual(parse_filesize('5 GB'), 5000000000)
self.assertEqual(parse_filesize('1.2Tb'), 1200000000000)
self.assertEqual(parse_filesize('1.2tb'), 1200000000000)
self.assertEqual(parse_filesize('1,24 KB'), 1240)
self.assertEqual(parse_filesize('1,24 kb'), 1240)
self.assertEqual(parse_filesize('8.5 megabytes'), 8500000)
def test_parse_count(self):
self.assertEqual(parse_count(None), None)
self.assertEqual(parse_count(''), None)
self.assertEqual(parse_count('0'), 0)
self.assertEqual(parse_count('1000'), 1000)
self.assertEqual(parse_count('1.000'), 1000)
self.assertEqual(parse_count('1.1k'), 1100)
self.assertEqual(parse_count('1.1 k'), 1100)
self.assertEqual(parse_count('1,1 k'), 1100)
self.assertEqual(parse_count('1.1kk'), 1100000)
self.assertEqual(parse_count('1.1kk '), 1100000)
self.assertEqual(parse_count('1,1kk'), 1100000)
self.assertEqual(parse_count('100 views'), 100)
self.assertEqual(parse_count('1,100 views'), 1100)
self.assertEqual(parse_count('1.1kk views'), 1100000)
self.assertEqual(parse_count('10M views'), 10000000)
self.assertEqual(parse_count('has 10M views'), 10000000)
def test_parse_resolution(self):
self.assertEqual(parse_resolution(None), {})
self.assertEqual(parse_resolution(''), {})
self.assertEqual(parse_resolution(' 1920x1080'), {'width': 1920, 'height': 1080})
self.assertEqual(parse_resolution('1920×1080 '), {'width': 1920, 'height': 1080})
self.assertEqual(parse_resolution('1920 x 1080'), {'width': 1920, 'height': 1080})
self.assertEqual(parse_resolution('720p'), {'height': 720})
self.assertEqual(parse_resolution('4k'), {'height': 2160})
self.assertEqual(parse_resolution('8K'), {'height': 4320})
self.assertEqual(parse_resolution('pre_1920x1080_post'), {'width': 1920, 'height': 1080})
self.assertEqual(parse_resolution('ep1x2'), {})
self.assertEqual(parse_resolution('1920, 1080'), {'width': 1920, 'height': 1080})
def test_parse_bitrate(self):
self.assertEqual(parse_bitrate(None), None)
self.assertEqual(parse_bitrate(''), None)
self.assertEqual(parse_bitrate('300kbps'), 300)
self.assertEqual(parse_bitrate('1500kbps'), 1500)
self.assertEqual(parse_bitrate('300 kbps'), 300)
def test_version_tuple(self):
self.assertEqual(version_tuple('1'), (1,))
self.assertEqual(version_tuple('10.23.344'), (10, 23, 344))
self.assertEqual(version_tuple('10.1-6'), (10, 1, 6)) # avconv style
def test_detect_exe_version(self):
self.assertEqual(detect_exe_version('''ffmpeg version 1.2.1
built on May 27 2013 08:37:26 with gcc 4.7 (Debian 4.7.3-4)
configuration: --prefix=/usr --extra-'''), '1.2.1')
self.assertEqual(detect_exe_version('''ffmpeg version N-63176-g1fb4685
built on May 15 2014 22:09:06 with gcc 4.8.2 (GCC)'''), 'N-63176-g1fb4685')
self.assertEqual(detect_exe_version('''X server found. dri2 connection failed!
Trying to open render node...
Success at /dev/dri/renderD128.
ffmpeg version 2.4.4 Copyright (c) 2000-2014 the FFmpeg ...'''), '2.4.4')
def test_age_restricted(self):
self.assertFalse(age_restricted(None, 10)) # unrestricted content
self.assertFalse(age_restricted(1, None)) # unrestricted policy
self.assertFalse(age_restricted(8, 10))
self.assertTrue(age_restricted(18, 14))
self.assertFalse(age_restricted(18, 18))
def test_is_html(self):
self.assertFalse(is_html(b'\x49\x44\x43<html'))
self.assertTrue(is_html(b'<!DOCTYPE foo>\xaaa'))
self.assertTrue(is_html( # UTF-8 with BOM
b'\xef\xbb\xbf<!DOCTYPE foo>\xaaa'))
self.assertTrue(is_html( # UTF-16-LE
b'\xff\xfe<\x00h\x00t\x00m\x00l\x00>\x00\xe4\x00'
))
self.assertTrue(is_html( # UTF-16-BE
b'\xfe\xff\x00<\x00h\x00t\x00m\x00l\x00>\x00\xe4'
))
self.assertTrue(is_html( # UTF-32-BE
b'\x00\x00\xFE\xFF\x00\x00\x00<\x00\x00\x00h\x00\x00\x00t\x00\x00\x00m\x00\x00\x00l\x00\x00\x00>\x00\x00\x00\xe4'))
self.assertTrue(is_html( # UTF-32-LE
b'\xFF\xFE\x00\x00<\x00\x00\x00h\x00\x00\x00t\x00\x00\x00m\x00\x00\x00l\x00\x00\x00>\x00\x00\x00\xe4\x00\x00\x00'))
def test_render_table(self):
self.assertEqual(
render_table(
['a', 'empty', 'bcd'],
[[123, '', 4], [9999, '', 51]]),
'a empty bcd\n'
'123 4\n'
'9999 51')
self.assertEqual(
render_table(
['a', 'empty', 'bcd'],
[[123, '', 4], [9999, '', 51]],
hide_empty=True),
'a bcd\n'
'123 4\n'
'9999 51')
self.assertEqual(
render_table(
['\ta', 'bcd'],
[['1\t23', 4], ['\t9999', 51]]),
' a bcd\n'
'1 23 4\n'
'9999 51')
self.assertEqual(
render_table(
['a', 'bcd'],
[[123, 4], [9999, 51]],
delim='-'),
'a bcd\n'
'--------\n'
'123 4\n'
'9999 51')
self.assertEqual(
render_table(
['a', 'bcd'],
[[123, 4], [9999, 51]],
delim='-', extra_gap=2),
'a bcd\n'
'----------\n'
'123 4\n'
'9999 51')
def test_match_str(self):
# Unary
self.assertFalse(match_str('xy', {'x': 1200}))
self.assertTrue(match_str('!xy', {'x': 1200}))
self.assertTrue(match_str('x', {'x': 1200}))
self.assertFalse(match_str('!x', {'x': 1200}))
self.assertTrue(match_str('x', {'x': 0}))
self.assertTrue(match_str('is_live', {'is_live': True}))
self.assertFalse(match_str('is_live', {'is_live': False}))
self.assertFalse(match_str('is_live', {'is_live': None}))
self.assertFalse(match_str('is_live', {}))
self.assertFalse(match_str('!is_live', {'is_live': True}))
self.assertTrue(match_str('!is_live', {'is_live': False}))
self.assertTrue(match_str('!is_live', {'is_live': None}))
self.assertTrue(match_str('!is_live', {}))
self.assertTrue(match_str('title', {'title': 'abc'}))
self.assertTrue(match_str('title', {'title': ''}))
self.assertFalse(match_str('!title', {'title': 'abc'}))
self.assertFalse(match_str('!title', {'title': ''}))
# Numeric
self.assertFalse(match_str('x>0', {'x': 0}))
self.assertFalse(match_str('x>0', {}))
self.assertTrue(match_str('x>?0', {}))
self.assertTrue(match_str('x>1K', {'x': 1200}))
self.assertFalse(match_str('x>2K', {'x': 1200}))
self.assertTrue(match_str('x>=1200 & x < 1300', {'x': 1200}))
self.assertFalse(match_str('x>=1100 & x < 1200', {'x': 1200}))
self.assertTrue(match_str('x > 1:0:0', {'x': 3700}))
# String
self.assertFalse(match_str('y=a212', {'y': 'foobar42'}))
self.assertTrue(match_str('y=foobar42', {'y': 'foobar42'}))
self.assertFalse(match_str('y!=foobar42', {'y': 'foobar42'}))
self.assertTrue(match_str('y!=foobar2', {'y': 'foobar42'}))
self.assertTrue(match_str('y^=foo', {'y': 'foobar42'}))
self.assertFalse(match_str('y!^=foo', {'y': 'foobar42'}))
self.assertFalse(match_str('y^=bar', {'y': 'foobar42'}))
self.assertTrue(match_str('y!^=bar', {'y': 'foobar42'}))
self.assertRaises(ValueError, match_str, 'x^=42', {'x': 42})
self.assertTrue(match_str('y*=bar', {'y': 'foobar42'}))
self.assertFalse(match_str('y!*=bar', {'y': 'foobar42'}))
self.assertFalse(match_str('y*=baz', {'y': 'foobar42'}))
self.assertTrue(match_str('y!*=baz', {'y': 'foobar42'}))
self.assertTrue(match_str('y$=42', {'y': 'foobar42'}))
self.assertFalse(match_str('y$=43', {'y': 'foobar42'}))
# And
self.assertFalse(match_str(
'like_count > 100 & dislike_count <? 50 & description',
{'like_count': 90, 'description': 'foo'}))
self.assertTrue(match_str(
'like_count > 100 & dislike_count <? 50 & description',
{'like_count': 190, 'description': 'foo'}))
self.assertFalse(match_str(
'like_count > 100 & dislike_count <? 50 & description',
{'like_count': 190, 'dislike_count': 60, 'description': 'foo'}))
self.assertFalse(match_str(
'like_count > 100 & dislike_count <? 50 & description',
{'like_count': 190, 'dislike_count': 10}))
# Regex
self.assertTrue(match_str(r'x~=\bbar', {'x': 'foo bar'}))
self.assertFalse(match_str(r'x~=\bbar.+', {'x': 'foo bar'}))
self.assertFalse(match_str(r'x~=^FOO', {'x': 'foo bar'}))
self.assertTrue(match_str(r'x~=(?i)^FOO', {'x': 'foo bar'}))
# Quotes
self.assertTrue(match_str(r'x^="foo"', {'x': 'foo "bar"'}))
self.assertFalse(match_str(r'x^="foo "', {'x': 'foo "bar"'}))
self.assertFalse(match_str(r'x$="bar"', {'x': 'foo "bar"'}))
self.assertTrue(match_str(r'x$=" \"bar\""', {'x': 'foo "bar"'}))
# Escaping &
self.assertFalse(match_str(r'x=foo & bar', {'x': 'foo & bar'}))
self.assertTrue(match_str(r'x=foo \& bar', {'x': 'foo & bar'}))
self.assertTrue(match_str(r'x=foo \& bar & x^=foo', {'x': 'foo & bar'}))
self.assertTrue(match_str(r'x="foo \& bar" & x^=foo', {'x': 'foo & bar'}))
# Example from docs
self.assertTrue(match_str(
r"!is_live & like_count>?100 & description~='(?i)\bcats \& dogs\b'",
{'description': 'Raining Cats & Dogs'}))
# Incomplete
self.assertFalse(match_str('id!=foo', {'id': 'foo'}, True))
self.assertTrue(match_str('x', {'id': 'foo'}, True))
self.assertTrue(match_str('!x', {'id': 'foo'}, True))
self.assertFalse(match_str('x', {'id': 'foo'}, False))
def test_parse_dfxp_time_expr(self):
self.assertEqual(parse_dfxp_time_expr(None), None)
self.assertEqual(parse_dfxp_time_expr(''), None)
self.assertEqual(parse_dfxp_time_expr('0.1'), 0.1)
self.assertEqual(parse_dfxp_time_expr('0.1s'), 0.1)
self.assertEqual(parse_dfxp_time_expr('00:00:01'), 1.0)
self.assertEqual(parse_dfxp_time_expr('00:00:01.100'), 1.1)
self.assertEqual(parse_dfxp_time_expr('00:00:01:100'), 1.1)
def test_dfxp2srt(self):
dfxp_data = '''<?xml version="1.0" encoding="UTF-8"?>
<tt xmlns="http://www.w3.org/ns/ttml" xml:lang="en" xmlns:tts="http://www.w3.org/ns/ttml#parameter">
<body>
<div xml:lang="en">
<p begin="0" end="1">The following line contains Chinese characters and special symbols</p>
<p begin="1" end="2">第二行<br/>♪♪</p>
<p begin="2" dur="1"><span>Third<br/>Line</span></p>
<p begin="3" end="-1">Lines with invalid timestamps are ignored</p>
<p begin="-1" end="-1">Ignore, two</p>
<p begin="3" dur="-1">Ignored, three</p>
</div>
</body>
</tt>'''.encode()
srt_data = '''1
00:00:00,000 --> 00:00:01,000
The following line contains Chinese characters and special symbols
2
00:00:01,000 --> 00:00:02,000
第二行
♪♪
3
00:00:02,000 --> 00:00:03,000
Third
Line
'''
self.assertEqual(dfxp2srt(dfxp_data), srt_data)
dfxp_data_no_default_namespace = b'''<?xml version="1.0" encoding="UTF-8"?>
<tt xml:lang="en" xmlns:tts="http://www.w3.org/ns/ttml#parameter">
<body>
<div xml:lang="en">
<p begin="0" end="1">The first line</p>
</div>
</body>
</tt>'''
srt_data = '''1
00:00:00,000 --> 00:00:01,000
The first line
'''
self.assertEqual(dfxp2srt(dfxp_data_no_default_namespace), srt_data)
dfxp_data_with_style = b'''<?xml version="1.0" encoding="utf-8"?>
<tt xmlns="http://www.w3.org/2006/10/ttaf1" xmlns:ttp="http://www.w3.org/2006/10/ttaf1#parameter" ttp:timeBase="media" xmlns:tts="http://www.w3.org/2006/10/ttaf1#style" xml:lang="en" xmlns:ttm="http://www.w3.org/2006/10/ttaf1#metadata">
<head>
<styling>
<style id="s2" style="s0" tts:color="cyan" tts:fontWeight="bold" />
<style id="s1" style="s0" tts:color="yellow" tts:fontStyle="italic" />
<style id="s3" style="s0" tts:color="lime" tts:textDecoration="underline" />
<style id="s0" tts:backgroundColor="black" tts:fontStyle="normal" tts:fontSize="16" tts:fontFamily="sansSerif" tts:color="white" />
</styling>
</head>
<body tts:textAlign="center" style="s0">
<div>
<p begin="00:00:02.08" id="p0" end="00:00:05.84">default style<span tts:color="red">custom style</span></p>
<p style="s2" begin="00:00:02.08" id="p0" end="00:00:05.84"><span tts:color="lime">part 1<br /></span><span tts:color="cyan">part 2</span></p>
<p style="s3" begin="00:00:05.84" id="p1" end="00:00:09.56">line 3<br />part 3</p>
<p style="s1" tts:textDecoration="underline" begin="00:00:09.56" id="p2" end="00:00:12.36"><span style="s2" tts:color="lime">inner<br /> </span>style</p>
</div>
</body>
</tt>'''
srt_data = '''1
00:00:02,080 --> 00:00:05,840
<font color="white" face="sansSerif" size="16">default style<font color="red">custom style</font></font>
2
00:00:02,080 --> 00:00:05,840
<b><font color="cyan" face="sansSerif" size="16"><font color="lime">part 1
</font>part 2</font></b>
3
00:00:05,840 --> 00:00:09,560
<u><font color="lime">line 3
part 3</font></u>
4
00:00:09,560 --> 00:00:12,360
<i><u><font color="yellow"><font color="lime">inner
</font>style</font></u></i>
'''
self.assertEqual(dfxp2srt(dfxp_data_with_style), srt_data)
dfxp_data_non_utf8 = '''<?xml version="1.0" encoding="UTF-16"?>
<tt xmlns="http://www.w3.org/ns/ttml" xml:lang="en" xmlns:tts="http://www.w3.org/ns/ttml#parameter">
<body>
<div xml:lang="en">
<p begin="0" end="1">Line 1</p>
<p begin="1" end="2">第二行</p>
</div>
</body>
</tt>'''.encode('utf-16')
srt_data = '''1
00:00:00,000 --> 00:00:01,000
Line 1
2
00:00:01,000 --> 00:00:02,000
第二行
'''
self.assertEqual(dfxp2srt(dfxp_data_non_utf8), srt_data)
def test_cli_option(self):
self.assertEqual(cli_option({'proxy': '127.0.0.1:3128'}, '--proxy', 'proxy'), ['--proxy', '127.0.0.1:3128'])
self.assertEqual(cli_option({'proxy': None}, '--proxy', 'proxy'), [])
self.assertEqual(cli_option({}, '--proxy', 'proxy'), [])
self.assertEqual(cli_option({'retries': 10}, '--retries', 'retries'), ['--retries', '10'])
def test_cli_valueless_option(self):
self.assertEqual(cli_valueless_option(
{'downloader': 'external'}, '--external-downloader', 'downloader', 'external'), ['--external-downloader'])
self.assertEqual(cli_valueless_option(
{'downloader': 'internal'}, '--external-downloader', 'downloader', 'external'), [])
self.assertEqual(cli_valueless_option(
{'nocheckcertificate': True}, '--no-check-certificate', 'nocheckcertificate'), ['--no-check-certificate'])
self.assertEqual(cli_valueless_option(
{'nocheckcertificate': False}, '--no-check-certificate', 'nocheckcertificate'), [])
self.assertEqual(cli_valueless_option(
{'checkcertificate': True}, '--no-check-certificate', 'checkcertificate', False), [])
self.assertEqual(cli_valueless_option(
{'checkcertificate': False}, '--no-check-certificate', 'checkcertificate', False), ['--no-check-certificate'])
def test_cli_bool_option(self):
self.assertEqual(
cli_bool_option(
{'nocheckcertificate': True}, '--no-check-certificate', 'nocheckcertificate'),
['--no-check-certificate', 'true'])
self.assertEqual(
cli_bool_option(
{'nocheckcertificate': True}, '--no-check-certificate', 'nocheckcertificate', separator='='),
['--no-check-certificate=true'])
self.assertEqual(
cli_bool_option(
{'nocheckcertificate': True}, '--check-certificate', 'nocheckcertificate', 'false', 'true'),
['--check-certificate', 'false'])
self.assertEqual(
cli_bool_option(
{'nocheckcertificate': True}, '--check-certificate', 'nocheckcertificate', 'false', 'true', '='),
['--check-certificate=false'])
self.assertEqual(
cli_bool_option(
{'nocheckcertificate': False}, '--check-certificate', 'nocheckcertificate', 'false', 'true'),
['--check-certificate', 'true'])
self.assertEqual(
cli_bool_option(
{'nocheckcertificate': False}, '--check-certificate', 'nocheckcertificate', 'false', 'true', '='),
['--check-certificate=true'])
self.assertEqual(
cli_bool_option(
{}, '--check-certificate', 'nocheckcertificate', 'false', 'true', '='),
[])
def test_ohdave_rsa_encrypt(self):
N = 0xab86b6371b5318aaa1d3c9e612a9f1264f372323c8c0f19875b5fc3b3fd3afcc1e5bec527aa94bfa85bffc157e4245aebda05389a5357b75115ac94f074aefcd
e = 65537
self.assertEqual(
ohdave_rsa_encrypt(b'aa111222', e, N),
'726664bd9a23fd0c70f9f1b84aab5e3905ce1e45a584e9cbcf9bcc7510338fc1986d6c599ff990d923aa43c51c0d9013cd572e13bc58f4ae48f2ed8c0b0ba881')
def test_pkcs1pad(self):
data = [1, 2, 3]
padded_data = pkcs1pad(data, 32)
self.assertEqual(padded_data[:2], [0, 2])
self.assertEqual(padded_data[28:], [0, 1, 2, 3])
self.assertRaises(ValueError, pkcs1pad, data, 8)
def test_encode_base_n(self):
self.assertEqual(encode_base_n(0, 30), '0')
self.assertEqual(encode_base_n(80, 30), '2k')
custom_table = '9876543210ZYXWVUTSRQPONMLKJIHGFEDCBA'
self.assertEqual(encode_base_n(0, 30, custom_table), '9')
self.assertEqual(encode_base_n(80, 30, custom_table), '7P')
self.assertRaises(ValueError, encode_base_n, 0, 70)
self.assertRaises(ValueError, encode_base_n, 0, 60, custom_table)
def test_caesar(self):
self.assertEqual(caesar('ace', 'abcdef', 2), 'cea')
self.assertEqual(caesar('cea', 'abcdef', -2), 'ace')
self.assertEqual(caesar('ace', 'abcdef', -2), 'eac')
self.assertEqual(caesar('eac', 'abcdef', 2), 'ace')
self.assertEqual(caesar('ace', 'abcdef', 0), 'ace')
self.assertEqual(caesar('xyz', 'abcdef', 2), 'xyz')
self.assertEqual(caesar('abc', 'acegik', 2), 'ebg')
self.assertEqual(caesar('ebg', 'acegik', -2), 'abc')
def test_rot47(self):
self.assertEqual(rot47('yt-dlp'), r'JE\5=A')
self.assertEqual(rot47('YT-DLP'), r'*%\s{!')
def test_urshift(self):
self.assertEqual(urshift(3, 1), 1)
self.assertEqual(urshift(-3, 1), 2147483646)
GET_ELEMENT_BY_CLASS_TEST_STRING = '''
<span class="foo bar">nice</span>
'''
def test_get_element_by_class(self):
html = self.GET_ELEMENT_BY_CLASS_TEST_STRING
self.assertEqual(get_element_by_class('foo', html), 'nice')
self.assertEqual(get_element_by_class('no-such-class', html), None)
def test_get_element_html_by_class(self):
html = self.GET_ELEMENT_BY_CLASS_TEST_STRING
self.assertEqual(get_element_html_by_class('foo', html), html.strip())
self.assertEqual(get_element_by_class('no-such-class', html), None)
GET_ELEMENT_BY_ATTRIBUTE_TEST_STRING = '''
<div itemprop="author" itemscope>foo</div>
'''
def test_get_element_by_attribute(self):
html = self.GET_ELEMENT_BY_CLASS_TEST_STRING
self.assertEqual(get_element_by_attribute('class', 'foo bar', html), 'nice')
self.assertEqual(get_element_by_attribute('class', 'foo', html), None)
self.assertEqual(get_element_by_attribute('class', 'no-such-foo', html), None)
html = self.GET_ELEMENT_BY_ATTRIBUTE_TEST_STRING
self.assertEqual(get_element_by_attribute('itemprop', 'author', html), 'foo')
def test_get_element_html_by_attribute(self):
html = self.GET_ELEMENT_BY_CLASS_TEST_STRING
self.assertEqual(get_element_html_by_attribute('class', 'foo bar', html), html.strip())
self.assertEqual(get_element_html_by_attribute('class', 'foo', html), None)
self.assertEqual(get_element_html_by_attribute('class', 'no-such-foo', html), None)
html = self.GET_ELEMENT_BY_ATTRIBUTE_TEST_STRING
self.assertEqual(get_element_html_by_attribute('itemprop', 'author', html), html.strip())
GET_ELEMENTS_BY_CLASS_TEST_STRING = '''
<span class="foo bar">nice</span><span class="foo bar">also nice</span>
'''
GET_ELEMENTS_BY_CLASS_RES = ['<span class="foo bar">nice</span>', '<span class="foo bar">also nice</span>']
def test_get_elements_by_class(self):
html = self.GET_ELEMENTS_BY_CLASS_TEST_STRING
self.assertEqual(get_elements_by_class('foo', html), ['nice', 'also nice'])
self.assertEqual(get_elements_by_class('no-such-class', html), [])
def test_get_elements_html_by_class(self):
html = self.GET_ELEMENTS_BY_CLASS_TEST_STRING
self.assertEqual(get_elements_html_by_class('foo', html), self.GET_ELEMENTS_BY_CLASS_RES)
self.assertEqual(get_elements_html_by_class('no-such-class', html), [])
def test_get_elements_by_attribute(self):
html = self.GET_ELEMENTS_BY_CLASS_TEST_STRING
self.assertEqual(get_elements_by_attribute('class', 'foo bar', html), ['nice', 'also nice'])
self.assertEqual(get_elements_by_attribute('class', 'foo', html), [])
self.assertEqual(get_elements_by_attribute('class', 'no-such-foo', html), [])
def test_get_elements_html_by_attribute(self):
html = self.GET_ELEMENTS_BY_CLASS_TEST_STRING
self.assertEqual(get_elements_html_by_attribute('class', 'foo bar', html), self.GET_ELEMENTS_BY_CLASS_RES)
self.assertEqual(get_elements_html_by_attribute('class', 'foo', html), [])
self.assertEqual(get_elements_html_by_attribute('class', 'no-such-foo', html), [])
def test_get_elements_text_and_html_by_attribute(self):
html = self.GET_ELEMENTS_BY_CLASS_TEST_STRING
self.assertEqual(
list(get_elements_text_and_html_by_attribute('class', 'foo bar', html)),
list(zip(['nice', 'also nice'], self.GET_ELEMENTS_BY_CLASS_RES)))
self.assertEqual(list(get_elements_text_and_html_by_attribute('class', 'foo', html)), [])
self.assertEqual(list(get_elements_text_and_html_by_attribute('class', 'no-such-foo', html)), [])
GET_ELEMENT_BY_TAG_TEST_STRING = '''
random text lorem ipsum</p>
<div>
this should be returned
<span>this should also be returned</span>
<div>
this should also be returned
</div>
closing tag above should not trick, so this should also be returned
</div>
but this text should not be returned
'''
GET_ELEMENT_BY_TAG_RES_OUTERDIV_HTML = GET_ELEMENT_BY_TAG_TEST_STRING.strip()[32:276]
GET_ELEMENT_BY_TAG_RES_OUTERDIV_TEXT = GET_ELEMENT_BY_TAG_RES_OUTERDIV_HTML[5:-6]
GET_ELEMENT_BY_TAG_RES_INNERSPAN_HTML = GET_ELEMENT_BY_TAG_TEST_STRING.strip()[78:119]
GET_ELEMENT_BY_TAG_RES_INNERSPAN_TEXT = GET_ELEMENT_BY_TAG_RES_INNERSPAN_HTML[6:-7]
def test_get_element_text_and_html_by_tag(self):
html = self.GET_ELEMENT_BY_TAG_TEST_STRING
self.assertEqual(
get_element_text_and_html_by_tag('div', html),
(self.GET_ELEMENT_BY_TAG_RES_OUTERDIV_TEXT, self.GET_ELEMENT_BY_TAG_RES_OUTERDIV_HTML))
self.assertEqual(
get_element_text_and_html_by_tag('span', html),
(self.GET_ELEMENT_BY_TAG_RES_INNERSPAN_TEXT, self.GET_ELEMENT_BY_TAG_RES_INNERSPAN_HTML))
self.assertRaises(compat_HTMLParseError, get_element_text_and_html_by_tag, 'article', html)
def test_iri_to_uri(self):
self.assertEqual(
iri_to_uri('https://www.google.com/search?q=foo&ie=utf-8&oe=utf-8&client=firefox-b'),
'https://www.google.com/search?q=foo&ie=utf-8&oe=utf-8&client=firefox-b') # Same
self.assertEqual(
iri_to_uri('https://www.google.com/search?q=Käsesoßenrührlöffel'), # German for cheese sauce stirring spoon
'https://www.google.com/search?q=K%C3%A4seso%C3%9Fenr%C3%BChrl%C3%B6ffel')
self.assertEqual(
iri_to_uri('https://www.google.com/search?q=lt<+gt>+eq%3D+amp%26+percent%25+hash%23+colon%3A+tilde~#trash=?&garbage=#'),
'https://www.google.com/search?q=lt%3C+gt%3E+eq%3D+amp%26+percent%25+hash%23+colon%3A+tilde~#trash=?&garbage=#')
self.assertEqual(
iri_to_uri('http://правозащита38.рф/category/news/'),
'http://xn--38-6kcaak9aj5chl4a3g.xn--p1ai/category/news/')
self.assertEqual(
iri_to_uri('http://www.правозащита38.рф/category/news/'),
'http://www.xn--38-6kcaak9aj5chl4a3g.xn--p1ai/category/news/')
self.assertEqual(
iri_to_uri('https://i❤.ws/emojidomain/👍👏🤝💪'),
'https://xn--i-7iq.ws/emojidomain/%F0%9F%91%8D%F0%9F%91%8F%F0%9F%A4%9D%F0%9F%92%AA')
self.assertEqual(
iri_to_uri('http://日本語.jp/'),
'http://xn--wgv71a119e.jp/')
self.assertEqual(
iri_to_uri('http://导航.中国/'),
'http://xn--fet810g.xn--fiqs8s/')
def test_clean_podcast_url(self):
self.assertEqual(clean_podcast_url('https://www.podtrac.com/pts/redirect.mp3/chtbl.com/track/5899E/traffic.megaphone.fm/HSW7835899191.mp3'), 'https://traffic.megaphone.fm/HSW7835899191.mp3')
self.assertEqual(clean_podcast_url('https://play.podtrac.com/npr-344098539/edge1.pod.npr.org/anon.npr-podcasts/podcast/npr/waitwait/2020/10/20201003_waitwait_wwdtmpodcast201003-015621a5-f035-4eca-a9a1-7c118d90bc3c.mp3'), 'https://edge1.pod.npr.org/anon.npr-podcasts/podcast/npr/waitwait/2020/10/20201003_waitwait_wwdtmpodcast201003-015621a5-f035-4eca-a9a1-7c118d90bc3c.mp3')
def test_LazyList(self):
it = list(range(10))
self.assertEqual(list(LazyList(it)), it)
self.assertEqual(LazyList(it).exhaust(), it)
self.assertEqual(LazyList(it)[5], it[5])
self.assertEqual(LazyList(it)[5:], it[5:])
self.assertEqual(LazyList(it)[:5], it[:5])
self.assertEqual(LazyList(it)[::2], it[::2])
self.assertEqual(LazyList(it)[1::2], it[1::2])
self.assertEqual(LazyList(it)[5::-1], it[5::-1])
self.assertEqual(LazyList(it)[6:2:-2], it[6:2:-2])
self.assertEqual(LazyList(it)[::-1], it[::-1])
self.assertTrue(LazyList(it))
self.assertFalse(LazyList(range(0)))
self.assertEqual(len(LazyList(it)), len(it))
self.assertEqual(repr(LazyList(it)), repr(it))
self.assertEqual(str(LazyList(it)), str(it))
self.assertEqual(list(LazyList(it, reverse=True)), it[::-1])
self.assertEqual(list(reversed(LazyList(it))[::-1]), it)
self.assertEqual(list(reversed(LazyList(it))[1:3:7]), it[::-1][1:3:7])
def test_LazyList_laziness(self):
def test(ll, idx, val, cache):
self.assertEqual(ll[idx], val)
self.assertEqual(getattr(ll, '_LazyList__cache'), list(cache))
ll = LazyList(range(10))
test(ll, 0, 0, range(1))
test(ll, 5, 5, range(6))
test(ll, -3, 7, range(10))
ll = LazyList(range(10), reverse=True)
test(ll, -1, 0, range(1))
test(ll, 3, 6, range(10))
ll = LazyList(itertools.count())
test(ll, 10, 10, range(11))
ll = reversed(ll)
test(ll, -15, 14, range(15))
def test_format_bytes(self):
self.assertEqual(format_bytes(0), '0.00B')
self.assertEqual(format_bytes(1000), '1000.00B')
self.assertEqual(format_bytes(1024), '1.00KiB')
self.assertEqual(format_bytes(1024**2), '1.00MiB')
self.assertEqual(format_bytes(1024**3), '1.00GiB')
self.assertEqual(format_bytes(1024**4), '1.00TiB')
self.assertEqual(format_bytes(1024**5), '1.00PiB')
self.assertEqual(format_bytes(1024**6), '1.00EiB')
self.assertEqual(format_bytes(1024**7), '1.00ZiB')
self.assertEqual(format_bytes(1024**8), '1.00YiB')
self.assertEqual(format_bytes(1024**9), '1024.00YiB')
def test_hide_login_info(self):
self.assertEqual(Config.hide_login_info(['-u', 'foo', '-p', 'bar']),
['-u', 'PRIVATE', '-p', 'PRIVATE'])
self.assertEqual(Config.hide_login_info(['-u']), ['-u'])
self.assertEqual(Config.hide_login_info(['-u', 'foo', '-u', 'bar']),
['-u', 'PRIVATE', '-u', 'PRIVATE'])
self.assertEqual(Config.hide_login_info(['--username=foo']),
['--username=PRIVATE'])
def test_locked_file(self):
TEXT = 'test_locked_file\n'
FILE = 'test_locked_file.ytdl'
MODES = 'war' # Order is important
try:
for lock_mode in MODES:
with locked_file(FILE, lock_mode, False) as f:
if lock_mode == 'r':
self.assertEqual(f.read(), TEXT * 2, 'Wrong file content')
else:
f.write(TEXT)
for test_mode in MODES:
testing_write = test_mode != 'r'
try:
with locked_file(FILE, test_mode, False):
pass
except (BlockingIOError, PermissionError):
if not testing_write: # FIXME
print(f'Known issue: Exclusive lock ({lock_mode}) blocks read access ({test_mode})')
continue
self.assertTrue(testing_write, f'{test_mode} is blocked by {lock_mode}')
else:
self.assertFalse(testing_write, f'{test_mode} is not blocked by {lock_mode}')
finally:
try:
os.remove(FILE)
except Exception:
pass
if __name__ == '__main__':
unittest.main()
| 46.417534
| 382
| 0.604141
|
import os
import sys
import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import io
import itertools
import json
import xml.etree.ElementTree
from yt_dlp.compat import (
compat_chr,
compat_etree_fromstring,
compat_getenv,
compat_HTMLParseError,
compat_os_name,
compat_setenv,
)
from yt_dlp.utils import (
Config,
DateRange,
ExtractorError,
InAdvancePagedList,
LazyList,
OnDemandPagedList,
age_restricted,
args_to_str,
base_url,
caesar,
clean_html,
clean_podcast_url,
cli_bool_option,
cli_option,
cli_valueless_option,
date_from_str,
datetime_from_str,
detect_exe_version,
determine_ext,
dfxp2srt,
dict_get,
encode_base_n,
encode_compat_str,
encodeFilename,
escape_rfc3986,
escape_url,
expand_path,
extract_attributes,
find_xpath_attr,
fix_xml_ampersands,
float_or_none,
format_bytes,
get_element_by_attribute,
get_element_by_class,
get_element_html_by_attribute,
get_element_html_by_class,
get_element_text_and_html_by_tag,
get_elements_by_attribute,
get_elements_by_class,
get_elements_html_by_attribute,
get_elements_html_by_class,
get_elements_text_and_html_by_attribute,
int_or_none,
intlist_to_bytes,
iri_to_uri,
is_html,
js_to_json,
limit_length,
locked_file,
lowercase_escape,
match_str,
merge_dicts,
mimetype2ext,
month_by_name,
multipart_encode,
ohdave_rsa_encrypt,
orderedSet,
parse_age_limit,
parse_bitrate,
parse_codecs,
parse_count,
parse_dfxp_time_expr,
parse_duration,
parse_filesize,
parse_iso8601,
parse_qs,
parse_resolution,
pkcs1pad,
prepend_extension,
read_batch_urls,
remove_end,
remove_quotes,
remove_start,
render_table,
replace_extension,
rot47,
sanitize_filename,
sanitize_path,
sanitize_url,
sanitized_Request,
shell_quote,
smuggle_url,
str_to_int,
strip_jsonp,
strip_or_none,
subtitles_filename,
timeconvert,
unescapeHTML,
unified_strdate,
unified_timestamp,
unsmuggle_url,
update_url_query,
uppercase_escape,
url_basename,
url_or_none,
urlencode_postdata,
urljoin,
urshift,
version_tuple,
xpath_attr,
xpath_element,
xpath_text,
xpath_with_ns,
)
class TestUtil(unittest.TestCase):
def test_timeconvert(self):
self.assertTrue(timeconvert('') is None)
self.assertTrue(timeconvert('bougrg') is None)
def test_sanitize_filename(self):
self.assertEqual(sanitize_filename(''), '')
self.assertEqual(sanitize_filename('abc'), 'abc')
self.assertEqual(sanitize_filename('abc_d-e'), 'abc_d-e')
self.assertEqual(sanitize_filename('123'), '123')
self.assertEqual('abc_de', sanitize_filename('abc/de'))
self.assertFalse('/' in sanitize_filename('abc/de///'))
self.assertEqual('abc_de', sanitize_filename('abc/<>\\*|de'))
self.assertEqual('xxx', sanitize_filename('xxx/<>\\*|'))
self.assertEqual('yes no', sanitize_filename('yes? no'))
self.assertEqual('this - that', sanitize_filename('this: that'))
self.assertEqual(sanitize_filename('AT&T'), 'AT&T')
aumlaut = 'ä'
self.assertEqual(sanitize_filename(aumlaut), aumlaut)
tests = '\u043a\u0438\u0440\u0438\u043b\u043b\u0438\u0446\u0430'
self.assertEqual(sanitize_filename(tests), tests)
self.assertEqual(
sanitize_filename('New World record at 0:12:34'),
'New World record at 0_12_34')
self.assertEqual(sanitize_filename('--gasdgf'), '--gasdgf')
self.assertEqual(sanitize_filename('--gasdgf', is_id=True), '--gasdgf')
self.assertEqual(sanitize_filename('--gasdgf', is_id=False), '_-gasdgf')
self.assertEqual(sanitize_filename('.gasdgf'), '.gasdgf')
self.assertEqual(sanitize_filename('.gasdgf', is_id=True), '.gasdgf')
self.assertEqual(sanitize_filename('.gasdgf', is_id=False), 'gasdgf')
forbidden = '"\0\\/'
for fc in forbidden:
for fbc in forbidden:
self.assertTrue(fbc not in sanitize_filename(fc))
def test_sanitize_filename_restricted(self):
self.assertEqual(sanitize_filename('abc', restricted=True), 'abc')
self.assertEqual(sanitize_filename('abc_d-e', restricted=True), 'abc_d-e')
self.assertEqual(sanitize_filename('123', restricted=True), '123')
self.assertEqual('abc_de', sanitize_filename('abc/de', restricted=True))
self.assertFalse('/' in sanitize_filename('abc/de///', restricted=True))
self.assertEqual('abc_de', sanitize_filename('abc/<>\\*|de', restricted=True))
self.assertEqual('xxx', sanitize_filename('xxx/<>\\*|', restricted=True))
self.assertEqual('yes_no', sanitize_filename('yes? no', restricted=True))
self.assertEqual('this_-_that', sanitize_filename('this: that', restricted=True))
tests = 'aäb\u4e2d\u56fd\u7684c'
self.assertEqual(sanitize_filename(tests, restricted=True), 'aab_c')
self.assertTrue(sanitize_filename('\xf6', restricted=True) != '') # No empty filename
forbidden = '"\0\\/&!: \'\t\n()[]{}$;`^,
for fc in forbidden:
for fbc in forbidden:
self.assertTrue(fbc not in sanitize_filename(fc, restricted=True))
# Handle a common case more neatly
self.assertEqual(sanitize_filename('\u5927\u58f0\u5e26 - Song', restricted=True), 'Song')
self.assertEqual(sanitize_filename('\u603b\u7edf: Speech', restricted=True), 'Speech')
# .. but make sure the file name is never empty
self.assertTrue(sanitize_filename('-', restricted=True) != '')
self.assertTrue(sanitize_filename(':', restricted=True) != '')
self.assertEqual(sanitize_filename(
'ÂÃÄÀÁÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖŐØŒÙÚÛÜŰÝÞßàáâãäåæçèéêëìíîïðñòóôõöőøœùúûüűýþÿ', restricted=True),
'AAAAAAAECEEEEIIIIDNOOOOOOOOEUUUUUYTHssaaaaaaaeceeeeiiiionooooooooeuuuuuythy')
def test_sanitize_ids(self):
self.assertEqual(sanitize_filename('_n_cd26wFpw', is_id=True), '_n_cd26wFpw')
self.assertEqual(sanitize_filename('_BD_eEpuzXw', is_id=True), '_BD_eEpuzXw')
self.assertEqual(sanitize_filename('N0Y__7-UOdI', is_id=True), 'N0Y__7-UOdI')
def test_sanitize_path(self):
if sys.platform != 'win32':
return
self.assertEqual(sanitize_path('abc'), 'abc')
self.assertEqual(sanitize_path('abc/def'), 'abc\\def')
self.assertEqual(sanitize_path('abc\\def'), 'abc\\def')
self.assertEqual(sanitize_path('abc|def'), 'abc
self.assertEqual(sanitize_path('<>:"|?*'), '#######')
self.assertEqual(sanitize_path('C:/abc/def'), 'C:\\abc\\def')
self.assertEqual(sanitize_path('C?:/abc/def'), 'C##\\abc\\def')
self.assertEqual(sanitize_path('\\\\?\\UNC\\ComputerName\\abc'), '\\\\?\\UNC\\ComputerName\\abc')
self.assertEqual(sanitize_path('\\\\?\\UNC/ComputerName/abc'), '\\\\?\\UNC\\ComputerName\\abc')
self.assertEqual(sanitize_path('\\\\?\\C:\\abc'), '\\\\?\\C:\\abc')
self.assertEqual(sanitize_path('\\\\?\\C:/abc'), '\\\\?\\C:\\abc')
self.assertEqual(sanitize_path('\\\\?\\C:\\ab?c\\de:f'), '\\\\?\\C:\\ab#c\\de#f')
self.assertEqual(sanitize_path('\\\\?\\C:\\abc'), '\\\\?\\C:\\abc')
self.assertEqual(
sanitize_path('youtube/%(uploader)s/%(autonumber)s-%(title)s-%(upload_date)s.%(ext)s'),
'youtube\\%(uploader)s\\%(autonumber)s-%(title)s-%(upload_date)s.%(ext)s')
self.assertEqual(
sanitize_path('youtube/TheWreckingYard ./00001-Not bad, Especially for Free! (1987 Yamaha 700)-20141116.mp4.part'),
'youtube\\TheWreckingYard #\\00001-Not bad, Especially for Free! (1987 Yamaha 700)-20141116.mp4.part')
self.assertEqual(sanitize_path('abc/def...'), 'abc\\def..#')
self.assertEqual(sanitize_path('abc.../def'), 'abc..#\\def')
self.assertEqual(sanitize_path('abc.../def...'), 'abc..#\\def..#')
self.assertEqual(sanitize_path('../abc'), '..\\abc')
self.assertEqual(sanitize_path('../../abc'), '..\\..\\abc')
self.assertEqual(sanitize_path('./abc'), 'abc')
self.assertEqual(sanitize_path('./../abc'), '..\\abc')
def test_sanitize_url(self):
self.assertEqual(sanitize_url('//foo.bar'), 'http://foo.bar')
self.assertEqual(sanitize_url('httpss://foo.bar'), 'https://foo.bar')
self.assertEqual(sanitize_url('rmtps://foo.bar'), 'rtmps://foo.bar')
self.assertEqual(sanitize_url('https://foo.bar'), 'https://foo.bar')
self.assertEqual(sanitize_url('foo bar'), 'foo bar')
def test_extract_basic_auth(self):
auth_header = lambda url: sanitized_Request(url).get_header('Authorization')
self.assertFalse(auth_header('http://foo.bar'))
self.assertFalse(auth_header('http://:foo.bar'))
self.assertEqual(auth_header('http://@foo.bar'), 'Basic Og==')
self.assertEqual(auth_header('http://:pass@foo.bar'), 'Basic OnBhc3M=')
self.assertEqual(auth_header('http://user:@foo.bar'), 'Basic dXNlcjo=')
self.assertEqual(auth_header('http://user:pass@foo.bar'), 'Basic dXNlcjpwYXNz')
def test_expand_path(self):
def env(var):
return f'%{var}%' if sys.platform == 'win32' else f'${var}'
compat_setenv('yt_dlp_EXPATH_PATH', 'expanded')
self.assertEqual(expand_path(env('yt_dlp_EXPATH_PATH')), 'expanded')
self.assertEqual(expand_path(env('HOME')), compat_getenv('HOME'))
self.assertEqual(expand_path('~'), compat_getenv('HOME'))
self.assertEqual(
expand_path('~/%s' % env('yt_dlp_EXPATH_PATH')),
'%s/expanded' % compat_getenv('HOME'))
def test_prepend_extension(self):
self.assertEqual(prepend_extension('abc.ext', 'temp'), 'abc.temp.ext')
self.assertEqual(prepend_extension('abc.ext', 'temp', 'ext'), 'abc.temp.ext')
self.assertEqual(prepend_extension('abc.unexpected_ext', 'temp', 'ext'), 'abc.unexpected_ext.temp')
self.assertEqual(prepend_extension('abc', 'temp'), 'abc.temp')
self.assertEqual(prepend_extension('.abc', 'temp'), '.abc.temp')
self.assertEqual(prepend_extension('.abc.ext', 'temp'), '.abc.temp.ext')
def test_replace_extension(self):
self.assertEqual(replace_extension('abc.ext', 'temp'), 'abc.temp')
self.assertEqual(replace_extension('abc.ext', 'temp', 'ext'), 'abc.temp')
self.assertEqual(replace_extension('abc.unexpected_ext', 'temp', 'ext'), 'abc.unexpected_ext.temp')
self.assertEqual(replace_extension('abc', 'temp'), 'abc.temp')
self.assertEqual(replace_extension('.abc', 'temp'), '.abc.temp')
self.assertEqual(replace_extension('.abc.ext', 'temp'), '.abc.temp')
def test_subtitles_filename(self):
self.assertEqual(subtitles_filename('abc.ext', 'en', 'vtt'), 'abc.en.vtt')
self.assertEqual(subtitles_filename('abc.ext', 'en', 'vtt', 'ext'), 'abc.en.vtt')
self.assertEqual(subtitles_filename('abc.unexpected_ext', 'en', 'vtt', 'ext'), 'abc.unexpected_ext.en.vtt')
def test_remove_start(self):
self.assertEqual(remove_start(None, 'A - '), None)
self.assertEqual(remove_start('A - B', 'A - '), 'B')
self.assertEqual(remove_start('B - A', 'A - '), 'B - A')
def test_remove_end(self):
self.assertEqual(remove_end(None, ' - B'), None)
self.assertEqual(remove_end('A - B', ' - B'), 'A')
self.assertEqual(remove_end('B - A', ' - B'), 'B - A')
def test_remove_quotes(self):
self.assertEqual(remove_quotes(None), None)
self.assertEqual(remove_quotes('"'), '"')
self.assertEqual(remove_quotes("'"), "'")
self.assertEqual(remove_quotes(';'), ';')
self.assertEqual(remove_quotes('";'), '";')
self.assertEqual(remove_quotes('""'), '')
self.assertEqual(remove_quotes('";"'), ';')
def test_ordered_set(self):
self.assertEqual(orderedSet([1, 1, 2, 3, 4, 4, 5, 6, 7, 3, 5]), [1, 2, 3, 4, 5, 6, 7])
self.assertEqual(orderedSet([]), [])
self.assertEqual(orderedSet([1]), [1])
# keep the list ordered
self.assertEqual(orderedSet([135, 1, 1, 1]), [135, 1])
def test_unescape_html(self):
self.assertEqual(unescapeHTML('%20;'), '%20;')
self.assertEqual(unescapeHTML('/'), '/')
self.assertEqual(unescapeHTML('/'), '/')
self.assertEqual(unescapeHTML('é'), 'é')
self.assertEqual(unescapeHTML('�'), '�')
self.assertEqual(unescapeHTML('&a"'), '&a"')
# HTML5 entities
self.assertEqual(unescapeHTML('.''), '.\'')
def test_date_from_str(self):
self.assertEqual(date_from_str('yesterday'), date_from_str('now-1day'))
self.assertEqual(date_from_str('now+7day'), date_from_str('now+1week'))
self.assertEqual(date_from_str('now+14day'), date_from_str('now+2week'))
self.assertEqual(date_from_str('20200229+365day'), date_from_str('20200229+1year'))
self.assertEqual(date_from_str('20210131+28day'), date_from_str('20210131+1month'))
def test_datetime_from_str(self):
self.assertEqual(datetime_from_str('yesterday', precision='day'), datetime_from_str('now-1day', precision='auto'))
self.assertEqual(datetime_from_str('now+7day', precision='day'), datetime_from_str('now+1week', precision='auto'))
self.assertEqual(datetime_from_str('now+14day', precision='day'), datetime_from_str('now+2week', precision='auto'))
self.assertEqual(datetime_from_str('20200229+365day', precision='day'), datetime_from_str('20200229+1year', precision='auto'))
self.assertEqual(datetime_from_str('20210131+28day', precision='day'), datetime_from_str('20210131+1month', precision='auto'))
self.assertEqual(datetime_from_str('20210131+59day', precision='day'), datetime_from_str('20210131+2month', precision='auto'))
self.assertEqual(datetime_from_str('now+1day', precision='hour'), datetime_from_str('now+24hours', precision='auto'))
self.assertEqual(datetime_from_str('now+23hours', precision='hour'), datetime_from_str('now+23hours', precision='auto'))
def test_daterange(self):
_20century = DateRange("19000101", "20000101")
self.assertFalse("17890714" in _20century)
_ac = DateRange("00010101")
self.assertTrue("19690721" in _ac)
_firstmilenium = DateRange(end="10000101")
self.assertTrue("07110427" in _firstmilenium)
def test_unified_dates(self):
self.assertEqual(unified_strdate('December 21, 2010'), '20101221')
self.assertEqual(unified_strdate('8/7/2009'), '20090708')
self.assertEqual(unified_strdate('Dec 14, 2012'), '20121214')
self.assertEqual(unified_strdate('2012/10/11 01:56:38 +0000'), '20121011')
self.assertEqual(unified_strdate('1968 12 10'), '19681210')
self.assertEqual(unified_strdate('1968-12-10'), '19681210')
self.assertEqual(unified_strdate('28/01/2014 21:00:00 +0100'), '20140128')
self.assertEqual(
unified_strdate('11/26/2014 11:30:00 AM PST', day_first=False),
'20141126')
self.assertEqual(
unified_strdate('2/2/2015 6:47:40 PM', day_first=False),
'20150202')
self.assertEqual(unified_strdate('Feb 14th 2016 5:45PM'), '20160214')
self.assertEqual(unified_strdate('25-09-2014'), '20140925')
self.assertEqual(unified_strdate('27.02.2016 17:30'), '20160227')
self.assertEqual(unified_strdate('UNKNOWN DATE FORMAT'), None)
self.assertEqual(unified_strdate('Feb 7, 2016 at 6:35 pm'), '20160207')
self.assertEqual(unified_strdate('July 15th, 2013'), '20130715')
self.assertEqual(unified_strdate('September 1st, 2013'), '20130901')
self.assertEqual(unified_strdate('Sep 2nd, 2013'), '20130902')
self.assertEqual(unified_strdate('November 3rd, 2019'), '20191103')
self.assertEqual(unified_strdate('October 23rd, 2005'), '20051023')
def test_unified_timestamps(self):
self.assertEqual(unified_timestamp('December 21, 2010'), 1292889600)
self.assertEqual(unified_timestamp('8/7/2009'), 1247011200)
self.assertEqual(unified_timestamp('Dec 14, 2012'), 1355443200)
self.assertEqual(unified_timestamp('2012/10/11 01:56:38 +0000'), 1349920598)
self.assertEqual(unified_timestamp('1968 12 10'), -33436800)
self.assertEqual(unified_timestamp('1968-12-10'), -33436800)
self.assertEqual(unified_timestamp('28/01/2014 21:00:00 +0100'), 1390939200)
self.assertEqual(
unified_timestamp('11/26/2014 11:30:00 AM PST', day_first=False),
1417001400)
self.assertEqual(
unified_timestamp('2/2/2015 6:47:40 PM', day_first=False),
1422902860)
self.assertEqual(unified_timestamp('Feb 14th 2016 5:45PM'), 1455471900)
self.assertEqual(unified_timestamp('25-09-2014'), 1411603200)
self.assertEqual(unified_timestamp('27.02.2016 17:30'), 1456594200)
self.assertEqual(unified_timestamp('UNKNOWN DATE FORMAT'), None)
self.assertEqual(unified_timestamp('May 16, 2016 11:15 PM'), 1463440500)
self.assertEqual(unified_timestamp('Feb 7, 2016 at 6:35 pm'), 1454870100)
self.assertEqual(unified_timestamp('2017-03-30T17:52:41Q'), 1490896361)
self.assertEqual(unified_timestamp('Sep 11, 2013 | 5:49 AM'), 1378878540)
self.assertEqual(unified_timestamp('December 15, 2017 at 7:49 am'), 1513324140)
self.assertEqual(unified_timestamp('2018-03-14T08:32:43.1493874+00:00'), 1521016363)
def test_determine_ext(self):
self.assertEqual(determine_ext('http://example.com/foo/bar.mp4/?download'), 'mp4')
self.assertEqual(determine_ext('http://example.com/foo/bar/?download', None), None)
self.assertEqual(determine_ext('http://example.com/foo/bar.nonext/?download', None), None)
self.assertEqual(determine_ext('http://example.com/foo/bar/mp4?download', None), None)
self.assertEqual(determine_ext('http://example.com/foo/bar.m3u8//?download'), 'm3u8')
self.assertEqual(determine_ext('foobar', None), None)
def test_find_xpath_attr(self):
testxml = '''<root>
<node/>
<node x="a"/>
<node x="a" y="c" />
<node x="b" y="d" />
<node x="" />
</root>'''
doc = compat_etree_fromstring(testxml)
self.assertEqual(find_xpath_attr(doc, './/fourohfour', 'n'), None)
self.assertEqual(find_xpath_attr(doc, './/fourohfour', 'n', 'v'), None)
self.assertEqual(find_xpath_attr(doc, './/node', 'n'), None)
self.assertEqual(find_xpath_attr(doc, './/node', 'n', 'v'), None)
self.assertEqual(find_xpath_attr(doc, './/node', 'x'), doc[1])
self.assertEqual(find_xpath_attr(doc, './/node', 'x', 'a'), doc[1])
self.assertEqual(find_xpath_attr(doc, './/node', 'x', 'b'), doc[3])
self.assertEqual(find_xpath_attr(doc, './/node', 'y'), doc[2])
self.assertEqual(find_xpath_attr(doc, './/node', 'y', 'c'), doc[2])
self.assertEqual(find_xpath_attr(doc, './/node', 'y', 'd'), doc[3])
self.assertEqual(find_xpath_attr(doc, './/node', 'x', ''), doc[4])
def test_xpath_with_ns(self):
testxml = '''<root xmlns:media="http://example.com/">
<media:song>
<media:author>The Author</media:author>
<url>http://server.com/download.mp3</url>
</media:song>
</root>'''
doc = compat_etree_fromstring(testxml)
find = lambda p: doc.find(xpath_with_ns(p, {'media': 'http://example.com/'}))
self.assertTrue(find('media:song') is not None)
self.assertEqual(find('media:song/media:author').text, 'The Author')
self.assertEqual(find('media:song/url').text, 'http://server.com/download.mp3')
def test_xpath_element(self):
doc = xml.etree.ElementTree.Element('root')
div = xml.etree.ElementTree.SubElement(doc, 'div')
p = xml.etree.ElementTree.SubElement(div, 'p')
p.text = 'Foo'
self.assertEqual(xpath_element(doc, 'div/p'), p)
self.assertEqual(xpath_element(doc, ['div/p']), p)
self.assertEqual(xpath_element(doc, ['div/bar', 'div/p']), p)
self.assertEqual(xpath_element(doc, 'div/bar', default='default'), 'default')
self.assertEqual(xpath_element(doc, ['div/bar'], default='default'), 'default')
self.assertTrue(xpath_element(doc, 'div/bar') is None)
self.assertTrue(xpath_element(doc, ['div/bar']) is None)
self.assertTrue(xpath_element(doc, ['div/bar'], 'div/baz') is None)
self.assertRaises(ExtractorError, xpath_element, doc, 'div/bar', fatal=True)
self.assertRaises(ExtractorError, xpath_element, doc, ['div/bar'], fatal=True)
self.assertRaises(ExtractorError, xpath_element, doc, ['div/bar', 'div/baz'], fatal=True)
def test_xpath_text(self):
testxml = '''<root>
<div>
<p>Foo</p>
</div>
</root>'''
doc = compat_etree_fromstring(testxml)
self.assertEqual(xpath_text(doc, 'div/p'), 'Foo')
self.assertEqual(xpath_text(doc, 'div/bar', default='default'), 'default')
self.assertTrue(xpath_text(doc, 'div/bar') is None)
self.assertRaises(ExtractorError, xpath_text, doc, 'div/bar', fatal=True)
def test_xpath_attr(self):
testxml = '''<root>
<div>
<p x="a">Foo</p>
</div>
</root>'''
doc = compat_etree_fromstring(testxml)
self.assertEqual(xpath_attr(doc, 'div/p', 'x'), 'a')
self.assertEqual(xpath_attr(doc, 'div/bar', 'x'), None)
self.assertEqual(xpath_attr(doc, 'div/p', 'y'), None)
self.assertEqual(xpath_attr(doc, 'div/bar', 'x', default='default'), 'default')
self.assertEqual(xpath_attr(doc, 'div/p', 'y', default='default'), 'default')
self.assertRaises(ExtractorError, xpath_attr, doc, 'div/bar', 'x', fatal=True)
self.assertRaises(ExtractorError, xpath_attr, doc, 'div/p', 'y', fatal=True)
def test_smuggle_url(self):
data = {"ö": "ö", "abc": [3]}
url = 'https://foo.bar/baz?x=y#a'
smug_url = smuggle_url(url, data)
unsmug_url, unsmug_data = unsmuggle_url(smug_url)
self.assertEqual(url, unsmug_url)
self.assertEqual(data, unsmug_data)
res_url, res_data = unsmuggle_url(url)
self.assertEqual(res_url, url)
self.assertEqual(res_data, None)
smug_url = smuggle_url(url, {'a': 'b'})
smug_smug_url = smuggle_url(smug_url, {'c': 'd'})
res_url, res_data = unsmuggle_url(smug_smug_url)
self.assertEqual(res_url, url)
self.assertEqual(res_data, {'a': 'b', 'c': 'd'})
def test_shell_quote(self):
args = ['ffmpeg', '-i', encodeFilename('ñ€ß\'.mp4')]
self.assertEqual(
shell_quote(args),
"""ffmpeg -i 'ñ€ß'"'"'.mp4'""" if compat_os_name != 'nt' else '''ffmpeg -i "ñ€ß'.mp4"''')
def test_float_or_none(self):
self.assertEqual(float_or_none('42.42'), 42.42)
self.assertEqual(float_or_none('42'), 42.0)
self.assertEqual(float_or_none(''), None)
self.assertEqual(float_or_none(None), None)
self.assertEqual(float_or_none([]), None)
self.assertEqual(float_or_none(set()), None)
def test_int_or_none(self):
self.assertEqual(int_or_none('42'), 42)
self.assertEqual(int_or_none(''), None)
self.assertEqual(int_or_none(None), None)
self.assertEqual(int_or_none([]), None)
self.assertEqual(int_or_none(set()), None)
def test_str_to_int(self):
self.assertEqual(str_to_int('123,456'), 123456)
self.assertEqual(str_to_int('123.456'), 123456)
self.assertEqual(str_to_int(523), 523)
self.assertEqual(str_to_int('noninteger'), None)
self.assertEqual(str_to_int([]), None)
def test_url_basename(self):
self.assertEqual(url_basename('http://foo.de/'), '')
self.assertEqual(url_basename('http://foo.de/bar/baz'), 'baz')
self.assertEqual(url_basename('http://foo.de/bar/baz?x=y'), 'baz')
self.assertEqual(url_basename('http://foo.de/bar/baz
self.assertEqual(url_basename('http://foo.de/bar/baz/'), 'baz')
self.assertEqual(
url_basename('http://media.w3.org/2010/05/sintel/trailer.mp4'),
'trailer.mp4')
def test_base_url(self):
self.assertEqual(base_url('http://foo.de/'), 'http://foo.de/')
self.assertEqual(base_url('http://foo.de/bar'), 'http://foo.de/')
self.assertEqual(base_url('http://foo.de/bar/'), 'http://foo.de/bar/')
self.assertEqual(base_url('http://foo.de/bar/baz'), 'http://foo.de/bar/')
self.assertEqual(base_url('http://foo.de/bar/baz?x=z/x/c'), 'http://foo.de/bar/')
def test_urljoin(self):
self.assertEqual(urljoin('http://foo.de/', '/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
self.assertEqual(urljoin(b'http://foo.de/', '/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
self.assertEqual(urljoin('http://foo.de/', b'/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
self.assertEqual(urljoin(b'http://foo.de/', b'/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
self.assertEqual(urljoin('//foo.de/', '/a/b/c.txt'), '//foo.de/a/b/c.txt')
self.assertEqual(urljoin('http://foo.de/', 'a/b/c.txt'), 'http://foo.de/a/b/c.txt')
self.assertEqual(urljoin('http://foo.de', '/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
self.assertEqual(urljoin('http://foo.de', 'a/b/c.txt'), 'http://foo.de/a/b/c.txt')
self.assertEqual(urljoin('http://foo.de/', 'http://foo.de/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
self.assertEqual(urljoin('http://foo.de/', '//foo.de/a/b/c.txt'), '//foo.de/a/b/c.txt')
self.assertEqual(urljoin(None, 'http://foo.de/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
self.assertEqual(urljoin(None, '//foo.de/a/b/c.txt'), '//foo.de/a/b/c.txt')
self.assertEqual(urljoin('', 'http://foo.de/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
self.assertEqual(urljoin(['foobar'], 'http://foo.de/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
self.assertEqual(urljoin('http://foo.de/', None), None)
self.assertEqual(urljoin('http://foo.de/', ''), None)
self.assertEqual(urljoin('http://foo.de/', ['foobar']), None)
self.assertEqual(urljoin('http://foo.de/a/b/c.txt', '.././../d.txt'), 'http://foo.de/d.txt')
self.assertEqual(urljoin('http://foo.de/a/b/c.txt', 'rtmp://foo.de'), 'rtmp://foo.de')
self.assertEqual(urljoin(None, 'rtmp://foo.de'), 'rtmp://foo.de')
def test_url_or_none(self):
self.assertEqual(url_or_none(None), None)
self.assertEqual(url_or_none(''), None)
self.assertEqual(url_or_none('foo'), None)
self.assertEqual(url_or_none('http://foo.de'), 'http://foo.de')
self.assertEqual(url_or_none('https://foo.de'), 'https://foo.de')
self.assertEqual(url_or_none('http$://foo.de'), None)
self.assertEqual(url_or_none('http://foo.de'), 'http://foo.de')
self.assertEqual(url_or_none('//foo.de'), '//foo.de')
self.assertEqual(url_or_none('s3://foo.de'), None)
self.assertEqual(url_or_none('rtmpte://foo.de'), 'rtmpte://foo.de')
self.assertEqual(url_or_none('mms://foo.de'), 'mms://foo.de')
self.assertEqual(url_or_none('rtspu://foo.de'), 'rtspu://foo.de')
self.assertEqual(url_or_none('ftps://foo.de'), 'ftps://foo.de')
def test_parse_age_limit(self):
self.assertEqual(parse_age_limit(None), None)
self.assertEqual(parse_age_limit(False), None)
self.assertEqual(parse_age_limit('invalid'), None)
self.assertEqual(parse_age_limit(0), 0)
self.assertEqual(parse_age_limit(18), 18)
self.assertEqual(parse_age_limit(21), 21)
self.assertEqual(parse_age_limit(22), None)
self.assertEqual(parse_age_limit('18'), 18)
self.assertEqual(parse_age_limit('18+'), 18)
self.assertEqual(parse_age_limit('PG-13'), 13)
self.assertEqual(parse_age_limit('TV-14'), 14)
self.assertEqual(parse_age_limit('TV-MA'), 17)
self.assertEqual(parse_age_limit('TV14'), 14)
self.assertEqual(parse_age_limit('TV_G'), 0)
def test_parse_duration(self):
self.assertEqual(parse_duration(None), None)
self.assertEqual(parse_duration(False), None)
self.assertEqual(parse_duration('invalid'), None)
self.assertEqual(parse_duration('1'), 1)
self.assertEqual(parse_duration('1337:12'), 80232)
self.assertEqual(parse_duration('9:12:43'), 33163)
self.assertEqual(parse_duration('12:00'), 720)
self.assertEqual(parse_duration('00:01:01'), 61)
self.assertEqual(parse_duration('x:y'), None)
self.assertEqual(parse_duration('3h11m53s'), 11513)
self.assertEqual(parse_duration('3h 11m 53s'), 11513)
self.assertEqual(parse_duration('3 hours 11 minutes 53 seconds'), 11513)
self.assertEqual(parse_duration('3 hours 11 mins 53 secs'), 11513)
self.assertEqual(parse_duration('3 hours, 11 minutes, 53 seconds'), 11513)
self.assertEqual(parse_duration('3 hours, 11 mins, 53 secs'), 11513)
self.assertEqual(parse_duration('62m45s'), 3765)
self.assertEqual(parse_duration('6m59s'), 419)
self.assertEqual(parse_duration('49s'), 49)
self.assertEqual(parse_duration('0h0m0s'), 0)
self.assertEqual(parse_duration('0m0s'), 0)
self.assertEqual(parse_duration('0s'), 0)
self.assertEqual(parse_duration('01:02:03.05'), 3723.05)
self.assertEqual(parse_duration('T30M38S'), 1838)
self.assertEqual(parse_duration('5 s'), 5)
self.assertEqual(parse_duration('3 min'), 180)
self.assertEqual(parse_duration('2.5 hours'), 9000)
self.assertEqual(parse_duration('02:03:04'), 7384)
self.assertEqual(parse_duration('01:02:03:04'), 93784)
self.assertEqual(parse_duration('1 hour 3 minutes'), 3780)
self.assertEqual(parse_duration('87 Min.'), 5220)
self.assertEqual(parse_duration('PT1H0.040S'), 3600.04)
self.assertEqual(parse_duration('PT00H03M30SZ'), 210)
self.assertEqual(parse_duration('P0Y0M0DT0H4M20.880S'), 260.88)
self.assertEqual(parse_duration('01:02:03:050'), 3723.05)
self.assertEqual(parse_duration('103:050'), 103.05)
def test_fix_xml_ampersands(self):
self.assertEqual(
fix_xml_ampersands('"&x=y&z=a'), '"&x=y&z=a')
self.assertEqual(
fix_xml_ampersands('"&x=y&wrong;&z=a'),
'"&x=y&wrong;&z=a')
self.assertEqual(
fix_xml_ampersands('&'><"'),
'&'><"')
self.assertEqual(
fix_xml_ampersands('& def testPL(size, pagesize, sliceargs, expected):
def get_page(pagenum):
firstid = pagenum * pagesize
upto = min(size, pagenum * pagesize + pagesize)
yield from range(firstid, upto)
pl = OnDemandPagedList(get_page, pagesize)
got = pl.getslice(*sliceargs)
self.assertEqual(got, expected)
iapl = InAdvancePagedList(get_page, size // pagesize + 1, pagesize)
got = iapl.getslice(*sliceargs)
self.assertEqual(got, expected)
testPL(5, 2, (), [0, 1, 2, 3, 4])
testPL(5, 2, (1,), [1, 2, 3, 4])
testPL(5, 2, (2,), [2, 3, 4])
testPL(5, 2, (4,), [4])
testPL(5, 2, (0, 3), [0, 1, 2])
testPL(5, 2, (1, 4), [1, 2, 3])
testPL(5, 2, (2, 99), [2, 3, 4])
testPL(5, 2, (20, 99), [])
def test_read_batch_urls(self):
f = io.StringIO('''\xef\xbb\xbf foo
bar\r
baz
# More after this line\r
; or after this
bam''')
self.assertEqual(read_batch_urls(f), ['foo', 'bar', 'baz', 'bam'])
def test_urlencode_postdata(self):
data = urlencode_postdata({'username': 'foo@bar.com', 'password': '1234'})
self.assertTrue(isinstance(data, bytes))
def test_update_url_query(self):
self.assertEqual(parse_qs(update_url_query(
'http://example.com/path', {'quality': ['HD'], 'format': ['mp4']})),
parse_qs('http://example.com/path?quality=HD&format=mp4'))
self.assertEqual(parse_qs(update_url_query(
'http://example.com/path', {'system': ['LINUX', 'WINDOWS']})),
parse_qs('http://example.com/path?system=LINUX&system=WINDOWS'))
self.assertEqual(parse_qs(update_url_query(
'http://example.com/path', {'fields': 'id,formats,subtitles'})),
parse_qs('http://example.com/path?fields=id,formats,subtitles'))
self.assertEqual(parse_qs(update_url_query(
'http://example.com/path', {'fields': ('id,formats,subtitles', 'thumbnails')})),
parse_qs('http://example.com/path?fields=id,formats,subtitles&fields=thumbnails'))
self.assertEqual(parse_qs(update_url_query(
'http://example.com/path?manifest=f4m', {'manifest': []})),
parse_qs('http://example.com/path'))
self.assertEqual(parse_qs(update_url_query(
'http://example.com/path?system=LINUX&system=WINDOWS', {'system': 'LINUX'})),
parse_qs('http://example.com/path?system=LINUX'))
self.assertEqual(parse_qs(update_url_query(
'http://example.com/path', {'fields': b'id,formats,subtitles'})),
parse_qs('http://example.com/path?fields=id,formats,subtitles'))
self.assertEqual(parse_qs(update_url_query(
'http://example.com/path', {'width': 1080, 'height': 720})),
parse_qs('http://example.com/path?width=1080&height=720'))
self.assertEqual(parse_qs(update_url_query(
'http://example.com/path', {'bitrate': 5020.43})),
parse_qs('http://example.com/path?bitrate=5020.43'))
self.assertEqual(parse_qs(update_url_query(
'http://example.com/path', {'test': '第二行тест'})),
parse_qs('http://example.com/path?test=%E7%AC%AC%E4%BA%8C%E8%A1%8C%D1%82%D0%B5%D1%81%D1%82'))
def test_multipart_encode(self):
self.assertEqual(
multipart_encode({b'field': b'value'}, boundary='AAAAAA')[0],
b'--AAAAAA\r\nContent-Disposition: form-data; name="field"\r\n\r\nvalue\r\n--AAAAAA--\r\n')
self.assertEqual(
multipart_encode({'欄位'.encode(): '值'.encode()}, boundary='AAAAAA')[0],
b'--AAAAAA\r\nContent-Disposition: form-data; name="\xe6\xac\x84\xe4\xbd\x8d"\r\n\r\n\xe5\x80\xbc\r\n--AAAAAA--\r\n')
self.assertRaises(
ValueError, multipart_encode, {b'field': b'value'}, boundary='value')
def test_dict_get(self):
FALSE_VALUES = {
'none': None,
'false': False,
'zero': 0,
'empty_string': '',
'empty_list': [],
}
d = FALSE_VALUES.copy()
d['a'] = 42
self.assertEqual(dict_get(d, 'a'), 42)
self.assertEqual(dict_get(d, 'b'), None)
self.assertEqual(dict_get(d, 'b', 42), 42)
self.assertEqual(dict_get(d, ('a', )), 42)
self.assertEqual(dict_get(d, ('b', 'a', )), 42)
self.assertEqual(dict_get(d, ('b', 'c', 'a', 'd', )), 42)
self.assertEqual(dict_get(d, ('b', 'c', )), None)
self.assertEqual(dict_get(d, ('b', 'c', ), 42), 42)
for key, false_value in FALSE_VALUES.items():
self.assertEqual(dict_get(d, ('b', 'c', key, )), None)
self.assertEqual(dict_get(d, ('b', 'c', key, ), skip_false_values=False), false_value)
def test_merge_dicts(self):
self.assertEqual(merge_dicts({'a': 1}, {'b': 2}), {'a': 1, 'b': 2})
self.assertEqual(merge_dicts({'a': 1}, {'a': 2}), {'a': 1})
self.assertEqual(merge_dicts({'a': 1}, {'a': None}), {'a': 1})
self.assertEqual(merge_dicts({'a': 1}, {'a': ''}), {'a': 1})
self.assertEqual(merge_dicts({'a': 1}, {}), {'a': 1})
self.assertEqual(merge_dicts({'a': None}, {'a': 1}), {'a': 1})
self.assertEqual(merge_dicts({'a': ''}, {'a': 1}), {'a': ''})
self.assertEqual(merge_dicts({'a': ''}, {'a': 'abc'}), {'a': 'abc'})
self.assertEqual(merge_dicts({'a': None}, {'a': ''}, {'a': 'abc'}), {'a': 'abc'})
def test_encode_compat_str(self):
self.assertEqual(encode_compat_str(b'\xd1\x82\xd0\xb5\xd1\x81\xd1\x82', 'utf-8'), 'тест')
self.assertEqual(encode_compat_str('тест', 'utf-8'), 'тест')
def test_parse_iso8601(self):
self.assertEqual(parse_iso8601('2014-03-23T23:04:26+0100'), 1395612266)
self.assertEqual(parse_iso8601('2014-03-23T22:04:26+0000'), 1395612266)
self.assertEqual(parse_iso8601('2014-03-23T22:04:26Z'), 1395612266)
self.assertEqual(parse_iso8601('2014-03-23T22:04:26.1234Z'), 1395612266)
self.assertEqual(parse_iso8601('2015-09-29T08:27:31.727'), 1443515251)
self.assertEqual(parse_iso8601('2015-09-29T08-27-31.727'), None)
def test_strip_jsonp(self):
stripped = strip_jsonp('cb ([ {"id":"532cb",\n\n\n"x":\n3}\n]\n);')
d = json.loads(stripped)
self.assertEqual(d, [{"id": "532cb", "x": 3}])
stripped = strip_jsonp('parseMetadata({"STATUS":"OK"})\n\n\n//epc')
d = json.loads(stripped)
self.assertEqual(d, {'STATUS': 'OK'})
stripped = strip_jsonp('ps.embedHandler({"status": "success"});')
d = json.loads(stripped)
self.assertEqual(d, {'status': 'success'})
stripped = strip_jsonp('window.cb && window.cb({"status": "success"});')
d = json.loads(stripped)
self.assertEqual(d, {'status': 'success'})
stripped = strip_jsonp('window.cb && cb({"status": "success"});')
d = json.loads(stripped)
self.assertEqual(d, {'status': 'success'})
stripped = strip_jsonp('({"status": "success"});')
d = json.loads(stripped)
self.assertEqual(d, {'status': 'success'})
def test_strip_or_none(self):
self.assertEqual(strip_or_none(' abc'), 'abc')
self.assertEqual(strip_or_none('abc '), 'abc')
self.assertEqual(strip_or_none(' abc '), 'abc')
self.assertEqual(strip_or_none('\tabc\t'), 'abc')
self.assertEqual(strip_or_none('\n\tabc\n\t'), 'abc')
self.assertEqual(strip_or_none('abc'), 'abc')
self.assertEqual(strip_or_none(''), '')
self.assertEqual(strip_or_none(None), None)
self.assertEqual(strip_or_none(42), None)
self.assertEqual(strip_or_none([]), None)
def test_uppercase_escape(self):
self.assertEqual(uppercase_escape('aä'), 'aä')
self.assertEqual(uppercase_escape('\\U0001d550'), '𝕐')
def test_lowercase_escape(self):
self.assertEqual(lowercase_escape('aä'), 'aä')
self.assertEqual(lowercase_escape('\\u0026'), '&')
def test_limit_length(self):
self.assertEqual(limit_length(None, 12), None)
self.assertEqual(limit_length('foo', 12), 'foo')
self.assertTrue(
limit_length('foo bar baz asd', 12).startswith('foo bar'))
self.assertTrue('...' in limit_length('foo bar baz asd', 12))
def test_mimetype2ext(self):
self.assertEqual(mimetype2ext(None), None)
self.assertEqual(mimetype2ext('video/x-flv'), 'flv')
self.assertEqual(mimetype2ext('application/x-mpegURL'), 'm3u8')
self.assertEqual(mimetype2ext('text/vtt'), 'vtt')
self.assertEqual(mimetype2ext('text/vtt;charset=utf-8'), 'vtt')
self.assertEqual(mimetype2ext('text/html; charset=utf-8'), 'html')
self.assertEqual(mimetype2ext('audio/x-wav'), 'wav')
self.assertEqual(mimetype2ext('audio/x-wav;codec=pcm'), 'wav')
def test_month_by_name(self):
self.assertEqual(month_by_name(None), None)
self.assertEqual(month_by_name('December', 'en'), 12)
self.assertEqual(month_by_name('décembre', 'fr'), 12)
self.assertEqual(month_by_name('December'), 12)
self.assertEqual(month_by_name('décembre'), None)
self.assertEqual(month_by_name('Unknown', 'unknown'), None)
def test_parse_codecs(self):
self.assertEqual(parse_codecs(''), {})
self.assertEqual(parse_codecs('avc1.77.30, mp4a.40.2'), {
'vcodec': 'avc1.77.30',
'acodec': 'mp4a.40.2',
'dynamic_range': None,
})
self.assertEqual(parse_codecs('mp4a.40.2'), {
'vcodec': 'none',
'acodec': 'mp4a.40.2',
'dynamic_range': None,
})
self.assertEqual(parse_codecs('mp4a.40.5,avc1.42001e'), {
'vcodec': 'avc1.42001e',
'acodec': 'mp4a.40.5',
'dynamic_range': None,
})
self.assertEqual(parse_codecs('avc3.640028'), {
'vcodec': 'avc3.640028',
'acodec': 'none',
'dynamic_range': None,
})
self.assertEqual(parse_codecs(', h264,,newcodec,aac'), {
'vcodec': 'h264',
'acodec': 'aac',
'dynamic_range': None,
})
self.assertEqual(parse_codecs('av01.0.05M.08'), {
'vcodec': 'av01.0.05M.08',
'acodec': 'none',
'dynamic_range': None,
})
self.assertEqual(parse_codecs('vp9.2'), {
'vcodec': 'vp9.2',
'acodec': 'none',
'dynamic_range': 'HDR10',
})
self.assertEqual(parse_codecs('av01.0.12M.10.0.110.09.16.09.0'), {
'vcodec': 'av01.0.12M.10',
'acodec': 'none',
'dynamic_range': 'HDR10',
})
self.assertEqual(parse_codecs('dvhe'), {
'vcodec': 'dvhe',
'acodec': 'none',
'dynamic_range': 'DV',
})
self.assertEqual(parse_codecs('theora, vorbis'), {
'vcodec': 'theora',
'acodec': 'vorbis',
'dynamic_range': None,
})
self.assertEqual(parse_codecs('unknownvcodec, unknownacodec'), {
'vcodec': 'unknownvcodec',
'acodec': 'unknownacodec',
})
self.assertEqual(parse_codecs('unknown'), {})
def test_escape_rfc3986(self):
reserved = "!*'();:@&=+$,/?#[]"
unreserved = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_.~'
self.assertEqual(escape_rfc3986(reserved), reserved)
self.assertEqual(escape_rfc3986(unreserved), unreserved)
self.assertEqual(escape_rfc3986('тест'), '%D1%82%D0%B5%D1%81%D1%82')
self.assertEqual(escape_rfc3986('%D1%82%D0%B5%D1%81%D1%82'), '%D1%82%D0%B5%D1%81%D1%82')
self.assertEqual(escape_rfc3986('foo bar'), 'foo%20bar')
self.assertEqual(escape_rfc3986('foo%20bar'), 'foo%20bar')
def test_escape_url(self):
self.assertEqual(
escape_url('http://wowza.imust.org/srv/vod/telemb/new/UPLOAD/UPLOAD/20224_IncendieHavré_FD.mp4'),
'http://wowza.imust.org/srv/vod/telemb/new/UPLOAD/UPLOAD/20224_IncendieHavre%CC%81_FD.mp4'
)
self.assertEqual(
escape_url('http://www.ardmediathek.de/tv/Sturm-der-Liebe/Folge-2036-Zu-Mann-und-Frau-erklärt/Das-Erste/Video?documentId=22673108&bcastId=5290'),
'http://www.ardmediathek.de/tv/Sturm-der-Liebe/Folge-2036-Zu-Mann-und-Frau-erkl%C3%A4rt/Das-Erste/Video?documentId=22673108&bcastId=5290'
)
self.assertEqual(
escape_url('http://тест.рф/фрагмент'),
'http://xn--e1aybc.xn--p1ai/%D1%84%D1%80%D0%B0%D0%B3%D0%BC%D0%B5%D0%BD%D1%82'
)
self.assertEqual(
escape_url('http://тест.рф/абв?абв=абв#абв'),
'http://xn--e1aybc.xn--p1ai/%D0%B0%D0%B1%D0%B2?%D0%B0%D0%B1%D0%B2=%D0%B0%D0%B1%D0%B2#%D0%B0%D0%B1%D0%B2'
)
self.assertEqual(escape_url('http://vimeo.com/56015672#at=0'), 'http://vimeo.com/56015672#at=0')
def test_js_to_json_realworld(self):
inp = '''{
'clip':{'provider':'pseudo'}
}'''
self.assertEqual(js_to_json(inp), '''{
"clip":{"provider":"pseudo"}
}''')
json.loads(js_to_json(inp))
inp = '''{
'playlist':[{'controls':{'all':null}}]
}'''
self.assertEqual(js_to_json(inp), '''{
"playlist":[{"controls":{"all":null}}]
}''')
inp = '''"The CW\\'s \\'Crazy Ex-Girlfriend\\'"'''
self.assertEqual(js_to_json(inp), '''"The CW's 'Crazy Ex-Girlfriend'"''')
inp = '"SAND Number: SAND 2013-7800P\\nPresenter: Tom Russo\\nHabanero Software Training - Xyce Software\\nXyce, Sandia\\u0027s"'
json_code = js_to_json(inp)
self.assertEqual(json.loads(json_code), json.loads(inp))
inp = '''{
0:{src:'skipped', type: 'application/dash+xml'},
1:{src:'skipped', type: 'application/vnd.apple.mpegURL'},
}'''
self.assertEqual(js_to_json(inp), '''{
"0":{"src":"skipped", "type": "application/dash+xml"},
"1":{"src":"skipped", "type": "application/vnd.apple.mpegURL"}
}''')
inp = '''{"foo":101}'''
self.assertEqual(js_to_json(inp), '''{"foo":101}''')
inp = '''{"duration": "00:01:07"}'''
self.assertEqual(js_to_json(inp), '''{"duration": "00:01:07"}''')
inp = '''{segments: [{"offset":-3.885780586188048e-16,"duration":39.75000000000001}]}'''
self.assertEqual(js_to_json(inp), '''{"segments": [{"offset":-3.885780586188048e-16,"duration":39.75000000000001}]}''')
def test_js_to_json_edgecases(self):
on = js_to_json("{abc_def:'1\\'\\\\2\\\\\\'3\"4'}")
self.assertEqual(json.loads(on), {"abc_def": "1'\\2\\'3\"4"})
on = js_to_json('{"abc": true}')
self.assertEqual(json.loads(on), {'abc': True})
on = js_to_json('''{
"x": 1,
y: "a",
z: some.code
}''')
d = json.loads(on)
self.assertEqual(d['x'], 1)
self.assertEqual(d['y'], 'a')
on = js_to_json('''{
a: !0,
b: !1,
c: !!0,
d: !!42.42,
e: !!![],
f: !"abc",
g: !"",
!42: 42
}''')
self.assertEqual(json.loads(on), {
'a': 0,
'b': 1,
'c': 0,
'd': 42.42,
'e': [],
'f': "abc",
'g': "",
'42': 42
})
on = js_to_json('["abc", "def",]')
self.assertEqual(json.loads(on), ['abc', 'def'])
on = js_to_json('[/*comment\n*/"abc"/*comment\n*/,/*comment\n*/"def",/*comment\n*/]')
self.assertEqual(json.loads(on), ['abc', 'def'])
on = js_to_json('[//comment\n"abc" //comment\n,//comment\n"def",//comment\n]')
self.assertEqual(json.loads(on), ['abc', 'def'])
on = js_to_json('{"abc": "def",}')
self.assertEqual(json.loads(on), {'abc': 'def'})
on = js_to_json('{/*comment\n*/"abc"/*comment\n*/:/*comment\n*/"def"/*comment\n*/,/*comment\n*/}')
self.assertEqual(json.loads(on), {'abc': 'def'})
on = js_to_json('{ 0: /* " \n */ ",]" , }')
self.assertEqual(json.loads(on), {'0': ',]'})
on = js_to_json('{ /*comment\n*/0/*comment\n*/: /* " \n */ ",]" , }')
self.assertEqual(json.loads(on), {'0': ',]'})
on = js_to_json('{ 0: // comment\n1 }')
self.assertEqual(json.loads(on), {'0': 1})
on = js_to_json(r'["<p>x<\/p>"]')
self.assertEqual(json.loads(on), ['<p>x</p>'])
on = js_to_json(r'["\xaa"]')
self.assertEqual(json.loads(on), ['\u00aa'])
on = js_to_json("['a\\\nb']")
self.assertEqual(json.loads(on), ['ab'])
on = js_to_json("/*comment\n*/[/*comment\n*/'a\\\nb'/*comment\n*/]/*comment\n*/")
self.assertEqual(json.loads(on), ['ab'])
on = js_to_json('{0xff:0xff}')
self.assertEqual(json.loads(on), {'255': 255})
on = js_to_json('{/*comment\n*/0xff/*comment\n*/:/*comment\n*/0xff/*comment\n*/}')
self.assertEqual(json.loads(on), {'255': 255})
on = js_to_json('{077:077}')
self.assertEqual(json.loads(on), {'63': 63})
on = js_to_json('{/*comment\n*/077/*comment\n*/:/*comment\n*/077/*comment\n*/}')
self.assertEqual(json.loads(on), {'63': 63})
on = js_to_json('{42:42}')
self.assertEqual(json.loads(on), {'42': 42})
on = js_to_json('{/*comment\n*/42/*comment\n*/:/*comment\n*/42/*comment\n*/}')
self.assertEqual(json.loads(on), {'42': 42})
on = js_to_json('{42:4.2e1}')
self.assertEqual(json.loads(on), {'42': 42.0})
on = js_to_json('{ "0x40": "0x40" }')
self.assertEqual(json.loads(on), {'0x40': '0x40'})
on = js_to_json('{ "040": "040" }')
self.assertEqual(json.loads(on), {'040': '040'})
on = js_to_json('[1,//{},\n2]')
self.assertEqual(json.loads(on), [1, 2])
def test_js_to_json_malformed(self):
self.assertEqual(js_to_json('42a1'), '42"a1"')
self.assertEqual(js_to_json('42a-1'), '42"a"-1')
def test_extract_attributes(self):
self.assertEqual(extract_attributes('<e x="y">'), {'x': 'y'})
self.assertEqual(extract_attributes("<e x='y'>"), {'x': 'y'})
self.assertEqual(extract_attributes('<e x=y>'), {'x': 'y'})
self.assertEqual(extract_attributes('<e x="a \'b\' c">'), {'x': "a 'b' c"})
self.assertEqual(extract_attributes('<e x=\'a "b" c\'>'), {'x': 'a "b" c'})
self.assertEqual(extract_attributes('<e x="y">'), {'x': 'y'})
self.assertEqual(extract_attributes('<e x="y">'), {'x': 'y'})
self.assertEqual(extract_attributes('<e x="&">'), {'x': '&'})
self.assertEqual(extract_attributes('<e x=""">'), {'x': '"'})
self.assertEqual(extract_attributes('<e x="£">'), {'x': '£'}) # HTML 3.2
self.assertEqual(extract_attributes('<e x="λ">'), {'x': 'λ'}) # HTML 4.0
self.assertEqual(extract_attributes('<e x="&foo">'), {'x': '&foo'})
self.assertEqual(extract_attributes('<e x="\'">'), {'x': "'"})
self.assertEqual(extract_attributes('<e x=\'"\'>'), {'x': '"'})
self.assertEqual(extract_attributes('<e x >'), {'x': None})
self.assertEqual(extract_attributes('<e x=y a>'), {'x': 'y', 'a': None})
self.assertEqual(extract_attributes('<e x= y>'), {'x': 'y'})
self.assertEqual(extract_attributes('<e x=1 y=2 x=3>'), {'y': '2', 'x': '3'})
self.assertEqual(extract_attributes('<e \nx=\ny\n>'), {'x': 'y'})
self.assertEqual(extract_attributes('<e \nx=\n"y"\n>'), {'x': 'y'})
self.assertEqual(extract_attributes("<e \nx=\n'y'\n>"), {'x': 'y'})
self.assertEqual(extract_attributes('<e \nx="\ny\n">'), {'x': '\ny\n'})
self.assertEqual(extract_attributes('<e CAPS=x>'), {'caps': 'x'}) # Names lowercased
self.assertEqual(extract_attributes('<e x=1 X=2>'), {'x': '2'})
self.assertEqual(extract_attributes('<e X=1 x=2>'), {'x': '2'})
self.assertEqual(extract_attributes('<e _:funny-name1=1>'), {'_:funny-name1': '1'})
self.assertEqual(extract_attributes('<e x="Fáilte 世界 \U0001f600">'), {'x': 'Fáilte 世界 \U0001f600'})
self.assertEqual(extract_attributes('<e x="décomposé">'), {'x': 'décompose\u0301'})
# "Narrow" Python builds don't support unicode code points outside BMP.
try:
compat_chr(0x10000)
supports_outside_bmp = True
except ValueError:
supports_outside_bmp = False
if supports_outside_bmp:
self.assertEqual(extract_attributes('<e x="Smile &
# Malformed HTML should not break attributes extraction on older Python
self.assertEqual(extract_attributes('<mal"formed/>'), {})
def test_clean_html(self):
self.assertEqual(clean_html('a:\nb'), 'a: b')
self.assertEqual(clean_html('a:\n "b"'), 'a: "b"')
self.assertEqual(clean_html('a<br>\xa0b'), 'a\nb')
def test_intlist_to_bytes(self):
self.assertEqual(
intlist_to_bytes([0, 1, 127, 128, 255]),
b'\x00\x01\x7f\x80\xff')
def test_args_to_str(self):
self.assertEqual(
args_to_str(['foo', 'ba/r', '-baz', '2 be', '']),
'foo ba/r -baz \'2 be\' \'\'' if compat_os_name != 'nt' else 'foo ba/r -baz "2 be" ""'
)
def test_parse_filesize(self):
self.assertEqual(parse_filesize(None), None)
self.assertEqual(parse_filesize(''), None)
self.assertEqual(parse_filesize('91 B'), 91)
self.assertEqual(parse_filesize('foobar'), None)
self.assertEqual(parse_filesize('2 MiB'), 2097152)
self.assertEqual(parse_filesize('5 GB'), 5000000000)
self.assertEqual(parse_filesize('1.2Tb'), 1200000000000)
self.assertEqual(parse_filesize('1.2tb'), 1200000000000)
self.assertEqual(parse_filesize('1,24 KB'), 1240)
self.assertEqual(parse_filesize('1,24 kb'), 1240)
self.assertEqual(parse_filesize('8.5 megabytes'), 8500000)
def test_parse_count(self):
self.assertEqual(parse_count(None), None)
self.assertEqual(parse_count(''), None)
self.assertEqual(parse_count('0'), 0)
self.assertEqual(parse_count('1000'), 1000)
self.assertEqual(parse_count('1.000'), 1000)
self.assertEqual(parse_count('1.1k'), 1100)
self.assertEqual(parse_count('1.1 k'), 1100)
self.assertEqual(parse_count('1,1 k'), 1100)
self.assertEqual(parse_count('1.1kk'), 1100000)
self.assertEqual(parse_count('1.1kk '), 1100000)
self.assertEqual(parse_count('1,1kk'), 1100000)
self.assertEqual(parse_count('100 views'), 100)
self.assertEqual(parse_count('1,100 views'), 1100)
self.assertEqual(parse_count('1.1kk views'), 1100000)
self.assertEqual(parse_count('10M views'), 10000000)
self.assertEqual(parse_count('has 10M views'), 10000000)
def test_parse_resolution(self):
self.assertEqual(parse_resolution(None), {})
self.assertEqual(parse_resolution(''), {})
self.assertEqual(parse_resolution(' 1920x1080'), {'width': 1920, 'height': 1080})
self.assertEqual(parse_resolution('1920×1080 '), {'width': 1920, 'height': 1080})
self.assertEqual(parse_resolution('1920 x 1080'), {'width': 1920, 'height': 1080})
self.assertEqual(parse_resolution('720p'), {'height': 720})
self.assertEqual(parse_resolution('4k'), {'height': 2160})
self.assertEqual(parse_resolution('8K'), {'height': 4320})
self.assertEqual(parse_resolution('pre_1920x1080_post'), {'width': 1920, 'height': 1080})
self.assertEqual(parse_resolution('ep1x2'), {})
self.assertEqual(parse_resolution('1920, 1080'), {'width': 1920, 'height': 1080})
def test_parse_bitrate(self):
self.assertEqual(parse_bitrate(None), None)
self.assertEqual(parse_bitrate(''), None)
self.assertEqual(parse_bitrate('300kbps'), 300)
self.assertEqual(parse_bitrate('1500kbps'), 1500)
self.assertEqual(parse_bitrate('300 kbps'), 300)
def test_version_tuple(self):
self.assertEqual(version_tuple('1'), (1,))
self.assertEqual(version_tuple('10.23.344'), (10, 23, 344))
self.assertEqual(version_tuple('10.1-6'), (10, 1, 6)) # avconv style
def test_detect_exe_version(self):
self.assertEqual(detect_exe_version('''ffmpeg version 1.2.1
built on May 27 2013 08:37:26 with gcc 4.7 (Debian 4.7.3-4)
configuration: --prefix=/usr --extra-'''), '1.2.1')
self.assertEqual(detect_exe_version('''ffmpeg version N-63176-g1fb4685
built on May 15 2014 22:09:06 with gcc 4.8.2 (GCC)'''), 'N-63176-g1fb4685')
self.assertEqual(detect_exe_version('''X server found. dri2 connection failed!
Trying to open render node...
Success at /dev/dri/renderD128.
ffmpeg version 2.4.4 Copyright (c) 2000-2014 the FFmpeg ...'''), '2.4.4')
def test_age_restricted(self):
self.assertFalse(age_restricted(None, 10)) # unrestricted content
self.assertFalse(age_restricted(1, None)) # unrestricted policy
self.assertFalse(age_restricted(8, 10))
self.assertTrue(age_restricted(18, 14))
self.assertFalse(age_restricted(18, 18))
def test_is_html(self):
self.assertFalse(is_html(b'\x49\x44\x43<html'))
self.assertTrue(is_html(b'<!DOCTYPE foo>\xaaa'))
self.assertTrue(is_html( # UTF-8 with BOM
b'\xef\xbb\xbf<!DOCTYPE foo>\xaaa'))
self.assertTrue(is_html( # UTF-16-LE
b'\xff\xfe<\x00h\x00t\x00m\x00l\x00>\x00\xe4\x00'
))
self.assertTrue(is_html( # UTF-16-BE
b'\xfe\xff\x00<\x00h\x00t\x00m\x00l\x00>\x00\xe4'
))
self.assertTrue(is_html( # UTF-32-BE
b'\x00\x00\xFE\xFF\x00\x00\x00<\x00\x00\x00h\x00\x00\x00t\x00\x00\x00m\x00\x00\x00l\x00\x00\x00>\x00\x00\x00\xe4'))
self.assertTrue(is_html( # UTF-32-LE
b'\xFF\xFE\x00\x00<\x00\x00\x00h\x00\x00\x00t\x00\x00\x00m\x00\x00\x00l\x00\x00\x00>\x00\x00\x00\xe4\x00\x00\x00'))
def test_render_table(self):
self.assertEqual(
render_table(
['a', 'empty', 'bcd'],
[[123, '', 4], [9999, '', 51]]),
'a empty bcd\n'
'123 4\n'
'9999 51')
self.assertEqual(
render_table(
['a', 'empty', 'bcd'],
[[123, '', 4], [9999, '', 51]],
hide_empty=True),
'a bcd\n'
'123 4\n'
'9999 51')
self.assertEqual(
render_table(
['\ta', 'bcd'],
[['1\t23', 4], ['\t9999', 51]]),
' a bcd\n'
'1 23 4\n'
'9999 51')
self.assertEqual(
render_table(
['a', 'bcd'],
[[123, 4], [9999, 51]],
delim='-'),
'a bcd\n'
'--------\n'
'123 4\n'
'9999 51')
self.assertEqual(
render_table(
['a', 'bcd'],
[[123, 4], [9999, 51]],
delim='-', extra_gap=2),
'a bcd\n'
'----------\n'
'123 4\n'
'9999 51')
def test_match_str(self):
# Unary
self.assertFalse(match_str('xy', {'x': 1200}))
self.assertTrue(match_str('!xy', {'x': 1200}))
self.assertTrue(match_str('x', {'x': 1200}))
self.assertFalse(match_str('!x', {'x': 1200}))
self.assertTrue(match_str('x', {'x': 0}))
self.assertTrue(match_str('is_live', {'is_live': True}))
self.assertFalse(match_str('is_live', {'is_live': False}))
self.assertFalse(match_str('is_live', {'is_live': None}))
self.assertFalse(match_str('is_live', {}))
self.assertFalse(match_str('!is_live', {'is_live': True}))
self.assertTrue(match_str('!is_live', {'is_live': False}))
self.assertTrue(match_str('!is_live', {'is_live': None}))
self.assertTrue(match_str('!is_live', {}))
self.assertTrue(match_str('title', {'title': 'abc'}))
self.assertTrue(match_str('title', {'title': ''}))
self.assertFalse(match_str('!title', {'title': 'abc'}))
self.assertFalse(match_str('!title', {'title': ''}))
# Numeric
self.assertFalse(match_str('x>0', {'x': 0}))
self.assertFalse(match_str('x>0', {}))
self.assertTrue(match_str('x>?0', {}))
self.assertTrue(match_str('x>1K', {'x': 1200}))
self.assertFalse(match_str('x>2K', {'x': 1200}))
self.assertTrue(match_str('x>=1200 & x < 1300', {'x': 1200}))
self.assertFalse(match_str('x>=1100 & x < 1200', {'x': 1200}))
self.assertTrue(match_str('x > 1:0:0', {'x': 3700}))
# String
self.assertFalse(match_str('y=a212', {'y': 'foobar42'}))
self.assertTrue(match_str('y=foobar42', {'y': 'foobar42'}))
self.assertFalse(match_str('y!=foobar42', {'y': 'foobar42'}))
self.assertTrue(match_str('y!=foobar2', {'y': 'foobar42'}))
self.assertTrue(match_str('y^=foo', {'y': 'foobar42'}))
self.assertFalse(match_str('y!^=foo', {'y': 'foobar42'}))
self.assertFalse(match_str('y^=bar', {'y': 'foobar42'}))
self.assertTrue(match_str('y!^=bar', {'y': 'foobar42'}))
self.assertRaises(ValueError, match_str, 'x^=42', {'x': 42})
self.assertTrue(match_str('y*=bar', {'y': 'foobar42'}))
self.assertFalse(match_str('y!*=bar', {'y': 'foobar42'}))
self.assertFalse(match_str('y*=baz', {'y': 'foobar42'}))
self.assertTrue(match_str('y!*=baz', {'y': 'foobar42'}))
self.assertTrue(match_str('y$=42', {'y': 'foobar42'}))
self.assertFalse(match_str('y$=43', {'y': 'foobar42'}))
# And
self.assertFalse(match_str(
'like_count > 100 & dislike_count <? 50 & description',
{'like_count': 90, 'description': 'foo'}))
self.assertTrue(match_str(
'like_count > 100 & dislike_count <? 50 & description',
{'like_count': 190, 'description': 'foo'}))
self.assertFalse(match_str(
'like_count > 100 & dislike_count <? 50 & description',
{'like_count': 190, 'dislike_count': 60, 'description': 'foo'}))
self.assertFalse(match_str(
'like_count > 100 & dislike_count <? 50 & description',
{'like_count': 190, 'dislike_count': 10}))
# Regex
self.assertTrue(match_str(r'x~=\bbar', {'x': 'foo bar'}))
self.assertFalse(match_str(r'x~=\bbar.+', {'x': 'foo bar'}))
self.assertFalse(match_str(r'x~=^FOO', {'x': 'foo bar'}))
self.assertTrue(match_str(r'x~=(?i)^FOO', {'x': 'foo bar'}))
# Quotes
self.assertTrue(match_str(r'x^="foo"', {'x': 'foo "bar"'}))
self.assertFalse(match_str(r'x^="foo "', {'x': 'foo "bar"'}))
self.assertFalse(match_str(r'x$="bar"', {'x': 'foo "bar"'}))
self.assertTrue(match_str(r'x$=" \"bar\""', {'x': 'foo "bar"'}))
# Escaping &
self.assertFalse(match_str(r'x=foo & bar', {'x': 'foo & bar'}))
self.assertTrue(match_str(r'x=foo \& bar', {'x': 'foo & bar'}))
self.assertTrue(match_str(r'x=foo \& bar & x^=foo', {'x': 'foo & bar'}))
self.assertTrue(match_str(r'x="foo \& bar" & x^=foo', {'x': 'foo & bar'}))
# Example from docs
self.assertTrue(match_str(
r"!is_live & like_count>?100 & description~='(?i)\bcats \& dogs\b'",
{'description': 'Raining Cats & Dogs'}))
# Incomplete
self.assertFalse(match_str('id!=foo', {'id': 'foo'}, True))
self.assertTrue(match_str('x', {'id': 'foo'}, True))
self.assertTrue(match_str('!x', {'id': 'foo'}, True))
self.assertFalse(match_str('x', {'id': 'foo'}, False))
def test_parse_dfxp_time_expr(self):
self.assertEqual(parse_dfxp_time_expr(None), None)
self.assertEqual(parse_dfxp_time_expr(''), None)
self.assertEqual(parse_dfxp_time_expr('0.1'), 0.1)
self.assertEqual(parse_dfxp_time_expr('0.1s'), 0.1)
self.assertEqual(parse_dfxp_time_expr('00:00:01'), 1.0)
self.assertEqual(parse_dfxp_time_expr('00:00:01.100'), 1.1)
self.assertEqual(parse_dfxp_time_expr('00:00:01:100'), 1.1)
def test_dfxp2srt(self):
dfxp_data = '''<?xml version="1.0" encoding="UTF-8"?>
<tt xmlns="http://www.w3.org/ns/ttml" xml:lang="en" xmlns:tts="http://www.w3.org/ns/ttml#parameter">
<body>
<div xml:lang="en">
<p begin="0" end="1">The following line contains Chinese characters and special symbols</p>
<p begin="1" end="2">第二行<br/>♪♪</p>
<p begin="2" dur="1"><span>Third<br/>Line</span></p>
<p begin="3" end="-1">Lines with invalid timestamps are ignored</p>
<p begin="-1" end="-1">Ignore, two</p>
<p begin="3" dur="-1">Ignored, three</p>
</div>
</body>
</tt>'''.encode()
srt_data = '''1
00:00:00,000 --> 00:00:01,000
The following line contains Chinese characters and special symbols
2
00:00:01,000 --> 00:00:02,000
第二行
♪♪
3
00:00:02,000 --> 00:00:03,000
Third
Line
'''
self.assertEqual(dfxp2srt(dfxp_data), srt_data)
dfxp_data_no_default_namespace = b'''<?xml version="1.0" encoding="UTF-8"?>
<tt xml:lang="en" xmlns:tts="http://www.w3.org/ns/ttml#parameter">
<body>
<div xml:lang="en">
<p begin="0" end="1">The first line</p>
</div>
</body>
</tt>'''
srt_data = '''1
00:00:00,000 --> 00:00:01,000
The first line
'''
self.assertEqual(dfxp2srt(dfxp_data_no_default_namespace), srt_data)
dfxp_data_with_style = b'''<?xml version="1.0" encoding="utf-8"?>
<tt xmlns="http://www.w3.org/2006/10/ttaf1" xmlns:ttp="http://www.w3.org/2006/10/ttaf1#parameter" ttp:timeBase="media" xmlns:tts="http://www.w3.org/2006/10/ttaf1#style" xml:lang="en" xmlns:ttm="http://www.w3.org/2006/10/ttaf1#metadata">
<head>
<styling>
<style id="s2" style="s0" tts:color="cyan" tts:fontWeight="bold" />
<style id="s1" style="s0" tts:color="yellow" tts:fontStyle="italic" />
<style id="s3" style="s0" tts:color="lime" tts:textDecoration="underline" />
<style id="s0" tts:backgroundColor="black" tts:fontStyle="normal" tts:fontSize="16" tts:fontFamily="sansSerif" tts:color="white" />
</styling>
</head>
<body tts:textAlign="center" style="s0">
<div>
<p begin="00:00:02.08" id="p0" end="00:00:05.84">default style<span tts:color="red">custom style</span></p>
<p style="s2" begin="00:00:02.08" id="p0" end="00:00:05.84"><span tts:color="lime">part 1<br /></span><span tts:color="cyan">part 2</span></p>
<p style="s3" begin="00:00:05.84" id="p1" end="00:00:09.56">line 3<br />part 3</p>
<p style="s1" tts:textDecoration="underline" begin="00:00:09.56" id="p2" end="00:00:12.36"><span style="s2" tts:color="lime">inner<br /> </span>style</p>
</div>
</body>
</tt>'''
srt_data = '''1
00:00:02,080 --> 00:00:05,840
<font color="white" face="sansSerif" size="16">default style<font color="red">custom style</font></font>
2
00:00:02,080 --> 00:00:05,840
<b><font color="cyan" face="sansSerif" size="16"><font color="lime">part 1
</font>part 2</font></b>
3
00:00:05,840 --> 00:00:09,560
<u><font color="lime">line 3
part 3</font></u>
4
00:00:09,560 --> 00:00:12,360
<i><u><font color="yellow"><font color="lime">inner
</font>style</font></u></i>
'''
self.assertEqual(dfxp2srt(dfxp_data_with_style), srt_data)
dfxp_data_non_utf8 = '''<?xml version="1.0" encoding="UTF-16"?>
<tt xmlns="http://www.w3.org/ns/ttml" xml:lang="en" xmlns:tts="http://www.w3.org/ns/ttml#parameter">
<body>
<div xml:lang="en">
<p begin="0" end="1">Line 1</p>
<p begin="1" end="2">第二行</p>
</div>
</body>
</tt>'''.encode('utf-16')
srt_data = '''1
00:00:00,000 --> 00:00:01,000
Line 1
2
00:00:01,000 --> 00:00:02,000
第二行
'''
self.assertEqual(dfxp2srt(dfxp_data_non_utf8), srt_data)
def test_cli_option(self):
self.assertEqual(cli_option({'proxy': '127.0.0.1:3128'}, '--proxy', 'proxy'), ['--proxy', '127.0.0.1:3128'])
self.assertEqual(cli_option({'proxy': None}, '--proxy', 'proxy'), [])
self.assertEqual(cli_option({}, '--proxy', 'proxy'), [])
self.assertEqual(cli_option({'retries': 10}, '--retries', 'retries'), ['--retries', '10'])
def test_cli_valueless_option(self):
self.assertEqual(cli_valueless_option(
{'downloader': 'external'}, '--external-downloader', 'downloader', 'external'), ['--external-downloader'])
self.assertEqual(cli_valueless_option(
{'downloader': 'internal'}, '--external-downloader', 'downloader', 'external'), [])
self.assertEqual(cli_valueless_option(
{'nocheckcertificate': True}, '--no-check-certificate', 'nocheckcertificate'), ['--no-check-certificate'])
self.assertEqual(cli_valueless_option(
{'nocheckcertificate': False}, '--no-check-certificate', 'nocheckcertificate'), [])
self.assertEqual(cli_valueless_option(
{'checkcertificate': True}, '--no-check-certificate', 'checkcertificate', False), [])
self.assertEqual(cli_valueless_option(
{'checkcertificate': False}, '--no-check-certificate', 'checkcertificate', False), ['--no-check-certificate'])
def test_cli_bool_option(self):
self.assertEqual(
cli_bool_option(
{'nocheckcertificate': True}, '--no-check-certificate', 'nocheckcertificate'),
['--no-check-certificate', 'true'])
self.assertEqual(
cli_bool_option(
{'nocheckcertificate': True}, '--no-check-certificate', 'nocheckcertificate', separator='='),
['--no-check-certificate=true'])
self.assertEqual(
cli_bool_option(
{'nocheckcertificate': True}, '--check-certificate', 'nocheckcertificate', 'false', 'true'),
['--check-certificate', 'false'])
self.assertEqual(
cli_bool_option(
{'nocheckcertificate': True}, '--check-certificate', 'nocheckcertificate', 'false', 'true', '='),
['--check-certificate=false'])
self.assertEqual(
cli_bool_option(
{'nocheckcertificate': False}, '--check-certificate', 'nocheckcertificate', 'false', 'true'),
['--check-certificate', 'true'])
self.assertEqual(
cli_bool_option(
{'nocheckcertificate': False}, '--check-certificate', 'nocheckcertificate', 'false', 'true', '='),
['--check-certificate=true'])
self.assertEqual(
cli_bool_option(
{}, '--check-certificate', 'nocheckcertificate', 'false', 'true', '='),
[])
def test_ohdave_rsa_encrypt(self):
N = 0xab86b6371b5318aaa1d3c9e612a9f1264f372323c8c0f19875b5fc3b3fd3afcc1e5bec527aa94bfa85bffc157e4245aebda05389a5357b75115ac94f074aefcd
e = 65537
self.assertEqual(
ohdave_rsa_encrypt(b'aa111222', e, N),
'726664bd9a23fd0c70f9f1b84aab5e3905ce1e45a584e9cbcf9bcc7510338fc1986d6c599ff990d923aa43c51c0d9013cd572e13bc58f4ae48f2ed8c0b0ba881')
def test_pkcs1pad(self):
data = [1, 2, 3]
padded_data = pkcs1pad(data, 32)
self.assertEqual(padded_data[:2], [0, 2])
self.assertEqual(padded_data[28:], [0, 1, 2, 3])
self.assertRaises(ValueError, pkcs1pad, data, 8)
def test_encode_base_n(self):
self.assertEqual(encode_base_n(0, 30), '0')
self.assertEqual(encode_base_n(80, 30), '2k')
custom_table = '9876543210ZYXWVUTSRQPONMLKJIHGFEDCBA'
self.assertEqual(encode_base_n(0, 30, custom_table), '9')
self.assertEqual(encode_base_n(80, 30, custom_table), '7P')
self.assertRaises(ValueError, encode_base_n, 0, 70)
self.assertRaises(ValueError, encode_base_n, 0, 60, custom_table)
def test_caesar(self):
self.assertEqual(caesar('ace', 'abcdef', 2), 'cea')
self.assertEqual(caesar('cea', 'abcdef', -2), 'ace')
self.assertEqual(caesar('ace', 'abcdef', -2), 'eac')
self.assertEqual(caesar('eac', 'abcdef', 2), 'ace')
self.assertEqual(caesar('ace', 'abcdef', 0), 'ace')
self.assertEqual(caesar('xyz', 'abcdef', 2), 'xyz')
self.assertEqual(caesar('abc', 'acegik', 2), 'ebg')
self.assertEqual(caesar('ebg', 'acegik', -2), 'abc')
def test_rot47(self):
self.assertEqual(rot47('yt-dlp'), r'JE\5=A')
self.assertEqual(rot47('YT-DLP'), r'*%\s{!')
def test_urshift(self):
self.assertEqual(urshift(3, 1), 1)
self.assertEqual(urshift(-3, 1), 2147483646)
GET_ELEMENT_BY_CLASS_TEST_STRING = '''
<span class="foo bar">nice</span>
'''
def test_get_element_by_class(self):
html = self.GET_ELEMENT_BY_CLASS_TEST_STRING
self.assertEqual(get_element_by_class('foo', html), 'nice')
self.assertEqual(get_element_by_class('no-such-class', html), None)
def test_get_element_html_by_class(self):
html = self.GET_ELEMENT_BY_CLASS_TEST_STRING
self.assertEqual(get_element_html_by_class('foo', html), html.strip())
self.assertEqual(get_element_by_class('no-such-class', html), None)
GET_ELEMENT_BY_ATTRIBUTE_TEST_STRING = '''
<div itemprop="author" itemscope>foo</div>
'''
def test_get_element_by_attribute(self):
html = self.GET_ELEMENT_BY_CLASS_TEST_STRING
self.assertEqual(get_element_by_attribute('class', 'foo bar', html), 'nice')
self.assertEqual(get_element_by_attribute('class', 'foo', html), None)
self.assertEqual(get_element_by_attribute('class', 'no-such-foo', html), None)
html = self.GET_ELEMENT_BY_ATTRIBUTE_TEST_STRING
self.assertEqual(get_element_by_attribute('itemprop', 'author', html), 'foo')
def test_get_element_html_by_attribute(self):
html = self.GET_ELEMENT_BY_CLASS_TEST_STRING
self.assertEqual(get_element_html_by_attribute('class', 'foo bar', html), html.strip())
self.assertEqual(get_element_html_by_attribute('class', 'foo', html), None)
self.assertEqual(get_element_html_by_attribute('class', 'no-such-foo', html), None)
html = self.GET_ELEMENT_BY_ATTRIBUTE_TEST_STRING
self.assertEqual(get_element_html_by_attribute('itemprop', 'author', html), html.strip())
GET_ELEMENTS_BY_CLASS_TEST_STRING = '''
<span class="foo bar">nice</span><span class="foo bar">also nice</span>
'''
GET_ELEMENTS_BY_CLASS_RES = ['<span class="foo bar">nice</span>', '<span class="foo bar">also nice</span>']
def test_get_elements_by_class(self):
html = self.GET_ELEMENTS_BY_CLASS_TEST_STRING
self.assertEqual(get_elements_by_class('foo', html), ['nice', 'also nice'])
self.assertEqual(get_elements_by_class('no-such-class', html), [])
def test_get_elements_html_by_class(self):
html = self.GET_ELEMENTS_BY_CLASS_TEST_STRING
self.assertEqual(get_elements_html_by_class('foo', html), self.GET_ELEMENTS_BY_CLASS_RES)
self.assertEqual(get_elements_html_by_class('no-such-class', html), [])
def test_get_elements_by_attribute(self):
html = self.GET_ELEMENTS_BY_CLASS_TEST_STRING
self.assertEqual(get_elements_by_attribute('class', 'foo bar', html), ['nice', 'also nice'])
self.assertEqual(get_elements_by_attribute('class', 'foo', html), [])
self.assertEqual(get_elements_by_attribute('class', 'no-such-foo', html), [])
def test_get_elements_html_by_attribute(self):
html = self.GET_ELEMENTS_BY_CLASS_TEST_STRING
self.assertEqual(get_elements_html_by_attribute('class', 'foo bar', html), self.GET_ELEMENTS_BY_CLASS_RES)
self.assertEqual(get_elements_html_by_attribute('class', 'foo', html), [])
self.assertEqual(get_elements_html_by_attribute('class', 'no-such-foo', html), [])
def test_get_elements_text_and_html_by_attribute(self):
html = self.GET_ELEMENTS_BY_CLASS_TEST_STRING
self.assertEqual(
list(get_elements_text_and_html_by_attribute('class', 'foo bar', html)),
list(zip(['nice', 'also nice'], self.GET_ELEMENTS_BY_CLASS_RES)))
self.assertEqual(list(get_elements_text_and_html_by_attribute('class', 'foo', html)), [])
self.assertEqual(list(get_elements_text_and_html_by_attribute('class', 'no-such-foo', html)), [])
GET_ELEMENT_BY_TAG_TEST_STRING = '''
random text lorem ipsum</p>
<div>
this should be returned
<span>this should also be returned</span>
<div>
this should also be returned
</div>
closing tag above should not trick, so this should also be returned
</div>
but this text should not be returned
'''
GET_ELEMENT_BY_TAG_RES_OUTERDIV_HTML = GET_ELEMENT_BY_TAG_TEST_STRING.strip()[32:276]
GET_ELEMENT_BY_TAG_RES_OUTERDIV_TEXT = GET_ELEMENT_BY_TAG_RES_OUTERDIV_HTML[5:-6]
GET_ELEMENT_BY_TAG_RES_INNERSPAN_HTML = GET_ELEMENT_BY_TAG_TEST_STRING.strip()[78:119]
GET_ELEMENT_BY_TAG_RES_INNERSPAN_TEXT = GET_ELEMENT_BY_TAG_RES_INNERSPAN_HTML[6:-7]
def test_get_element_text_and_html_by_tag(self):
html = self.GET_ELEMENT_BY_TAG_TEST_STRING
self.assertEqual(
get_element_text_and_html_by_tag('div', html),
(self.GET_ELEMENT_BY_TAG_RES_OUTERDIV_TEXT, self.GET_ELEMENT_BY_TAG_RES_OUTERDIV_HTML))
self.assertEqual(
get_element_text_and_html_by_tag('span', html),
(self.GET_ELEMENT_BY_TAG_RES_INNERSPAN_TEXT, self.GET_ELEMENT_BY_TAG_RES_INNERSPAN_HTML))
self.assertRaises(compat_HTMLParseError, get_element_text_and_html_by_tag, 'article', html)
def test_iri_to_uri(self):
self.assertEqual(
iri_to_uri('https://www.google.com/search?q=foo&ie=utf-8&oe=utf-8&client=firefox-b'),
'https://www.google.com/search?q=foo&ie=utf-8&oe=utf-8&client=firefox-b') # Same
self.assertEqual(
iri_to_uri('https://www.google.com/search?q=Käsesoßenrührlöffel'), # German for cheese sauce stirring spoon
'https://www.google.com/search?q=K%C3%A4seso%C3%9Fenr%C3%BChrl%C3%B6ffel')
self.assertEqual(
iri_to_uri('https://www.google.com/search?q=lt<+gt>+eq%3D+amp%26+percent%25+hash%23+colon%3A+tilde~ 'https://www.google.com/search?q=lt%3C+gt%3E+eq%3D+amp%26+percent%25+hash%23+colon%3A+tilde~ self.assertEqual(
iri_to_uri('http://правозащита38.рф/category/news/'),
'http://xn--38-6kcaak9aj5chl4a3g.xn--p1ai/category/news/')
self.assertEqual(
iri_to_uri('http://www.правозащита38.рф/category/news/'),
'http://www.xn--38-6kcaak9aj5chl4a3g.xn--p1ai/category/news/')
self.assertEqual(
iri_to_uri('https://i❤.ws/emojidomain/👍👏🤝💪'),
'https://xn--i-7iq.ws/emojidomain/%F0%9F%91%8D%F0%9F%91%8F%F0%9F%A4%9D%F0%9F%92%AA')
self.assertEqual(
iri_to_uri('http://日本語.jp/'),
'http://xn--wgv71a119e.jp/')
self.assertEqual(
iri_to_uri('http://导航.中国/'),
'http://xn--fet810g.xn--fiqs8s/')
def test_clean_podcast_url(self):
self.assertEqual(clean_podcast_url('https://www.podtrac.com/pts/redirect.mp3/chtbl.com/track/5899E/traffic.megaphone.fm/HSW7835899191.mp3'), 'https://traffic.megaphone.fm/HSW7835899191.mp3')
self.assertEqual(clean_podcast_url('https://play.podtrac.com/npr-344098539/edge1.pod.npr.org/anon.npr-podcasts/podcast/npr/waitwait/2020/10/20201003_waitwait_wwdtmpodcast201003-015621a5-f035-4eca-a9a1-7c118d90bc3c.mp3'), 'https://edge1.pod.npr.org/anon.npr-podcasts/podcast/npr/waitwait/2020/10/20201003_waitwait_wwdtmpodcast201003-015621a5-f035-4eca-a9a1-7c118d90bc3c.mp3')
def test_LazyList(self):
it = list(range(10))
self.assertEqual(list(LazyList(it)), it)
self.assertEqual(LazyList(it).exhaust(), it)
self.assertEqual(LazyList(it)[5], it[5])
self.assertEqual(LazyList(it)[5:], it[5:])
self.assertEqual(LazyList(it)[:5], it[:5])
self.assertEqual(LazyList(it)[::2], it[::2])
self.assertEqual(LazyList(it)[1::2], it[1::2])
self.assertEqual(LazyList(it)[5::-1], it[5::-1])
self.assertEqual(LazyList(it)[6:2:-2], it[6:2:-2])
self.assertEqual(LazyList(it)[::-1], it[::-1])
self.assertTrue(LazyList(it))
self.assertFalse(LazyList(range(0)))
self.assertEqual(len(LazyList(it)), len(it))
self.assertEqual(repr(LazyList(it)), repr(it))
self.assertEqual(str(LazyList(it)), str(it))
self.assertEqual(list(LazyList(it, reverse=True)), it[::-1])
self.assertEqual(list(reversed(LazyList(it))[::-1]), it)
self.assertEqual(list(reversed(LazyList(it))[1:3:7]), it[::-1][1:3:7])
def test_LazyList_laziness(self):
def test(ll, idx, val, cache):
self.assertEqual(ll[idx], val)
self.assertEqual(getattr(ll, '_LazyList__cache'), list(cache))
ll = LazyList(range(10))
test(ll, 0, 0, range(1))
test(ll, 5, 5, range(6))
test(ll, -3, 7, range(10))
ll = LazyList(range(10), reverse=True)
test(ll, -1, 0, range(1))
test(ll, 3, 6, range(10))
ll = LazyList(itertools.count())
test(ll, 10, 10, range(11))
ll = reversed(ll)
test(ll, -15, 14, range(15))
def test_format_bytes(self):
self.assertEqual(format_bytes(0), '0.00B')
self.assertEqual(format_bytes(1000), '1000.00B')
self.assertEqual(format_bytes(1024), '1.00KiB')
self.assertEqual(format_bytes(1024**2), '1.00MiB')
self.assertEqual(format_bytes(1024**3), '1.00GiB')
self.assertEqual(format_bytes(1024**4), '1.00TiB')
self.assertEqual(format_bytes(1024**5), '1.00PiB')
self.assertEqual(format_bytes(1024**6), '1.00EiB')
self.assertEqual(format_bytes(1024**7), '1.00ZiB')
self.assertEqual(format_bytes(1024**8), '1.00YiB')
self.assertEqual(format_bytes(1024**9), '1024.00YiB')
def test_hide_login_info(self):
self.assertEqual(Config.hide_login_info(['-u', 'foo', '-p', 'bar']),
['-u', 'PRIVATE', '-p', 'PRIVATE'])
self.assertEqual(Config.hide_login_info(['-u']), ['-u'])
self.assertEqual(Config.hide_login_info(['-u', 'foo', '-u', 'bar']),
['-u', 'PRIVATE', '-u', 'PRIVATE'])
self.assertEqual(Config.hide_login_info(['--username=foo']),
['--username=PRIVATE'])
def test_locked_file(self):
TEXT = 'test_locked_file\n'
FILE = 'test_locked_file.ytdl'
MODES = 'war' # Order is important
try:
for lock_mode in MODES:
with locked_file(FILE, lock_mode, False) as f:
if lock_mode == 'r':
self.assertEqual(f.read(), TEXT * 2, 'Wrong file content')
else:
f.write(TEXT)
for test_mode in MODES:
testing_write = test_mode != 'r'
try:
with locked_file(FILE, test_mode, False):
pass
except (BlockingIOError, PermissionError):
if not testing_write: # FIXME
print(f'Known issue: Exclusive lock ({lock_mode}) blocks read access ({test_mode})')
continue
self.assertTrue(testing_write, f'{test_mode} is blocked by {lock_mode}')
else:
self.assertFalse(testing_write, f'{test_mode} is not blocked by {lock_mode}')
finally:
try:
os.remove(FILE)
except Exception:
pass
if __name__ == '__main__':
unittest.main()
| true
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.