repo_name stringlengths 7 65 | path stringlengths 5 185 | copies stringlengths 1 4 | size stringlengths 4 6 | content stringlengths 977 990k | license stringclasses 14 values | hash stringlengths 32 32 | line_mean float64 7.18 99.4 | line_max int64 31 999 | alpha_frac float64 0.25 0.95 | ratio float64 1.5 7.84 | autogenerated bool 1 class | config_or_test bool 2 classes | has_no_keywords bool 2 classes | has_few_assignments bool 1 class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
cherrypy/cheroot | bin/pip_constraint_helpers.py | 1 | 2980 | """A set of functions helping generating pip constraint files."""
import functools
import os
import platform
import subprocess # noqa: S404
import sys
PYTHON_IMPLEMENTATION_MAP = { # noqa: WPS407
'cpython': 'cp',
'ironpython': 'ip',
'jython': 'jy',
'python': 'py',
'pypy': 'pp',
}
PYTHON_IMPLEMENTATION = platform.python_implementation()
print_info = functools.partial(print, file=sys.stderr)
def get_runtime_python_tag():
"""Identify the Python tag of the current runtime.
:returns: Python tag.
"""
python_minor_ver = sys.version_info[:2]
try:
sys_impl = sys.implementation.name
except AttributeError:
sys_impl = PYTHON_IMPLEMENTATION.lower()
# pylint: disable=possibly-unused-variable
python_tag_prefix = PYTHON_IMPLEMENTATION_MAP.get(sys_impl, sys_impl)
# pylint: disable=possibly-unused-variable
python_minor_ver_tag = ''.join(map(str, python_minor_ver))
return (
'{python_tag_prefix!s}{python_minor_ver_tag!s}'.
format(**locals()) # noqa: WPS421
)
def get_constraint_file_path(req_dir, toxenv, python_tag):
"""Identify the constraints filename for the current environment.
:param req_dir: Requirements directory.
:param toxenv: tox testenv.
:param python_tag: Python tag.
:returns: Constraints filename for the current environment.
"""
sys_platform = sys.platform
# pylint: disable=possibly-unused-variable
platform_machine = platform.machine().lower()
if toxenv in {'py', 'python'}:
extra_prefix = 'py' if PYTHON_IMPLEMENTATION == 'PyPy' else ''
toxenv = '{prefix}py{ver}'.format(
prefix=extra_prefix,
ver=python_tag[2:],
)
if sys_platform == 'linux2':
sys_platform = 'linux'
constraint_name = (
'tox-{toxenv}-{python_tag}-{sys_platform}-{platform_machine}'.
format(**locals()) # noqa: WPS421
)
return os.path.join(req_dir, os.path.extsep.join((constraint_name, 'txt')))
def make_pip_cmd(pip_args, constraint_file_path):
"""Inject a lockfile constraint into the pip command if present.
:param pip_args: pip arguments.
:param constraint_file_path: Path to a ``constraints.txt``-compatible file.
:returns: pip command.
"""
pip_cmd = [sys.executable, '-m', 'pip'] + pip_args
if os.path.isfile(constraint_file_path):
pip_cmd += ['--constraint', constraint_file_path]
else:
print_info(
'WARNING: The expected pinned constraints file for the current '
'env does not exist (should be "{constraint_file_path}").'.
format(**locals()), # noqa: WPS421
)
return pip_cmd
def run_cmd(cmd):
"""Invoke a shell command after logging it.
:param cmd: The command to invoke.
"""
print_info(
'Invoking the following command: {cmd}'.
format(cmd=' '.join(cmd)),
)
subprocess.check_call(cmd) # noqa: S603
| bsd-3-clause | c069125bbf4f5a3154ce2d65438a050a | 27.932039 | 79 | 0.63255 | 3.599034 | false | false | false | false |
cherrypy/cheroot | cheroot/ssl/pyopenssl.py | 1 | 13231 | """
A library for integrating :doc:`pyOpenSSL <pyopenssl:index>` with Cheroot.
The :py:mod:`OpenSSL <pyopenssl:OpenSSL>` module must be importable
for SSL/TLS/HTTPS functionality.
You can obtain it from `here <https://github.com/pyca/pyopenssl>`_.
To use this module, set :py:attr:`HTTPServer.ssl_adapter
<cheroot.server.HTTPServer.ssl_adapter>` to an instance of
:py:class:`ssl.Adapter <cheroot.ssl.Adapter>`.
There are two ways to use :abbr:`TLS (Transport-Level Security)`:
Method One
----------
* :py:attr:`ssl_adapter.context
<cheroot.ssl.pyopenssl.pyOpenSSLAdapter.context>`: an instance of
:py:class:`SSL.Context <pyopenssl:OpenSSL.SSL.Context>`.
If this is not None, it is assumed to be an :py:class:`SSL.Context
<pyopenssl:OpenSSL.SSL.Context>` instance, and will be passed to
:py:class:`SSL.Connection <pyopenssl:OpenSSL.SSL.Connection>` on bind().
The developer is responsible for forming a valid :py:class:`Context
<pyopenssl:OpenSSL.SSL.Context>` object. This
approach is to be preferred for more flexibility, e.g. if the cert and
key are streams instead of files, or need decryption, or
:py:data:`SSL.SSLv3_METHOD <pyopenssl:OpenSSL.SSL.SSLv3_METHOD>`
is desired instead of the default :py:data:`SSL.SSLv23_METHOD
<pyopenssl:OpenSSL.SSL.SSLv3_METHOD>`, etc. Consult
the :doc:`pyOpenSSL <pyopenssl:api/ssl>` documentation for
complete options.
Method Two (shortcut)
---------------------
* :py:attr:`ssl_adapter.certificate
<cheroot.ssl.pyopenssl.pyOpenSSLAdapter.certificate>`: the file name
of the server's TLS certificate.
* :py:attr:`ssl_adapter.private_key
<cheroot.ssl.pyopenssl.pyOpenSSLAdapter.private_key>`: the file name
of the server's private key file.
Both are :py:data:`None` by default. If :py:attr:`ssl_adapter.context
<cheroot.ssl.pyopenssl.pyOpenSSLAdapter.context>` is :py:data:`None`,
but ``.private_key`` and ``.certificate`` are both given and valid, they
will be read, and the context will be automatically created from them.
.. spelling::
pyopenssl
"""
import socket
import sys
import threading
import time
try:
import OpenSSL.version
from OpenSSL import SSL
from OpenSSL import crypto
try:
ssl_conn_type = SSL.Connection
except AttributeError:
ssl_conn_type = SSL.ConnectionType
except ImportError:
SSL = None
from . import Adapter
from .. import errors, server as cheroot_server
from ..makefile import StreamReader, StreamWriter
class SSLFileobjectMixin:
"""Base mixin for a TLS socket stream."""
ssl_timeout = 3
ssl_retry = .01
# FIXME:
def _safe_call(self, is_reader, call, *args, **kwargs): # noqa: C901
"""Wrap the given call with TLS error-trapping.
is_reader: if False EOF errors will be raised. If True, EOF errors
will return "" (to emulate normal sockets).
"""
start = time.time()
while True:
try:
return call(*args, **kwargs)
except SSL.WantReadError:
# Sleep and try again. This is dangerous, because it means
# the rest of the stack has no way of differentiating
# between a "new handshake" error and "client dropped".
# Note this isn't an endless loop: there's a timeout below.
# Ref: https://stackoverflow.com/a/5133568/595220
time.sleep(self.ssl_retry)
except SSL.WantWriteError:
time.sleep(self.ssl_retry)
except SSL.SysCallError as e:
if is_reader and e.args == (-1, 'Unexpected EOF'):
return b''
errnum = e.args[0]
if is_reader and errnum in errors.socket_errors_to_ignore:
return b''
raise socket.error(errnum)
except SSL.Error as e:
if is_reader and e.args == (-1, 'Unexpected EOF'):
return b''
thirdarg = None
try:
thirdarg = e.args[0][0][2]
except IndexError:
pass
if thirdarg == 'http request':
# The client is talking HTTP to an HTTPS server.
raise errors.NoSSLError()
raise errors.FatalSSLAlert(*e.args)
if time.time() - start > self.ssl_timeout:
raise socket.timeout('timed out')
def recv(self, size):
"""Receive message of a size from the socket."""
return self._safe_call(
True,
super(SSLFileobjectMixin, self).recv,
size,
)
def readline(self, size=-1):
"""Receive message of a size from the socket.
Matches the following interface:
https://docs.python.org/3/library/io.html#io.IOBase.readline
"""
return self._safe_call(
True,
super(SSLFileobjectMixin, self).readline,
size,
)
def sendall(self, *args, **kwargs):
"""Send whole message to the socket."""
return self._safe_call(
False,
super(SSLFileobjectMixin, self).sendall,
*args, **kwargs
)
def send(self, *args, **kwargs):
"""Send some part of message to the socket."""
return self._safe_call(
False,
super(SSLFileobjectMixin, self).send,
*args, **kwargs
)
class SSLFileobjectStreamReader(SSLFileobjectMixin, StreamReader):
"""SSL file object attached to a socket object."""
class SSLFileobjectStreamWriter(SSLFileobjectMixin, StreamWriter):
"""SSL file object attached to a socket object."""
class SSLConnectionProxyMeta:
"""Metaclass for generating a bunch of proxy methods."""
def __new__(mcl, name, bases, nmspc):
"""Attach a list of proxy methods to a new class."""
proxy_methods = (
'get_context', 'pending', 'send', 'write', 'recv', 'read',
'renegotiate', 'bind', 'listen', 'connect', 'accept',
'setblocking', 'fileno', 'close', 'get_cipher_list',
'getpeername', 'getsockname', 'getsockopt', 'setsockopt',
'makefile', 'get_app_data', 'set_app_data', 'state_string',
'sock_shutdown', 'get_peer_certificate', 'want_read',
'want_write', 'set_connect_state', 'set_accept_state',
'connect_ex', 'sendall', 'settimeout', 'gettimeout',
'shutdown',
)
proxy_methods_no_args = (
'shutdown',
)
proxy_props = (
'family',
)
def lock_decorator(method):
"""Create a proxy method for a new class."""
def proxy_wrapper(self, *args):
self._lock.acquire()
try:
new_args = (
args[:] if method not in proxy_methods_no_args else []
)
return getattr(self._ssl_conn, method)(*new_args)
finally:
self._lock.release()
return proxy_wrapper
for m in proxy_methods:
nmspc[m] = lock_decorator(m)
nmspc[m].__name__ = m
def make_property(property_):
"""Create a proxy method for a new class."""
def proxy_prop_wrapper(self):
return getattr(self._ssl_conn, property_)
proxy_prop_wrapper.__name__ = property_
return property(proxy_prop_wrapper)
for p in proxy_props:
nmspc[p] = make_property(p)
# Doesn't work via super() for some reason.
# Falling back to type() instead:
return type(name, bases, nmspc)
class SSLConnection(metaclass=SSLConnectionProxyMeta):
r"""A thread-safe wrapper for an ``SSL.Connection``.
:param tuple args: the arguments to create the wrapped \
:py:class:`SSL.Connection(*args) \
<pyopenssl:OpenSSL.SSL.Connection>`
"""
def __init__(self, *args):
"""Initialize SSLConnection instance."""
self._ssl_conn = SSL.Connection(*args)
self._lock = threading.RLock()
class pyOpenSSLAdapter(Adapter):
"""A wrapper for integrating pyOpenSSL with Cheroot."""
certificate = None
"""The file name of the server's TLS certificate."""
private_key = None
"""The file name of the server's private key file."""
certificate_chain = None
"""Optional. The file name of CA's intermediate certificate bundle.
This is needed for cheaper "chained root" TLS certificates,
and should be left as :py:data:`None` if not required."""
context = None
"""
An instance of :py:class:`SSL.Context <pyopenssl:OpenSSL.SSL.Context>`.
"""
ciphers = None
"""The ciphers list of TLS."""
def __init__(
self, certificate, private_key, certificate_chain=None,
ciphers=None,
):
"""Initialize OpenSSL Adapter instance."""
if SSL is None:
raise ImportError('You must install pyOpenSSL to use HTTPS.')
super(pyOpenSSLAdapter, self).__init__(
certificate, private_key, certificate_chain, ciphers,
)
self._environ = None
def bind(self, sock):
"""Wrap and return the given socket."""
if self.context is None:
self.context = self.get_context()
conn = SSLConnection(self.context, sock)
self._environ = self.get_environ()
return conn
def wrap(self, sock):
"""Wrap and return the given socket, plus WSGI environ entries."""
# pyOpenSSL doesn't perform the handshake until the first read/write
# forcing the handshake to complete tends to result in the connection
# closing so we can't reliably access protocol/client cert for the env
return sock, self._environ.copy()
def get_context(self):
"""Return an ``SSL.Context`` from self attributes.
Ref: :py:class:`SSL.Context <pyopenssl:OpenSSL.SSL.Context>`
"""
# See https://code.activestate.com/recipes/442473/
c = SSL.Context(SSL.SSLv23_METHOD)
c.use_privatekey_file(self.private_key)
if self.certificate_chain:
c.load_verify_locations(self.certificate_chain)
c.use_certificate_file(self.certificate)
return c
def get_environ(self):
"""Return WSGI environ entries to be merged into each request."""
ssl_environ = {
'wsgi.url_scheme': 'https',
'HTTPS': 'on',
'SSL_VERSION_INTERFACE': '%s %s/%s Python/%s' % (
cheroot_server.HTTPServer.version,
OpenSSL.version.__title__, OpenSSL.version.__version__,
sys.version,
),
'SSL_VERSION_LIBRARY': SSL.SSLeay_version(
SSL.SSLEAY_VERSION,
).decode(),
}
if self.certificate:
# Server certificate attributes
with open(self.certificate, 'rb') as cert_file:
cert = crypto.load_certificate(
crypto.FILETYPE_PEM, cert_file.read(),
)
ssl_environ.update({
'SSL_SERVER_M_VERSION': cert.get_version(),
'SSL_SERVER_M_SERIAL': cert.get_serial_number(),
# 'SSL_SERVER_V_START':
# Validity of server's certificate (start time),
# 'SSL_SERVER_V_END':
# Validity of server's certificate (end time),
})
for prefix, dn in [
('I', cert.get_issuer()),
('S', cert.get_subject()),
]:
# X509Name objects don't seem to have a way to get the
# complete DN string. Use str() and slice it instead,
# because str(dn) == "<X509Name object '/C=US/ST=...'>"
dnstr = str(dn)[18:-2]
wsgikey = 'SSL_SERVER_%s_DN' % prefix
ssl_environ[wsgikey] = dnstr
# The DN should be of the form: /k1=v1/k2=v2, but we must allow
# for any value to contain slashes itself (in a URL).
while dnstr:
pos = dnstr.rfind('=')
dnstr, value = dnstr[:pos], dnstr[pos + 1:]
pos = dnstr.rfind('/')
dnstr, key = dnstr[:pos], dnstr[pos + 1:]
if key and value:
wsgikey = 'SSL_SERVER_%s_DN_%s' % (prefix, key)
ssl_environ[wsgikey] = value
return ssl_environ
def makefile(self, sock, mode='r', bufsize=-1):
"""Return socket file object."""
cls = (
SSLFileobjectStreamReader
if 'r' in mode else
SSLFileobjectStreamWriter
)
if SSL and isinstance(sock, ssl_conn_type):
wrapped_socket = cls(sock, mode, bufsize)
wrapped_socket.ssl_timeout = sock.gettimeout()
return wrapped_socket
# This is from past:
# TODO: figure out what it's meant for
else:
return cheroot_server.CP_fileobject(sock, mode, bufsize)
| bsd-3-clause | af91f6309e4d71a1ea6194640e8a57f6 | 34.18883 | 79 | 0.574862 | 4.151553 | false | false | false | false |
encode/apistar | tests/test_cli.py | 2 | 5166 | import json
import os
from click.testing import CliRunner
from starlette.applications import Starlette
from starlette.responses import JSONResponse
from starlette.testclient import TestClient
from apistar.cli import cli
def test_valid_document(tmpdir):
schema = os.path.join(tmpdir, "schema.json")
with open(schema, "w") as schema_file:
schema_file.write(
json.dumps(
{"openapi": "3.0.0", "info": {"title": "", "version": ""}, "paths": {}}
)
)
runner = CliRunner()
result = runner.invoke(cli, ["validate", "--path", schema, "--format", "openapi"])
assert result.exit_code == 0
assert result.output == "✓ Valid OpenAPI schema.\n"
def test_invalid_document(tmpdir):
schema = os.path.join(tmpdir, "schema.json")
with open(schema, "w") as schema_file:
schema_file.write(json.dumps({"openapi": "3.0.0", "info": {"version": ""}}))
runner = CliRunner()
result = runner.invoke(cli, ["validate", "--path", schema, "--format", "openapi"])
assert result.exit_code != 0
assert result.output == (
"* The field 'paths' is required. (At line 1, column 1.)\n"
"* The field 'title' is required. (At ['info'], line 1, column 30.)\n"
"✘ Invalid OpenAPI schema.\n"
)
def test_invalid_document_verbose(tmpdir):
schema = os.path.join(tmpdir, "schema.json")
with open(schema, "w") as schema_file:
schema_file.write(json.dumps({"openapi": "3.0.0", "info": {"version": ""}}))
runner = CliRunner()
result = runner.invoke(
cli, ["validate", "--path", schema, "--format", "openapi", "--verbose"]
)
assert result.exit_code != 0
assert result.output == (
'{"openapi": "3.0.0", "info": {"version": ""}}\n'
"^ The field 'paths' is required.\n"
" ^ The field 'title' is required.\n"
"\n"
"✘ Invalid OpenAPI schema.\n"
)
def test_docs(tmpdir):
schema = os.path.join(tmpdir, "schema.json")
output_dir = os.path.join(tmpdir, "build")
output_index = os.path.join(output_dir, "index.html")
with open(schema, "w") as schema_file:
schema_file.write(
json.dumps(
{"openapi": "3.0.0", "info": {"title": "", "version": ""}, "paths": {}}
)
)
runner = CliRunner()
result = runner.invoke(
cli,
["docs", "--path", schema, "--format", "openapi", "--output-dir", output_dir],
)
assert result.exit_code == 0
assert result.output == '✓ Documentation built at "%s".\n' % output_index
app = Starlette()
@app.route("/homepage")
def homepage(request):
return JSONResponse({"hello": "world"})
@app.route("/error")
def error(request):
return JSONResponse({"error": "something failed"}, status_code=400)
def test_request(tmpdir):
schema = os.path.join(tmpdir, "schema.json")
with open(schema, "w") as schema_file:
schema_file.write(
json.dumps(
{
"openapi": "3.0.0",
"info": {"title": "", "version": ""},
"servers": [{"url": "https://testserver"}],
"paths": {"/homepage": {"get": {"operationId": "example"}}},
}
)
)
session = TestClient(app)
runner = CliRunner()
cmd = ["request", "--path", schema, "example"]
result = runner.invoke(cli, cmd, obj=session)
assert result.exit_code == 0
assert result.output == '{\n "hello": "world"\n}\n'
def test_request_verbose(tmpdir):
schema = os.path.join(tmpdir, "schema.json")
with open(schema, "w") as schema_file:
schema_file.write(
json.dumps(
{
"openapi": "3.0.0",
"info": {"title": "", "version": ""},
"servers": [{"url": "https://testserver"}],
"paths": {"/homepage": {"get": {"operationId": "example"}}},
}
)
)
session = TestClient(app)
runner = CliRunner()
cmd = ["request", "--path", schema, "--verbose", "example"]
result = runner.invoke(cli, cmd, obj=session)
assert result.exit_code == 0
assert "> GET /homepage HTTP/1.1" in result.output
assert "< 200 OK" in result.output
assert '{\n "hello": "world"\n}\n' in result.output
def test_request_error(tmpdir):
schema = os.path.join(tmpdir, "schema.json")
with open(schema, "w") as schema_file:
schema_file.write(
json.dumps(
{
"openapi": "3.0.0",
"info": {"title": "", "version": ""},
"servers": [{"url": "https://testserver"}],
"paths": {"/error": {"get": {"operationId": "example"}}},
}
)
)
session = TestClient(app)
runner = CliRunner()
cmd = ["request", "--path", schema, "example"]
result = runner.invoke(cli, cmd, obj=session)
assert result.exit_code != 0
assert result.output == '{\n "error": "something failed"\n}\n✘ 400 Bad Request\n'
| bsd-3-clause | b6bafa23fc5ce4aa9c32af320e84dd82 | 31.225 | 88 | 0.534911 | 3.590529 | false | true | false | false |
encode/apistar | apistar/compat.py | 2 | 1517 | import collections
import sys
try:
import jinja2
except ImportError:
jinja2 = None
try:
import pygments
from pygments.lexers import get_lexer_by_name
from pygments.formatters import HtmlFormatter
def pygments_highlight(text, lang, style):
lexer = get_lexer_by_name(lang, stripall=False)
formatter = HtmlFormatter(nowrap=True, style=style)
return pygments.highlight(text, lexer, formatter)
def pygments_css(style):
formatter = HtmlFormatter(style=style)
return formatter.get_style_defs(".highlight")
except ImportError:
pygments = None
def pygments_highlight(text, lang, style):
return text
def pygments_css(style):
return None
try:
# Ideally we subclass `_TemporaryFileWrapper` to present a clear __repr__
# for downloaded files.
from tempfile import _TemporaryFileWrapper
class DownloadedFile(_TemporaryFileWrapper):
basename = None
def __repr__(self):
state = "closed" if self.closed else "open"
mode = "" if self.closed else " '%s'" % self.file.mode
return "<DownloadedFile '%s', %s%s>" % (self.name, state, mode)
def __str__(self):
return self.__repr__()
except ImportError:
# On some platforms (eg GAE) the private _TemporaryFileWrapper may not be
# available, just use the standard `NamedTemporaryFile` function
# in this case.
import tempfile
DownloadedFile = tempfile.NamedTemporaryFile
| bsd-3-clause | 10824da1426830e264e0f52802ee4f80 | 24.711864 | 77 | 0.659855 | 4.334286 | false | false | false | false |
dyninc/dyn-python | dyn/tm/tools.py | 2 | 3444 | # -*- coding: utf-8 -*-
"""The tools module is designed to be able to assist users in some of the more
common or complicated tasks one will likely find themselves needing to
accomplish via the DynECT API
"""
from dyn.compat import string_types
__author__ = 'jnappi'
def change_ip(zone, from_ip, to, v6=False, publish=False):
"""Change all occurances of an ip address to a new ip address under the
specified zone
:param zone: The :class:`~dyn.tm.zones.Zone` you wish to update ips for
:param from_ip: Either a list of ip addresses or a single ip address that
you want updated
:param to: Either a list of ip addresses or a single ip address that will
overwrite from_ip
:param v6: Boolean flag to specify if we're replacing ipv4 or ipv6
addresses (ie, whether we're updating an ARecord or AAAARecord)
:param publish: A boolean flag denoting whether or not to publish changes
after making them. You can optionally leave this as *False* and process
the returned changeset prior to publishing your changes.
:returns: A list of tuples of the form (fqdn, old, new) where fqdn is
the fqdn of the record that was updated, old was the old ip address,
and new is the new ip address.
"""
records = zone.get_all_records()
records = records['aaaa_records'] if v6 else records['a_records']
changset = []
changed = False
def update_single_ip(f, t):
l_changed = False
for rrset in records:
if rrset.address == f:
fqdn, orig = rrset.fqdn, rrset.address
rrset.address = t
changset.append((fqdn, orig, t))
l_changed = True
return l_changed
if isinstance(from_ip, string_types):
from_ip, to = [from_ip], [to]
for index, ip in enumerate(from_ip):
if update_single_ip(ip, to[index]):
publish = True
# If we made changes, publish the zone
if publish and changed:
zone.publish()
return changset
def map_ips(zone, mapping, v6=False, publish=False):
"""Change all occurances of an ip address to a new ip address under the
specified zone
:param zone: The :class:`~dyn.tm.zones.Zone` you wish to update ips for
:param mapping: A *dict* of the form {'old_ip': 'new_ip'}
:param v6: Boolean flag to specify if we're replacing ipv4 or ipv6
addresses (ie, whether we're updating an ARecord or AAAARecord)
:param publish: A boolean flag denoting whether or not to publish changes
after making them. You can optionally leave this as *False* and process
the returned changeset prior to publishing your changes.
:returns: A list of tuples of the form (fqdn, old, new) where fqdn is
the fqdn of the record that was updated, old was the old ip address,
and new is the new ip address.
"""
records = zone.get_all_records()
records = records['aaaa_records'] if v6 else records['a_records']
changeset = []
changed = False
for old, new in mapping.items():
for record in records:
if record.address == old:
fqdn, orig = record.fqdn, record.address
record.address = new
changeset.append((fqdn, orig, new))
changed = True
# If we made changes, publish the zone
if publish and changed:
zone.publish()
return changeset
| bsd-3-clause | a17bf8a224ddbe22fcc7ec4b7949bf98 | 38.136364 | 79 | 0.64547 | 4.051765 | false | false | false | false |
dyninc/dyn-python | dyn/tm/accounts.py | 2 | 66135 | # -*- coding: utf-8 -*-
"""This module contains interfaces for all Account management features of the
REST API
"""
from dyn.tm.errors import DynectInvalidArgumentError
from dyn.tm.session import DynectSession
from dyn.compat import force_unicode
import re
__author__ = 'jnappi'
__all__ = ['get_updateusers', 'get_users', 'get_permissions_groups',
'get_contacts', 'get_notifiers', 'UpdateUser', 'User',
'PermissionsGroup', 'UserZone', 'Notifier', 'Contact']
def get_updateusers(search=None):
"""Return a ``list`` of :class:`~dyn.tm.accounts.UpdateUser` objects. If
*search* is specified, then only :class:`~dyn.tm.accounts.UpdateUsers` who
match those search criteria will be returned in the list. Otherwise, all
:class:`~dyn.tm.accounts.UpdateUsers`'s will be returned.
:param search: A ``dict`` of search criteria. Key's in this ``dict`` much
map to an attribute a :class:`~dyn.tm.accounts.UpdateUsers` instance
and the value mapped to by that key will be used as the search criteria
for that key when searching.
:return: a ``list`` of :class:`~dyn.tm.accounts.UpdateUser` objects
"""
uri = '/UpdateUser/'
api_args = {'detail': 'Y'}
response = DynectSession.get_session().execute(uri, 'GET', api_args)
update_users = []
for user in response['data']:
update_users.append(UpdateUser(api=False, **user))
if search is not None:
original = update_users
update_users = []
for uu in original:
for key, val in search.items():
if hasattr(uu, key) and getattr(uu, key) == val:
update_users.append(uu)
return update_users
def get_users(search=None):
"""Return a ``list`` of :class:`~dyn.tm.accounts.User` objects. If *search*
is specified, then only users who match those search parameters will be
returned in the list. Otherwise, all :class:`~dyn.tm.accounts.User`'s will
be returned.
:param search: A ``dict`` of search criteria. Key's in this ``dict`` much
map to an attribute a :class:`~dyn.tm.accounts.User` instance and the
value mapped to by that key will be used as the search criteria for
that key when searching.
:return: a ``list`` of :class:`~dyn.tm.accounts.User` objects
"""
uri = '/User/'
api_args = {'detail': 'Y'}
if search is not None:
search_string = ''
for key, val in search.items():
if search_string != '':
' AND '.join([search_string, '{}:"{}"'.format(key, val)])
else:
search_string = '{}:"{}"'.format(key, val)
api_args['search'] = search_string
response = DynectSession.get_session().execute(uri, 'GET', api_args)
users = []
for user in response['data']:
user_name = None
if 'user_name' in user:
user_name = user['user_name']
del user['user_name']
users.append(User(user_name, api=False, **user))
return users
def get_permissions_groups(search=None):
"""Return a ``list`` of :class:`~dyn.tm.accounts.PermissionGroup` objects.
If *search* is specified, then only
:class:`~dyn.tm.accounts.PermissionGroup`'s that match those search
criteria will be returned in the list. Otherwise, all
:class:`~dyn.tm.accounts.PermissionGroup`'s will be returned.
:param search: A ``dict`` of search criteria. Key's in this ``dict`` much
map to an attribute a :class:`~dyn.tm.accounts.PermissionGroup`
instance and the value mapped to by that key will be used as the search
criteria for that key when searching.
:return: a ``list`` of :class:`~dyn.tm.accounts.PermissionGroup` objects
"""
uri = '/PermissionGroup/'
api_args = {'detail': 'Y'}
response = DynectSession.get_session().execute(uri, 'GET', api_args)
groups = []
for group in response['data']:
groups.append(PermissionsGroup(None, api=False, **group))
if search is not None:
original = groups
groups = []
for group in original:
for key, val in search.items():
if hasattr(group, key) and getattr(group, key) == val:
groups.append(group)
return groups
def get_contacts(search=None):
"""Return a ``list`` of :class:`~dyn.tm.accounts.Contact` objects. If
*search* is specified, then only :class:`~dyn.tm.accounts.Contact`'s who
match those search criteria will be returned in the list. Otherwise, all
:class:`~dyn.tm.accounts.Contact`'s will be returned.
:param search: A ``dict`` of search criteria. Key's in this ``dict`` much
map to an attribute a :class:`~dyn.tm.accounts.Contact` instance and
the value mapped to by that key will be used as the search criteria
for that key when searching.
:return: a ``list`` of :class:`~dyn.tm.accounts.Contact` objects
"""
uri = '/Contact/'
api_args = {'detail': 'Y'}
response = DynectSession.get_session().execute(uri, 'GET', api_args)
contacts = []
for contact in response['data']:
if 'nickname' in contact:
contact['_nickname'] = contact['nickname']
del contact['nickname']
contacts.append(Contact(None, api=False, **contact))
if search is not None:
original = contacts
contacts = []
for contact in original:
for key, val in search.items():
if hasattr(contact, key) and getattr(contact, key) == val:
contacts.append(contact)
return contacts
def get_notifiers(search=None):
"""Return a ``list`` of :class:`~dyn.tm.accounts.Notifier` objects. If
*search* is specified, then only :class:`~dyn.tm.accounts.Notifier`'s who
match those search criteria will be returned in the list. Otherwise, all
:class:`~dyn.tm.accounts.Notifier`'s will be returned.
:param search: A ``dict`` of search criteria. Key's in this ``dict`` much
map to an attribute a :class:`~dyn.tm.accounts.Notifier` instance and
the value mapped to by that key will be used as the search criteria for
that key when searching.
:return: a ``list`` of :class:`~dyn.tm.accounts.Notifier` objects
"""
uri = '/Notifier/'
api_args = {'detail': 'Y'}
response = DynectSession.get_session().execute(uri, 'GET', api_args)
notifiers = []
for notifier in response['data']:
notifiers.append(Notifier(None, api=False, **notifier))
if search is not None:
original = notifiers
notifiers = []
for notifier in original:
for key, val in search.items():
if hasattr(notifier, key) and getattr(notifier, key) == val:
notifiers.append(notifier)
return notifiers
class UpdateUser(object):
""":class:`~dyn.tm.accounts.UpdateUser` type objects are a special form of
a :class:`~dyn.tm.accounts.User` which are tied to a specific Dynamic DNS
services.
"""
def __init__(self, *args, **kwargs):
"""Create an :class:`~dyn.tm.accounts.UpdateUser` object
:param user_name: the Username this
:class:`~dyn.tm.accounts.UpdateUser` uses or will use to log in to
the DynECT System. A :class:`~dyn.tm.accounts.UpdateUser`'s
`user_name` is required for both creating and getting
:class:`~dyn.tm.accounts.UpdateUser`'s.
:param nickname: When creating a new
:class:`~dyn.tm.accounts.UpdateUser` on the DynECT System, this
`nickname` will be the System nickname for this
:class:`~dyn.tm.accounts.UpdateUser`
:param password: When creating a new
:class:`~dyn.tm.accounts.UpdateUser` on the DynECT System, this
`password` will be the password this
:class:`~dyn.tm.accounts.UpdateUser` uses to log into the System
"""
super(UpdateUser, self).__init__()
self.uri = '/UpdateUser/'
self._password = self._status = self._user_name = self._nickname = None
if 'api' in kwargs:
good_args = ('user_name', 'status', 'password')
for key, val in kwargs.items():
if key in good_args:
setattr(self, '_' + key, val)
self.uri = '/UpdateUser/{}/'.format(self._user_name)
elif len(args) + len(kwargs) == 1:
self._get(*args, **kwargs)
else:
self._post(*args, **kwargs)
def _post(self, nickname, password):
"""Create a new :class:`~dyn.tm.accounts.UpdateUser` on the DynECT
System
"""
self._nickname = nickname
self._password = password
uri = '/UpdateUser/'
api_args = {'nickname': self._nickname,
'password': self._password}
response = DynectSession.get_session().execute(uri, 'POST', api_args)
self._build(response['data'])
self.uri = '/UpdateUser/{}/'.format(self._user_name)
def _get(self, user_name):
"""Get an existing :class:`~dyn.tm.accounts.UpdateUser` from the
DynECT System
"""
self._user_name = user_name
self.uri = '/UpdateUser/{}/'.format(self._user_name)
response = DynectSession.get_session().execute(self.uri, 'GET')
self._build(response['data'])
def _build(self, data):
for key, val in data.items():
setattr(self, '_' + key, val)
def _update(self, api_args=None):
response = DynectSession.get_session().execute(self.uri, 'PUT',
api_args)
self._build(response['data'])
@property
def user_name(self):
"""This :class:`~dyn.tm.accounts.UpdateUser`'s `user_name`. An
:class:`~dyn.tm.accounts.UpdateUser`'s user_name is a read-only
property which can not be updated after the :class:`UpdateUser` has
been created.
"""
return self._user_name
@user_name.setter
def user_name(self, value):
pass
@property
def nickname(self):
"""This :class:`~dyn.tm.accounts.UpdateUser`s `nickname`. An
:class:`~dyn.tm.accounts.UpdateUser`'s `nickname` is a read-only
property which can not be updated after the
:class:`~dyn.tm.accounts.UpdateUser` has been created.
"""
return self._nickname
@nickname.setter
def nickname(self, value):
pass
@property
def status(self):
"""The current `status` of an :class:`~dyn.tm.accounts.UpdateUser` will
be one of either 'active' or 'blocked'. Blocked
:class:`~dyn.tm.accounts.UpdateUser`'s are unable to log into the
DynECT System, where active :class:`~dyn.tm.accounts.UpdateUser`'s are.
"""
return self._status
@status.setter
def status(self, value):
pass
@property
def password(self):
"""The current `password` for this
:class:`~dyn.tm.accounts.UpdateUser`. An
:class:`~dyn.tm.accounts.UpdateUser`'s `password` may be reassigned.
"""
if self._password is None or self._password == u'':
self._get(self._user_name)
return self._password
@password.setter
def password(self, new_password):
"""Update this :class:`~dyn.tm.accounts.UpdateUser`'s password to be
the provided password
:param new_password: The new password to use
"""
api_args = {'password': new_password}
self._update(api_args)
def block(self):
"""Set the status of this :class:`~dyn.tm.accounts.UpdateUser` to
'blocked'. This will prevent this :class:`~dyn.tm.accounts.UpdateUser`
from logging in until they are explicitly unblocked.
"""
api_args = {'block': True}
self._update(api_args)
def unblock(self):
"""Set the status of this :class:`~dyn.tm.accounts.UpdateUser` to
'active'. This will re-enable this :class:`~dyn.tm.accounts.UpdateUser`
to be able to login if they were previously blocked.
"""
api_args = {'unblock': True}
self._update(api_args)
def sync_password(self):
"""Pull in this :class:`~dyn.tm.accounts.UpdateUser` current password
from the DynECT System, in the unlikely event that this
:class:`~dyn.tm.accounts.UpdateUser` object's password may have gotten
out of sync
"""
api_args = {'user_name': self._user_name}
self._update(api_args)
def delete(self):
"""Delete this :class:`~dyn.tm.accounts.UpdateUser` from the DynECT
System. It is important to note that this operation may not be undone.
"""
DynectSession.get_session().execute(self.uri, 'DELETE')
def __str__(self):
"""Custom str method"""
return force_unicode('<UpdateUser>: {}').format(self.user_name)
__repr__ = __unicode__ = __str__
def __bytes__(self):
"""bytes override"""
return bytes(self.__str__())
class User(object):
"""DynECT System User object"""
def __init__(self, user_name, *args, **kwargs):
"""Create a new :class:`~dyn.tm.accounts.User` object
:param user_name: This :class:`~dyn.tm.accounts.User`'s system
username; used for logging into the system
:param password: Password for this :class:`~dyn.tm.accounts.User`
account
:param email: This :class:`~dyn.tm.accounts.User`'s Email address
:param first_name: This :class:`~dyn.tm.accounts.User`'s first name
:param last_name: This :class:`~dyn.tm.accounts.User`'s last name
:param nickname: The nickname for the `Contact` associated with this
:class:`~dyn.tm.accounts.User`
:param organization: This :class:`~dyn.tm.accounts.User`'s organization
:param phone: This :class:`~dyn.tm.accounts.User`'s phone number. Can
be of the form: (0) ( country-code ) ( local number ) ( extension )
Only the country-code (1-3 digits) and local number (at least 7
digits) are required. The extension can be up to 4 digits. Any
non-digits are ignored.
:param address: This :class:`~dyn.tm.accounts.User`'s street address
:param address2: This :class:`~dyn.tm.accounts.User`'s street address,
line 2
:param city: This :class:`~dyn.tm.accounts.User`'s city, part of the
user's address
:param country: This :class:`~dyn.tm.accounts.User`'s country, part of
the user's address
:param fax: This :class:`~dyn.tm.accounts.User`'s fax number
:param notify_email: Email address where this
:class:`~dyn.tm.accounts.User` should receive notifications
:param pager_email: Email address where this
:class:`~dyn.tm.accounts.User` should receive messages destined
for a pager
:param post_code: Zip code or Postal code
:param group_name: A list of permission groups this
:class:`~dyn.tm.accounts.User` belongs to
:param permission: A list of permissions assigned to this
:class:`~dyn.tm.accounts.User`
:param zone: A list of zones where this
:class:`~dyn.tm.accounts.User`'s permissions apply
:param forbid: A list of forbidden permissions for this
:class:`~dyn.tm.accounts.User`
:param status: Current status of this :class:`~dyn.tm.accounts.User`
:param website: This :class:`~dyn.tm.accounts.User`'s website
"""
super(User, self).__init__()
self._user_name = user_name
self.uri = '/User/{}/'.format(self._user_name)
self._permission_report_uri = '/UserPermissionReport/'
self._password = self._email = self._first_name = None
self._last_name = self._nickname = self._organization = None
self._phone = self._address = self._address_2 = self._city = None
self._country = self._fax = self._notify_email = None
self._pager_email = self._post_code = self._group_name = None
self._zone = self._forbid = self._status = None
self._website = None
self._permission = []
self.permission_groups = []
self.groups = []
if 'api' in kwargs:
del kwargs['api']
for key, val in kwargs.items():
if key != '_user_name':
setattr(self, '_' + key, val)
else:
setattr(self, key, val)
elif len(args) == 0 and len(kwargs) == 0:
self._get()
else:
self._post(*args, **kwargs)
def _post(self, password, email, first_name, last_name, nickname,
organization, phone, address=None, address_2=None, city=None,
country=None, fax=None, notify_email=None, pager_email=None,
post_code=None, group_name=None, permission=None, zone=None,
forbid=None, status=None, website=None):
"""Create a new :class:`~dyn.tm.accounts.User` object on the DynECT
System
"""
api_args = {'password': password, 'email': email,
'first_name': first_name, 'last_name': last_name,
'nickname': nickname, 'organization': organization,
'phone': phone, 'address': address,
'address_2': address_2, 'city': city, 'country': country,
'fax': fax, 'notify_email': notify_email,
'pager_email': pager_email, 'post_code': post_code,
'group_name': group_name, 'permission': permission,
'zone': zone, 'forbid': forbid,
'website': website}
self._password = password
self._email = email
self._first_name = first_name
self._last_name = last_name
self._nickname = nickname
self._organization = organization
self._phone = phone
self._address = address
self._address_2 = address_2
self._city = city
self._country = country
self._fax = fax
self._notify_email = notify_email
self._pager_email = pager_email
self._post_code = post_code
self._group_name = group_name
self._permission = permission
self._zone = zone
self._forbid = forbid
self._status = status
self._website = website
response = DynectSession.get_session().execute(self.uri, 'POST',
api_args)
self._build(response['data'])
def _get(self):
"""Get an existing :class:`~dyn.tm.accounts.User` object from the
DynECT System
"""
api_args = {}
response = DynectSession.get_session().execute(self.uri, 'GET',
api_args)
self._build(response['data'])
self._get_permission()
def _update_permission(self):
api_args = {'user_name': self._user_name}
response = DynectSession.get_session().execute(
self._permission_report_uri, 'POST', api_args)
self._build_permission(response)
def _update(self, api_args=None):
response = DynectSession.get_session().execute(self.uri, 'PUT',
api_args)
self._build(response['data'])
def _build(self, data):
"""Private build method"""
for key, val in data.items():
setattr(self, '_' + key, val)
def _get_permission(self):
api_args = {'user_name': self._user_name}
response = DynectSession.get_session().execute(
self._permission_report_uri, 'POST', api_args)
self._build_permission(response)
def _build_permission(self, response):
self._zone = list()
for val in response['data']['allowed']:
self._permission.append(val['name'])
for zone in val['zone']:
if zone['zone_name'] not in self._zone:
self._zone.append(zone['zone_name'])
@property
def user_name(self):
"""A :class:`~dyn.tm.accounts.User`'s user_name is a read-only property
"""
return self._user_name
@user_name.setter
def user_name(self, value):
pass
@property
def status(self):
"""A :class:`~dyn.tm.accounts.User`'s status is a read-only property.
To change you must use the :meth:`block`/:meth:`unblock` methods
"""
return self._status
@status.setter
def status(self, value):
pass
@property
def email(self):
"""This :class:`~dyn.tm.accounts.User`'s Email address"""
return self._email
@email.setter
def email(self, value):
api_args = {'email': value}
self._update(api_args)
@property
def first_name(self):
"""This :class:`~dyn.tm.accounts.User`'s first name"""
return self._first_name
@first_name.setter
def first_name(self, value):
api_args = {'first_name': value}
self._update(api_args)
@property
def last_name(self):
"""This :class:`~dyn.tm.accounts.User`'s last name"""
return self._last_name
@last_name.setter
def last_name(self, value):
api_args = {'last_name': value}
self._update(api_args)
@property
def nickname(self):
"""The nickname for the `Contact` associated with this
:class:`~dyn.tm.accounts.User`"""
return self._nickname
@nickname.setter
def nickname(self, value):
api_args = {'nickname': value}
self._update(api_args)
@property
def organization(self):
"""This :class:`~dyn.tm.accounts.User`'s organization"""
return self._organization
@organization.setter
def organization(self, value):
api_args = {'organization': value}
self._update(api_args)
@property
def phone(self):
"""This :class:`~dyn.tm.accounts.User`'s phone number. Can be of the
form: (0) ( country-code ) ( local number ) ( extension ) Only the
country-code (1-3 digits) and local number (at least 7 digits) are
required. The extension can be up to 4 digits. Any non-digits are
ignored.
"""
return self._phone
@phone.setter
def phone(self, value):
api_args = {'phone': value}
self._update(api_args)
@property
def address(self):
"""This :class:`~dyn.tm.accounts.User`'s street address"""
return self._address
@address.setter
def address(self, value):
api_args = {'address': value}
self._update(api_args)
@property
def address_2(self):
"""This :class:`~dyn.tm.accounts.User`'s street address, line 2"""
return self._address_2
@address_2.setter
def address_2(self, value):
api_args = {'address_2': value}
self._update(api_args)
@property
def city(self):
"""This :class:`~dyn.tm.accounts.User`'s city, part of the user's
address
"""
return self._city
@city.setter
def city(self, value):
api_args = {'city': value}
self._update(api_args)
@property
def country(self):
"""This :class:`~dyn.tm.accounts.User`'s country, part of the user's
address
"""
return self._country
@country.setter
def country(self, value):
api_args = {'country': value}
self._update(api_args)
@property
def fax(self):
"""This :class:`~dyn.tm.accounts.User`'s fax number"""
return self._fax
@fax.setter
def fax(self, value):
api_args = {'fax': value}
self._update(api_args)
@property
def notify_email(self):
"""Email address where this :class:`~dyn.tm.accounts.User` should
receive notifications
"""
return self._notify_email
@notify_email.setter
def notify_email(self, value):
api_args = {'notify_email': value}
self._update(api_args)
@property
def pager_email(self):
"""Email address where this :class:`~dyn.tm.accounts.User` should
receive messages destined for a pager
"""
return self._pager_email
@pager_email.setter
def pager_email(self, value):
api_args = {'pager_email': value}
self._update(api_args)
@property
def post_code(self):
"""This :class:`~dyn.tm.accounts.User`'s postal code, part of the
user's address
"""
return self._post_code
@post_code.setter
def post_code(self, value):
api_args = {'post_code': value}
self._update(api_args)
@property
def group_name(self):
"""A list of permission groups this :class:`~dyn.tm.accounts.User`
belongs to
"""
return self._group_name
@group_name.setter
def group_name(self, value):
api_args = {'group_name': value}
self._update(api_args)
@property
def permission(self):
"""A list of permissions assigned to this
:class:`~dyn.tm.accounts.User`
"""
return self._permission
@permission.setter
def permission(self, value):
api_args = {'permission': value}
self._update(api_args)
@property
def zone(self):
"""A list of zones where this :class:`~dyn.tm.accounts.User`'s
permissions apply
"""
return self._zone
@zone.setter
def zone(self, value):
api_args = {'zone': value}
self._update(api_args)
@property
def forbid(self):
"""A list of forbidden permissions for this
:class:`~dyn.tm.accounts.User`
"""
return self._forbid
@forbid.setter
def forbid(self, value):
"""Apply a new list of forbidden permissions for the
:class:`~dyn.tm.accounts.User`
"""
api_args = {'forbid': value}
self._update(api_args)
@property
def website(self):
"""This :class:`~dyn.tm.accounts.User`'s website"""
return self._website
@website.setter
def website(self, value):
api_args = {'website': value}
self._update(api_args)
def block(self):
"""Blocks this :class:`~dyn.tm.accounts.User` from logging in"""
api_args = {'block': 'True'}
uri = '/User/{}/'.format(self._user_name)
response = DynectSession.get_session().execute(uri, 'PUT', api_args)
self._status = response['data']['status']
def unblock(self):
"""Restores this :class:`~dyn.tm.accounts.User` to an active status and
re-enables their log-in
"""
api_args = {'unblock': 'True'}
uri = '/User/{}/'.format(self._user_name)
response = DynectSession.get_session().execute(uri, 'PUT', api_args)
self._status = response['data']['status']
def add_permission(self, permission):
"""Add individual permissions to this :class:`~dyn.tm.accounts.User`
:param permission: the permission to add
"""
if permission not in self._permission:
self._permission.append(permission)
uri = '/UserPermissionEntry/{}/{}/'.format(self._user_name,
permission)
DynectSession.get_session().execute(uri, 'POST')
def replace_permission(self, permission=None):
"""Replaces the list of permissions for this
:class:`~dyn.tm.accounts.User`
:param permissions: A list of permissions. Pass an empty list or omit
the argument to clear the list of permissions of the
:class:`~dyn.tm.accounts.User`
"""
api_args = {}
if permission is not None:
api_args['permission'] = permission
self._permission = permission
else:
self._permission = []
uri = '/UserPermissionEntry/{}/'.format(self._user_name)
DynectSession.get_session().execute(uri, 'PUT', api_args)
def delete_permission(self, permission):
"""Remove this specific permission from the
:class:`~dyn.tm.accounts.User`
:param permission: the permission to remove
"""
if permission in self._permission:
self._permission.remove(permission)
uri = '/UserPermissionEntry/{}/{}/'.format(self._user_name, permission)
DynectSession.get_session().execute(uri, 'DELETE')
def add_permissions_group(self, group):
"""Assigns the permissions group to this :class:`~dyn.tm.accounts.User`
:param group: the permissions group to add to this
:class:`~dyn.tm.accounts.User`
"""
self.permission_groups.append(group)
uri = '/UserGroupEntry/{}/{}/'.format(self._user_name, group)
DynectSession.get_session().execute(uri, 'POST')
def replace_permissions_group(self, groups=None):
"""Replaces the list of permissions for this
:class:`~dyn.tm.accounts.User`
:param groups: A list of permissions groups. Pass an empty list or omit
the argument to clear the list of permissions groups of the
:class:`~dyn.tm.accounts.User`
"""
api_args = {}
if groups is not None:
api_args['groups'] = groups
self.groups = groups
else:
self.groups = []
uri = '/UserGroupEntry/{}/'.format(self._user_name)
DynectSession.get_session().execute(uri, 'PUT', api_args)
def delete_permissions_group(self, group):
"""Removes the permissions group from the
:class:`~dyn.tm.accounts.User`
:param group: the permissions group to remove from this
:class:`~dyn.tm.accounts.User`
"""
if group in self.permission:
self.permission_groups.remove(group)
uri = '/UserGroupEntry/{}/{}/'.format(self._user_name, group)
DynectSession.get_session().execute(uri, 'DELETE')
def add_zone(self, zone, recurse='Y'):
"""Add individual zones to this :class:`~dyn.tm.accounts.User`
:param zone: the zone to add
:param recurse: determine if permissions should be extended to
subzones.
"""
if self._zone is not None:
if zone not in self._zone:
uri = '/UserZoneEntry/{}/{}/'.format(self._user_name, zone)
DynectSession.get_session().execute(uri, 'POST')
else:
uri = '/UserZoneEntry/{}/{}/'.format(self._user_name, zone)
DynectSession.get_session().execute(uri, 'POST')
self._get_permission()
def replace_zones(self, zones):
"""Remove this specific zones from the
:class:`~dyn.tm.accounts.User`
:param zones: array of the zones to be updated
format must be [{'zone_name':[yourzone], recurse: 'Y'},{ ...}]
recurse is optional.
"""
api_args = {}
if zones is not None:
api_args['zone'] = zones
uri = '/UserZoneEntry/{}/'.format(self._user_name)
DynectSession.get_session().execute(uri, 'PUT', api_args)
self._get_permission()
def delete_zone(self, zone):
"""Remove this specific zones from the
:class:`~dyn.tm.accounts.User`
:param zone: the zone to remove
"""
uri = '/UserZoneEntry/{}/{}/'.format(self._user_name, zone)
DynectSession.get_session().execute(uri, 'DELETE')
self._get_permission()
def add_forbid_rule(self, permission, zone=None):
"""Adds the forbid rule to the :class:`~dyn.tm.accounts.User`'s
permission group
:param permission: the permission to forbid from this
:class:`~dyn.tm.accounts.User`
:param zone: A list of zones where the forbid rule applies
"""
api_args = {}
if zone is not None:
api_args['zone'] = zone
uri = '/UserForbidEntry/{}/{}/'.format(self._user_name, permission)
DynectSession.get_session().execute(uri, 'POST', api_args)
def replace_forbid_rules(self, forbid=None):
"""Replaces the list of forbidden permissions in the
:class:`~dyn.tm.accounts.User`'s permissions group with a new list.
:param forbid: A list of rules to replace the forbidden rules on the
:class:`~dyn.tm.accounts.User`'s permission group. If empty or not
passed in, the :class:`~dyn.tm.accounts.User`'s forbid list will be
cleared
"""
api_args = {}
if forbid is not None:
api_args['forbid'] = forbid
uri = '/UserForbidEntry/{}/'.format(self._user_name)
DynectSession.get_session().execute(uri, 'PUT', api_args)
def delete_forbid_rule(self, permission, zone=None):
"""Removes a forbid permissions rule from the
:class:`~dyn.tm.accounts.User`'s permission group
:param permission: permission
:param zone: A list of zones where the forbid rule applies
"""
api_args = {}
if zone is not None:
api_args['zone'] = zone
uri = '/UserForbidEntry/{}/{}/'.format(self._user_name, permission)
DynectSession.get_session().execute(uri, 'DELETE', api_args)
def delete(self):
"""Delete this :class:`~dyn.tm.accounts.User` from the system"""
uri = '/User/{}/'.format(self._user_name)
DynectSession.get_session().execute(uri, 'DELETE')
def __str__(self):
"""Custom str method"""
return force_unicode('<User>: {}').format(self.user_name)
__repr__ = __unicode__ = __str__
def __bytes__(self):
"""bytes override"""
return bytes(self.__str__())
class PermissionsGroup(object):
"""A DynECT System Permissions Group object"""
def __init__(self, group_name, *args, **kwargs):
"""Create a new permissions Group
:param group_name: The name of the permission group to update
:param description: A description of the permission group
:param group_type: The type of the permission group. Valid values:
plain or default
:param all_users: If 'Y', all current users will be added to the group.
Cannot be used if user_name is passed in
:param permission: A list of permissions that the group contains
:param user_name: A list of users that belong to the permission group
:param subgroup: A list of groups that belong to the permission group
:param zone: A list of zones where the group's permissions apply
"""
super(PermissionsGroup, self).__init__()
self._group_name = group_name
self._description = self._group_type = self._all_users = None
self._permission = self._user_name = self._subgroup = self._zone = None
self.uri = '/PermissionGroup/{}/'.format(self._group_name)
if 'api' in kwargs:
del kwargs['api']
for key, val in kwargs.items():
setattr(self, '_' + key, val)
elif len(args) == 0 and len(kwargs) == 0:
self._get()
else:
self._post(*args, **kwargs)
def _post(self, description, group_type=None, all_users=None,
permission=None, user_name=None, subgroup=None, zone=None):
"""Create a new :class:`~dyn.tm.accounts.PermissionsGroup` on the
DynECT System
"""
self._description = description
self._group_type = group_type
self._all_users = all_users
self._permission = permission
self._user_name = user_name
self._subgroup = subgroup
self._zone = zone
api_args = {}
# Any fields that were not explicitly set should not be passed through
for key, val in self.__dict__.items():
if val is not None and not hasattr(val, '__call__') and \
key.startswith('_'):
if key is '_group_type':
api_args['type'] = val
else:
api_args[key[1:]] = val
uri = '/PermissionGroup/{}/'.format(self._group_name)
response = DynectSession.get_session().execute(uri, 'POST', api_args)
for key, val in response['data'].items():
if key == 'type':
setattr(self, '_group_type', val)
elif key == 'zone':
self._zone = []
for zone in val:
self._zone.append(zone['zone_name'])
else:
setattr(self, '_' + key, val)
def _get(self):
"""Get an existing :class:`~dyn.tm.accounts.PermissionsGroup` from the
DynECT System
"""
response = DynectSession.get_session().execute(self.uri, 'GET')
for key, val in response['data'].items():
if key == 'type':
setattr(self, '_group_type', val)
elif key == 'zone':
self._zone = []
for zone in val:
self._zone.append(zone['zone_name'])
else:
setattr(self, '_' + key, val)
def _update(self, api_args=None):
response = DynectSession.get_session().execute(self.uri, 'PUT',
api_args)
for key, val in response['data'].items():
if key == 'type':
setattr(self, '_group_type', val)
elif key == 'zone':
self._zone = []
for zone in val:
self._zone.append(zone['zone_name'])
else:
setattr(self, '_' + key, val)
@property
def group_name(self):
"""The name of this permission group"""
return self._group_name
@group_name.setter
def group_name(self, value):
new_group_name = value
api_args = {'new_group_name': new_group_name,
'group_name': self._group_name}
self._update(api_args)
self._group_name = new_group_name
self.uri = '/PermissionGroup/{}/'.format(self._group_name)
@property
def description(self):
"""A description of this permission group"""
return self._description
@description.setter
def description(self, value):
self._description = value
api_args = {'group_name': self._group_name,
'description': self._description}
self._update(api_args)
@property
def group_type(self):
"""The type of this permission group"""
return self._group_type
@group_type.setter
def group_type(self, value):
self._group_type = value
api_args = {'type': self._group_type,
'group_name': self._group_name}
self._update(api_args)
@property
def all_users(self):
"""If 'Y', all current users will be added to the group. Cannot be
used if user_name is passed in
"""
return self._all_users
@all_users.setter
def all_users(self, value):
self._all_users = value
api_args = {'all_users': self._all_users,
'group_name': self._group_name}
self._update(api_args)
@property
def permission(self):
"""A list of permissions that this group contains"""
return self._permission
@permission.setter
def permission(self, value):
self._permission = value
api_args = {'permission': self._permission,
'group_name': self._group_name}
self._update(api_args)
@property
def user_name(self):
"""A list of users that belong to the permission group"""
return self._user_name
@user_name.setter
def user_name(self, value):
self._user_name = value
api_args = {'user_name': self._user_name,
'group_name': self._group_name}
self._update(api_args)
@property
def subgroup(self):
"""A list of groups that belong to the permission group"""
return self._subgroup
@subgroup.setter
def subgroup(self, value):
self._subgroup = value
api_args = {'subgroup': self._subgroup,
'group_name': self._group_name}
self._update(api_args)
@property
def zone(self):
"""A list of users that belong to the permission group"""
return self._zone
@zone.setter
def zone(self, value):
self._zone = value
api_args = {'zone': self._zone,
'group_name': self._group_name}
self._update(api_args)
def delete(self):
"""Delete this permission group"""
uri = '/PermissionGroup/{}/'.format(self._group_name)
DynectSession.get_session().execute(uri, 'DELETE')
def add_permission(self, permission):
"""Adds individual permissions to the user
:param permission: the permission to add to this user
"""
uri = '/PermissionGroupPermissionEntry/{}/{}/'.format(self._group_name,
permission)
DynectSession.get_session().execute(uri, 'POST')
self._permission.append(permission)
def replace_permissions(self, permission=None):
"""Replaces a list of individual user permissions for the user
:param permission: A list of permissions. Pass an empty list or omit
the argument to clear the list of permissions of the user
"""
api_args = {}
if permission is not None:
api_args['permission'] = permission
uri = '/PermissionGroupPermissionEntry/{}/'.format(self._group_name)
DynectSession.get_session().execute(uri, 'PUT', api_args)
if permission:
self._permission = permission
else:
self._permission = []
def remove_permission(self, permission):
"""Removes the specific permission from the user
:param permission: the permission to remove
"""
uri = '/PermissionGroupPermissionEntry/{}/{}/'.format(self._group_name,
permission)
DynectSession.get_session().execute(uri, 'DELETE')
self._permission.remove(permission)
def add_zone(self, zone, recurse='Y'):
"""Add a new Zone to this :class:`~dyn.tm.accounts.PermissionsGroup`
:param zone: The name of the Zone to be added to this
:class:`~dyn.tm.accounts.PermissionsGroup`
:param recurse: A flag determining whether or not to add all sub-nodes
of a Zone to this :class:`~dyn.tm.accounts.PermissionsGroup`
"""
api_args = {'recurse': recurse}
uri = '/PermissionGroupZoneEntry/{}/{}/'.format(self._group_name, zone)
DynectSession.get_session().execute(uri, 'POST', api_args)
self._zone.append(zone)
def add_subgroup(self, name):
"""Add a new Sub group to this
:class:`~dyn.tm.accounts.PermissionsGroup`
:param name: The name of the :class:`~dyn.tm.accounts.PermissionsGroup`
to be added to this :class:`~dyn.tm.accounts.PermissionsGroup`'s
subgroups
"""
uri = '/PermissionGroupSubgroupEntry/{}/{}/'.format(self._group_name,
name)
DynectSession.get_session().execute(uri, 'POST')
self._subgroup.append(name)
def update_subgroup(self, subgroups):
"""Update the subgroups under this
:class:`~dyn.tm.accounts.PermissionsGroup`
:param subgroups: The subgroups with updated information
"""
api_args = {'subgroup': subgroups}
uri = '/PermissionGroupSubgroupEntry/{}/'.format(self._group_name)
DynectSession.get_session().execute(uri, 'PUT', api_args)
self._subgroup = subgroups
def delete_subgroup(self, name):
"""Remove a Subgroup from this
:class:`~dyn.tm.accounts.PermissionsGroup`
:param name: The name of the :class:`~dyn.tm.accounts.PermissionsGroup`
to be remoevd from this
:class:`~dyn.tm.accounts.PermissionsGroup`'s subgroups
"""
uri = '/PermissionGroupSubgroupEntry/{}/{}/'.format(self._group_name,
name)
DynectSession.get_session().execute(uri, 'DELETE')
self._subgroup.remove(name)
def __str__(self):
"""Custom str method"""
return force_unicode('<PermissionsGroup>: {}').format(self.group_name)
__repr__ = __unicode__ = __str__
def __bytes__(self):
"""bytes override"""
return bytes(self.__str__())
class UserZone(object):
"""A DynECT system UserZoneEntry"""
def __init__(self, user_name, zone_name, recurse='Y'):
super(UserZone, self).__init__()
self._user_name = user_name
self._zone_name = zone_name
self._recurse = recurse
api_args = {'recurse': self._recurse}
uri = '/UserZoneEntry/{}/{}/'.format(self._user_name, self._zone_name)
respnose = DynectSession.get_session().execute(uri, 'POST', api_args)
for key, val in respnose['data'].items():
setattr(self, '_' + key, val)
@property
def user_name(self):
"""User_name property of :class:`~dyn.tm.accounts.UserZone` object is
read only
"""
return self._user_name
@user_name.setter
def user_name(self, value):
pass
@property
def recurse(self):
"""Indicates whether or not permissions should apply to subnodes of
the `zone_name` as well
"""
return self._recurse
@recurse.setter
def recurse(self, value):
self._recurse = value
api_args = {'recurse': self._recurse, 'zone_name': self._zone_name}
uri = '/UserZoneEntry/{}/'.format(self._user_name)
DynectSession.get_session().execute(uri, 'PUT', api_args)
def update_zones(self, zone=None):
"""Replacement list zones where the user will now have permissions.
Pass an empty list or omit the argument to clear the user's zone
permissions
:param zone: a list of zone names where the user will now have
permissions
"""
if zone is None:
zone = []
api_args = {'zone': []}
for zone_data in zone:
api_args['zone'].append({'zone_name': zone_data})
uri = '/UserZoneEntry/{}/'.format(self._user_name)
respnose = DynectSession.get_session().execute(uri, 'PUT', api_args)
for key, val in respnose['data'].items():
setattr(self, '_' + key, val)
def delete(self):
"""Delete this :class:`~dyn.tm.accounts.UserZone` object from the
DynECT System
"""
api_args = {'recurse': self.recurse}
uri = '/UserZoneEntry/{}/{}/'.format(self._user_name, self._zone_name)
DynectSession.get_session().execute(uri, 'DELETE', api_args)
def __str__(self):
"""Custom str method"""
return force_unicode('<UserZone>: {}').format(self.user_name)
__repr__ = __unicode__ = __str__
def __bytes__(self):
"""bytes override"""
return bytes(self.__str__())
class Notifier(object):
"""DynECT System Notifier"""
def __init__(self, *args, **kwargs):
"""Create a new :class:`~dyn.tm.accounts.Notifier` object
:param label: The label used to identify this
:class:`~dyn.tm.accounts.Notifier`
:param recipients: List of Recipients attached to this
:class:`~dyn.tm.accounts.Notifier`
:param services: List of services attached to this
:class:`~dyn.tm.accounts.Notifier`
:param notifier_id: The system id of this
:class:`~dyn.tm.accounts.Notifier`
"""
super(Notifier, self).__init__()
self._label = self._recipients = self._services = None
self._notifier_id = self.uri = None
if 'api' in kwargs:
del kwargs['api']
for key, val in kwargs.items():
setattr(self, '_' + key, val)
self.uri = '/Notifier/{}/'.format(self._notifier_id)
elif len(args) + len(kwargs) > 1:
self._post(*args, **kwargs)
elif len(kwargs) > 0 or 'label' in kwargs:
self._post(**kwargs)
else:
self._get(*args, **kwargs)
def _post(self, label=None, recipients=None, services=None):
"""Create a new :class:`~dyn.tm.accounts.Notifier` object on the
DynECT System
"""
if label is None:
raise DynectInvalidArgumentError
uri = '/Notifier/'
self._label = label
self._recipients = recipients
self._services = services
response = DynectSession.get_session().execute(uri, 'POST', self)
self._build(response['data'])
self.uri = '/Notifier/{}/'.format(self._notifier_id)
def _get(self, notifier_id):
"""Get an existing :class:`~dyn.tm.accounts.Notifier` object from the
DynECT System
"""
self._notifier_id = notifier_id
self.uri = '/Notifier/{}/'.format(self._notifier_id)
response = DynectSession.get_session().execute(self.uri, 'GET')
self._build(response['data'])
def _build(self, data):
for key, val in data.items():
setattr(self, '_' + key, val)
def _update(self, api_args=None):
response = DynectSession.get_session().execute(self.uri, 'PUT',
api_args)
self._build(response['data'])
@property
def notifier_id(self):
"""The unique System id for this Notifier"""
return self._notifier_id
@notifier_id.setter
def notifier_id(self, value):
pass
@property
def label(self):
"""The label used to identify this :class:`~dyn.tm.accounts.Notifier`
"""
return self._label
@label.setter
def label(self, value):
self._label = value
api_args = {'label': self._label}
self._update(api_args)
@property
def recipients(self):
"""List of Recipients attached to this
:class:`~dyn.tm.accounts.Notifier`
"""
return self._recipients
@recipients.setter
def recipients(self, value):
self._recipients = value
api_args = {'recipients': self._recipients}
self._update(api_args)
@property
def services(self):
"""List of services attached to this
:class:`~dyn.tm.accounts.Notifier`
"""
return self._services
@services.setter
def services(self, value):
self._services = value
api_args = {'services': self._services}
self._update(api_args)
def delete(self):
"""Delete this :class:`~dyn.tm.accounts.Notifier` from the Dynect
System
"""
DynectSession.get_session().execute(self.uri, 'DELETE')
def __str__(self):
"""Custom str method"""
return force_unicode('<Notifier>: {}').format(self.label)
__repr__ = __unicode__ = __str__
def __bytes__(self):
"""bytes override"""
return bytes(self.__str__())
class Contact(object):
"""A DynECT System Contact"""
def __init__(self, nickname, *args, **kwargs):
"""Create a :class:`~dyn.tm.accounts.Contact` object
:param nickname: The nickname for this
:class:`~dyn.tm.accounts.Contact`
:param email: The :class:`~dyn.tm.accounts.Contact`'s email address
:param first_name: The :class:`~dyn.tm.accounts.Contact`'s first name
:param last_name: The :class:`~dyn.tm.accounts.Contact`'s last name
:param organization: The :class:`~dyn.tm.accounts.Contact`'s
organization
:param phone: The :class:`~dyn.tm.accounts.Contact`'s phone number. Can
be of the form: ( 0 ) ( country-code ) ( local number )
( extension ) Only the country-code (1-3 digits) and local number
(at least 7 digits) are required. The extension can be up to 4
digits. Any non-digits are ignored.
:param address: The :class:`~dyn.tm.accounts.Contact`'s street address
:param address2: The :class:`~dyn.tm.accounts.Contact`'s street
address, line 2
:param city: The :class:`~dyn.tm.accounts.Contact`'s city, part of the
user's address
:param country: The :class:`~dyn.tm.accounts.Contact`'s country, part
of the :class:`~dyn.tm.accounts.Contact`'s address
:param fax: The :class:`~dyn.tm.accounts.Contact`'s fax number
:param notify_email: Email address where the
:class:`~dyn.tm.accounts.Contact` should receive notifications
:param pager_email: Email address where the
:class:`~dyn.tm.accounts.Contact` should receive messages destined
for a pager
:param post_code: Zip code or Postal code
:param state: The :class:`~dyn.tm.accounts.Contact`'s state, part of
the :class:`~dyn.tm.accounts.Contact`'s address
:param website: The :class:`~dyn.tm.accounts.Contact`'s website
"""
super(Contact, self).__init__()
self._nickname = nickname
self._email = self._first_name = self._last_name = None
self._organization = self._address = self._address_2 = None
self._city = self._country = self._fax = self._notify_email = None
self._pager_email = self._phone = self._post_code = self._state = None
self._website = None
self.uri = '/Contact/{}/'.format(self._nickname)
if 'api' in kwargs:
del kwargs['api']
for key, val in kwargs.items():
if key != '_nickname':
setattr(self, '_' + key, val)
else:
setattr(self, key, val)
self.uri = '/Contact/{}/'.format(self._nickname)
elif len(args) == 0 and len(kwargs) == 0:
self._get()
else:
self._post(*args, **kwargs)
def _post(self, email, first_name, last_name, organization, address=None,
address_2=None, city=None, country=None, fax=None,
notify_email=None, pager_email=None, phone=None, post_code=None,
state=None, website=None):
"""Create a new :class:`~dyn.tm.accounts.Contact` on the DynECT System
"""
self._email = email
self._first_name = first_name
self._last_name = last_name
self._organization = organization
self._address = address
self._address_2 = address_2
self._city = city
self._country = country
self._fax = fax
self._notify_email = notify_email
self._pager_email = pager_email
self._phone = phone
self._post_code = post_code
self._state = state
self._website = website
response = DynectSession.get_session().execute(self.uri, 'POST', self)
self._build(response['data'])
def _get(self):
"""Get an existing :class:`~dyn.tm.accounts.Contact` from the DynECT
System
"""
response = DynectSession.get_session().execute(self.uri, 'GET')
for key, val in response['data'].items():
setattr(self, '_' + key, val)
def _build(self, data):
for key, val in data.items():
setattr(self, '_' + key, val)
def _update(self, api_args=None):
"""Private update method which handles building this
:class:`~dyn.tm.accounts.Contact` object from the API JSON respnose
"""
response = DynectSession.get_session().execute(self.uri, 'PUT',
api_args)
self._build(response['data'])
@property
def nickname(self):
"""This :class:`~dyn.tm.accounts.Contact`'s DynECT System Nickname"""
return self._nickname
@nickname.setter
def nickname(self, value):
self._nickname = value
api_args = {'new_nickname': self._nickname}
self._update(api_args)
@property
def email(self):
"""This :class:`~dyn.tm.accounts.Contact`'s DynECT System Email address
"""
return self._email
@email.setter
def email(self, value):
self._email = value
api_args = {'email': self._email}
self._update(api_args)
@property
def first_name(self):
"""The first name of this :class:`~dyn.tm.accounts.Contact`"""
return self._first_name
@first_name.setter
def first_name(self, value):
self._first_name = value
api_args = {'first_name': self._first_name}
self._update(api_args)
@property
def last_name(self):
"""The last name of this :class:`~dyn.tm.accounts.Contact`"""
return self._last_name
@last_name.setter
def last_name(self, value):
self._last_name = value
api_args = {'last_name': self._last_name}
self._update(api_args)
@property
def organization(self):
"""The organization this :class:`~dyn.tm.accounts.Contact` belongs to
within the DynECT System
"""
return self._organization
@organization.setter
def organization(self, value):
self._organization = value
api_args = {'organization': self._organization}
self._update(api_args)
@property
def phone(self):
"""The phone number associated with this
:class:`~dyn.tm.accounts.Contact`
"""
return self._phone
@phone.setter
def phone(self, value):
self._phone = value
api_args = {'phone': self._phone}
self._update(api_args)
@property
def address(self):
"""This :class:`~dyn.tm.accounts.Contact`'s street address"""
return self._address
@address.setter
def address(self, value):
self._address = value
api_args = {'address': self._address}
self._update(api_args)
@property
def address_2(self):
"""This :class:`~dyn.tm.accounts.Contact`'s street address, line 2"""
return self._address_2
@address_2.setter
def address_2(self, value):
self._address_2 = value
api_args = {'address_2': self._address_2}
self._update(api_args)
@property
def city(self):
"""This :class:`~dyn.tm.accounts.Contact`'s city"""
return self._city
@city.setter
def city(self, value):
self._city = value
api_args = {'city': self._city}
self._update(api_args)
@property
def country(self):
"""This :class:`~dyn.tm.accounts.Contact`'s Country"""
return self._country
@country.setter
def country(self, value):
self._country = value
api_args = {'country': self._country}
self._update(api_args)
@property
def fax(self):
"""The fax number associated with this
:class:`~dyn.tm.accounts.Contact`
"""
return self._fax
@fax.setter
def fax(self, value):
self._fax = value
api_args = {'fax': self._fax}
self._update(api_args)
@property
def notify_email(self):
"""Email address where this :class:`~dyn.tm.accounts.Contact` should
receive notifications
"""
return self._notify_email
@notify_email.setter
def notify_email(self, value):
self._notify_email = value
api_args = {'notify_email': self._notify_email}
self._update(api_args)
@property
def pager_email(self):
"""Email address where this :class:`~dyn.tm.accounts.Contact` should
receive messages destined for a pager
"""
return self._pager_email
@pager_email.setter
def pager_email(self, value):
self._pager_email = value
api_args = {'pager_email': self._pager_email}
self._update(api_args)
@property
def post_code(self):
"""This :class:`~dyn.tm.accounts.Contacts`'s postal code, part of the
contacts's address
"""
return self._post_code
@post_code.setter
def post_code(self, value):
self._post_code = value
api_args = {'post_code': self._post_code}
self._update(api_args)
@property
def state(self):
"""This :class:`~dyn.tm.accounts.Contact`'s state"""
return self._state
@state.setter
def state(self, value):
self._state = value
api_args = {'state': self._state}
self._update(api_args)
@property
def website(self):
"""This :class:`~dyn.tm.accounts.Contact`'s website"""
return self._website
@website.setter
def website(self, value):
self._website = value
api_args = {'website': self._website}
self._update(api_args)
def delete(self):
"""Delete this :class:`~dyn.tm.accounts.Contact` from the Dynect System
"""
DynectSession.get_session().execute(self.uri, 'DELETE')
def __str__(self):
"""Custom str method"""
return force_unicode('<Contact>: {}').format(self.nickname)
__repr__ = __unicode__ = __str__
def __bytes__(self):
"""bytes override"""
return bytes(self.__str__())
class IPACL(object):
"""A scoped IP ACL for logins on a customer"""
def __init__(self, *args, **kwargs):
"""Create a :class:`~dyn.tm.accounts.IPACL` object
:param netmasks: a list of netmasks, in CIDR
form; no '/' assumes exact address
:param active: Whether or not this ACL is active: 'Y' (default) or 'N'
:param scope: The scope this :class:`~dyn.tm.accounts.IPACL` covers:
'web' (default) or 'api'
"""
super(IPACL, self).__init__()
valid_scope = ['api', 'web']
self._scope = kwargs.get('scope', 'web').lower()
if self._scope not in valid_scope:
raise Exception('scope can only be: {}'.format(" ".join(
valid_scope)))
if not isinstance(kwargs.get('netmasks', []), list):
raise Exception('Must be list of netmasks.')
self._netmasks = " ".join(kwargs.get('netmasks', []))
self._active = kwargs.get('active', 'Y')
if 'api' in kwargs:
del kwargs['api']
for key, val in kwargs.items():
setattr(self, '_' + key, val)
elif len(args) == 0 and len(kwargs) == 0:
self._get()
elif len(args) == 0 and len(kwargs) == 1 and kwargs['scope']:
self._get(scope=self._scope)
else:
kwargs['netmasks'] = self._netmasks
self._post(*args, **kwargs)
def _post(self, netmasks=None, active=None, scope=None):
"""Create a new :class:`~dyn.tm.accounts.IPACL` on the DynECT System
"""
self.uri = '/CustomerIPACL/{}/'.format(self.scope)
api_args = {'netmasks': self._netmasks, 'active': self._active}
response = DynectSession.get_session().execute(
self.uri, 'PUT', api_args)
self._build(response['data'])
def _get(self, scope='web'):
"""Get an existing :class:`~dyn.tm.accounts.IPACL` from the DynECT
System
"""
self._scope = scope
self.uri = '/CustomerIPACL/{}/'.format(self._scope)
response = DynectSession.get_session().execute(self.uri, 'GET')
self._build(response['data'])
def _build(self, data):
for scope in data:
if scope['scope'] == self._scope:
for key, val in scope.items():
setattr(self, '_' + key, val)
def _update(self, api_args=None):
"""Private update method which handles building this
:class:`~dyn.tm.accounts.IPACL` object from the API JSON response
"""
self.uri = '/CustomerIPACL/{}/'.format(self._scope)
response = DynectSession.get_session().execute(
self.uri, 'PUT', api_args)
self._build(response['data'])
@property
def netmasks(self):
"""The netmask list of this :class:`~dyn.tm.accounts.IPACL`"""
#
return [x for x in (re.split('\r\n| |,', self._netmasks)) if x]
@netmasks.setter
def netmasks(self, values):
if not isinstance(values, list):
raise Exception('Must be list of netmasks.')
self._netmasks = " ".join(values)
api_args = {'netmasks': self._netmasks}
self._update(api_args)
@property
def active(self):
"""The active status of this :class:`~dyn.tm.accounts.IPACL`"""
return self._active
@active.setter
def active(self, value):
self._active = value
api_args = {'active': self._active}
self._update(api_args)
@property
def scope(self):
"""The scope of this :class:`~dyn.tm.accounts.IPACL`"""
return self._scope
@scope.setter
def scope(self, value):
self._scope = value.lower()
api_args = {'scope': self._scope}
self._update(api_args)
def delete(self):
"""Delete this :class:`~dyn.tm.accounts.IPACL` from the Dynect System
"""
api_args = {'netmasks': '', 'scope': self._scope}
DynectSession.get_session().execute(self.uri, 'PUT', api_args)
self._netmasks = ''
def __str__(self):
"""Custom str method"""
return force_unicode(
'<IPACL>: Scope: {}, Active: {}, Netmasks: {}').format(
self._scope, self._active, " ".join(self.netmasks))
__repr__ = __unicode__ = __str__
def __bytes__(self):
"""bytes override"""
return bytes(self.__str__())
| bsd-3-clause | f8ec730028eb0f787617a4d95119324d | 35.001633 | 79 | 0.574673 | 4.027465 | false | false | false | false |
dyninc/dyn-python | dyn/tm/services/httpredirect.py | 2 | 5118 | # -*- coding: utf-8 -*-
"""This module contains API Wrapper implementations of the HTTP Redirect
service
"""
from dyn.compat import force_unicode
from dyn.tm.session import DynectSession
__author__ = 'xorg'
__all__ = ['HTTPRedirect']
class HTTPRedirect(object):
"""HTTPRedirect is a service which sets up a redirect to the specified
URL.//
"""
def __init__(self, zone, fqdn, *args, **kwargs):
"""Create a new :class:`HTTPRedirect` service object
:param zone: The zone to attach this HTTPRedirect Service to
:param fqdn: The FQDN of the node where this service will be attached
:param code: HTTP response code to return for redirection.
:param url: The target URL where the client is sent. Must begin with
either http:// or https://
:param keep_uri: A flag indicating whether the redirection should
include the originally requested URI.
"""
super(HTTPRedirect, self).__init__()
self._zone = zone
self._fqdn = fqdn
self._code = self._url = self._keep_uri = None
if 'api' in kwargs:
del kwargs['api']
for key, val in kwargs.items():
setattr(self, '_' + key, val)
elif len(args) + len(kwargs) == 1:
self._get()
else:
self._post(*args, **kwargs)
def _get(self):
"""Build an object around an existing DynECT HTTPRedirect Service"""
self.uri = '/HTTPRedirect/{}/{}/'.format(self._zone, self._fqdn)
api_args = {'detail': 'Y'}
response = DynectSession.get_session().execute(self.uri, 'GET',
api_args)
for key, val in response['data'].items():
setattr(self, '_' + key, val)
def _post(self, code, keep_uri, url):
"""Create a new HTTPRedirect Service on the DynECT System"""
self._code = code
self._keep_uri = keep_uri
self._url = url
self.uri = '/HTTPRedirect/{}/{}/'.format(self._zone, self._fqdn)
api_args = {'code': self._code, 'keep_uri': self._keep_uri,
'url': self._url}
response = DynectSession.get_session().execute(self.uri, 'POST',
api_args)
for key, val in response['data'].items():
setattr(self, '_' + key, val)
def _update(self, **kwargs):
"""Update an existing HTTPRedirect Service on the DynECT System"""
self._code = kwargs.get('code', self._code)
self._keep_uri = kwargs.get('keep_uri', self.keep_uri)
self._url = kwargs.get('url', self._url)
self.uri = '/HTTPRedirect/{}/{}/'.format(self._zone, self._fqdn)
api_args = {'code': self._code, 'keep_uri': self._keep_uri,
'url': self._url}
response = DynectSession.get_session().execute(self.uri, 'PUT',
api_args)
for key, val in response['data'].items():
setattr(self, '_' + key, val)
@property
def zone(self):
"""The zone that this HTTPRedirect Service is attached to is a
read-only attribute
"""
self._get()
return self._zone
@zone.setter
def zone(self, value):
pass
@property
def fqdn(self):
"""The fqdn that this HTTPRedirect Service is attached to is a
read-only attribute
"""
self._get()
return self._fqdn
@fqdn.setter
def fqdn(self, value):
pass
@property
def code(self):
"""HTTP response code to return for redirection.
Valid values:
301 – Permanent redirect
302 – Temporary redirect
"""
self._get()
return self._code
@code.setter
def code(self, value):
self._update(code=value)
@property
def keep_uri(self):
"""A flag indicating whether the redirection should include the
originally requested URI.
Valid values: Y, N
"""
self._get()
return self._keep_uri
@keep_uri.setter
def keep_uri(self, value):
self._update(keep_uri=value)
@property
def url(self):
"""The target URL where the client is sent. Must begin with either
http:// or https://
"""
self._get()
return self._url
@url.setter
def url(self, value):
self._update(url=value)
def delete(self, publish='Y'):
"""Delete this HTTPRedirect service from the DynECT System
publish='N' can be passed into this function to do a soft-delete which
will be acted upon during a zone publish.
"""
api_args = {'publish': publish}
DynectSession.get_session().execute(self.uri, 'DELETE', api_args)
def __str__(self):
"""str override"""
return force_unicode('<HTTPRedirect>: {}').format(self._fqdn)
__repr__ = __unicode__ = __str__
def __bytes__(self):
"""bytes override"""
return bytes(self.__str__())
| bsd-3-clause | 635d84b4cffcaf760be836341e60fc9d | 32.424837 | 78 | 0.55045 | 4.181521 | false | false | false | false |
dyninc/dyn-python | dyn/tm/zones.py | 1 | 54026 | # -*- coding: utf-8 -*-
"""This module contains all Zone related API objects."""
import os
from time import sleep
from datetime import datetime
from dyn.tm.utils import unix_date
from dyn.compat import force_unicode
from dyn.tm.errors import (DynectCreateError, DynectGetError,
DynectInvalidArgumentError)
from dyn.tm.records import (ARecord, AAAARecord, ALIASRecord, CDSRecord,
CAARecord, CDNSKEYRecord, CSYNCRecord, CERTRecord,
CNAMERecord, DHCIDRecord, DNAMERecord,
DNSKEYRecord, DSRecord, KEYRecord, KXRecord,
LOCRecord, IPSECKEYRecord, MXRecord, NAPTRRecord,
PTRRecord, PXRecord, NSAPRecord, RPRecord,
NSRecord, SOARecord, SPFRecord, SRVRecord,
TLSARecord, TXTRecord, SSHFPRecord, UNKNOWNRecord)
from dyn.tm.session import DynectSession
from dyn.tm.services import (ActiveFailover, DynamicDNS, DNSSEC,
TrafficDirector, GSLB, ReverseDNS, RTTM,
HTTPRedirect, AdvancedRedirect)
from dyn.tm.task import Task
__author__ = 'jnappi'
__all__ = ['get_all_zones', 'Zone', 'SecondaryZone', 'Node',
'ExternalNameserver', 'ExternalNameserverEntry']
RECS = {'A': ARecord, 'AAAA': AAAARecord, 'ALIAS': ALIASRecord,
'CAA': CAARecord, 'CDS': CDSRecord, 'CDNSKEY': CDNSKEYRecord,
'CSYNC': CSYNCRecord, 'CERT': CERTRecord, 'CNAME': CNAMERecord,
'DHCID': DHCIDRecord, 'DNAME': DNAMERecord, 'DNSKEY': DNSKEYRecord,
'DS': DSRecord, 'KEY': KEYRecord, 'KX': KXRecord, 'LOC': LOCRecord,
'IPSECKEY': IPSECKEYRecord, 'MX': MXRecord, 'NAPTR': NAPTRRecord,
'PTR': PTRRecord, 'PX': PXRecord, 'NSAP': NSAPRecord,
'RP': RPRecord, 'NS': NSRecord, 'SOA': SOARecord, 'SPF': SPFRecord,
'SRV': SRVRecord, 'TLSA': TLSARecord, 'TXT': TXTRecord,
'SSHFP': SSHFPRecord, 'UNKNOWN': UNKNOWNRecord}
def get_all_zones():
"""Accessor function to retrieve a *list* of all
:class:`~dyn.tm.zones.Zone`'s accessible to a user
:return: a *list* of :class:`~dyn.tm.zones.Zone`'s
"""
uri = '/Zone/'
api_args = {'detail': 'Y'}
response = DynectSession.get_session().execute(uri, 'GET', api_args)
zones = []
for zone in response['data']:
zones.append(Zone(zone['zone'], api=False, **zone))
return zones
def get_all_secondary_zones():
"""Accessor function to retrieve a *list* of all :class:`SecondaryZone`'s
accessible to a user
:return: a *list* of :class:`~dyn.tm.zones.SecondaryZone`'s
"""
uri = '/Secondary/'
api_args = {'detail': 'Y'}
response = DynectSession.get_session().execute(uri, 'GET', api_args)
zones = []
for zone in response['data']:
zones.append(SecondaryZone(zone.pop('zone'), api=False, **zone))
return zones
def get_apex(node_name, full_details=False):
"""Accessor function to retireve the apex zone name of a given node
available to logged in user.
:param node_name: name of the node to search for apex for.
:param full_details: if true, returns zone_type, serial_type, and serial
along with apex zone name
:return: a *string* containing apex zone name, if full_details is
:const:`False`, a :const:`dict` containing apex zone name otherwise
"""
uri = '/Apex/{}'.format(node_name)
api_args = {}
response = DynectSession.get_session().execute(uri, 'GET', api_args)
if full_details:
return response['data']
else:
return response['data']['zone']
class Zone(object):
"""A class representing a DynECT Zone"""
def __init__(self, name, *args, **kwargs):
"""Create a :class:`Zone` object. Note: When creating a new
:class:`Zone` if no contact is specified the path to a local zone
file must be passed to the ``file_name`` param.
:param name: the name of the zone to create
:param contact: Administrative contact for this zone
:param ttl: TTL (in seconds) for records in the zone
:param serial_style: The style of the zone's serial. Valid values:
increment, epoch, day, minute
:param file_name: The path to a valid RFC1035, BIND, or tinydns style
Master file. Note: this file must be under 1mb in size.
:param master_ip: The IP of the master server from which to fetch zone
data for Transferring this :class:`Zone`. Note: This argument is
required for performing a valid ZoneTransfer operation.
:param timeout: The time, in minutes, to wait for a zone xfer to
complete
"""
super(Zone, self).__init__()
self.valid_serials = ('increment', 'epoch', 'day', 'minute')
self._name = name
self._fqdn = self._name
if self._fqdn and not self._fqdn.endswith('.'):
self._fqdn += '.'
self._contact = self._ttl = self._serial_style = self._serial = None
self._zone = self._status = None
self.records = {}
self._task_id = None
self.services = {}
self.uri = '/Zone/{}/'.format(self._name)
if 'api' in kwargs:
del kwargs['api']
for key, val in kwargs.items():
setattr(self, '_' + key, val)
self._name = self._zone
self.uri = '/Zone/{}/'.format(self._name)
elif len(args) == 0 and len(kwargs) == 0:
self._get()
else:
self._post(*args, **kwargs)
self._status = 'active'
def _post(self, contact=None, ttl=60, serial_style='increment',
file_name=None, master_ip=None, timeout=None):
"""Create a new :class:`Zone` object on the DynECT System"""
if contact is None and file_name is None and master_ip is None:
raise DynectInvalidArgumentError('contact', None)
if file_name is not None:
self._post_with_file(file_name)
elif master_ip is not None:
self._xfer(master_ip, timeout)
else:
self._contact = contact
self._ttl = ttl
if serial_style not in self.valid_serials:
raise DynectInvalidArgumentError(serial_style,
self.valid_serials)
self._serial_style = serial_style
api_args = {'zone': self._name,
'rname': self._contact,
'ttl': self._ttl,
'serial_style': self._serial_style}
response = DynectSession.get_session().execute(self.uri, 'POST',
api_args)
self._build(response['data'])
def _post_with_file(self, file_name):
"""Create a :class:`Zone` from a RFC1035 style Master file. A ZoneFile
for BIND or tinydns will also be accepted
:param file_name: The path to a valid ZoneFile
"""
full_path = os.path.abspath(file_name)
file_size = os.path.getsize(full_path)
if file_size > 1048576:
raise DynectInvalidArgumentError('Zone File Size', file_size,
'Under 1MB')
else:
uri = '/ZoneFile/{}/'.format(self.name)
f = open(full_path, 'r')
content = f.read()
f.close()
api_args = {'file': content}
response = DynectSession.get_session().execute(
uri, 'POST', api_args)
self.__poll_for_get()
self._build(response['data'])
def _xfer(self, master_ip, timeout=None):
"""Create a :class:`Zone` by ZoneTransfer by providing an optional
master_ip argument.
"""
uri = '/ZoneTransfer/{}/'.format(self.name)
api_args = {'master_ip': master_ip}
response = DynectSession.get_session().execute(uri, 'POST', api_args)
self._build(response['data'])
time_out = timeout or 10
count = 0
while count < time_out:
response = DynectSession.get_session().execute(uri, 'GET', {})
if response['status'] == 'running' and response['message'] == '':
sleep(60)
count += 1
else:
break
self._get()
def __poll_for_get(self, n_loops=10, xfer=False, xfer_master_ip=None):
"""For use ONLY by _post_with_file and _xfer. Will wait at MOST
``n_loops * 2`` seconds for a successfull GET API response. If no
successfull get is recieved no error will be raised.
"""
count = 0
got = False
while count < n_loops:
try:
self._get()
got = True
break
except DynectGetError:
sleep(2)
count += 1
if not got and xfer:
uri = '/ZoneTransfer/{}/'.format(self.name)
api_args = {}
if xfer_master_ip is not None:
api_args['master_ip'] = xfer_master_ip
response = DynectSession.get_session().execute(uri, 'GET',
api_args)
error_labels = ['running', 'waiting', 'failed', 'canceled']
ok_labels = ['ready', 'unpublished', 'ok']
if response['data']['status'] in error_labels:
raise DynectCreateError(response['msgs'])
elif response['data']['status'] in ok_labels:
self._get()
else:
pass # Should never get here
def _get(self):
"""Get an existing :class:`Zone` object from the DynECT System"""
api_args = {}
response = DynectSession.get_session().execute(self.uri, 'GET',
api_args)
self._build(response['data'])
def _build(self, data):
"""Build the variables in this object by pulling out the data from data
"""
for key, val in data.items():
if key == "task_id" and not val:
self._task_id = None
elif key == "task_id":
self._task_id = Task(val)
else:
setattr(self, '_' + key, val)
def _update(self, api_args):
"""Update this :class:`ActiveFailover`, via the API, with the args in
api_args
"""
response = DynectSession.get_session().execute(self.uri, 'PUT',
api_args)
self._build(response['data'])
@property
def __root_soa(self):
"""Return the SOA record associated with this Zone"""
return self.get_all_records_by_type('SOA')[0]
@property
def name(self):
"""The name of this :class:`Zone`"""
return self._name
@name.setter
def name(self, value):
pass
@property
def fqdn(self):
"""The name of this :class:`Zone`"""
return self._fqdn
@fqdn.setter
def fqdn(self, value):
pass
@property
def contact(self):
"""The email address of the primary :class:`Contact` associated with
this :class:`Zone`
"""
self._contact = self.__root_soa.rname
return self._contact
@contact.setter
def contact(self, value):
self.__root_soa.rname = value
@property
def ttl(self):
"""This :class:`Zone`'s default TTL"""
self._ttl = self.__root_soa.ttl
return self._ttl
@ttl.setter
def ttl(self, value):
self.__root_soa.ttl = value
@property
def serial(self):
"""The current serial of this :class:`Zone`"""
self._get()
return self._serial
@serial.setter
def serial(self, value):
pass
@property
def serial_style(self):
"""The current serial style of this :class:`Zone`"""
self._get()
return self._serial_style
@serial_style.setter
def serial_style(self, value):
if value not in self.valid_serials:
raise DynectInvalidArgumentError('serial_style', value,
self.valid_serials)
self.__root_soa.serial_style = value
@property
def status(self):
"""Convenience property for :class:`Zones`. If a :class:`Zones` is
frozen the status will read as `'frozen'`, if the :class:`Zones` is not
frozen the status will read as `'active'`. Because the API does not
return information about whether or not a :class:`Zones` is frozen
there will be a few cases where this status will be `None` in order to
avoid guessing what the current status actually is.
"""
self._get()
return self._status
@status.setter
def status(self, value):
pass
def freeze(self):
"""Causes the zone to become frozen. Freezing a zone prevents changes
to the zone until it is thawed.
"""
api_args = {'freeze': True}
response = DynectSession.get_session().execute(self.uri, 'PUT',
api_args)
self._build(response['data'])
if response['status'] == 'success':
self._status = 'frozen'
def thaw(self):
"""Causes the zone to become thawed. Thawing a frozen zone allows
changes to again be made to the zone.
"""
api_args = {'thaw': True}
response = DynectSession.get_session().execute(self.uri, 'PUT',
api_args)
self._build(response['data'])
if response['status'] == 'success':
self._status = 'active'
def publish(self, notes=None):
"""Causes all pending changes to become part of the zone. The serial
number increments based on its serial style and the data is pushed out
to the nameservers.
"""
api_args = {'publish': True}
if notes:
api_args['notes'] = notes
self._update(api_args)
@property
def task(self):
""":class:`Task` for most recent system action on this :class:`Zone`.
"""
if self._task_id:
self._task_id.refresh()
return self._task_id
def get_notes(self, offset=None, limit=None):
"""Generates a report containing the Zone Notes for this :class:`Zone`
:param offset: The starting point at which to retrieve the notes
:param limit: The maximum number of notes to be retrieved
:return: A :class:`list` of :class:`dict` containing :class:`Zone`
Notes
"""
uri = '/ZoneNoteReport/'
api_args = {'zone': self.name}
if offset:
api_args['offset'] = offset
if limit:
api_args['limit'] = limit
response = DynectSession.get_session().execute(uri, 'POST', api_args)
return response['data']
def add_record(self, name=None, record_type='A', *args, **kwargs):
"""Adds an a record with the provided name and data to this
:class:`Zone`
:param name: The name of the node where this record will be added
:param record_type: The type of record you would like to add.
Valid record_type arguments are: 'A', 'AAAA', 'CERT', 'CNAME',
'DHCID', 'DNAME', 'DNSKEY', 'DS', 'KEY', 'KX', 'LOC', 'IPSECKEY',
'MX', 'NAPTR', 'PTR', 'PX', 'NSAP', 'RP', 'NS', 'SOA', 'SPF',
'SRV', and 'TXT'.
:param args: Non-keyword arguments to pass to the Record constructor
:param kwargs: Keyword arguments to pass to the Record constructor
"""
fqdn = name + '.' + self.name + '.' if name else self.name + '.'
# noinspection PyCallingNonCallable
rec = RECS[record_type](self.name, fqdn, *args, **kwargs)
if record_type in self.records:
self.records[record_type].append(rec)
else:
self.records[record_type] = [rec]
return rec
def add_service(self, name=None, service_type=None, *args, **kwargs):
"""Add the specified service type to this zone, or to a node under this
zone
:param name: The name of the :class:`Node` where this service will be
attached to or `None` to attach it to the root :class:`Node` of
this :class:`Zone`
:param service_type: The type of the service you would like to create.
Valid service_type arguments are: 'ActiveFailover', 'DDNS',
'DNSSEC', 'DSF', 'GSLB', 'RDNS', 'RTTM', 'HTTPRedirect'
:param args: Non-keyword arguments to pass to the Record constructor
:param kwargs: Keyword arguments to pass to the Record constructor
"""
constructors = {'ActiveFailover': ActiveFailover,
'DDNS': DynamicDNS,
'DNSSEC': DNSSEC,
'DSF': TrafficDirector,
'GSLB': GSLB,
'RDNS': ReverseDNS,
'RTTM': RTTM,
'HTTPRedirect': HTTPRedirect}
fqdn = self.name + '.'
if name:
fqdn = name + '.' + fqdn
if service_type == 'DNSSEC':
# noinspection PyCallingNonCallable
service = constructors[service_type](self.name, *args, **kwargs)
else:
# noinspection PyCallingNonCallable
service = constructors[service_type](self.name, fqdn, *args,
**kwargs)
if service_type in self.services:
self.services[service_type].append(service)
else:
self.services[service_type] = [service]
return service
def get_all_nodes(self):
"""Returns a list of Node Objects for all subnodes in Zone (Excluding
the Zone itself.)
"""
api_args = {}
uri = '/NodeList/{}/'.format(self._name)
response = DynectSession.get_session().execute(uri, 'GET',
api_args)
nodes = [Node(self._name, fqdn) for fqdn in response['data'] if
fqdn != self._name]
return nodes
def get_node(self, node=None):
"""Returns all DNS Records for that particular node
:param node: The name of the Node you wish to access, or `None` to get
the root :class:`Node` of this :class:`Zone`
"""
if node:
fqdn = node + '.' + self.name + '.'
else:
fqdn = self.name + '.'
return Node(self.name, fqdn)
def get_all_records(self):
"""Retrieve a list of all record resources for the specified node and
zone combination as well as all records from any Base_Record below that
point on the zone hierarchy
:return: A :class:`List` of all the :class:`DNSRecord`'s under this
:class:`Zone`
"""
self.records = {}
uri = '/AllRecord/{}/'.format(self._name)
if self.fqdn is not None:
uri += '{}/'.format(self.fqdn)
api_args = {'detail': 'Y'}
response = DynectSession.get_session().execute(uri, 'GET', api_args)
# Strip out empty record_type lists
record_lists = {label: rec_list for label, rec_list in
response['data'].items() if rec_list != []}
records = {}
for key, record_list in record_lists.items():
search = key.split('_')[0].upper()
try:
constructor = RECS[search]
except KeyError:
constructor = RECS['UNKNOWN']
list_records = []
for record in record_list:
del record['zone']
fqdn = record['fqdn']
del record['fqdn']
# Unpack rdata
for r_key, r_val in record['rdata'].items():
record[r_key] = r_val
record['create'] = False
list_records.append(constructor(self._name, fqdn, **record))
records[key] = list_records
return records
def get_all_records_by_type(self, record_type):
"""Get a list of all :class:`DNSRecord` of type ``record_type`` which
are owned by this node.
:param record_type: The type of :class:`DNSRecord` you wish returned.
Valid record_type arguments are: 'A', 'AAAA', 'CAA', 'CERT',
'CNAME', 'DHCID', 'DNAME', 'DNSKEY', 'DS', 'KEY', 'KX', 'LOC',
'IPSECKEY', 'MX', 'NAPTR', 'PTR', 'PX', 'NSAP', 'RP', 'NS', 'SOA',
'SPF', 'SRV', and 'TXT'.
:return: A :class:`List` of :class:`DNSRecord`'s
"""
names = {'A': 'ARecord', 'AAAA': 'AAAARecord', 'ALIAS': 'ALIASRecord',
'CAA': 'CAARecord', 'CDS': 'CDSRecord', 'CDNSKEY':
'CDNSKEYRecord', 'CERT': 'CERTRecord', 'CSYNC': 'CSYNCRecord',
'CNAME': 'CNAMERecord', 'DHCID': 'DHCIDRecord', 'DNAME':
'DNAMERecord', 'DNSKEY': 'DNSKEYRecord', 'DS': 'DSRecord',
'KEY': 'KEYRecord', 'KX': 'KXRecord', 'LOC': 'LOCRecord',
'IPSECKEY': 'IPSECKEYRecord', 'MX': 'MXRecord', 'NAPTR':
'NAPTRRecord', 'PTR': 'PTRRecord', 'PX': 'PXRecord', 'NSAP':
'NSAPRecord', 'RP': 'RPRecord', 'NS': 'NSRecord', 'SOA':
'SOARecord', 'SPF': 'SPFRecord', 'SRV': 'SRVRecord', 'TLSA':
'TLSARecord', 'TXT': 'TXTRecord', 'SSHFP': 'SSHFPRecord'}
constructor = RECS[record_type]
uri = '/{}/{}/{}/'.format(names[record_type], self._name, self.fqdn)
api_args = {'detail': 'Y'}
response = DynectSession.get_session().execute(uri, 'GET', api_args)
records = []
for record in response['data']:
fqdn = record['fqdn']
del record['fqdn']
del record['zone']
# Unpack rdata
for key, val in record['rdata'].items():
record[key] = val
del record['rdata']
record['create'] = False
records.append(constructor(self._name, fqdn, **record))
return records
def get_any_records(self):
"""Retrieve a list of all :class:`DNSRecord`'s associated with this
:class:`Zone`
"""
if self.fqdn is None:
return
api_args = {'detail': 'Y'}
uri = '/ANYRecord/{}/{}/'.format(self._name, self.fqdn)
response = DynectSession.get_session().execute(uri, 'GET', api_args)
# Strip out empty record_type lists
record_lists = {label: rec_list for label, rec_list in
response['data'].items() if rec_list != []}
records = {}
for key, record_list in record_lists.items():
search = key.split('_')[0].upper()
try:
constructor = RECS[search]
except KeyError:
constructor = RECS['UNKNOWN']
list_records = []
for record in record_list:
del record['zone']
del record['fqdn']
# Unpack rdata
for r_key, r_val in record['rdata'].items():
record[r_key] = r_val
record['create'] = False
list_records.append(constructor(self._name, self.fqdn,
**record))
records[key] = list_records
return records
def get_all_active_failovers(self):
"""Retrieve a list of all :class:`ActiveFailover` services associated
with this :class:`Zone`
:return: A :class:`List` of :class:`ActiveFailover` Services
"""
uri = '/Failover/{}/'.format(self._name)
api_args = {'detail': 'Y'}
response = DynectSession.get_session().execute(uri, 'GET', api_args)
afos = []
for failover in response['data']:
del failover['zone']
del failover['fqdn']
afos.append(ActiveFailover(self._name, self._fqdn, api=False,
**failover))
return afos
def get_all_ddns(self):
"""Retrieve a list of all :class:`DDNS` services associated with this
:class:`Zone`
:return: A :class:`List` of :class:`DDNS` Services
"""
uri = '/DDNS/{}/'.format(self._name)
api_args = {'detail': 'Y'}
response = DynectSession.get_session().execute(uri, 'GET', api_args)
ddnses = []
for svc in response['data']:
del svc['zone']
del svc['fqdn']
ddnses.append(
DynamicDNS(self._name, self._fqdn, api=False, **svc))
return ddnses
def get_all_httpredirect(self):
"""Retrieve a list of all :class:`HTTPRedirect` services associated
with this :class:`Zone`
:return: A :class:`List` of :class:`HTTPRedirect` Services
"""
uri = '/HTTPRedirect/{}/'.format(self._name)
api_args = {'detail': 'Y'}
response = DynectSession.get_session().execute(uri, 'GET', api_args)
httpredirs = []
for httpredir in response['data']:
fqdn = httpredir['fqdn']
del httpredir['zone']
del httpredir['fqdn']
httpredirs.append(
HTTPRedirect(self._name, fqdn, api=False, **httpredir))
return httpredirs
def get_all_advanced_redirect(self):
"""Retrieve a list of all :class:`AdvancedRedirect` services associated
with this :class:`Zone`
:return: A :class:`List` of :class:`AdvancedRedirect` Services
"""
uri = '/AdvRedirect/{}/'.format(self._name)
api_args = {'rules': 'Y'}
response = DynectSession.get_session().execute(uri, 'GET', api_args)
advredirs = []
for advredir in response['data']:
del advredir['zone']
del advredir['fqdn']
advredirs.append(
AdvancedRedirect(self._name, self._fqdn,
api=False, **advredir))
return advredirs
def get_all_gslb(self):
"""Retrieve a list of all :class:`GSLB` services associated with this
:class:`Zone`
:return: A :class:`List` of :class:`GSLB` Services
"""
uri = '/GSLB/{}/'.format(self._name)
api_args = {'detail': 'Y'}
response = DynectSession.get_session().execute(uri, 'GET', api_args)
gslbs = []
for gslb_svc in response['data']:
del gslb_svc['zone']
del gslb_svc['fqdn']
gslbs.append(GSLB(self._name, self._fqdn, api=False, **gslb_svc))
return gslbs
def get_all_rdns(self):
"""Retrieve a list of all :class:`ReverseDNS` services associated with
this :class:`Zone`
:return: A :class:`List` of :class:`ReverseDNS` Services
"""
uri = '/IPTrack/{}/'.format(self._name)
api_args = {'detail': 'Y'}
response = DynectSession.get_session().execute(uri, 'GET', api_args)
rdnses = []
for rdns in response['data']:
del rdns['zone']
del rdns['fqdn']
rdnses.append(
ReverseDNS(self._name, self._fqdn, api=False, **rdns))
return rdnses
def get_all_rttm(self):
"""Retrieve a list of all :class:`RTTM` services associated with this
:class:`Zone`
:return: A :class:`List` of :class:`RTTM` Services
"""
uri = '/RTTM/{}/'.format(self._name)
api_args = {'detail': 'Y'}
response = DynectSession.get_session().execute(uri, 'GET', api_args)
rttms = []
for rttm_svc in response['data']:
del rttm_svc['zone']
del rttm_svc['fqdn']
rttms.append(RTTM(self._name, self._fqdn, api=False, **rttm_svc))
return rttms
def get_qps(self, start_ts, end_ts=None, breakdown=None, hosts=None,
rrecs=None):
"""Generates a report with information about Queries Per Second (QPS)
for this zone
:param start_ts: datetime.datetime instance identifying point in time
for the QPS report
:param end_ts: datetime.datetime instance indicating the end of the
data range for the report. Defaults to datetime.datetime.now()
:param breakdown: By default, most data is aggregated together.
Valid values ('hosts', 'rrecs', 'zones').
:param hosts: List of hosts to include in the report.
:param rrecs: List of record types to include in report.
:return: A :class:`str` with CSV data
"""
end_ts = end_ts or datetime.now()
api_args = {'start_ts': unix_date(start_ts),
'end_ts': unix_date(end_ts),
'zones': [self.name]}
if breakdown is not None:
api_args['breakdown'] = breakdown
if hosts is not None:
api_args['hosts'] = hosts
if rrecs is not None:
api_args['rrecs'] = rrecs
response = DynectSession.get_session().execute('/QPSReport/',
'POST', api_args)
return response['data']
def delete(self):
"""Delete this :class:`Zone` and perform nessecary cleanups"""
api_args = {}
DynectSession.get_session().execute(self.uri, 'DELETE', api_args)
def __eq__(self, other):
"""Equivalence operations for easily pulling a :class:`Zone` out of a
list of :class:`Zone` objects
"""
if isinstance(other, str):
return other == self._name
elif isinstance(other, Zone):
return other.name == self._name
return False
def __ne__(self, other):
"""Non-Equivalence operator"""
return not self.__eq__(other)
def __str__(self):
"""str override"""
return force_unicode('<Zone>: {}').format(self._name)
__repr__ = __unicode__ = __str__
def __bytes__(self):
"""bytes override"""
return bytes(self.__str__())
class SecondaryZone(object):
"""A class representing DynECT Secondary zones"""
def __init__(self, zone, *args, **kwargs):
"""Create a :class:`SecondaryZone` object
:param zone: The name of this secondary zone
:param masters: A list of IPv4 or IPv6 addresses of the master
nameserver(s) for this zone.
:param contact_nickname: Name of the :class:`Contact` that will receive
notifications for this zone
:param tsig_key_name: Name of the TSIG key that will be used to sign
transfer requests to this zone's master
"""
super(SecondaryZone, self).__init__()
self._zone = self._name = zone
self.uri = '/Secondary/{}/'.format(self._zone)
self._masters = self._contact_nickname = self._tsig_key_name = None
self._task_id = None
if 'api' in kwargs:
del kwargs['api']
for key, val in kwargs.items():
setattr(self, '_' + key, val)
elif len(args) == 0 and len(kwargs) == 0:
self._get()
else:
self._post(*args, **kwargs)
def _get(self):
"""Get a :class:`SecondaryZone` object from the DynECT System"""
api_args = {}
response = DynectSession.get_session().execute(self.uri, 'GET',
api_args)
self._build(response['data'])
def _post(self, masters, contact_nickname=None, tsig_key_name=None):
"""Create a new :class:`SecondaryZone` object on the DynECT System"""
self._masters = masters
self._contact_nickname = contact_nickname
self._tsig_key_name = tsig_key_name
api_args = {'masters': self._masters}
if contact_nickname:
api_args['contact_nickname'] = self._contact_nickname
if tsig_key_name:
api_args['tsig_key_name'] = self._tsig_key_name
response = DynectSession.get_session().execute(self.uri, 'POST',
api_args)
self._build(response['data'])
def _update(self, api_args, uri=None):
"""Update this :class:`ActiveFailover`, via the API, with the args in
api_args
"""
if not uri:
uri = self.uri
response = DynectSession.get_session().execute(uri, 'PUT',
api_args)
self._build(response['data'])
def _build(self, data):
"""Build the variables in this object by pulling out the data from data
"""
for key, val in data.items():
if key == "task_id" and not val:
self._task_id = None
elif key == "task_id":
self._task_id = Task(val)
else:
setattr(self, '_' + key, val)
@property
def task(self):
""":class:`Task` for most recent system action
on this :class:`SecondaryZone`.
"""
if self._task_id:
self._task_id.refresh()
return self._task_id
@property
def zone(self):
"""The name of this :class:`SecondaryZone`"""
return self._zone
@zone.setter
def zone(self, value):
pass
@property
def masters(self):
"""A list of IPv4 or IPv6 addresses of the master nameserver(s) for
this zone.
"""
self._get()
return self._masters
@masters.setter
def masters(self, value):
self._masters = value
api_args = {'masters': self._masters}
self._update(api_args)
@property
def contact_nickname(self):
"""Name of the :class:`Contact` that will receive notifications for
this zone
"""
self._get()
return self._contact_nickname
@contact_nickname.setter
def contact_nickname(self, value):
self._contact_nickname = value
api_args = {'contact_nickname': self._contact_nickname}
self._update(api_args)
@property
def tsig_key_name(self):
"""Name of the TSIG key that will be used to sign transfer requests to
this zone's master
"""
self._get()
return self._tsig_key_name
@tsig_key_name.setter
def tsig_key_name(self, value):
self._tsig_key_name = value
api_args = {'tsig_key_name': self._tsig_key_name}
self._update(api_args)
def activate(self):
"""Activates this secondary zone"""
api_args = {'activate': True}
self._update(api_args)
def deactivate(self):
"""Deactivates this secondary zone"""
api_args = {'deactivate': True}
self._update(api_args)
def retransfer(self):
"""Retransfers this secondary zone from its original provider into
Dyn's Managed DNS
"""
api_args = {'retransfer': True}
self._update(api_args)
def delete(self):
"""Delete this :class:`SecondaryZone`"""
api_args = {}
uri = '/Zone/{}/'.format(self._zone)
DynectSession.get_session().execute(uri, 'DELETE', api_args)
@property
def active(self):
"""Reports the status of :class:`SecondaryZone` Y, L or N"""
self._get()
return self._active
@property
def serial(self):
"""Reports the serial of :class:`SecondaryZone`"""
api_args = {}
uri = '/Zone/{}/'.format(self._zone)
self._update(api_args, uri)
return self._serial
def __str__(self):
"""str override"""
return force_unicode('<SecondaryZone>: {}').format(self._zone)
__repr__ = __unicode__ = __str__
def __bytes__(self):
"""bytes override"""
return bytes(self.__str__())
class Node(object):
"""Node object. Represents a valid fqdn node within a zone. It should be
noted that simply creating a :class:`Node` object does not actually create
anything on the DynECT System. The only way to actively create a
:class:`Node` on the DynECT System is by attaching either a record or a
service to it.
"""
def __init__(self, zone, fqdn=None):
"""Create a :class:`Node` object
:param zone: name of the zone that this Node belongs to
:param fqdn: the fully qualified domain name of this zone
"""
super(Node, self).__init__()
self.zone = zone
self.fqdn = fqdn or self.zone + '.'
self.records = self.my_records = {}
self.services = []
def add_record(self, record_type='A', *args, **kwargs):
"""Adds an a record with the provided data to this :class:`Node`
:param record_type: The type of record you would like to add.
Valid record_type arguments are: 'A', 'AAAA', 'CERT', 'CNAME',
'DHCID', 'DNAME', 'DNSKEY', 'DS', 'KEY', 'KX', 'LOC', 'IPSECKEY',
'MX', 'NAPTR', 'PTR', 'PX', 'NSAP', 'RP', 'NS', 'SOA', 'SPF',
'SRV', and 'TXT'.
:param args: Non-keyword arguments to pass to the Record constructor
:param kwargs: Keyword arguments to pass to the Record constructor
"""
# noinspection PyCallingNonCallable
rec = RECS[record_type](self.zone, self.fqdn, *args, **kwargs)
if record_type in self.records:
self.records[record_type].append(rec)
else:
self.records[record_type] = [rec]
return rec
def add_service(self, service_type=None, *args, **kwargs):
"""Add the specified service type to this :class:`Node`
:param service_type: The type of the service you would like to create.
Valid service_type arguments are: 'ActiveFailover', 'DDNS',
'DNSSEC', 'DSF', 'GSLB', 'RDNS', 'RTTM', 'HTTPRedirect'
:param args: Non-keyword arguments to pass to the Record constructor
:param kwargs: Keyword arguments to pass to the Record constructor
"""
constructors = {'ActiveFailover': ActiveFailover,
'DDNS': DynamicDNS,
'DNSSEC': DNSSEC,
'DSF': TrafficDirector,
'GSLB': GSLB,
'RDNS': ReverseDNS,
'RTTM': RTTM,
'HTTPRedirect': HTTPRedirect}
# noinspection PyCallingNonCallable
service = constructors[service_type](self.zone, self.fqdn, *args,
**kwargs)
self.services.append(service)
return service
def get_all_records(self):
"""Retrieve a list of all record resources for the specified node and
zone combination as well as all records from any Base_Record below that
point on the zone hierarchy
"""
self.records = {}
uri = '/AllRecord/{}/'.format(self.zone)
if self.fqdn is not None:
uri += '{}/'.format(self.fqdn)
api_args = {'detail': 'Y'}
response = DynectSession.get_session().execute(uri, 'GET', api_args)
# Strip out empty record_type lists
record_lists = {label: rec_list for label, rec_list in
response['data'].items() if rec_list != []}
records = {}
for key, record_list in record_lists.items():
search = key.split('_')[0].upper()
try:
constructor = RECS[search]
except KeyError:
constructor = RECS['UNKNOWN']
list_records = []
for record in record_list:
del record['zone']
fqdn = record['fqdn']
del record['fqdn']
# Unpack rdata
for r_key, r_val in record['rdata'].items():
record[r_key] = r_val
record['create'] = False
list_records.append(constructor(self.zone, fqdn, **record))
records[key] = list_records
return records
def get_all_records_by_type(self, record_type):
"""Get a list of all :class:`DNSRecord` of type ``record_type`` which
are owned by this node.
:param record_type: The type of :class:`DNSRecord` you wish returned.
Valid record_type arguments are: 'A', 'AAAA', 'CERT', 'CNAME',
'DHCID', 'DNAME', 'DNSKEY', 'DS', 'KEY', 'KX', 'LOC', 'IPSECKEY',
'MX', 'NAPTR', 'PTR', 'PX', 'NSAP', 'RP', 'NS', 'SOA', 'SPF',
'SRV', and 'TXT'.
:return: A list of :class:`DNSRecord`'s
"""
names = {'A': 'ARecord', 'AAAA': 'AAAARecord', 'CAA': 'CAARecord',
'CERT': 'CERTRecord', 'CNAME': 'CNAMERecord',
'DHCID': 'DHCIDRecord', 'DNAME': 'DNAMERecord',
'DNSKEY': 'DNSKEYRecord', 'DS': 'DSRecord',
'KEY': 'KEYRecord', 'KX': 'KXRecord', 'LOC': 'LOCRecord',
'IPSECKEY': 'IPSECKEYRecord', 'MX': 'MXRecord',
'NAPTR': 'NAPTRRecord', 'PTR': 'PTRRecord',
'PX': 'PXRecord', 'NSAP': 'NSAPRecord', 'RP': 'RPRecord',
'NS': 'NSRecord', 'SOA': 'SOARecord', 'SPF': 'SPFRecord',
'SRV': 'SRVRecord', 'TLSA': 'TLSARecord',
'TXT': 'TXTRecord', 'SSHFP': 'SSHFPRecord',
'ALIAS': 'ALIASRecord'}
constructor = RECS[record_type]
uri = '/{}/{}/{}/'.format(names[record_type], self.zone,
self.fqdn)
api_args = {'detail': 'Y'}
response = DynectSession.get_session().execute(uri, 'GET', api_args)
records = []
for record in response['data']:
fqdn = record['fqdn']
del record['fqdn']
del record['zone']
# Unpack rdata
for key, val in record['rdata'].items():
record[key] = val
del record['rdata']
record['create'] = False
records.append(constructor(self.zone, fqdn, **record))
return records
def get_any_records(self):
"""Retrieve a list of all recs"""
if self.fqdn is None:
return
api_args = {'detail': 'Y'}
uri = '/ANYRecord/{}/{}/'.format(self.zone, self.fqdn)
response = DynectSession.get_session().execute(uri, 'GET', api_args)
# Strip out empty record_type lists
record_lists = {label: rec_list for label, rec_list in
response['data'].items() if rec_list != []}
records = {}
for key, record_list in record_lists.items():
search = key.split('_')[0].upper()
try:
constructor = RECS[search]
except KeyError:
constructor = RECS['UNKNOWN']
list_records = []
for record in record_list:
del record['zone']
del record['fqdn']
# Unpack rdata
for r_key, r_val in record['rdata'].items():
record[r_key] = r_val
record['create'] = False
list_records.append(
constructor(self.zone, self.fqdn, **record))
records[key] = list_records
return records
def delete(self):
"""Delete this node, any records within this node, and any nodes
underneath this node
"""
uri = '/Node/{}/{}'.format(self.zone, self.fqdn)
DynectSession.get_session().execute(uri, 'DELETE', {})
def __str__(self):
"""str override"""
return force_unicode('<Node>: {}').format(self.fqdn)
__repr__ = __unicode__ = __str__
def __bytes__(self):
"""bytes override"""
return bytes(self.__str__())
class TSIG(object):
"""A class representing DynECT TSIG Records"""
def __init__(self, name, *args, **kwargs):
"""Create a :class:`TSIG` object
:param name: The name of the TSIG key for :class:`TSIG` object
:param algorithm: Algorithm used for :class:`TSIG` object. Valid
options: hmac-sha1, hmac-md5, hmac-sha224,hmac-sha256, hmac-sha384,
hmac-sha512
:param secret: Secret key used by :class:`TSIG` object
"""
self._name = name
self.uri = '/TSIGKey/{}/'.format(self._name)
self._secret = None
self._algorithm = None
if len(args) == 0 and len(kwargs) == 0:
self._get()
else:
self._post(*args, **kwargs)
def _get(self):
"""Get a :class:`TSIG` object from the DynECT System"""
api_args = {'name': self._name}
response = DynectSession.get_session().execute(self.uri, 'GET',
api_args)
for key, val in response['data'].items():
setattr(self, '_' + key, val)
def _post(self, *args, **kwargs):
"""Create a new :class:`TSIG` object on the DynECT System"""
api_args = {'name': self._name, 'secret': kwargs['secret'],
'algorithm': kwargs['algorithm']}
response = DynectSession.get_session().execute(self.uri, 'POST',
api_args)
for key, val in response['data'].items():
setattr(self, '_' + key, val)
@property
def secret(self):
"""Gets Secret key of :class:`TSIG` object"""
self._get()
return self._secret
@secret.setter
def secret(self, secret):
"""
Sets secret key of :class:`TSIG` object
:param secret: key
"""
api_args = {'name': self._name, 'secret': secret}
response = DynectSession.get_session().execute(self.uri, 'PUT',
api_args)
for key, val in response['data'].items():
setattr(self, '_' + key, val)
@property
def algorithm(self):
"""Gets Algorithm of :class:`TSIG` object. """
self._get()
return self._algorithm
@algorithm.setter
def algorithm(self, algorithm):
"""
Sets Algorithm of :class:`TSIG` object.
:param algorithm: hmac-sha1, hmac-md5, hmac-sha224,
hmac-sha256, hmac-sha384, hmac-sha512
"""
api_args = {'name': self._name, 'algorithm': algorithm}
response = DynectSession.get_session().execute(self.uri, 'PUT',
api_args)
for key, val in response['data'].items():
setattr(self, '_' + key, val)
@property
def name(self):
"""Gets name of TSIG Key in :class:`TSIG`"""
return self._name
def delete(self):
api_args = {}
DynectSession.get_session().execute(self.uri, 'DELETE',
api_args)
class ExternalNameserver(object):
"""A class representing DynECT External Nameserver """
def __init__(self, zone, *args, **kwargs):
"""Create a :class:`ExternalNameserver` object
:param zone: The name of the zone for this :class:`ExternalNameserver`
:param deny: does this block requests or add them
:param hosts: list of :class:`ExternalNameserverEntry`
:param active: active? Y/N
:param tsig_key_name: Name of TSIG to associate with this
:class:`ExternalNameserver`
"""
self._zone = zone
self.uri = '/ExtNameserver/{}/'.format(self._zone)
self._deny = None
self._hosts = None
self._active = None
self._tsig_key_name = None
if len(args) == 0 and len(kwargs) == 0:
self._get()
else:
self._post(*args, **kwargs)
def _get(self):
"""Get a :class:`ExternalNameserver` object from the DynECT System"""
api_args = {'zone': self._zone}
response = DynectSession.get_session().execute(self.uri, 'GET',
api_args)
self._build(response['data'])
def _post(self, *args, **kwargs):
"""Create a new :class:`ExternalNameserver`
object on the DynECT System"""
api_args = {'zone': self._zone}
self._deny = kwargs.get('deny', None)
if self._deny:
api_args['deny'] = self._deny
self._tsig_key_name = kwargs.get('tsig_key_name', None)
if self._tsig_key_name:
api_args['tsig_key_name'] = self._tsig_key_name
self._active = kwargs.get('active', None)
if self._active:
api_args['active'] = self._active
self._hosts = kwargs.get('hosts', None)
if self._hosts:
api_args['hosts'] = list()
for host in self._hosts:
if isinstance(host, ExternalNameserverEntry):
api_args['hosts'].append(host._json)
else:
api_args['hosts'].append(host)
response = DynectSession.get_session().execute(self.uri, 'POST',
api_args)
self._build(response['data'])
def _update(self, api_args=None):
"""Update an existing :class:`AdvancedRedirect` Service
on the DynECT System"""
response = DynectSession.get_session().execute(self.uri, 'PUT',
api_args)
self._build(response['data'])
def _build(self, data):
self._hosts = []
for key, val in data.items():
if key == 'hosts':
for host in val:
host['api'] = 'Y'
self._hosts.append(ExternalNameserverEntry(
host['address'], notifies=host['notifies']))
continue
setattr(self, '_' + key, val)
@property
def deny(self):
"""Gets deny value :class:`ExternalNameserver` object"""
self._get()
return self._deny
@deny.setter
def deny(self, deny):
"""
Sets deny value of :class:`ExternalNameserver` object
:param deny: Y/N
"""
api_args = {'zone': self._zone, 'deny': deny}
self._update(api_args=api_args)
@property
def tsig_key_name(self):
"""Gets tsig_key_name value :class:`ExternalNameserver` object"""
self._get()
return self._tsig_key_name
@tsig_key_name.setter
def tsig_key_name(self, tsig_key_name):
"""
Sets deny value of :class:`ExternalNameserver` object
:param deny: Y/N
"""
api_args = {'zone': self._zone, 'tsig_key_name': tsig_key_name}
self._update(api_args=api_args)
@property
def hosts(self):
"""
:class:`ExternalNameserver` hosts. list of ExternalNameserverEntries
"""
self._get()
return self._hosts
@hosts.setter
def hosts(self, value):
api_args = dict()
api_args['hosts'] = list()
for host in value:
if isinstance(host, ExternalNameserverEntry):
api_args['hosts'].append(host._json)
else:
api_args['hosts'].append(host)
self._update(api_args)
@property
def active(self):
"""Gets active status of :class:`ExternalNameserver` object. """
self._get()
return self._active
@active.setter
def active(self, active):
"""
Sets active status of :class:`ExternalNameserver` object.
:param active: Y/N
"""
api_args = {'zone': self._zone, 'active': active}
self._update(api_args=api_args)
@property
def zone(self):
"""Gets name of zone in :class:`ExternalNameserver`"""
return self._zone
def delete(self):
api_args = {}
DynectSession.get_session().execute(self.uri, 'DELETE',
api_args)
class ExternalNameserverEntry(object):
"""A class representing DynECT :class:`ExternalNameserverEntry`"""
def __init__(self, address, *args, **kwargs):
"""Create a :class:`ExternalNameserverEntry` object
:param address: address or CIDR of this nameserver Entry
:param notifies: Y/N Do we send notifies to this host?
"""
self._address = address
self._notifies = kwargs.get('notifies')
@property
def _json(self):
"""Get the JSON representation of this :class:`ExternalNameserverEntry`
object
"""
json_blob = {'address': self._address, 'notifies': self._notifies}
return {k: v for k, v in json_blob.items() if v is not None}
@property
def address(self):
"""Gets address value :class:`ExternalNameserverEntry` object"""
return self._address
@address.setter
def address(self, address):
"""
Sets address of :class:`ExternalNameserverEntry` object
:param address: address or CIDR
"""
self._address = address
@property
def notifies(self):
"""Gets address value :class:`ExternalNameserverEntry` object"""
return self._notifies
@notifies.setter
def notifies(self, notifies):
"""
Sets notifies of :class:`ExternalNameserverEntry` object
:param notifies: send notifies to this server. Y/N
"""
self._notifies = notifies
def __str__(self):
"""str override"""
return force_unicode(
'<ExternalNameserverEntry>: {}, Notifies: {}').format(
self._address,
self._notifies)
__repr__ = __unicode__ = __str__
def __bytes__(self):
"""bytes override"""
return bytes(self.__str__())
| bsd-3-clause | bacc668766d30481c0bdedcb293106fa | 36.754018 | 79 | 0.538426 | 4.025183 | false | false | false | false |
dyninc/dyn-python | dyn/tm/services/advanced_redirect.py | 2 | 14363 | # -*- coding: utf-8 -*-
"""This module contains API Wrapper implementations of the Advanced Redirect
service
"""
from dyn.compat import force_unicode
from dyn.tm.session import DynectSession
__author__ = 'mhowes'
__all__ = ['AdvancedRedirect',
'AdvancedRedirectRule',
'get_all_advanced_redirect_rules']
def get_all_advanced_redirect_rules(zone, fqdn):
"""
Gets all rules for service attached to zone/fqdn.
:param zone: zone for query
:param fqdn: fqdn for query
"""
uri = '/AdvRedirectRule/{}/{}/'.format(zone, fqdn)
response = DynectSession.get_session().execute(uri, 'GET',
{})
return [AdvancedRedirectRule(zone, fqdn, api=True, **rule) for rule in
response['data']]
class AdvancedRedirect(object):
""":class:`AdvancedRedirect` is a service which sets up a
redirect which utilizes an ordered list of rules to match.
requests to and then forward them.
"""
def __init__(self, zone, fqdn, *args, **kwargs):
"""Create a new :class:`AdvancedRedirect` service object
:param zone: The zone to attach this :class:`AdvancedRedirect`
Service to
:param fqdn: The FQDN of the node where this service will be attached
:param rules: list of :class:`AdvancedRedirectRule` In the order which
they are to be implemented.
:param active: Y/N Whether this service is active or not.
"""
self._zone = zone
self._fqdn = fqdn
self._active = kwargs.get("active", None)
self._rules = kwargs.get("rules", None)
if 'api' in kwargs:
del kwargs['api']
for key, val in kwargs.items():
setattr(self, '_' + key, val)
elif len(args) + len(kwargs) == 0:
self._get()
else:
self._post()
def _get(self):
"""Build an object around an existing DynECT :class:`AdvancedRedirect`
Service"""
self.uri = '/AdvRedirect/{}/{}/'.format(self._zone, self._fqdn)
api_args = {'detail': 'Y'}
response = DynectSession.get_session().execute(self.uri, 'GET',
api_args)
for key, val in response['data'].items():
setattr(self, '_' + key, val)
def _post(self):
"""Create a new :class:`AdvancedRedirect` Service on
the DynECT System"""
self.uri = '/AdvRedirect/{}/{}/'.format(self._zone, self._fqdn)
api_args = {'active': self._active}
if self._rules:
api_args['rules'] = list()
for rule in self._rules:
if isinstance(rule, AdvancedRedirectRule):
api_args['rules'].append(rule._json)
else:
api_args['rules'].append(rule)
response = DynectSession.get_session().execute(self.uri, 'POST',
api_args)
self._build(response['data'])
def _update(self, api_args=None):
"""Update an existing :class:`AdvancedRedirect` Service
on the DynECT System"""
self.uri = '/AdvRedirect/{}/{}/'.format(self._zone, self._fqdn)
response = DynectSession.get_session().execute(self.uri, 'PUT',
api_args)
self._build(response['data'])
def _build(self, data):
self._rules = []
for key, val in data.items():
if key == 'rules':
for rule in val:
rule['api'] = 'Y'
self._rules.append(AdvancedRedirectRule(self._zone,
self._fqdn,
**rule))
continue
setattr(self, '_' + key, val)
@property
def zone(self):
"""The zone that this :class:`AdvancedRedirect` Service is
attached to is a read-only attribute
"""
return self._zone
@zone.setter
def zone(self, value):
pass
@property
def fqdn(self):
"""The fqdn that this :class:`AdvancedRedirect` Service is
attached to is a read-only attribute
"""
return self._fqdn
@fqdn.setter
def fqdn(self, value):
pass
@property
def active(self):
"""
:class:`AdvancedRedirect` active Y/N
"""
self._get()
return self._active
@active.setter
def active(self, value):
api_args = {'active': value}
self._update(api_args)
@property
def rules(self):
"""
:class:`AdvancedRedirect` rules. An ordered list of
:class:`AdvancedRedirectRules`
"""
self._get()
return [AdvancedRedirectRule(self.zone, self.fqdn, api=True, **rule)
for rule in self._rules]
@rules.setter
def rules(self, value):
api_args = dict()
api_args['rules'] = list()
for rule in value:
if isinstance(rule, AdvancedRedirectRule):
api_args['rules'].append(rule._json)
else:
api_args['rules'].append(rule)
self._update(api_args)
def delete(self):
"""Delete this :class:`AdvancedRedirect` service from the DynECT
System """
self.uri = '/AdvRedirect/{}/{}'.format(self._zone, self._fqdn)
DynectSession.get_session().execute(self.uri, 'DELETE', {})
def __str__(self):
"""str override"""
return force_unicode('<AdvRedirect>: {}').format(self._fqdn)
__repr__ = __unicode__ = __str__
def __bytes__(self):
"""bytes override"""
return bytes(self.__str__())
class AdvancedRedirectRule(object):
""":class:`AdvancedRedirectRule` handles Rules for the
:class:`AdvancedRedirect` service """
def __init__(self, *args, **kwargs):
"""Create a new :class:`AdvancedRedirectRule` service object
AdvancedRedirectRule(zone, fqdn, option1=..., option2=...)
:param zone: The zone to attach this :class:`AdvancedRedirectRule`
Service to
:param fqdn: The FQDN of the node where this service will be attached.
:param code: HTTP response code to return for redirection.
:param url_pattern: The target URL pattern for matching this rule.
:param active: Y/N whether this Rule should be active or not.
:param host_prefix: host prefix for rule to match.
:param path: path for rule to match.
:param next_public_id: public_id of the next rule to match in the
chain.
:param public_id: public_id of this rule.
"""
self._zone = None
self._fqdn = None
if len(args) >= 1:
self._zone = args[0]
if len(args) >= 2:
self._fqdn = args[1]
self._code = kwargs.get("code", None)
self._host_prefix = kwargs.get("host_prefix", None)
self._path = kwargs.get("path", None)
# self._query = kwargs.get("query",None)
self._url_pattern = kwargs.get("url_pattern", None)
self._active = kwargs.get("active", None)
self._next_public_id = kwargs.get("next_public_id", None)
self._public_id = kwargs.get("public_id", None)
if 'api' in kwargs:
del kwargs['api']
for key, val in kwargs.items():
setattr(self, '_' + key, val)
elif len(args) == 0 and len(kwargs) > 0:
for key, val in kwargs.items():
setattr(self, '_' + key, val)
elif len(args) == 2 and self._public_id:
self._get()
else:
self._post()
def _get(self):
"""Build an object around an existing DynECT
:class:`AdvancedRedirectRule` Service"""
self.uri = '/AdvRedirectRule/{}/{}/{}'.format(self._zone,
self._fqdn,
self._public_id)
api_args = {'detail': 'Y'}
response = DynectSession.get_session().execute(self.uri, 'GET',
api_args)
for key, val in response['data'].items():
setattr(self, '_' + key, val)
def _post(self):
"""Create a new :class:`AdvancedRedirectRule` Service
on the DynECT System"""
api_args = dict()
self.uri = '/AdvRedirectRule/{}/{}/'.format(self._zone,
self._fqdn)
if self._code:
api_args['code'] = self._code
if self._host_prefix:
api_args['host_prefix'] = self._host_prefix
if self._path:
api_args['path'] = self._path
# if self._query:
# api_args['query'] = self._query
if self._url_pattern:
api_args['url_pattern'] = self._url_pattern
if self._active:
api_args['active'] = self._active
if self._next_public_id:
api_args['next_public_id'] = self._next_public_id
response = DynectSession.get_session().execute(self.uri, 'POST',
api_args)
self._build(response['data'])
def _update(self, api_args=None):
"""Update an existing :class:`AdvancedRedirectRule` Service
on the DynECT System"""
self.uri = '/AdvRedirectRule/{}/{}/{}'.format(self._zone,
self._fqdn,
self._public_id)
response = DynectSession.get_session().execute(self.uri, 'PUT',
api_args)
self._build(response['data'])
def _build(self, data):
for key, val in data.items():
setattr(self, '_' + key, val)
@property
def _json(self):
"""Get the JSON representation of this :class:`AdvancedRedirectRule`
object
"""
json_blob = {'code': self._code,
'host_prefix': self._host_prefix,
'active': self._active,
'path': self._path,
# 'query': self._query,
'next_public_id': self._next_public_id,
'url_pattern': self._url_pattern,
}
return {x: json_blob[x] for x in json_blob if json_blob[x] is not None}
@property
def zone(self):
"""The zone that this :class:`AdvancedRedirectRule` Service is
attached to is a read-only attribute
"""
return self._zone
@zone.setter
def zone(self, value):
pass
@property
def fqdn(self):
"""The fqdn that this :class:`AdvancedRedirectRule` Service is
attached to is a read-only attribute
"""
return self._fqdn
@fqdn.setter
def fqdn(self, value):
pass
@property
def active(self):
"""
:class:`AdvancedRedirectRule` active Y/N
"""
self._get()
return self._active
@active.setter
def active(self, value):
api_args = {'active': value}
self._update(api_args)
@property
def code(self):
"""
:class:`AdvancedRedirectRule` Code: 301, 302, 404
"""
self._get()
return self._code
@code.setter
def code(self, value):
api_args = {'code': value}
self._update(api_args)
@property
def public_id(self):
"""
:class:`AdvancedRedirectRule` public_id
"""
self._get()
return self._public_id
@public_id.setter
def public_id(self, value):
api_args = {'public_id': value}
self._update(api_args)
@property
def next_public_id(self):
"""
:class:`AdvancedRedirectRule` next_public_id. That is, public_id of
the next :class:`AdvancedRedirectRule` to be acted upon
"""
self._get()
return self._next_public_id
@next_public_id.setter
def next_public_id(self, value):
api_args = {'next_public_id': value}
self._update(api_args)
@property
def host_prefix(self):
"""
:class:`AdvancedRedirectRule` host_prefix. the `help`
in `http://help.dyn.com`
"""
self._get()
return self._host_prefix
@host_prefix.setter
def host_prefix(self, value):
api_args = {'host_prefix': value}
self._update(api_args)
# @property
# def query(self):
# """
# query of Rule
# """
# self._get()
# return self._query
#
# @query.setter
# def query(self, value):
# api_args = {'query': value}
# self._update(api_args)
@property
def path(self):
"""
:class:`AdvancedRedirectRule` path. the `help` in
`http://www.dyn.com/help`
"""
self._get()
return self._path
@path.setter
def path(self, value):
api_args = {'path': value}
self._update(api_args)
@property
def url_pattern(self):
"""
:class:`AdvancedRedirectRule` url pattern.
used to implement how the redirect is written.
"""
self._get()
return self._url_pattern
@url_pattern.setter
def url_pattern(self, value):
api_args = {'url_pattern': value}
self._update(api_args)
def delete(self):
"""Delete this :class:`AdvancedRedirectRule` service
from the DynECT System
"""
self.uri = '/AdvRedirectRule/{}/{}/{}'.format(self._zone,
self._fqdn,
self._public_id)
DynectSession.get_session().execute(self.uri, 'DELETE', {})
def __str__(self):
"""str override"""
return force_unicode(
'<AdvRedirectRule>: {}, {}, Active: {}, Public_Id: {}').format(
self._fqdn,
self._url_pattern,
self._active,
self._public_id)
__repr__ = __unicode__ = __str__
def __bytes__(self):
"""bytes override"""
return bytes(self.__str__())
| bsd-3-clause | d5d33f8872c404ffd287f7413b6e5b26 | 31.942661 | 79 | 0.516744 | 4.205857 | false | false | false | false |
qutip/qutip | qutip/qip/device/modelprocessor.py | 1 | 5755 | from collections.abc import Iterable
import numbers
import numpy as np
from qutip.qobj import Qobj
from qutip.qobjevo import QobjEvo
from qutip.qip.operations.gates import globalphase
from qutip.tensor import tensor
from qutip.mesolve import mesolve
from qutip.qip.circuit import QubitCircuit
from qutip.qip.device.processor import Processor
__all__ = ['ModelProcessor']
class ModelProcessor(Processor):
"""
The base class for a circuit processor simulating a physical device,
e.g cavityQED, spinchain.
The available Hamiltonian of the system is predefined.
The processor can simulate the evolution under the given
control pulses either numerically or analytically.
It cannot be used alone, please refer to the sub-classes.
(Only additional attributes are documented here, for others please
refer to the parent class :class:`.Processor`)
Parameters
----------
N: int
The number of component systems.
correct_global_phase: boolean, optional
If true, the analytical solution will track the global phase. It
has no effect on the numerical solution.
t1: list or float
Characterize the decoherence of amplitude damping for
each qubit. A list of size `N` or a float for all qubits.
t2: list of float
Characterize the decoherence of dephasing for
each qubit. A list of size `N` or a float for all qubits.
Attributes
----------
params: dict
A Python dictionary contains the name and the value of the parameters
in the physical realization, such as laser frequency, detuning etc.
correct_global_phase: float
Save the global phase, the analytical solution
will track the global phase.
It has no effect on the numerical solution.
"""
def __init__(self, N, correct_global_phase=True, t1=None, t2=None):
super(ModelProcessor, self).__init__(N, t1=t1, t2=t2)
self.correct_global_phase = correct_global_phase
self.global_phase = 0.
self._params = {}
def to_array(self, params, N):
"""
Transfer a parameter to an array.
"""
if isinstance(params, numbers.Real):
return np.asarray([params] * N)
elif isinstance(params, Iterable):
return np.asarray(params)
def set_up_params(self):
"""
Save the parameters in the attribute `params` and check the validity.
(Defined in subclasses)
Notes
-----
All parameters will be multiplied by 2*pi for simplicity
"""
raise NotImplementedError("Parameters should be defined in subclass.")
@property
def params(self):
return self._params
@params.setter
def params(self, par):
self._params = par
def run_state(self, init_state=None, analytical=False, qc=None,
states=None, **kwargs):
"""
If `analytical` is False, use :func:`qutip.mesolve` to
calculate the time of the state evolution
and return the result. Other arguments of mesolve can be
given as keyword arguments.
If `analytical` is True, calculate the propagator
with matrix exponentiation and return a list of matrices.
Parameters
----------
init_state: Qobj
Initial density matrix or state vector (ket).
analytical: boolean
If True, calculate the evolution with matrices exponentiation.
qc: :class:`.QubitCircuit`, optional
A quantum circuit. If given, it first calls the ``load_circuit``
and then calculate the evolution.
states: :class:`qutip.Qobj`, optional
Old API, same as init_state.
**kwargs
Keyword arguments for the qutip solver.
Returns
-------
evo_result: :class:`qutip.solver.Result`
If ``analytical`` is False, an instance of the class
:class:`qutip.solver.Result` will be returned.
If ``analytical`` is True, a list of matrices representation
is returned.
"""
if qc is not None:
self.load_circuit(qc)
return super(ModelProcessor, self).run_state(
init_state=init_state, analytical=analytical,
states=states, **kwargs)
def get_ops_and_u(self):
"""
Get the labels for each Hamiltonian.
Returns
-------
ctrls: list
The list of Hamiltonians
coeffs: array_like
The transposed pulse matrix
"""
return (self.ctrls, self.get_full_coeffs().T)
def pulse_matrix(self, dt=0.01):
"""
Generates the pulse matrix for the desired physical system.
Returns
-------
t, u, labels:
Returns the total time and label for every operation.
"""
ctrls = self.ctrls
coeffs = self.get_full_coeffs().T
# FIXME This might becomes a problem if new tlist other than
# int the default pulses are added.
tlist = self.get_full_tlist()
dt_list = tlist[1:] - tlist[:-1]
t_tot = tlist[-1]
num_step = int(np.ceil(t_tot / dt))
t = np.linspace(0, t_tot, num_step)
u = np.zeros((len(ctrls), num_step))
t_start = 0
for n in range(len(dt_list)):
t_idx_len = int(np.floor(dt_list[n] / dt))
mm = 0
for m in range(len(ctrls)):
u[mm, t_start:(t_start + t_idx_len)] = (np.ones(t_idx_len) *
coeffs[n, m])
mm += 1
t_start += t_idx_len
return t, u, self.get_operators_labels()
| bsd-3-clause | 70f4f1489bfec716e55092cd042a2289 | 31.150838 | 78 | 0.601911 | 4.10778 | false | false | false | false |
qutip/qutip | qutip/visualization.py | 1 | 57029 | """
Functions for visualizing results of quantum dynamics simulations,
visualizations of quantum states and processes.
"""
__all__ = ['hinton', 'sphereplot', 'energy_level_diagram',
'plot_energy_levels', 'fock_distribution',
'plot_fock_distribution', 'wigner_fock_distribution',
'plot_wigner_fock_distribution', 'plot_wigner',
'plot_expectation_values', 'plot_spin_distribution_2d',
'plot_spin_distribution_3d', 'plot_qubism', 'plot_schmidt',
'complex_array_to_rgb', 'matrix_histogram',
'matrix_histogram_complex', 'sphereplot', 'plot_wigner_sphere']
import warnings
import itertools as it
import numpy as np
from numpy import pi, array, sin, cos, angle, log2
from packaging.version import parse as parse_version
from qutip.qobj import Qobj, isket
from qutip.states import ket2dm
from qutip.wigner import wigner
from qutip.tensor import tensor
from qutip.matplotlib_utilities import complex_phase_cmap
from qutip.superoperator import vector_to_operator
from qutip.superop_reps import _super_to_superpauli, _isqubitdims
from qutip import settings
try:
import matplotlib.pyplot as plt
import matplotlib as mpl
from matplotlib import cm
from mpl_toolkits.mplot3d import Axes3D
# Define a custom _axes3D function based on the matplotlib version.
# The auto_add_to_figure keyword is new for matplotlib>=3.4.
if parse_version(mpl.__version__) >= parse_version('3.4'):
def _axes3D(fig, *args, **kwargs):
ax = Axes3D(fig, *args, auto_add_to_figure=False, **kwargs)
return fig.add_axes(ax)
else:
def _axes3D(*args, **kwargs):
return Axes3D(*args, **kwargs)
except:
pass
def plot_wigner_sphere(fig, ax, wigner, reflections):
"""Plots a coloured Bloch sphere.
Parameters
----------
fig : :obj:`matplotlib.figure.Figure`
An instance of :obj:`~matplotlib.figure.Figure`.
ax : :obj:`matplotlib.axes.Axes`
An axes instance in the given figure.
wigner : list of float
The wigner transformation at `steps` different theta and phi.
reflections : bool
If the reflections of the sphere should be plotted as well.
Notes
------
Special thanks to Russell P Rundle for writing this function.
"""
ax.set_xlabel("x")
ax.set_ylabel("y")
ax.set_zlabel("z")
steps = len(wigner)
theta = np.linspace(0, np.pi, steps)
phi = np.linspace(0, 2 * np.pi, steps)
x = np.outer(np.sin(theta), np.cos(phi))
y = np.outer(np.sin(theta), np.sin(phi))
z = np.outer(np.cos(theta), np.ones(steps))
wigner = np.real(wigner)
wigner_max = np.real(np.amax(np.abs(wigner)))
wigner_c1 = cm.seismic_r((wigner + wigner_max) / (2 * wigner_max))
# Plot coloured Bloch sphere:
ax.plot_surface(x, y, z, facecolors=wigner_c1, vmin=-wigner_max,
vmax=wigner_max, rcount=steps, ccount=steps, linewidth=0,
zorder=0.5, antialiased=None)
if reflections:
wigner_c2 = cm.seismic_r((wigner[0:steps, 0:steps]+wigner_max) /
(2*wigner_max)) # bottom
wigner_c3 = cm.seismic_r((wigner[0:steps, 0:steps]+wigner_max) /
(2*wigner_max)) # side
wigner_c4 = cm.seismic_r((wigner[0:steps, 0:steps]+wigner_max) /
(2*wigner_max)) # back
# Plot bottom reflection:
ax.plot_surface(x[0:steps, 0:steps], y[0:steps, 0:steps],
-1.5*np.ones((steps, steps)), facecolors=wigner_c2,
vmin=-wigner_max, vmax=wigner_max, rcount=steps/2,
ccount=steps/2, linewidth=0, zorder=0.5,
antialiased=False)
# Plot side reflection:
ax.plot_surface(-1.5*np.ones((steps, steps)), y[0:steps, 0:steps],
z[0:steps, 0:steps], facecolors=wigner_c3,
vmin=-wigner_max, vmax=wigner_max, rcount=steps/2,
ccount=steps/2, linewidth=0, zorder=0.5,
antialiased=False)
# Plot back reflection:
ax.plot_surface(x[0:steps, 0:steps], 1.5*np.ones((steps, steps)),
z[0:steps, 0:steps], facecolors=wigner_c4,
vmin=-wigner_max, vmax=wigner_max, rcount=steps/2,
ccount=steps/2, linewidth=0, zorder=0.5,
antialiased=False)
# Create colourbar:
m = cm.ScalarMappable(cmap=cm.seismic_r)
m.set_array([-wigner_max, wigner_max])
plt.colorbar(m, shrink=0.5, aspect=10)
plt.show()
# Adopted from the SciPy Cookbook.
def _blob(x, y, w, w_max, area, color_fn, ax=None):
"""
Draws a square-shaped blob with the given area (< 1) at
the given coordinates.
"""
hs = np.sqrt(area) / 2
xcorners = array([x - hs, x + hs, x + hs, x - hs])
ycorners = array([y - hs, y - hs, y + hs, y + hs])
if ax is not None:
handle = ax
else:
handle = plt
handle.fill(xcorners, ycorners,
color=color_fn(w))
def _cb_labels(left_dims):
"""Creates plot labels for matrix elements in the computational basis.
Parameters
----------
left_dims : flat list of ints
Dimensions of the left index of a density operator. E. g.
[2, 3] for a qubit tensored with a qutrit.
Returns
-------
left_labels, right_labels : lists of strings
Labels for the left and right indices of a density operator
(kets and bras, respectively).
"""
# FIXME: assumes dims, such that we only need left_dims == dims[0].
basis_labels = list(map(",".join, it.product(*[
map(str, range(dim))
for dim in left_dims
])))
return [
map(fmt.format, basis_labels) for fmt in
(
r"$\langle{}|$",
r"$|{}\rangle$",
)
]
# Adopted from the SciPy Cookbook.
def hinton(rho, xlabels=None, ylabels=None, title=None, ax=None, cmap=None,
label_top=True, color_style="scaled"):
"""Draws a Hinton diagram for visualizing a density matrix or superoperator.
Parameters
----------
rho : qobj
Input density matrix or superoperator.
xlabels : list of strings or False
list of x labels
ylabels : list of strings or False
list of y labels
title : string
title of the plot (optional)
ax : a matplotlib axes instance
The axes context in which the plot will be drawn.
cmap : a matplotlib colormap instance
Color map to use when plotting.
label_top : bool
If True, x-axis labels will be placed on top, otherwise
they will appear below the plot.
color_style : string
Determines how colors are assigned to each square:
- If set to ``"scaled"`` (default), each color is chosen by
passing the absolute value of the corresponding matrix
element into `cmap` with the sign of the real part.
- If set to ``"threshold"``, each square is plotted as
the maximum of `cmap` for the positive real part and as
the minimum for the negative part of the matrix element;
note that this generalizes `"threshold"` to complex numbers.
- If set to ``"phase"``, each color is chosen according to
the angle of the corresponding matrix element.
Returns
-------
fig, ax : tuple
A tuple of the matplotlib figure and axes instances used to produce
the figure.
Raises
------
ValueError
Input argument is not a quantum object.
Examples
--------
>>> import qutip
>>>
>>> dm = qutip.rand_dm(4)
>>> fig, ax = qutip.hinton(dm)
>>> fig.show()
>>>
>>> qutip.settings.colorblind_safe = True
>>> fig, ax = qutip.hinton(dm, color_style="threshold")
>>> fig.show()
>>> qutip.settings.colorblind_safe = False
>>>
>>> fig, ax = qutip.hinton(dm, color_style="phase")
>>> fig.show()
"""
# Apply default colormaps.
# TODO: abstract this away into something that makes default
# colormaps.
cmap = (
(cm.Greys_r if settings.colorblind_safe else cm.RdBu)
if cmap is None else cmap
)
# Extract plotting data W from the input.
if isinstance(rho, Qobj):
if rho.isoper:
W = rho.full()
# Create default labels if none are given.
if xlabels is None or ylabels is None:
labels = _cb_labels(rho.dims[0])
xlabels = xlabels if xlabels is not None else list(labels[0])
ylabels = ylabels if ylabels is not None else list(labels[1])
elif rho.isoperket:
W = vector_to_operator(rho).full()
elif rho.isoperbra:
W = vector_to_operator(rho.dag()).full()
elif rho.issuper:
if not _isqubitdims(rho.dims):
raise ValueError("Hinton plots of superoperators are "
"currently only supported for qubits.")
# Convert to a superoperator in the Pauli basis,
# so that all the elements are real.
sqobj = _super_to_superpauli(rho)
nq = int(log2(sqobj.shape[0]) / 2)
W = sqobj.full().T
# Create default labels, too.
if (xlabels is None) or (ylabels is None):
labels = list(map("".join, it.product("IXYZ", repeat=nq)))
xlabels = xlabels if xlabels is not None else labels
ylabels = ylabels if ylabels is not None else labels
else:
raise ValueError(
"Input quantum object must be an operator or superoperator."
)
else:
W = rho
if ax is None:
fig, ax = plt.subplots(1, 1, figsize=(8, 6))
else:
fig = None
if not (xlabels or ylabels):
ax.axis('off')
if title:
ax.set_title(title)
ax.axis('equal')
ax.set_frame_on(False)
height, width = W.shape
w_max = 1.25 * max(abs(np.array(W)).flatten())
if w_max <= 0.0:
w_max = 1.0
# Set color_fn here.
if color_style == "scaled":
def color_fn(w):
w = np.abs(w) * np.sign(np.real(w))
return cmap(int((w + w_max) * 256 / (2 * w_max)))
elif color_style == "threshold":
def color_fn(w):
w = np.real(w)
return cmap(255 if w > 0 else 0)
elif color_style == "phase":
def color_fn(w):
return cmap(int(255 * (np.angle(w) / 2 / np.pi + 0.5)))
else:
raise ValueError(
"Unknown color style {} for Hinton diagrams.".format(color_style)
)
ax.fill(array([0, width, width, 0]), array([0, 0, height, height]),
color=cmap(128))
for x in range(width):
for y in range(height):
_x = x + 1
_y = y + 1
_blob(
_x - 0.5, height - _y + 0.5, W[y, x], w_max,
min(1, abs(W[y, x]) / w_max), color_fn=color_fn, ax=ax)
# color axis
vmax = np.pi if color_style == "phase" else abs(W).max()
norm = mpl.colors.Normalize(-vmax, vmax)
cax, kw = mpl.colorbar.make_axes(ax, shrink=0.75, pad=.1)
mpl.colorbar.ColorbarBase(cax, norm=norm, cmap=cmap)
xtics = 0.5 + np.arange(width)
# x axis
ax.xaxis.set_major_locator(plt.FixedLocator(xtics))
if xlabels:
nxlabels = len(xlabels)
if nxlabels != len(xtics):
raise ValueError(f"got {nxlabels} xlabels but needed {len(xtics)}")
ax.set_xticklabels(xlabels)
if label_top:
ax.xaxis.tick_top()
ax.tick_params(axis='x', labelsize=14)
# y axis
ytics = 0.5 + np.arange(height)
ax.yaxis.set_major_locator(plt.FixedLocator(ytics))
if ylabels:
nylabels = len(ylabels)
if nylabels != len(ytics):
raise ValueError(f"got {nylabels} ylabels but needed {len(ytics)}")
ax.set_yticklabels(list(reversed(ylabels)))
ax.tick_params(axis='y', labelsize=14)
return fig, ax
def sphereplot(theta, phi, values, fig=None, ax=None, save=False):
"""Plots a matrix of values on a sphere
Parameters
----------
theta : float
Angle with respect to z-axis
phi : float
Angle in x-y plane
values : array
Data set to be plotted
fig : a matplotlib Figure instance
The Figure canvas in which the plot will be drawn.
ax : a matplotlib axes instance
The axes context in which the plot will be drawn.
save : bool {False , True}
Whether to save the figure or not
Returns
-------
fig, ax : tuple
A tuple of the matplotlib figure and axes instances used to produce
the figure.
"""
if fig is None or ax is None:
fig = plt.figure()
ax = _axes3D(fig)
thetam, phim = np.meshgrid(theta, phi)
xx = sin(thetam) * cos(phim)
yy = sin(thetam) * sin(phim)
zz = cos(thetam)
r = array(abs(values))
ph = angle(values)
# normalize color range based on phase angles in list ph
nrm = mpl.colors.Normalize(ph.min(), ph.max())
# plot with facecolors set to cm.jet colormap normalized to nrm
ax.plot_surface(r * xx, r * yy, r * zz, rstride=1, cstride=1,
facecolors=cm.jet(nrm(ph)), linewidth=0)
# create new axes on plot for colorbar and shrink it a bit.
# pad shifts location of bar with repsect to the main plot
cax, kw = mpl.colorbar.make_axes(ax, shrink=.66, pad=.02)
# create new colorbar in axes cax with cm jet and normalized to nrm like
# our facecolors
cb1 = mpl.colorbar.ColorbarBase(cax, cmap=cm.jet, norm=nrm)
# add our colorbar label
cb1.set_label('Angle')
if save:
plt.savefig("sphereplot.png")
return fig, ax
def _remove_margins(axis):
"""
removes margins about z = 0 and improves the style
by monkey patching
"""
def _get_coord_info_new(renderer):
mins, maxs, centers, deltas, tc, highs = \
_get_coord_info_old(renderer)
mins += deltas / 4
maxs -= deltas / 4
return mins, maxs, centers, deltas, tc, highs
_get_coord_info_old = axis._get_coord_info
axis._get_coord_info = _get_coord_info_new
def _truncate_colormap(cmap, minval=0.0, maxval=1.0, n=100):
"""
truncates portion of a colormap and returns the new one
"""
if isinstance(cmap, str):
cmap = plt.get_cmap(cmap)
new_cmap = mpl.colors.LinearSegmentedColormap.from_list(
'trunc({n},{a:.2f},{b:.2f})'.format(
n=cmap.name, a=minval, b=maxval),
cmap(np.linspace(minval, maxval, n)))
return new_cmap
def _stick_to_planes(stick, azim, ax, M, spacing):
"""adjusts xlim and ylim in way that bars will
Stick to xz and yz planes
"""
if stick is True:
azim = azim % 360
if 0 <= azim <= 90:
ax.set_ylim(1 - .5,)
ax.set_xlim(1 - .5,)
elif 90 < azim <= 180:
ax.set_ylim(1 - .5,)
ax.set_xlim(0, M.shape[0] + (.5 - spacing))
elif 180 < azim <= 270:
ax.set_ylim(0, M.shape[1] + (.5 - spacing))
ax.set_xlim(0, M.shape[0] + (.5 - spacing))
elif 270 < azim < 360:
ax.set_ylim(0, M.shape[1] + (.5 - spacing))
ax.set_xlim(1 - .5,)
def _update_yaxis(spacing, M, ax, ylabels):
"""
updates the y-axis
"""
ytics = [x + (1 - (spacing / 2)) for x in range(M.shape[1])]
ax.axes.w_yaxis.set_major_locator(plt.FixedLocator(ytics))
if ylabels:
nylabels = len(ylabels)
if nylabels != len(ytics):
raise ValueError(f"got {nylabels} ylabels but needed {len(ytics)}")
ax.set_yticklabels(ylabels)
else:
ax.set_yticklabels([str(y + 1) for y in range(M.shape[1])])
ax.set_yticklabels([str(i) for i in range(M.shape[1])])
ax.tick_params(axis='y', labelsize=14)
ax.set_yticks([y + (1 - (spacing / 2)) for y in range(M.shape[1])])
def _update_xaxis(spacing, M, ax, xlabels):
"""
updates the x-axis
"""
xtics = [x + (1 - (spacing / 2)) for x in range(M.shape[1])]
ax.axes.w_xaxis.set_major_locator(plt.FixedLocator(xtics))
if xlabels:
nxlabels = len(xlabels)
if nxlabels != len(xtics):
raise ValueError(f"got {nxlabels} xlabels but needed {len(xtics)}")
ax.set_xticklabels(xlabels)
else:
ax.set_xticklabels([str(x + 1) for x in range(M.shape[0])])
ax.set_xticklabels([str(i) for i in range(M.shape[0])])
ax.tick_params(axis='x', labelsize=14)
ax.set_xticks([x + (1 - (spacing / 2)) for x in range(M.shape[0])])
def _update_zaxis(ax, z_min, z_max, zticks):
"""
updates the z-axis
"""
ax.axes.w_zaxis.set_major_locator(plt.IndexLocator(1, 0.5))
if isinstance(zticks, list):
ax.set_zticks(zticks)
ax.set_zlim3d([min(z_min, 0), z_max])
def matrix_histogram(M, xlabels=None, ylabels=None, title=None, limits=None,
colorbar=True, fig=None, ax=None, options=None):
"""
Draw a histogram for the matrix M, with the given x and y labels and title.
Parameters
----------
M : Matrix of Qobj
The matrix to visualize
xlabels : list of strings
list of x labels
ylabels : list of strings
list of y labels
title : string
title of the plot (optional)
limits : list/array with two float numbers
The z-axis limits [min, max] (optional)
ax : a matplotlib axes instance
The axes context in which the plot will be drawn.
colorbar : bool (default: True)
show colorbar
options : dict
A dictionary containing extra options for the plot.
The names (keys) and values of the options are
described below:
'zticks' : list of numbers
A list of z-axis tick locations.
'cmap' : string (default: 'jet')
The name of the color map to use.
'cmap_min' : float (default: 0.0)
The lower bound to truncate the color map at.
A value in range 0 - 1. The default, 0, leaves the lower
bound of the map unchanged.
'cmap_max' : float (default: 1.0)
The upper bound to truncate the color map at.
A value in range 0 - 1. The default, 1, leaves the upper
bound of the map unchanged.
'bars_spacing' : float (default: 0.1)
spacing between bars.
'bars_alpha' : float (default: 1.)
transparency of bars, should be in range 0 - 1
'bars_lw' : float (default: 0.5)
linewidth of bars' edges.
'bars_edgecolor' : color (default: 'k')
The colors of the bars' edges.
Examples: 'k', (0.1, 0.2, 0.5) or '#0f0f0f80'.
'shade' : bool (default: True)
Whether to shade the dark sides of the bars (True) or not (False).
The shading is relative to plot's source of light.
'azim' : float
The azimuthal viewing angle.
'elev' : float
The elevation viewing angle.
'proj_type' : string (default: 'ortho' if ax is not passed)
The type of projection ('ortho' or 'persp')
'stick' : bool (default: False)
Changes xlim and ylim in such a way that bars next to
XZ and YZ planes will stick to those planes.
This option has no effect if ``ax`` is passed as a parameter.
'cbar_pad' : float (default: 0.04)
The fraction of the original axes between the colorbar
and the new image axes.
(i.e. the padding between the 3D figure and the colorbar).
'cbar_to_z' : bool (default: False)
Whether to set the color of maximum and minimum z-values to the
maximum and minimum colors in the colorbar (True) or not (False).
'figsize' : tuple of two numbers
The size of the figure.
Returns :
-------
fig, ax : tuple
A tuple of the matplotlib figure and axes instances used to produce
the figure.
Raises
------
ValueError
Input argument is not valid.
"""
# default options
default_opts = {'figsize': None, 'cmap': 'jet', 'cmap_min': 0.,
'cmap_max': 1., 'zticks': None, 'bars_spacing': 0.2,
'bars_alpha': 1., 'bars_lw': 0.5, 'bars_edgecolor': 'k',
'shade': False, 'azim': -35, 'elev': 35,
'proj_type': 'ortho', 'stick': False,
'cbar_pad': 0.04, 'cbar_to_z': False}
# update default_opts from input options
if options is None:
pass
elif isinstance(options, dict):
# check if keys in options dict are valid
if set(options) - set(default_opts):
raise ValueError("invalid key(s) found in options: "
f"{', '.join(set(options) - set(default_opts))}")
else:
# updating default options
default_opts.update(options)
else:
raise ValueError("options must be a dictionary")
if isinstance(M, Qobj):
# extract matrix data from Qobj
M = M.full()
n = np.size(M)
xpos, ypos = np.meshgrid(range(M.shape[0]), range(M.shape[1]))
xpos = xpos.T.flatten() + 0.5
ypos = ypos.T.flatten() + 0.5
zpos = np.zeros(n)
dx = dy = (1 - default_opts['bars_spacing']) * np.ones(n)
dz = np.real(M.flatten())
if isinstance(limits, list) and len(limits) == 2:
z_min = limits[0]
z_max = limits[1]
else:
z_min = min(dz)
z_max = max(dz)
if z_min == z_max:
z_min -= 0.1
z_max += 0.1
if default_opts['cbar_to_z']:
norm = mpl.colors.Normalize(min(dz), max(dz))
else:
norm = mpl.colors.Normalize(z_min, z_max)
cmap = _truncate_colormap(default_opts['cmap'],
default_opts['cmap_min'],
default_opts['cmap_max'])
colors = cmap(norm(dz))
if ax is None:
fig = plt.figure(figsize=default_opts['figsize'])
ax = _axes3D(fig,
azim=default_opts['azim'] % 360,
elev=default_opts['elev'] % 360)
ax.set_proj_type(default_opts['proj_type'])
ax.bar3d(xpos, ypos, zpos, dx, dy, dz, color=colors,
edgecolors=default_opts['bars_edgecolor'],
linewidths=default_opts['bars_lw'],
alpha=default_opts['bars_alpha'],
shade=default_opts['shade'])
# remove vertical lines on xz and yz plane
ax.yaxis._axinfo["grid"]['linewidth'] = 0
ax.xaxis._axinfo["grid"]['linewidth'] = 0
if title:
ax.set_title(title)
# x axis
_update_xaxis(default_opts['bars_spacing'], M, ax, xlabels)
# y axis
_update_yaxis(default_opts['bars_spacing'], M, ax, ylabels)
# z axis
_update_zaxis(ax, z_min, z_max, default_opts['zticks'])
# stick to xz and yz plane
_stick_to_planes(default_opts['stick'],
default_opts['azim'], ax, M,
default_opts['bars_spacing'])
# color axis
if colorbar:
cax, kw = mpl.colorbar.make_axes(ax, shrink=.75,
pad=default_opts['cbar_pad'])
mpl.colorbar.ColorbarBase(cax, cmap=cmap, norm=norm)
# removing margins
_remove_margins(ax.xaxis)
_remove_margins(ax.yaxis)
_remove_margins(ax.zaxis)
return fig, ax
def matrix_histogram_complex(M, xlabels=None, ylabels=None,
title=None, limits=None, phase_limits=None,
colorbar=True, fig=None, ax=None,
threshold=None):
"""
Draw a histogram for the amplitudes of matrix M, using the argument
of each element for coloring the bars, with the given x and y labels
and title.
Parameters
----------
M : Matrix of Qobj
The matrix to visualize
xlabels : list of strings
list of x labels
ylabels : list of strings
list of y labels
title : string
title of the plot (optional)
limits : list/array with two float numbers
The z-axis limits [min, max] (optional)
phase_limits : list/array with two float numbers
The phase-axis (colorbar) limits [min, max] (optional)
ax : a matplotlib axes instance
The axes context in which the plot will be drawn.
threshold: float (None)
Threshold for when bars of smaller height should be transparent. If
not set, all bars are colored according to the color map.
Returns
-------
fig, ax : tuple
A tuple of the matplotlib figure and axes instances used to produce
the figure.
Raises
------
ValueError
Input argument is not valid.
"""
if isinstance(M, Qobj):
# extract matrix data from Qobj
M = M.full()
n = np.size(M)
xpos, ypos = np.meshgrid(range(M.shape[0]), range(M.shape[1]))
xpos = xpos.T.flatten() - 0.5
ypos = ypos.T.flatten() - 0.5
zpos = np.zeros(n)
dx = dy = 0.8 * np.ones(n)
Mvec = M.flatten()
dz = abs(Mvec)
# make small numbers real, to avoid random colors
idx, = np.where(abs(Mvec) < 0.001)
Mvec[idx] = abs(Mvec[idx])
if phase_limits: # check that limits is a list type
phase_min = phase_limits[0]
phase_max = phase_limits[1]
else:
phase_min = -pi
phase_max = pi
norm = mpl.colors.Normalize(phase_min, phase_max)
cmap = complex_phase_cmap()
colors = cmap(norm(angle(Mvec)))
if threshold is not None:
colors[:, 3] = 1 * (dz > threshold)
if ax is None:
fig = plt.figure()
ax = _axes3D(fig, azim=-35, elev=35)
ax.bar3d(xpos, ypos, zpos, dx, dy, dz, color=colors)
if title:
ax.set_title(title)
# x axis
xtics = -0.5 + np.arange(M.shape[0])
ax.axes.w_xaxis.set_major_locator(plt.FixedLocator(xtics))
if xlabels:
nxlabels = len(xlabels)
if nxlabels != len(xtics):
raise ValueError(f"got {nxlabels} xlabels but needed {len(xtics)}")
ax.set_xticklabels(xlabels)
ax.tick_params(axis='x', labelsize=12)
# y axis
ytics = -0.5 + np.arange(M.shape[1])
ax.axes.w_yaxis.set_major_locator(plt.FixedLocator(ytics))
if ylabels:
nylabels = len(ylabels)
if nylabels != len(ytics):
raise ValueError(f"got {nylabels} ylabels but needed {len(ytics)}")
ax.set_yticklabels(ylabels)
ax.tick_params(axis='y', labelsize=12)
# z axis
if limits and isinstance(limits, list):
ax.set_zlim3d(limits)
else:
ax.set_zlim3d([0, 1]) # use min/max
# ax.set_zlabel('abs')
# color axis
if colorbar:
cax, kw = mpl.colorbar.make_axes(ax, shrink=.75, pad=.0)
cb = mpl.colorbar.ColorbarBase(cax, cmap=cmap, norm=norm)
cb.set_ticks([-pi, -pi / 2, 0, pi / 2, pi])
cb.set_ticklabels(
(r'$-\pi$', r'$-\pi/2$', r'$0$', r'$\pi/2$', r'$\pi$'))
cb.set_label('arg')
return fig, ax
def plot_energy_levels(H_list, N=0, labels=None, show_ylabels=False,
figsize=(8, 12), fig=None, ax=None):
"""
Plot the energy level diagrams for a list of Hamiltonians. Include
up to N energy levels. For each element in H_list, the energy
levels diagram for the cummulative Hamiltonian sum(H_list[0:n]) is plotted,
where n is the index of an element in H_list.
Parameters
----------
H_list : List of Qobj
A list of Hamiltonians.
labels : List of string
A list of labels for each Hamiltonian
show_ylabels : Bool (default False)
Show y labels to the left of energy levels of the initial
Hamiltonian.
N : int
The number of energy levels to plot
figsize : tuple (int,int)
The size of the figure (width, height).
fig : a matplotlib Figure instance
The Figure canvas in which the plot will be drawn.
ax : a matplotlib axes instance
The axes context in which the plot will be drawn.
Returns
-------
fig, ax : tuple
A tuple of the matplotlib figure and axes instances used to produce
the figure.
Raises
------
ValueError
Input argument is not valid.
"""
if not isinstance(H_list, list):
raise ValueError("H_list must be a list of Qobj instances")
if not fig and not ax:
fig, ax = plt.subplots(1, 1, figsize=figsize)
H = H_list[0]
N = H.shape[0] if N == 0 else min(H.shape[0], N)
xticks = []
yticks = []
x = 0
evals0 = H.eigenenergies(eigvals=N)
for e_idx, e in enumerate(evals0[:N]):
ax.plot([x, x + 2], np.array([1, 1]) * e, 'b', linewidth=2)
yticks.append(e)
xticks.append(x + 1)
x += 2
for H1 in H_list[1:]:
H = H + H1
evals1 = H.eigenenergies()
for e_idx, e in enumerate(evals1[:N]):
ax.plot([x, x + 1], np.array([evals0[e_idx], e]), 'k:')
x += 1
for e_idx, e in enumerate(evals1[:N]):
ax.plot([x, x + 2], np.array([1, 1]) * e, 'b', linewidth=2)
xticks.append(x + 1)
x += 2
evals0 = evals1
ax.set_frame_on(False)
if show_ylabels:
yticks = np.unique(np.around(yticks, 1))
ax.set_yticks(yticks)
else:
ax.axes.get_yaxis().set_visible(False)
if labels:
ax.get_xaxis().tick_bottom()
ax.set_xticks(xticks)
ax.set_xticklabels(labels, fontsize=16)
else:
ax.axes.get_xaxis().set_visible(False)
return fig, ax
def energy_level_diagram(H_list, N=0, labels=None, show_ylabels=False,
figsize=(8, 12), fig=None, ax=None):
warnings.warn("Deprecated: Use plot_energy_levels")
return plot_energy_levels(H_list, N=N, labels=labels,
show_ylabels=show_ylabels,
figsize=figsize, fig=fig, ax=ax)
def plot_fock_distribution(rho, offset=0, fig=None, ax=None,
figsize=(8, 6), title=None, unit_y_range=True):
"""
Plot the Fock distribution for a density matrix (or ket) that describes
an oscillator mode.
Parameters
----------
rho : :class:`qutip.Qobj`
The density matrix (or ket) of the state to visualize.
fig : a matplotlib Figure instance
The Figure canvas in which the plot will be drawn.
ax : a matplotlib axes instance
The axes context in which the plot will be drawn.
title : string
An optional title for the figure.
figsize : (width, height)
The size of the matplotlib figure (in inches) if it is to be created
(that is, if no 'fig' and 'ax' arguments are passed).
Returns
-------
fig, ax : tuple
A tuple of the matplotlib figure and axes instances used to produce
the figure.
"""
if not fig and not ax:
fig, ax = plt.subplots(1, 1, figsize=figsize)
if isket(rho):
rho = ket2dm(rho)
N = rho.shape[0]
ax.bar(np.arange(offset, offset + N), np.real(rho.diag()),
color="green", alpha=0.6, width=0.8)
if unit_y_range:
ax.set_ylim(0, 1)
ax.set_xlim(-.5 + offset, N + offset)
ax.set_xlabel('Fock number', fontsize=12)
ax.set_ylabel('Occupation probability', fontsize=12)
if title:
ax.set_title(title)
return fig, ax
def fock_distribution(rho, offset=0, fig=None, ax=None,
figsize=(8, 6), title=None, unit_y_range=True):
warnings.warn("Deprecated: Use plot_fock_distribution")
return plot_fock_distribution(rho, offset=offset, fig=fig, ax=ax,
figsize=figsize, title=title,
unit_y_range=unit_y_range)
def plot_wigner(rho, fig=None, ax=None, figsize=(6, 6),
cmap=None, alpha_max=7.5, colorbar=False,
method='clenshaw', projection='2d'):
"""
Plot the the Wigner function for a density matrix (or ket) that describes
an oscillator mode.
Parameters
----------
rho : :class:`qutip.Qobj`
The density matrix (or ket) of the state to visualize.
fig : a matplotlib Figure instance
The Figure canvas in which the plot will be drawn.
ax : a matplotlib axes instance
The axes context in which the plot will be drawn.
figsize : (width, height)
The size of the matplotlib figure (in inches) if it is to be created
(that is, if no 'fig' and 'ax' arguments are passed).
cmap : a matplotlib cmap instance
The colormap.
alpha_max : float
The span of the x and y coordinates (both [-alpha_max, alpha_max]).
colorbar : bool
Whether (True) or not (False) a colorbar should be attached to the
Wigner function graph.
method : string {'clenshaw', 'iterative', 'laguerre', 'fft'}
The method used for calculating the wigner function. See the
documentation for qutip.wigner for details.
projection: string {'2d', '3d'}
Specify whether the Wigner function is to be plotted as a
contour graph ('2d') or surface plot ('3d').
Returns
-------
fig, ax : tuple
A tuple of the matplotlib figure and axes instances used to produce
the figure.
"""
if not fig and not ax:
if projection == '2d':
fig, ax = plt.subplots(1, 1, figsize=figsize)
elif projection == '3d':
fig = plt.figure(figsize=figsize)
ax = fig.add_subplot(1, 1, 1, projection='3d')
else:
raise ValueError('Unexpected value of projection keyword argument')
if isket(rho):
rho = ket2dm(rho)
xvec = np.linspace(-alpha_max, alpha_max, 200)
W0 = wigner(rho, xvec, xvec, method=method)
W, yvec = W0 if isinstance(W0, tuple) else (W0, xvec)
wlim = abs(W).max()
if cmap is None:
cmap = cm.get_cmap('RdBu')
if projection == '2d':
cf = ax.contourf(xvec, yvec, W, 100,
norm=mpl.colors.Normalize(-wlim, wlim), cmap=cmap)
elif projection == '3d':
X, Y = np.meshgrid(xvec, xvec)
cf = ax.plot_surface(X, Y, W0, rstride=5, cstride=5, linewidth=0.5,
norm=mpl.colors.Normalize(-wlim, wlim), cmap=cmap)
else:
raise ValueError('Unexpected value of projection keyword argument.')
if xvec is not yvec:
ax.set_ylim(xvec.min(), xvec.max())
ax.set_xlabel(r'$\rm{Re}(\alpha)$', fontsize=12)
ax.set_ylabel(r'$\rm{Im}(\alpha)$', fontsize=12)
if colorbar:
fig.colorbar(cf, ax=ax)
ax.set_title("Wigner function", fontsize=12)
return fig, ax
def plot_wigner_fock_distribution(rho, fig=None, axes=None, figsize=(8, 4),
cmap=None, alpha_max=7.5, colorbar=False,
method='iterative', projection='2d'):
"""
Plot the Fock distribution and the Wigner function for a density matrix
(or ket) that describes an oscillator mode.
Parameters
----------
rho : :class:`qutip.Qobj`
The density matrix (or ket) of the state to visualize.
fig : a matplotlib Figure instance
The Figure canvas in which the plot will be drawn.
axes : a list of two matplotlib axes instances
The axes context in which the plot will be drawn.
figsize : (width, height)
The size of the matplotlib figure (in inches) if it is to be created
(that is, if no 'fig' and 'ax' arguments are passed).
cmap : a matplotlib cmap instance
The colormap.
alpha_max : float
The span of the x and y coordinates (both [-alpha_max, alpha_max]).
colorbar : bool
Whether (True) or not (False) a colorbar should be attached to the
Wigner function graph.
method : string {'iterative', 'laguerre', 'fft'}
The method used for calculating the wigner function. See the
documentation for qutip.wigner for details.
projection: string {'2d', '3d'}
Specify whether the Wigner function is to be plotted as a
contour graph ('2d') or surface plot ('3d').
Returns
-------
fig, ax : tuple
A tuple of the matplotlib figure and axes instances used to produce
the figure.
"""
if not fig and not axes:
if projection == '2d':
fig, axes = plt.subplots(1, 2, figsize=figsize)
elif projection == '3d':
fig = plt.figure(figsize=figsize)
axes = [fig.add_subplot(1, 2, 1),
fig.add_subplot(1, 2, 2, projection='3d')]
else:
raise ValueError('Unexpected value of projection keyword argument')
if isket(rho):
rho = ket2dm(rho)
plot_fock_distribution(rho, fig=fig, ax=axes[0])
plot_wigner(rho, fig=fig, ax=axes[1], figsize=figsize, cmap=cmap,
alpha_max=alpha_max, colorbar=colorbar, method=method,
projection=projection)
return fig, axes
def wigner_fock_distribution(rho, fig=None, axes=None, figsize=(8, 4),
cmap=None, alpha_max=7.5, colorbar=False,
method='iterative'):
warnings.warn("Deprecated: Use plot_wigner_fock_distribution")
return plot_wigner_fock_distribution(rho, fig=fig, axes=axes,
figsize=figsize, cmap=cmap,
alpha_max=alpha_max,
colorbar=colorbar,
method=method)
def plot_expectation_values(results, ylabels=[], title=None, show_legend=False,
fig=None, axes=None, figsize=(8, 4)):
"""
Visualize the results (expectation values) for an evolution solver.
`results` is assumed to be an instance of Result, or a list of Result
instances.
Parameters
----------
results : (list of) :class:`qutip.solver.Result`
List of results objects returned by any of the QuTiP evolution solvers.
ylabels : list of strings
The y-axis labels. List should be of the same length as `results`.
title : string
The title of the figure.
show_legend : bool
Whether or not to show the legend.
fig : a matplotlib Figure instance
The Figure canvas in which the plot will be drawn.
axes : a matplotlib axes instance
The axes context in which the plot will be drawn.
figsize : (width, height)
The size of the matplotlib figure (in inches) if it is to be created
(that is, if no 'fig' and 'ax' arguments are passed).
Returns
-------
fig, ax : tuple
A tuple of the matplotlib figure and axes instances used to produce
the figure.
"""
if not isinstance(results, list):
results = [results]
n_e_ops = max([len(result.expect) for result in results])
if not fig or not axes:
if not figsize:
figsize = (12, 3 * n_e_ops)
fig, axes = plt.subplots(n_e_ops, 1, sharex=True,
figsize=figsize, squeeze=False)
for r_idx, result in enumerate(results):
for e_idx, e in enumerate(result.expect):
axes[e_idx, 0].plot(result.times, e,
label="%s [%d]" % (result.solver, e_idx))
if title:
fig.suptitle(title)
axes[n_e_ops - 1, 0].set_xlabel("time", fontsize=12)
for n in range(n_e_ops):
if show_legend:
axes[n, 0].legend()
if ylabels:
axes[n, 0].set_ylabel(ylabels[n], fontsize=12)
return fig, axes
def plot_spin_distribution_2d(P, THETA, PHI,
fig=None, ax=None, figsize=(8, 8)):
"""
Plot a spin distribution function (given as meshgrid data) with a 2D
projection where the surface of the unit sphere is mapped on the unit disk.
Parameters
----------
P : matrix
Distribution values as a meshgrid matrix.
THETA : matrix
Meshgrid matrix for the theta coordinate.
PHI : matrix
Meshgrid matrix for the phi coordinate.
fig : a matplotlib figure instance
The figure canvas on which the plot will be drawn.
ax : a matplotlib axis instance
The axis context in which the plot will be drawn.
figsize : (width, height)
The size of the matplotlib figure (in inches) if it is to be created
(that is, if no 'fig' and 'ax' arguments are passed).
Returns
-------
fig, ax : tuple
A tuple of the matplotlib figure and axes instances used to produce
the figure.
"""
if not fig or not ax:
if not figsize:
figsize = (8, 8)
fig, ax = plt.subplots(1, 1, figsize=figsize)
Y = (THETA - pi / 2) / (pi / 2)
X = (pi - PHI) / pi * np.sqrt(cos(THETA - pi / 2))
if P.min() < -1e12:
cmap = cm.RdBu
else:
cmap = cm.RdYlBu
ax.pcolor(X, Y, P.real, cmap=cmap)
ax.set_xlabel(r'$\varphi$', fontsize=18)
ax.set_ylabel(r'$\theta$', fontsize=18)
ax.set_xticks([-1, 0, 1])
ax.set_xticklabels([r'$0$', r'$\pi$', r'$2\pi$'], fontsize=18)
ax.set_yticks([-1, 0, 1])
ax.set_yticklabels([r'$\pi$', r'$\pi/2$', r'$0$'], fontsize=18)
return fig, ax
def plot_spin_distribution_3d(P, THETA, PHI,
fig=None, ax=None, figsize=(8, 6)):
"""Plots a matrix of values on a sphere
Parameters
----------
P : matrix
Distribution values as a meshgrid matrix.
THETA : matrix
Meshgrid matrix for the theta coordinate.
PHI : matrix
Meshgrid matrix for the phi coordinate.
fig : a matplotlib figure instance
The figure canvas on which the plot will be drawn.
ax : a matplotlib axis instance
The axis context in which the plot will be drawn.
figsize : (width, height)
The size of the matplotlib figure (in inches) if it is to be created
(that is, if no 'fig' and 'ax' arguments are passed).
Returns
-------
fig, ax : tuple
A tuple of the matplotlib figure and axes instances used to produce
the figure.
"""
if fig is None or ax is None:
fig = plt.figure(figsize=figsize)
ax = _axes3D(fig, azim=-35, elev=35)
xx = sin(THETA) * cos(PHI)
yy = sin(THETA) * sin(PHI)
zz = cos(THETA)
if P.min() < -1e12:
cmap = cm.RdBu
norm = mpl.colors.Normalize(-P.max(), P.max())
else:
cmap = cm.RdYlBu
norm = mpl.colors.Normalize(P.min(), P.max())
ax.plot_surface(xx, yy, zz, rstride=1, cstride=1,
facecolors=cmap(norm(P)), linewidth=0)
cax, kw = mpl.colorbar.make_axes(ax, shrink=.66, pad=.02)
cb1 = mpl.colorbar.ColorbarBase(cax, cmap=cmap, norm=norm)
cb1.set_label('magnitude')
return fig, ax
#
# Qubism and other qubistic visualizations
#
def complex_array_to_rgb(X, theme='light', rmax=None):
"""
Makes an array of complex number and converts it to an array of [r, g, b],
where phase gives hue and saturation/value are given by the absolute value.
Especially for use with imshow for complex plots.
For more info on coloring, see:
Emilia Petrisor,
Visualizing complex-valued functions with Matplotlib and Mayavi
https://nbviewer.ipython.org/github/empet/Math/blob/master/DomainColoring.ipynb
Parameters
----------
X : array
Array (of any dimension) of complex numbers.
theme : 'light' (default) or 'dark'
Set coloring theme for mapping complex values into colors.
rmax : float
Maximal abs value for color normalization.
If None (default), uses np.abs(X).max().
Returns
-------
Y : array
Array of colors (of shape X.shape + (3,)).
"""
absmax = rmax or np.abs(X).max()
if absmax == 0.:
absmax = 1.
Y = np.zeros(X.shape + (3,), dtype='float')
Y[..., 0] = np.angle(X) / (2 * pi) % 1
if theme == 'light':
Y[..., 1] = np.clip(np.abs(X) / absmax, 0, 1)
Y[..., 2] = 1
elif theme == 'dark':
Y[..., 1] = 1
Y[..., 2] = np.clip(np.abs(X) / absmax, 0, 1)
Y = mpl.colors.hsv_to_rgb(Y)
return Y
def _index_to_sequence(i, dim_list):
"""
For a matrix entry with index i it returns state it corresponds to.
In particular, for dim_list=[2]*n it returns i written as a binary number.
Parameters
----------
i : int
Index in a matrix.
dim_list : list of int
List of dimensions of consecutive particles.
Returns
-------
seq : list
List of coordinates for each particle.
"""
res = []
j = i
for d in reversed(dim_list):
j, s = divmod(j, d)
res.append(s)
return list(reversed(res))
def _sequence_to_index(seq, dim_list):
"""
Inverse of _index_to_sequence.
Parameters
----------
seq : list of ints
List of coordinates for each particle.
dim_list : list of int
List of dimensions of consecutive particles.
Returns
-------
i : list
Index in a matrix.
"""
i = 0
for s, d in zip(seq, dim_list):
i *= d
i += s
return i
def _to_qubism_index_pair(i, dim_list, how='pairs'):
"""
For a matrix entry with index i
it returns x, y coordinates in qubism mapping.
Parameters
----------
i : int
Index in a matrix.
dim_list : list of int
List of dimensions of consecutive particles.
how : 'pairs' ('default'), 'pairs_skewed' or 'before_after'
Type of qubistic plot.
Returns
-------
x, y : tuple of ints
List of coordinates for each particle.
"""
seq = _index_to_sequence(i, dim_list)
if how == 'pairs':
y = _sequence_to_index(seq[::2], dim_list[::2])
x = _sequence_to_index(seq[1::2], dim_list[1::2])
elif how == 'pairs_skewed':
dim_list2 = dim_list[::2]
y = _sequence_to_index(seq[::2], dim_list2)
seq2 = [(b - a) % d for a, b, d in zip(seq[::2], seq[1::2], dim_list2)]
x = _sequence_to_index(seq2, dim_list2)
elif how == 'before_after':
# https://en.wikipedia.org/wiki/File:Ising-tartan.png
n = len(dim_list)
y = _sequence_to_index(reversed(seq[:(n // 2)]),
reversed(dim_list[:(n // 2)]))
x = _sequence_to_index(seq[(n // 2):], dim_list[(n // 2):])
else:
raise Exception("No such 'how'.")
return x, y
def _sequence_to_latex(seq, style='ket'):
"""
For a sequence of particle states generate LaTeX code.
Parameters
----------
seq : list of ints
List of coordinates for each particle.
style : 'ket' (default), 'bra' or 'bare'
Style of LaTeX (i.e. |01> or <01| or 01, respectively).
Returns
-------
latex : str
LaTeX output.
"""
if style == 'ket':
latex = "$\\left|{0}\\right\\rangle$"
elif style == 'bra':
latex = "$\\left\\langle{0}\\right|$"
elif style == 'bare':
latex = "${0}$"
else:
raise Exception("No such style.")
return latex.format("".join(map(str, seq)))
def plot_qubism(ket, theme='light', how='pairs',
grid_iteration=1, legend_iteration=0,
fig=None, ax=None, figsize=(6, 6)):
"""
Qubism plot for pure states of many qudits. Works best for spin chains,
especially with even number of particles of the same dimension. Allows to
see entanglement between first 2k particles and the rest.
Parameters
----------
ket : Qobj
Pure state for plotting.
theme : 'light' (default) or 'dark'
Set coloring theme for mapping complex values into colors.
See: complex_array_to_rgb.
how : 'pairs' (default), 'pairs_skewed' or 'before_after'
Type of Qubism plotting. Options:
- 'pairs' - typical coordinates,
- 'pairs_skewed' - for ferromagnetic/antriferromagnetic plots,
- 'before_after' - related to Schmidt plot (see also: plot_schmidt).
grid_iteration : int (default 1)
Helper lines to be drawn on plot.
Show tiles for 2*grid_iteration particles vs all others.
legend_iteration : int (default 0) or 'grid_iteration' or 'all'
Show labels for first ``2*legend_iteration`` particles. Option
'grid_iteration' sets the same number of particles as for
grid_iteration. Option 'all' makes label for all particles. Typically
it should be 0, 1, 2 or perhaps 3.
fig : a matplotlib figure instance
The figure canvas on which the plot will be drawn.
ax : a matplotlib axis instance
The axis context in which the plot will be drawn.
figsize : (width, height)
The size of the matplotlib figure (in inches) if it is to be created
(that is, if no 'fig' and 'ax' arguments are passed).
Returns
-------
fig, ax : tuple
A tuple of the matplotlib figure and axes instances used to produce
the figure.
Notes
-----
See also [1]_.
References
----------
.. [1] J. Rodriguez-Laguna, P. Migdal, M. Ibanez Berganza, M. Lewenstein
and G. Sierra, *Qubism: self-similar visualization of many-body
wavefunctions*, `New J. Phys. 14 053028
<https://dx.doi.org/10.1088/1367-2630/14/5/053028>`_, arXiv:1112.3560
(2012), open access.
"""
if not isket(ket):
raise Exception("Qubism works only for pure states, i.e. kets.")
# add for dm? (perhaps a separate function, plot_qubism_dm)
if not fig and not ax:
fig, ax = plt.subplots(1, 1, figsize=figsize)
dim_list = ket.dims[0]
n = len(dim_list)
# for odd number of particles - pixels are rectangular
if n % 2 == 1:
ket = tensor(ket, Qobj([1] * dim_list[-1]))
dim_list = ket.dims[0]
n += 1
ketdata = ket.full()
if how == 'pairs':
dim_list_y = dim_list[::2]
dim_list_x = dim_list[1::2]
elif how == 'pairs_skewed':
dim_list_y = dim_list[::2]
dim_list_x = dim_list[1::2]
if dim_list_x != dim_list_y:
raise Exception("For 'pairs_skewed' pairs " +
"of dimensions need to be the same.")
elif how == 'before_after':
dim_list_y = list(reversed(dim_list[:(n // 2)]))
dim_list_x = dim_list[(n // 2):]
else:
raise Exception("No such 'how'.")
size_x = np.prod(dim_list_x)
size_y = np.prod(dim_list_y)
qub = np.zeros([size_x, size_y], dtype=complex)
for i in range(ketdata.size):
qub[_to_qubism_index_pair(i, dim_list, how=how)] = ketdata[i, 0]
qub = qub.transpose()
quadrants_x = np.prod(dim_list_x[:grid_iteration])
quadrants_y = np.prod(dim_list_y[:grid_iteration])
ticks_x = [size_x // quadrants_x * i for i in range(1, quadrants_x)]
ticks_y = [size_y // quadrants_y * i for i in range(1, quadrants_y)]
ax.set_xticks(ticks_x)
ax.set_xticklabels([""] * (quadrants_x - 1))
ax.set_yticks(ticks_y)
ax.set_yticklabels([""] * (quadrants_y - 1))
theme2color_of_lines = {'light': '#000000',
'dark': '#FFFFFF'}
ax.grid(True, color=theme2color_of_lines[theme])
ax.imshow(complex_array_to_rgb(qub, theme=theme),
interpolation="none",
extent=(0, size_x, 0, size_y))
if legend_iteration == 'all':
label_n = n // 2
elif legend_iteration == 'grid_iteration':
label_n = grid_iteration
else:
try:
label_n = int(legend_iteration)
except:
raise Exception("No such option for legend_iteration keyword " +
"argument. Use 'all', 'grid_iteration' or an " +
"integer.")
if label_n:
if how == 'before_after':
dim_list_small = list(reversed(dim_list_y[-label_n:])) \
+ dim_list_x[:label_n]
else:
dim_list_small = []
for j in range(label_n):
dim_list_small.append(dim_list_y[j])
dim_list_small.append(dim_list_x[j])
scale_x = float(size_x) / np.prod(dim_list_x[:label_n])
shift_x = 0.5 * scale_x
scale_y = float(size_y) / np.prod(dim_list_y[:label_n])
shift_y = 0.5 * scale_y
bbox = ax.get_window_extent().transformed(
fig.dpi_scale_trans.inverted())
fontsize = 35 * bbox.width / np.prod(dim_list_x[:label_n]) / label_n
opts = {'fontsize': fontsize,
'color': theme2color_of_lines[theme],
'horizontalalignment': 'center',
'verticalalignment': 'center'}
for i in range(np.prod(dim_list_small)):
x, y = _to_qubism_index_pair(i, dim_list_small, how=how)
seq = _index_to_sequence(i, dim_list=dim_list_small)
ax.text(scale_x * x + shift_x,
size_y - (scale_y * y + shift_y),
_sequence_to_latex(seq),
**opts)
return fig, ax
def plot_schmidt(ket, splitting=None,
labels_iteration=(3, 2),
theme='light',
fig=None, ax=None, figsize=(6, 6)):
"""
Plotting scheme related to Schmidt decomposition.
Converts a state into a matrix (A_ij -> A_i^j),
where rows are first particles and columns - last.
See also: plot_qubism with how='before_after' for a similar plot.
Parameters
----------
ket : Qobj
Pure state for plotting.
splitting : int
Plot for a number of first particles versus the rest.
If not given, it is (number of particles + 1) // 2.
theme : 'light' (default) or 'dark'
Set coloring theme for mapping complex values into colors.
See: complex_array_to_rgb.
labels_iteration : int or pair of ints (default (3,2))
Number of particles to be shown as tick labels,
for first (vertical) and last (horizontal) particles, respectively.
fig : a matplotlib figure instance
The figure canvas on which the plot will be drawn.
ax : a matplotlib axis instance
The axis context in which the plot will be drawn.
figsize : (width, height)
The size of the matplotlib figure (in inches) if it is to be created
(that is, if no 'fig' and 'ax' arguments are passed).
Returns
-------
fig, ax : tuple
A tuple of the matplotlib figure and axes instances used to produce
the figure.
"""
if not isket(ket):
raise Exception("Schmidt plot works only for pure states, i.e. kets.")
if not fig and not ax:
fig, ax = plt.subplots(1, 1, figsize=figsize)
dim_list = ket.dims[0]
if splitting is None:
splitting = (len(dim_list) + 1) // 2
if isinstance(labels_iteration, int):
labels_iteration = labels_iteration, labels_iteration
ketdata = ket.full()
dim_list_y = dim_list[:splitting]
dim_list_x = dim_list[splitting:]
size_x = np.prod(dim_list_x)
size_y = np.prod(dim_list_y)
ketdata = ketdata.reshape((size_y, size_x))
dim_list_small_x = dim_list_x[:labels_iteration[1]]
dim_list_small_y = dim_list_y[:labels_iteration[0]]
quadrants_x = np.prod(dim_list_small_x)
quadrants_y = np.prod(dim_list_small_y)
ticks_x = [size_x / quadrants_x * (i + 0.5)
for i in range(quadrants_x)]
ticks_y = [size_y / quadrants_y * (quadrants_y - i - 0.5)
for i in range(quadrants_y)]
labels_x = [_sequence_to_latex(_index_to_sequence(i*size_x // quadrants_x,
dim_list=dim_list_x))
for i in range(quadrants_x)]
labels_y = [_sequence_to_latex(_index_to_sequence(i*size_y // quadrants_y,
dim_list=dim_list_y))
for i in range(quadrants_y)]
ax.set_xticks(ticks_x)
ax.set_xticklabels(labels_x)
ax.set_yticks(ticks_y)
ax.set_yticklabels(labels_y)
ax.set_xlabel("last particles")
ax.set_ylabel("first particles")
ax.imshow(complex_array_to_rgb(ketdata, theme=theme),
interpolation="none",
extent=(0, size_x, 0, size_y))
return fig, ax
| bsd-3-clause | 4c824e7da833e523e56cca5e02949d98 | 30.317408 | 87 | 0.572937 | 3.509045 | false | false | false | false |
qutip/qutip | qutip/_mkl/spmv.py | 2 | 1196 | import numpy as np
from ctypes import POINTER, c_int, c_char, byref
from numpy.ctypeslib import ndpointer
import qutip.settings as qset
zcsrgemv = qset.mkl_lib.mkl_cspblas_zcsrgemv
def mkl_spmv(A, x):
"""
sparse csr_spmv using MKL
"""
m, _ = A.shape
# Pointers to data of the matrix
data = A.data.ctypes.data_as(ndpointer(np.complex128, ndim=1, flags='C'))
indptr = A.indptr.ctypes.data_as(POINTER(c_int))
indices = A.indices.ctypes.data_as(POINTER(c_int))
# Allocate output, using same conventions as input
if x.ndim == 1:
y = np.empty(m, dtype=np.complex128, order='C')
elif x.ndim == 2 and x.shape[1] == 1:
y = np.empty((m, 1), dtype=np.complex128, order='C')
else:
raise Exception('Input vector must be 1D row or 2D column vector')
# Now call MKL. This returns the answer in the last argument, which shares
# memory with y.
zcsrgemv(
byref(c_char(bytes(b'N'))),
byref(c_int(m)),
data,
indptr,
indices,
x.ctypes.data_as(ndpointer(np.complex128, ndim=1, flags='C')),
y.ctypes.data_as(ndpointer(np.complex128, ndim=1, flags='C')),
)
return y
| bsd-3-clause | ebc7f225f2d11431cbd5d46e301c0977 | 30.473684 | 78 | 0.624582 | 3.07455 | false | false | false | false |
qutip/qutip | qutip/tests/test_mkl.py | 2 | 3447 | import pytest
import numpy as np
import scipy.linalg
import scipy.sparse
import qutip
if qutip.settings.has_mkl:
from qutip._mkl.spsolve import mkl_splu, mkl_spsolve
pytestmark = [
pytest.mark.skipif(not qutip.settings.has_mkl,
reason='MKL extensions not found.'),
]
class Test_spsolve:
def test_single_rhs_vector_real(self):
Adense = np.array([[0, 1, 1],
[1, 0, 1],
[0, 0, 1]])
As = scipy.sparse.csr_matrix(Adense)
np.random.seed(1234)
x = np.random.randn(3)
b = As * x
x2 = mkl_spsolve(As, b, verbose=True)
np.testing.assert_allclose(x, x2)
def test_single_rhs_vector_complex(self):
A = qutip.rand_herm(10)
x = qutip.rand_ket(10).full()
b = A.full() @ x
y = mkl_spsolve(A.data, b, verbose=True)
np.testing.assert_allclose(x, y)
@pytest.mark.parametrize('dtype', [np.float64, np.complex128])
def test_multi_rhs_vector(self, dtype):
M = np.array([
[1, 0, 2],
[0, 0, 3],
[-4, 5, 6],
], dtype=dtype)
sM = scipy.sparse.csr_matrix(M)
N = np.array([
[3, 0, 1],
[0, 2, 0],
[0, 0, 0],
], dtype=dtype)
sX = mkl_spsolve(sM, N, verbose=True)
X = scipy.linalg.solve(M, N)
np.testing.assert_allclose(X, sX)
def test_rhs_shape_is_maintained(self):
A = scipy.sparse.csr_matrix(np.array([
[1, 0, 2],
[0, 0, 3],
[-4, 5, 6],
], dtype=np.complex128))
b = np.array([0, 2, 0], dtype=np.complex128)
out = mkl_spsolve(A, b, verbose=True)
assert b.shape == out.shape
b = np.array([0, 2, 0], dtype=np.complex128).reshape((3, 1))
out = mkl_spsolve(A, b, verbose=True)
assert b.shape == out.shape
def test_sparse_rhs(self):
A = scipy.sparse.csr_matrix([
[1, 2, 0],
[0, 3, 0],
[0, 0, 5],
])
b = scipy.sparse.csr_matrix([
[0, 1],
[1, 0],
[0, 0],
])
x = mkl_spsolve(A, b, verbose=True)
ans = np.array([[-0.66666667, 1],
[0.33333333, 0],
[0, 0]])
np.testing.assert_allclose(x.toarray(), ans)
@pytest.mark.parametrize('dtype', [np.float64, np.complex128])
def test_symmetric_solver(self, dtype):
A = qutip.rand_herm(np.arange(1, 11)).data
if dtype == np.float64:
A = A.real
x = np.ones(10, dtype=dtype)
b = A.dot(x)
y = mkl_spsolve(A, b, hermitian=1, verbose=True)
np.testing.assert_allclose(x, y)
class Test_splu:
@pytest.mark.parametrize('dtype', [np.float64, np.complex128])
def test_repeated_rhs_solve(self, dtype):
M = np.array([
[1, 0, 2],
[0, 0, 3],
[-4, 5, 6],
], dtype=dtype)
sM = scipy.sparse.csr_matrix(M)
N = np.array([
[3, 0, 1],
[0, 2, 0],
[0, 0, 0],
], dtype=dtype)
test_X = np.zeros((3, 3), dtype=dtype)
lu = mkl_splu(sM, verbose=True)
for k in range(3):
test_X[:, k] = lu.solve(N[:, k])
lu.delete()
expected_X = scipy.linalg.solve(M, N)
np.testing.assert_allclose(test_X, expected_X)
| bsd-3-clause | 061943ce7ee14dfc5880c858c8ea5610 | 29.236842 | 68 | 0.487961 | 3.085944 | false | true | false | false |
qutip/qutip | qutip/fastsparse.py | 1 | 16420 | from warnings import warn
import operator
import numpy as np
from scipy.sparse import (
csr_matrix, dia_matrix, isspmatrix, SparseEfficiencyWarning,
)
# fast_csr_matrix extends the internals of csr_matrix, and we need to
# import parts of the internals of scipy.sparse to do that:
import scipy.sparse
import scipy.sparse._sparsetools as _sparsetools
if hasattr(scipy.sparse, "_sputils"):
# SciPy 1.8.0 deprecated the public scipy.sparse.sputils interface and
# moved it to _sputils
from scipy.sparse._sputils import (
isdense, isscalarlike, upcast, get_index_dtype,
)
else:
from scipy.sparse.sputils import (
isdense, isscalarlike, upcast, get_index_dtype,
)
class fast_csr_matrix(csr_matrix):
"""
A subclass of scipy.sparse.csr_matrix that skips the data format
checks that are run everytime a new csr_matrix is created.
"""
def __init__(self, args=None, shape=None, dtype=None, copy=False):
if args is None: #Build zero matrix
if shape is None:
raise Exception('Shape must be given when building zero matrix.')
self.data = np.array([], dtype=complex)
self.indices = np.array([], dtype=np.int32)
self.indptr = np.zeros(shape[0]+1, dtype=np.int32)
self._shape = tuple(int(s) for s in shape)
else:
if args[0].shape[0] and args[0].dtype != complex:
raise TypeError('fast_csr_matrix allows only complex data.')
if args[1].shape[0] and args[1].dtype != np.int32:
raise TypeError('fast_csr_matrix allows only int32 indices.')
if args[2].shape[0] and args[1].dtype != np.int32:
raise TypeError('fast_csr_matrix allows only int32 indptr.')
self.data = np.array(args[0], dtype=complex, copy=copy)
self.indices = np.array(args[1], dtype=np.int32, copy=copy)
self.indptr = np.array(args[2], dtype=np.int32, copy=copy)
if shape is None:
self._shape = tuple([len(self.indptr)-1]*2)
else:
self._shape = tuple(int(s) for s in shape)
self.dtype = complex
self.maxprint = 50
self.format = 'csr'
def _binopt(self, other, op):
"""
Do the binary operation fn to two sparse matrices using
fast_csr_matrix only when other is also a fast_csr_matrix.
"""
# e.g. csr_plus_csr, csr_minus_csr, etc.
if not isinstance(other, fast_csr_matrix):
other = csr_matrix(other)
# e.g. csr_plus_csr, csr_minus_csr, etc.
fn = getattr(_sparsetools, self.format + op + self.format)
maxnnz = self.nnz + other.nnz
idx_dtype = get_index_dtype((self.indptr, self.indices,
other.indptr, other.indices),
maxval=maxnnz)
indptr = np.empty(self.indptr.shape, dtype=idx_dtype)
indices = np.empty(maxnnz, dtype=idx_dtype)
bool_ops = ['_ne_', '_lt_', '_gt_', '_le_', '_ge_']
if op in bool_ops:
data = np.empty(maxnnz, dtype=np.bool_)
else:
data = np.empty(maxnnz, dtype=upcast(self.dtype, other.dtype))
fn(self.shape[0], self.shape[1],
np.asarray(self.indptr, dtype=idx_dtype),
np.asarray(self.indices, dtype=idx_dtype),
self.data,
np.asarray(other.indptr, dtype=idx_dtype),
np.asarray(other.indices, dtype=idx_dtype),
other.data,
indptr, indices, data)
actual_nnz = indptr[-1]
indices = indices[:actual_nnz]
data = data[:actual_nnz]
if actual_nnz < maxnnz // 2:
# too much waste, trim arrays
indices = indices.copy()
data = data.copy()
if isinstance(other, fast_csr_matrix) and (not op in bool_ops):
A = fast_csr_matrix((data, indices, indptr), dtype=data.dtype, shape=self.shape)
else:
A = csr_matrix((data, indices, indptr), dtype=data.dtype, shape=self.shape)
return A
def multiply(self, other):
"""Point-wise multiplication by another matrix, vector, or
scalar.
"""
# Scalar multiplication.
if isscalarlike(other):
return self._mul_scalar(other)
# Sparse matrix or vector.
if isspmatrix(other):
if self.shape == other.shape:
if not isinstance(other, fast_csr_matrix):
other = csr_matrix(other)
return self._binopt(other, '_elmul_')
# Single element.
elif other.shape == (1,1):
return self._mul_scalar(other.toarray()[0, 0])
elif self.shape == (1,1):
return other._mul_scalar(self.toarray()[0, 0])
# A row times a column.
elif self.shape[1] == other.shape[0] and self.shape[1] == 1:
return self._mul_sparse_matrix(other.tocsc())
elif self.shape[0] == other.shape[1] and self.shape[0] == 1:
return other._mul_sparse_matrix(self.tocsc())
# Row vector times matrix. other is a row.
elif other.shape[0] == 1 and self.shape[1] == other.shape[1]:
other = dia_matrix((other.toarray().ravel(), [0]),
shape=(other.shape[1], other.shape[1]))
return self._mul_sparse_matrix(other)
# self is a row.
elif self.shape[0] == 1 and self.shape[1] == other.shape[1]:
copy = dia_matrix((self.toarray().ravel(), [0]),
shape=(self.shape[1], self.shape[1]))
return other._mul_sparse_matrix(copy)
# Column vector times matrix. other is a column.
elif other.shape[1] == 1 and self.shape[0] == other.shape[0]:
other = dia_matrix((other.toarray().ravel(), [0]),
shape=(other.shape[0], other.shape[0]))
return other._mul_sparse_matrix(self)
# self is a column.
elif self.shape[1] == 1 and self.shape[0] == other.shape[0]:
copy = dia_matrix((self.toarray().ravel(), [0]),
shape=(self.shape[0], self.shape[0]))
return copy._mul_sparse_matrix(other)
else:
raise ValueError("inconsistent shapes")
# Dense matrix.
if isdense(other):
if self.shape == other.shape:
ret = self.tocoo()
ret.data = np.multiply(ret.data, other[ret.row, ret.col]
).view(np.ndarray).ravel()
return ret
# Single element.
elif other.size == 1:
return self._mul_scalar(other.flat[0])
# Anything else.
return np.multiply(self.toarray(), other)
def _mul_sparse_matrix(self, other):
"""
Do the sparse matrix mult returning fast_csr_matrix only
when other is also fast_csr_matrix.
"""
M, _ = self.shape
_, N = other.shape
major_axis = self._swap((M, N))[0]
if isinstance(other, fast_csr_matrix):
A = zcsr_mult(self, other, sorted=1)
return A
other = csr_matrix(other) # convert to this format
idx_dtype = get_index_dtype((self.indptr, self.indices,
other.indptr, other.indices),
maxval=M*N)
# scipy 1.5 renamed the older csr_matmat_pass1 to the much more
# descriptive csr_matmat_maxnnz, but also changed the call and logic
# structure of constructing the indices.
try:
fn = getattr(_sparsetools, self.format + '_matmat_maxnnz')
nnz = fn(M, N,
np.asarray(self.indptr, dtype=idx_dtype),
np.asarray(self.indices, dtype=idx_dtype),
np.asarray(other.indptr, dtype=idx_dtype),
np.asarray(other.indices, dtype=idx_dtype))
idx_dtype = get_index_dtype((self.indptr, self.indices,
other.indptr, other.indices),
maxval=nnz)
indptr = np.empty(major_axis + 1, dtype=idx_dtype)
except AttributeError:
indptr = np.empty(major_axis + 1, dtype=idx_dtype)
fn = getattr(_sparsetools, self.format + '_matmat_pass1')
fn(M, N,
np.asarray(self.indptr, dtype=idx_dtype),
np.asarray(self.indices, dtype=idx_dtype),
np.asarray(other.indptr, dtype=idx_dtype),
np.asarray(other.indices, dtype=idx_dtype),
indptr)
nnz = indptr[-1]
idx_dtype = get_index_dtype((self.indptr, self.indices,
other.indptr, other.indices),
maxval=nnz)
indices = np.empty(nnz, dtype=idx_dtype)
data = np.empty(nnz, dtype=upcast(self.dtype, other.dtype))
try:
fn = getattr(_sparsetools, self.format + '_matmat')
except AttributeError:
fn = getattr(_sparsetools, self.format + '_matmat_pass2')
fn(M, N, np.asarray(self.indptr, dtype=idx_dtype),
np.asarray(self.indices, dtype=idx_dtype),
self.data,
np.asarray(other.indptr, dtype=idx_dtype),
np.asarray(other.indices, dtype=idx_dtype),
other.data,
indptr, indices, data)
A = csr_matrix((data, indices, indptr), shape=(M, N))
return A
def _scalar_binopt(self, other, op):
"""Scalar version of self._binopt, for cases in which no new nonzeros
are added. Produces a new spmatrix in canonical form.
"""
self.sum_duplicates()
res = self._with_data(op(self.data, other), copy=True)
res.eliminate_zeros()
return res
def __eq__(self, other):
# Scalar other.
if isscalarlike(other):
if np.isnan(other):
return csr_matrix(self.shape, dtype=np.bool_)
if other == 0:
warn("Comparing a sparse matrix with 0 using == is inefficient"
", try using != instead.", SparseEfficiencyWarning)
all_true = _all_true(self.shape)
inv = self._scalar_binopt(other, operator.ne)
return all_true - inv
else:
return self._scalar_binopt(other, operator.eq)
# Dense other.
elif isdense(other):
return self.toarray() == other
# Sparse other.
elif isspmatrix(other):
warn("Comparing sparse matrices using == is inefficient, try using"
" != instead.", SparseEfficiencyWarning)
#TODO sparse broadcasting
if self.shape != other.shape:
return False
elif self.format != other.format:
other = other.asformat(self.format)
res = self._binopt(other,'_ne_')
all_true = _all_true(self.shape)
return all_true - res
else:
return False
def __ne__(self, other):
# Scalar other.
if isscalarlike(other):
if np.isnan(other):
warn("Comparing a sparse matrix with nan using != is inefficient",
SparseEfficiencyWarning)
all_true = _all_true(self.shape)
return all_true
elif other != 0:
warn("Comparing a sparse matrix with a nonzero scalar using !="
" is inefficient, try using == instead.", SparseEfficiencyWarning)
all_true = _all_true(self.shape)
inv = self._scalar_binopt(other, operator.eq)
return all_true - inv
else:
return self._scalar_binopt(other, operator.ne)
# Dense other.
elif isdense(other):
return self.toarray() != other
# Sparse other.
elif isspmatrix(other):
#TODO sparse broadcasting
if self.shape != other.shape:
return True
elif self.format != other.format:
other = other.asformat(self.format)
return self._binopt(other,'_ne_')
else:
return True
def _inequality(self, other, op, op_name, bad_scalar_msg):
# Scalar other.
if isscalarlike(other):
if 0 == other and op_name in ('_le_', '_ge_'):
raise NotImplementedError(" >= and <= don't work with 0.")
elif op(0, other):
warn(bad_scalar_msg, SparseEfficiencyWarning)
other_arr = np.empty(self.shape, dtype=np.result_type(other))
other_arr.fill(other)
other_arr = csr_matrix(other_arr)
return self._binopt(other_arr, op_name)
else:
return self._scalar_binopt(other, op)
# Dense other.
elif isdense(other):
return op(self.toarray(), other)
# Sparse other.
elif isspmatrix(other):
#TODO sparse broadcasting
if self.shape != other.shape:
raise ValueError("inconsistent shapes")
elif self.format != other.format:
other = other.asformat(self.format)
if op_name not in ('_ge_', '_le_'):
return self._binopt(other, op_name)
warn("Comparing sparse matrices using >= and <= is inefficient, "
"using <, >, or !=, instead.", SparseEfficiencyWarning)
all_true = _all_true(self.shape)
res = self._binopt(other, '_gt_' if op_name == '_le_' else '_lt_')
return all_true - res
else:
raise ValueError("Operands could not be compared.")
def _with_data(self,data,copy=True):
"""Returns a matrix with the same sparsity structure as self,
but with different data. By default the structure arrays
(i.e. .indptr and .indices) are copied.
"""
# We need this just in case something like abs(data) gets called
# does nothing if data.dtype is complex.
data = np.asarray(data, dtype=complex)
if copy:
return fast_csr_matrix((data,self.indices.copy(),self.indptr.copy()),
shape=self.shape,dtype=data.dtype)
else:
return fast_csr_matrix((data,self.indices,self.indptr),
shape=self.shape,dtype=data.dtype)
def transpose(self):
"""
Returns the transpose of the matrix, keeping
it in fast_csr format.
"""
return zcsr_transpose(self)
def trans(self):
"""
Same as transpose
"""
return zcsr_transpose(self)
def getH(self):
"""
Returns the conjugate-transpose of the matrix, keeping
it in fast_csr format.
"""
return zcsr_adjoint(self)
def adjoint(self):
"""
Same as getH
"""
return zcsr_adjoint(self)
def csr2fast(A, copy=False):
if (not isinstance(A, fast_csr_matrix)) or copy:
# Do not need to do any type checking here
# since fast_csr_matrix does that.
return fast_csr_matrix((A.data,A.indices,A.indptr),
shape=A.shape,copy=copy)
else:
return A
def fast_identity(N):
"""Generates a sparse identity matrix in
fast_csr format.
"""
data = np.ones(N, dtype=complex)
ind = np.arange(N, dtype=np.int32)
ptr = np.arange(N+1, dtype=np.int32)
ptr[-1] = N
return fast_csr_matrix((data,ind,ptr),shape=(N,N))
#Convenience functions
#--------------------
def _all_true(shape):
A = csr_matrix((np.ones(np.prod(shape), dtype=np.bool_),
np.tile(np.arange(shape[1],dtype=np.int32),shape[0]),
np.arange(0,np.prod(shape)+1,shape[1],dtype=np.int32)),
shape=shape)
return A
#Need to do some trailing imports here
#-------------------------------------
from qutip.cy.spmath import (zcsr_transpose, zcsr_adjoint, zcsr_mult)
| bsd-3-clause | 46daa5718abf725593cc15dbd2569eb7 | 39.44335 | 92 | 0.538916 | 4.013689 | false | false | false | false |
qutip/qutip | qutip/orbital.py | 1 | 2571 | __all__ = ['orbital']
import numpy as np
from scipy.special import sph_harm
def orbital(theta, phi, *args):
r"""Calculates an angular wave function on a sphere.
``psi = orbital(theta,phi,ket1,ket2,...)`` calculates
the angular wave function on a sphere at the mesh of points
defined by theta and phi which is
:math:`\sum_{lm} c_{lm} Y_{lm}(theta,phi)` where :math:`C_{lm}` are the
coefficients specified by the list of kets. Each ket has 2l+1 components
for some integer l. The first entry of the ket defines the coefficient
c_{l,-l}, while the last entry of the ket defines the
coefficient c_{l, l}.
Parameters
----------
theta : int/float/list/array
Polar angles in [0, pi]
phi : int/float/list/array
Azimuthal angles in [0, 2*pi]
args : list/array
``list`` of ket vectors.
Returns
-------
``array`` for angular wave function evaluated at all
possible combinations of theta and phi
"""
if isinstance(args[0], list):
# use the list in args[0]
args = args[0]
# convert to numpy array
theta = np.atleast_1d(theta)
phi = np.atleast_1d(phi)
# check that arrays are only 1D
if len(theta.shape) != 1:
raise ValueError('Polar angles theta must be 1D list')
if len(phi.shape) != 1:
raise ValueError('Azimuthal angles phi must be 1D list')
# make meshgrid
phi_mesh, theta_mesh = np.meshgrid(phi, theta)
# setup empty wavefunction
psi = np.zeros([theta.shape[0], phi.shape[0]], dtype=complex)
# iterate through provided kets
for k in range(len(args)):
ket = args[k]
if ket.type == 'bra':
ket = ket.conj()
elif not ket.type == 'ket':
raise TypeError('Invalid type for input ket in orbital')
# Extract l value from the state
l = (ket.shape[0] - 1) / 2.0
if l != np.floor(l):
raise ValueError(
'Kets must have odd number of components in orbital')
l = int(l)
# get factors from ket
factors = ket.full()
# iterate through the possible m
for i in range(len(factors)):
# set correct m
m = i - l
# calculate spherical harmonics
# note that theta and phi are interchanged in scipy implementation
res = sph_harm(m, l, phi_mesh, theta_mesh)
psi += factors[i] * res
# flatten output if only one row
if psi.shape[1] == 1:
psi = psi.flatten()
return psi
| bsd-3-clause | 05faf13a46fd4eedc3a80c8dd46a6355 | 30.740741 | 78 | 0.588876 | 3.667618 | false | false | false | false |
qutip/qutip | qutip/partial_transpose.py | 2 | 3537 | __all__ = ['partial_transpose']
import numpy as np
import scipy.sparse as sp
from qutip.qobj import Qobj
from qutip.states import (state_index_number, state_number_index,
state_number_enumerate)
def partial_transpose(rho, mask, method='dense'):
"""
Return the partial transpose of a Qobj instance `rho`,
where `mask` is an array/list with length that equals
the number of components of `rho` (that is, the length of
`rho.dims[0]`), and the values in `mask` indicates whether
or not the corresponding subsystem is to be transposed.
The elements in `mask` can be boolean or integers `0` or `1`,
where `True`/`1` indicates that the corresponding subsystem
should be tranposed.
Parameters
----------
rho : :class:`qutip.qobj`
A density matrix.
mask : *list* / *array*
A mask that selects which subsystems should be transposed.
method : str
choice of method, `dense` or `sparse`. The default method
is `dense`. The `sparse` implementation can be faster for
large and sparse systems (hundreds of quantum states).
Returns
-------
rho_pr: :class:`qutip.qobj`
A density matrix with the selected subsystems transposed.
"""
mask = [int(i) for i in mask]
if method == 'sparse':
return _partial_transpose_sparse(rho, mask)
else:
return _partial_transpose_dense(rho, mask)
def _partial_transpose_dense(rho, mask):
"""
Based on Jonas' implementation using numpy.
Very fast for dense problems.
"""
nsys = len(mask)
pt_dims = np.arange(2 * nsys).reshape(2, nsys).T
pt_idx = np.concatenate([[pt_dims[n, mask[n]] for n in range(nsys)],
[pt_dims[n, 1 - mask[n]] for n in range(nsys)]])
data = rho.data.toarray().reshape(
np.array(rho.dims).flatten()).transpose(pt_idx).reshape(rho.shape)
return Qobj(data, dims=rho.dims)
def _partial_transpose_sparse(rho, mask):
"""
Implement the partial transpose using the CSR sparse matrix.
"""
data = sp.lil_matrix((rho.shape[0], rho.shape[1]), dtype=complex)
for m in range(len(rho.data.indptr) - 1):
n1 = rho.data.indptr[m]
n2 = rho.data.indptr[m + 1]
psi_A = state_index_number(rho.dims[0], m)
for idx, n in enumerate(rho.data.indices[n1:n2]):
psi_B = state_index_number(rho.dims[1], n)
m_pt = state_number_index(
rho.dims[1], np.choose(mask, [psi_A, psi_B]))
n_pt = state_number_index(
rho.dims[0], np.choose(mask, [psi_B, psi_A]))
data[m_pt, n_pt] = rho.data.data[n1 + idx]
return Qobj(data.tocsr(), dims=rho.dims)
def _partial_transpose_reference(rho, mask):
"""
This is a reference implementation that explicitly loops over
all states and performs the transpose. It's slow but easy to
understand and useful for testing.
"""
A_pt = np.zeros(rho.shape, dtype=complex)
for psi_A in state_number_enumerate(rho.dims[0]):
m = state_number_index(rho.dims[0], psi_A)
for psi_B in state_number_enumerate(rho.dims[1]):
n = state_number_index(rho.dims[1], psi_B)
m_pt = state_number_index(
rho.dims[1], np.choose(mask, [psi_A, psi_B]))
n_pt = state_number_index(
rho.dims[0], np.choose(mask, [psi_B, psi_A]))
A_pt[m_pt, n_pt] = rho.data[m, n]
return Qobj(A_pt, dims=rho.dims)
| bsd-3-clause | e4ce7d52b8ec95c5ae3d4b9e007850a0 | 28.722689 | 76 | 0.604467 | 3.427326 | false | false | false | false |
qutip/qutip | qutip/tests/test_fileio.py | 1 | 2229 | import pytest
import numpy as np
import uuid
import qutip
from pathlib import Path
# qsave _always_ appends a suffix to the file name at the time of writing, but
# in case this changes in the future, to ensure that we never leak a temporary
# file into the user's folders, we simply apply these tests in a temporary
# directory. Windows also does not allow temporary files to be opened multiple
# times, so using a temporary directory is best.
pytestmark = [pytest.mark.usefixtures("in_temporary_directory")]
_dimension = 10
def _random_file_name():
return "_" + str(uuid.uuid4())
class Test_file_data_store_file_data_read:
# Tests parametrised seprately to give nicer descriptions in verbose mode.
def case(self, filename, kwargs):
data = 1 - 2*np.random.rand(_dimension, _dimension)
if kwargs.get('numtype', 'complex') == 'complex':
data = data * (0.5*0.5j)
qutip.file_data_store(filename, data, **kwargs)
out = qutip.file_data_read(filename)
np.testing.assert_allclose(data, out, atol=1e-8)
def test_defaults(self):
return self.case(_random_file_name(), {})
@pytest.mark.parametrize("type_", ["real", "complex"])
@pytest.mark.parametrize("format_", ["decimal", "exp"])
def test_type_format(self, type_, format_):
kwargs = {'numtype': type_, 'numformat': format_}
return self.case(_random_file_name(), kwargs)
@pytest.mark.parametrize("separator", [",", ";", "\t", " ", " \t "],
ids=lambda x: "'" + x + "'")
def test_separator_detection(self, separator):
kwargs = {'numtype': 'complex', 'numformat': 'exp', 'sep': separator}
return self.case(_random_file_name(), kwargs)
@pytest.mark.parametrize('use_path', [True, False], ids=['Path', 'str'])
@pytest.mark.parametrize('suffix', ['', '.qu', '.dat'])
def test_qsave_qload(use_path, suffix):
ops_in = [qutip.sigmax(),
qutip.num(_dimension),
qutip.coherent_dm(_dimension, 1j)]
filename = _random_file_name() + suffix
if use_path:
filename = Path.cwd() / filename
qutip.qsave(ops_in, filename)
ops_out = qutip.qload(filename)
assert ops_in == ops_out
| bsd-3-clause | 7d2c8b27671a4a901d6dd7175c84cf25 | 36.779661 | 79 | 0.635262 | 3.526899 | false | true | false | false |
qutip/qutip | qutip/countstat.py | 2 | 9138 | """
This module contains functions for calculating current and current noise using
the counting statistics formalism.
"""
__all__ = ['countstat_current', 'countstat_current_noise']
import numpy as np
import scipy.sparse as sp
from qutip.expect import expect_rho_vec
from qutip.steadystate import pseudo_inverse, steadystate
from qutip.superoperator import mat2vec, sprepost, spre
from qutip import operator_to_vector, identity, tensor
import qutip.settings as settings
from qutip.qobj import Qobj, issuper, isoper
# Load MKL spsolve if avaiable
if settings.has_mkl:
from qutip._mkl.spsolve import (mkl_splu, mkl_spsolve)
def countstat_current(L, c_ops=None, rhoss=None, J_ops=None):
"""
Calculate the current corresponding a system Liouvillian `L` and a list of
current collapse operators `c_ops` or current superoperators `J_ops`
(either must be specified). Optionally the steadystate density matrix
`rhoss` and a list of current superoperators `J_ops` can be specified. If
either of these are omitted they are computed internally.
Parameters
----------
L : :class:`qutip.Qobj`
Qobj representing the system Liouvillian.
c_ops : array / list (optional)
List of current collapse operators.
rhoss : :class:`qutip.Qobj` (optional)
The steadystate density matrix corresponding the system Liouvillian
`L`.
J_ops : array / list (optional)
List of current superoperators.
Returns
--------
I : array
The currents `I` corresponding to each current collapse operator
`c_ops` (or, equivalently, each current superopeator `J_ops`).
"""
if J_ops is None:
if c_ops is None:
raise ValueError("c_ops must be given if J_ops is not")
J_ops = [sprepost(c, c.dag()) for c in c_ops]
if rhoss is None:
if c_ops is None:
raise ValueError("c_ops must be given if rhoss is not")
rhoss = steadystate(L, c_ops)
rhoss_vec = mat2vec(rhoss.full()).ravel()
N = len(J_ops)
I = np.zeros(N)
for i, Ji in enumerate(J_ops):
I[i] = expect_rho_vec(Ji.data, rhoss_vec, 1)
return I
def countstat_current_noise(L, c_ops, wlist=None, rhoss=None, J_ops=None,
sparse=True, method='direct'):
"""
Compute the cross-current noise spectrum for a list of collapse operators
`c_ops` corresponding to monitored currents, given the system
Liouvillian `L`. The current collapse operators `c_ops` should be part
of the dissipative processes in `L`, but the `c_ops` given here does not
necessarily need to be all collapse operators contributing to dissipation
in the Liouvillian. Optionally, the steadystate density matrix `rhoss`
and the current operators `J_ops` correpsonding to the current collapse
operators `c_ops` can also be specified. If either of
`rhoss` and `J_ops` are omitted, they will be computed internally.
'wlist' is an optional list of frequencies at which to evaluate the noise
spectrum.
Note:
The default method is a direct solution using dense matrices, as sparse
matrix methods fail for some examples of small systems.
For larger systems it is reccomended to use the sparse solver
with the direct method, as it avoids explicit calculation of the
pseudo-inverse, as described in page 67 of "Electrons in nanostructures"
C. Flindt, PhD Thesis, available online:
https://orbit.dtu.dk/fedora/objects/orbit:82314/datastreams/file_4732600/content
Parameters
----------
L : :class:`qutip.Qobj`
Qobj representing the system Liouvillian.
c_ops : array / list
List of current collapse operators.
rhoss : :class:`qutip.Qobj` (optional)
The steadystate density matrix corresponding the system Liouvillian
`L`.
wlist : array / list (optional)
List of frequencies at which to evaluate (if none are given, evaluates
at zero frequency)
J_ops : array / list (optional)
List of current superoperators.
sparse : bool
Flag that indicates whether to use sparse or dense matrix methods when
computing the pseudo inverse. Default is false, as sparse solvers
can fail for small systems. For larger systems the sparse solvers
are reccomended.
Returns
--------
I, S : tuple of arrays
The currents `I` corresponding to each current collapse operator
`c_ops` (or, equivalently, each current superopeator `J_ops`) and the
zero-frequency cross-current correlation `S`.
"""
if rhoss is None:
rhoss = steadystate(L, c_ops)
if J_ops is None:
J_ops = [sprepost(c, c.dag()) for c in c_ops]
N = len(J_ops)
I = np.zeros(N)
if wlist is None:
S = np.zeros((N, N,1))
wlist=[0.]
else:
S = np.zeros((N, N,len(wlist)))
if sparse == False:
rhoss_vec = mat2vec(rhoss.full()).ravel()
for k,w in enumerate(wlist):
R = pseudo_inverse(L, rhoss=rhoss, w= w, sparse = sparse, method=method)
for i, Ji in enumerate(J_ops):
for j, Jj in enumerate(J_ops):
if i == j:
I[i] = expect_rho_vec(Ji.data, rhoss_vec, 1)
S[i, j,k] = I[i]
S[i, j,k] -= expect_rho_vec((Ji * R * Jj
+ Jj * R * Ji).data,
rhoss_vec, 1)
else:
if method == "direct":
N = np.prod(L.dims[0][0])
rhoss_vec = operator_to_vector(rhoss)
tr_op = tensor([identity(n) for n in L.dims[0][0]])
tr_op_vec = operator_to_vector(tr_op)
Pop = sp.kron(rhoss_vec.data, tr_op_vec.data.T, format='csr')
Iop = sp.eye(N*N, N*N, format='csr')
Q = Iop - Pop
for k,w in enumerate(wlist):
if w != 0.0:
L_temp = 1.0j*w*spre(tr_op) + L
else: #At zero frequency some solvers fail for small systems.
#Adding a small finite frequency of order 1e-15
#helps prevent the solvers from throwing an exception.
L_temp = 1.0j*(1e-15)*spre(tr_op) + L
if not settings.has_mkl:
A = L_temp.data.tocsc()
else:
A = L_temp.data.tocsr()
A.sort_indices()
rhoss_vec = mat2vec(rhoss.full()).ravel()
for j, Jj in enumerate(J_ops):
Qj = Q.dot( Jj.data.dot( rhoss_vec))
try:
if settings.has_mkl:
X_rho_vec_j = mkl_spsolve(A,Qj)
else:
X_rho_vec_j = sp.linalg.splu(A, permc_spec
='COLAMD').solve(Qj)
except:
X_rho_vec_j = sp.linalg.lsqr(A,Qj)[0]
for i, Ji in enumerate(J_ops):
Qi = Q.dot( Ji.data.dot(rhoss_vec))
try:
if settings.has_mkl:
X_rho_vec_i = mkl_spsolve(A,Qi)
else:
X_rho_vec_i = sp.linalg.splu(A, permc_spec
='COLAMD').solve(Qi)
except:
X_rho_vec_i = sp.linalg.lsqr(A,Qi)[0]
if i == j:
I[i] = expect_rho_vec(Ji.data,
rhoss_vec, 1)
S[j, i, k] = I[i]
S[j, i, k] -= (expect_rho_vec(Jj.data * Q,
X_rho_vec_i, 1)
+ expect_rho_vec(Ji.data * Q,
X_rho_vec_j, 1))
else:
rhoss_vec = mat2vec(rhoss.full()).ravel()
for k,w in enumerate(wlist):
R = pseudo_inverse(L,rhoss=rhoss, w= w, sparse = sparse,
method=method)
for i, Ji in enumerate(J_ops):
for j, Jj in enumerate(J_ops):
if i == j:
I[i] = expect_rho_vec(Ji.data, rhoss_vec, 1)
S[i, j, k] = I[i]
S[i, j, k] -= expect_rho_vec((Ji * R * Jj
+ Jj * R * Ji).data,
rhoss_vec, 1)
return I, S
| bsd-3-clause | 9ca1c18b45b229fa35a37651b77b38d6 | 37.720339 | 87 | 0.510943 | 3.885204 | false | false | false | false |
qutip/qutip | qutip/control/pulsegen.py | 2 | 41865 | # -*- coding: utf-8 -*-
# @author: Alexander Pitchford
# @email1: agp1@aber.ac.uk
# @email2: alex.pitchford@gmail.com
# @organization: Aberystwyth University
# @supervisor: Daniel Burgarth
"""
Pulse generator - Generate pulses for the timeslots
Each class defines a gen_pulse function that produces a float array of
size num_tslots. Each class produces a differ type of pulse.
See the class and gen_pulse function descriptions for details
"""
import numpy as np
import qutip.logging_utils as logging
logger = logging.get_logger()
import qutip.control.dynamics as dynamics
import qutip.control.errors as errors
def create_pulse_gen(pulse_type='RND', dyn=None, pulse_params=None):
"""
Create and return a pulse generator object matching the given type.
The pulse generators each produce a different type of pulse,
see the gen_pulse function description for details.
These are the random pulse options:
RND - Independent random value in each timeslot
RNDFOURIER - Fourier series with random coefficients
RNDWAVES - Summation of random waves
RNDWALK1 - Random change in amplitude each timeslot
RNDWALK2 - Random change in amp gradient each timeslot
These are the other non-periodic options:
LIN - Linear, i.e. contant gradient over the time
ZERO - special case of the LIN pulse, where the gradient is 0
These are the periodic options
SINE - Sine wave
SQUARE - Square wave
SAW - Saw tooth wave
TRIANGLE - Triangular wave
If a Dynamics object is passed in then this is used in instantiate
the PulseGen, meaning that some timeslot and amplitude properties
are copied over.
"""
if pulse_type == 'RND':
return PulseGenRandom(dyn, params=pulse_params)
if pulse_type == 'RNDFOURIER':
return PulseGenRndFourier(dyn, params=pulse_params)
if pulse_type == 'RNDWAVES':
return PulseGenRndWaves(dyn, params=pulse_params)
if pulse_type == 'RNDWALK1':
return PulseGenRndWalk1(dyn, params=pulse_params)
if pulse_type == 'RNDWALK2':
return PulseGenRndWalk2(dyn, params=pulse_params)
elif pulse_type == 'LIN':
return PulseGenLinear(dyn, params=pulse_params)
elif pulse_type == 'ZERO':
return PulseGenZero(dyn, params=pulse_params)
elif pulse_type == 'SINE':
return PulseGenSine(dyn, params=pulse_params)
elif pulse_type == 'SQUARE':
return PulseGenSquare(dyn, params=pulse_params)
elif pulse_type == 'SAW':
return PulseGenSaw(dyn, params=pulse_params)
elif pulse_type == 'TRIANGLE':
return PulseGenTriangle(dyn, params=pulse_params)
elif pulse_type == 'GAUSSIAN':
return PulseGenGaussian(dyn, params=pulse_params)
elif pulse_type == 'CRAB_FOURIER':
return PulseGenCrabFourier(dyn, params=pulse_params)
elif pulse_type == 'GAUSSIAN_EDGE':
return PulseGenGaussianEdge(dyn, params=pulse_params)
else:
raise ValueError("No option for pulse_type '{}'".format(pulse_type))
class PulseGen(object):
"""
Pulse generator
Base class for all Pulse generators
The object can optionally be instantiated with a Dynamics object,
in which case the timeslots and amplitude scaling and offset
are copied from that.
Otherwise the class can be used independently by setting:
tau (array of timeslot durations)
or
num_tslots and pulse_time for equally spaced timeslots
Attributes
----------
num_tslots : integer
Number of timeslots, aka timeslices
(copied from Dynamics if given)
pulse_time : float
total duration of the pulse
(copied from Dynamics.evo_time if given)
scaling : float
linear scaling applied to the pulse
(copied from Dynamics.initial_ctrl_scaling if given)
offset : float
linear offset applied to the pulse
(copied from Dynamics.initial_ctrl_offset if given)
tau : array[num_tslots] of float
Duration of each timeslot
(copied from Dynamics if given)
lbound : float
Lower boundary for the pulse amplitudes
Note that the scaling and offset attributes can be used to fully
bound the pulse for all generators except some of the random ones
This bound (if set) may result in additional shifting / scaling
Default is -Inf
ubound : float
Upper boundary for the pulse amplitudes
Note that the scaling and offset attributes can be used to fully
bound the pulse for all generators except some of the random ones
This bound (if set) may result in additional shifting / scaling
Default is Inf
periodic : boolean
True if the pulse generator produces periodic pulses
random : boolean
True if the pulse generator produces random pulses
log_level : integer
level of messaging output from the logger.
Options are attributes of qutip.logging_utils,
in decreasing levels of messaging, are:
DEBUG_INTENSE, DEBUG_VERBOSE, DEBUG, INFO, WARN, ERROR, CRITICAL
Anything WARN or above is effectively 'quiet' execution,
assuming everything runs as expected.
The default NOTSET implies that the level will be taken from
the QuTiP settings file, which by default is WARN
"""
def __init__(self, dyn=None, params=None):
self.parent = dyn
self.params = params
self.reset()
def reset(self):
"""
reset attributes to default values
"""
if isinstance(self.parent, dynamics.Dynamics):
dyn = self.parent
self.num_tslots = dyn.num_tslots
self.pulse_time = dyn.evo_time
self.scaling = dyn.initial_ctrl_scaling
self.offset = dyn.initial_ctrl_offset
self.tau = dyn.tau
self.log_level = dyn.log_level
else:
self.num_tslots = 100
self.pulse_time = 1.0
self.scaling = 1.0
self.tau = None
self.offset = 0.0
self._uses_time = False
self.time = None
self._pulse_initialised = False
self.periodic = False
self.random = False
self.lbound = None
self.ubound = None
self.ramping_pulse = None
self.apply_params()
def apply_params(self, params=None):
"""
Set object attributes based on the dictionary (if any) passed in the
instantiation, or passed as a parameter
This is called during the instantiation automatically.
The key value pairs are the attribute name and value
"""
if not params:
params = self.params
if isinstance(params, dict):
self.params = params
for key in params:
setattr(self, key, params[key])
@property
def log_level(self):
return logger.level
@log_level.setter
def log_level(self, lvl):
"""
Set the log_level attribute and set the level of the logger
that is call logger.setLevel(lvl)
"""
logger.setLevel(lvl)
def gen_pulse(self):
"""
returns the pulse as an array of vales for each timeslot
Must be implemented by subclass
"""
# must be implemented by subclass
raise errors.UsageError(
"No method defined for generating a pulse. "
" Suspect base class was used where sub class should have been")
def init_pulse(self):
"""
Initialise the pulse parameters
"""
if self.tau is None:
self.tau = np.ones(self.num_tslots, dtype='f') * \
self.pulse_time/self.num_tslots
if self._uses_time:
self.time = np.zeros(self.num_tslots, dtype=float)
for k in range(self.num_tslots-1):
self.time[k+1] = self.time[k] + self.tau[k]
self._pulse_initialised = True
if not self.lbound is None:
if np.isinf(self.lbound):
self.lbound = None
if not self.ubound is None:
if np.isinf(self.ubound):
self.ubound = None
if not self.ubound is None and not self.lbound is None:
if self.ubound < self.lbound:
raise ValueError("ubound cannot be less the lbound")
def _apply_bounds_and_offset(self, pulse):
"""
Ensure that the randomly generated pulse fits within the bounds
(after applying the offset)
Assumes that pulses passed are centered around zero (on average)
"""
if self.lbound is None and self.ubound is None:
return pulse + self.offset
max_amp = max(pulse)
min_amp = min(pulse)
if ((self.ubound is None or max_amp + self.offset <= self.ubound) and
(self.lbound is None or min_amp + self.offset >= self.lbound)):
return pulse + self.offset
# Some shifting / scaling is required.
if self.ubound is None or self.lbound is None:
# One of the bounds is inf, so just shift the pulse
if self.lbound is None:
# max_amp + offset must exceed the ubound
return pulse + self.ubound - max_amp
else:
# min_amp + offset must exceed the lbound
return pulse + self.lbound - min_amp
else:
bound_range = self.ubound - self.lbound
amp_range = max_amp - min_amp
if max_amp - min_amp > bound_range:
# pulse range is too high, it must be scaled
pulse = pulse * bound_range / amp_range
# otherwise the pulse should fit anyway
return pulse + self.lbound - min(pulse)
def _apply_ramping_pulse(self, pulse, ramping_pulse=None):
if ramping_pulse is None:
ramping_pulse = self.ramping_pulse
if ramping_pulse is not None:
pulse = pulse*ramping_pulse
return pulse
class PulseGenZero(PulseGen):
"""
Generates a flat pulse
"""
def gen_pulse(self):
"""
Generate a pulse with the same value in every timeslot.
The value will be zero, unless the offset is not zero,
in which case it will be the offset
"""
pulse = np.zeros(self.num_tslots)
return self._apply_bounds_and_offset(pulse)
class PulseGenRandom(PulseGen):
"""
Generates random pulses as simply random values for each timeslot
"""
def reset(self):
PulseGen.reset(self)
self.random = True
self.apply_params()
def gen_pulse(self):
"""
Generate a pulse of random values between 1 and -1
Values are scaled using the scaling property
and shifted using the offset property
Returns the pulse as an array of vales for each timeslot
"""
pulse = (2*np.random.random(self.num_tslots) - 1) * self.scaling
return self._apply_bounds_and_offset(pulse)
class PulseGenRndFourier(PulseGen):
"""
Generates pulses by summing sine waves as a Fourier series
with random coefficients
Attributes
----------
scaling : float
The pulses should fit approximately within -/+scaling
(before the offset is applied)
as it is used to set a maximum for each component wave
Use bounds to be sure
(copied from Dynamics.initial_ctrl_scaling if given)
min_wavelen : float
Minimum wavelength of any component wave
Set by default to 1/10th of the pulse time
"""
def reset(self):
"""
reset attributes to default values
"""
PulseGen.reset(self)
self.random = True
self._uses_time = True
try:
self.min_wavelen = self.pulse_time / 10.0
except:
self.min_wavelen = 0.1
self.apply_params()
def gen_pulse(self, min_wavelen=None):
"""
Generate a random pulse based on a Fourier series with a minimum
wavelength
"""
if min_wavelen is not None:
self.min_wavelen = min_wavelen
min_wavelen = self.min_wavelen
if min_wavelen > self.pulse_time:
raise ValueError("Minimum wavelength cannot be greater than "
"the pulse time")
if not self._pulse_initialised:
self.init_pulse()
# use some phase to avoid the first pulse being always 0
sum_wave = np.zeros(self.tau.shape)
wavelen = 2.0*self.pulse_time
t = self.time
wl = []
while wavelen > min_wavelen:
wl.append(wavelen)
wavelen = wavelen/2.0
num_comp_waves = len(wl)
amp_scale = np.sqrt(8)*self.scaling / float(num_comp_waves)
for wavelen in wl:
amp = amp_scale*(np.random.rand()*2 - 1)
phase_off = np.random.rand()*np.pi/2.0
curr_wave = amp*np.sin(2*np.pi*t/wavelen + phase_off)
sum_wave += curr_wave
return self._apply_bounds_and_offset(sum_wave)
class PulseGenRndWaves(PulseGen):
"""
Generates pulses by summing sine waves with random frequencies
amplitudes and phase offset
Attributes
----------
scaling : float
The pulses should fit approximately within -/+scaling
(before the offset is applied)
as it is used to set a maximum for each component wave
Use bounds to be sure
(copied from Dynamics.initial_ctrl_scaling if given)
num_comp_waves : integer
Number of component waves. That is the number of waves that
are summed to make the pulse signal
Set to 20 by default.
min_wavelen : float
Minimum wavelength of any component wave
Set by default to 1/10th of the pulse time
max_wavelen : float
Maximum wavelength of any component wave
Set by default to twice the pulse time
"""
def reset(self):
"""
reset attributes to default values
"""
PulseGen.reset(self)
self.random = True
self._uses_time = True
self.num_comp_waves = 20
try:
self.min_wavelen = self.pulse_time / 10.0
except:
self.min_wavelen = 0.1
try:
self.max_wavelen = 2*self.pulse_time
except:
self.max_wavelen = 10.0
self.apply_params()
def gen_pulse(self, num_comp_waves=None,
min_wavelen=None, max_wavelen=None):
"""
Generate a random pulse by summing sine waves with random freq,
amplitude and phase offset
"""
if num_comp_waves is not None:
self.num_comp_waves = num_comp_waves
if min_wavelen is not None:
self.min_wavelen = min_wavelen
if max_wavelen is not None:
self.max_wavelen = max_wavelen
num_comp_waves = self.num_comp_waves
min_wavelen = self.min_wavelen
max_wavelen = self.max_wavelen
if min_wavelen > self.pulse_time:
raise ValueError("Minimum wavelength cannot be greater than "
"the pulse time")
if max_wavelen <= min_wavelen:
raise ValueError("Maximum wavelength must be greater than "
"the minimum wavelength")
if not self._pulse_initialised:
self.init_pulse()
# use some phase to avoid the first pulse being always 0
sum_wave = np.zeros(self.tau.shape)
t = self.time
wl_range = max_wavelen - min_wavelen
amp_scale = np.sqrt(8)*self.scaling / float(num_comp_waves)
for n in range(num_comp_waves):
amp = amp_scale*(np.random.rand()*2 - 1)
phase_off = np.random.rand()*np.pi/2.0
wavelen = min_wavelen + np.random.rand()*wl_range
curr_wave = amp*np.sin(2*np.pi*t/wavelen + phase_off)
sum_wave += curr_wave
return self._apply_bounds_and_offset(sum_wave)
class PulseGenRndWalk1(PulseGen):
"""
Generates pulses by using a random walk algorithm
Attributes
----------
scaling : float
Used as the range for the starting amplitude
Note must used bounds if values must be restricted.
Also scales the max_d_amp value
(copied from Dynamics.initial_ctrl_scaling if given)
max_d_amp : float
Maximum amount amplitude will change between timeslots
Note this is also factored by the scaling attribute
"""
def reset(self):
"""
reset attributes to default values
"""
PulseGen.reset(self)
self.random = True
self.max_d_amp = 0.1
self.apply_params()
def gen_pulse(self, max_d_amp=None):
"""
Generate a pulse by changing the amplitude a random amount between
-max_d_amp and +max_d_amp at each timeslot. The walk will start at
a random amplitude between -/+scaling.
"""
if max_d_amp is not None:
self.max_d_amp = max_d_amp
max_d_amp = self.max_d_amp*self.scaling
if not self._pulse_initialised:
self.init_pulse()
walk = np.zeros(self.tau.shape)
amp = self.scaling*(np.random.rand()*2 - 1)
for k in range(len(walk)):
walk[k] = amp
amp += (np.random.rand()*2 - 1)*max_d_amp
return self._apply_bounds_and_offset(walk)
class PulseGenRndWalk2(PulseGen):
"""
Generates pulses by using a random walk algorithm
Note this is best used with bounds as the walks tend to wander far
Attributes
----------
scaling : float
Used as the range for the starting amplitude
Note must used bounds if values must be restricted.
Also scales the max_d2_amp value
(copied from Dynamics.initial_ctrl_scaling if given)
max_d2_amp : float
Maximum amount amplitude gradient will change between timeslots
Note this is also factored by the scaling attribute
"""
def reset(self):
"""
reset attributes to default values
"""
PulseGen.reset(self)
self.random = True
self.max_d2_amp = 0.01
self.apply_params()
def gen_pulse(self, init_grad_range=None, max_d2_amp=None):
"""
Generate a pulse by changing the amplitude gradient a random amount
between -max_d2_amp and +max_d2_amp at each timeslot.
The walk will start at a random amplitude between -/+scaling.
The gradient will start at 0
"""
if max_d2_amp is not None:
self.max_d2_amp = max_d2_amp
max_d2_amp = self.max_d2_amp
if not self._pulse_initialised:
self.init_pulse()
walk = np.zeros(self.tau.shape)
amp = self.scaling*(np.random.rand()*2 - 1)
print("Start amp {}".format(amp))
grad = 0.0
print("Start grad {}".format(grad))
for k in range(len(walk)):
walk[k] = amp
grad += (np.random.rand()*2 - 1)*max_d2_amp
amp += grad
# print("grad {}".format(grad))
return self._apply_bounds_and_offset(walk)
class PulseGenLinear(PulseGen):
"""
Generates linear pulses
Attributes
----------
gradient : float
Gradient of the line.
Note this is calculated from the start_val and end_val if these
are given
start_val : float
Start point of the line. That is the starting amplitude
end_val : float
End point of the line.
That is the amplitude at the start of the last timeslot
"""
def reset(self):
"""
reset attributes to default values
"""
PulseGen.reset(self)
self.gradient = None
self.start_val = -1.0
self.end_val = 1.0
self.apply_params()
def init_pulse(self, gradient=None, start_val=None, end_val=None):
"""
Calculate the gradient if pulse is defined by start and
end point values
"""
PulseGen.init_pulse(self)
if start_val is not None and end_val is not None:
self.start_val = start_val
self.end_val = end_val
if self.start_val is not None and self.end_val is not None:
self.gradient = float(self.end_val - self.start_val) / \
(self.pulse_time - self.tau[-1])
def gen_pulse(self, gradient=None, start_val=None, end_val=None):
"""
Generate a linear pulse using either the gradient and start value
or using the end point to calulate the gradient
Note that the scaling and offset parameters are still applied,
so unless these values are the default 1.0 and 0.0, then the
actual gradient etc will be different
Returns the pulse as an array of vales for each timeslot
"""
if (gradient is not None or
start_val is not None or end_val is not None):
self.init_pulse(gradient, start_val, end_val)
if not self._pulse_initialised:
self.init_pulse()
pulse = np.empty(self.num_tslots)
t = 0.0
for k in range(self.num_tslots):
y = self.gradient*t + self.start_val
pulse[k] = self.scaling*y
t = t + self.tau[k]
return self._apply_bounds_and_offset(pulse)
class PulseGenPeriodic(PulseGen):
"""
Intermediate class for all periodic pulse generators
All of the periodic pulses range from -1 to 1
All have a start phase that can be set between 0 and 2pi
Attributes
----------
num_waves : float
Number of complete waves (cycles) that occur in the pulse.
wavelen and freq calculated from this if it is given
wavelen : float
Wavelength of the pulse (assuming the speed is 1)
freq is calculated from this if it is given
freq : float
Frequency of the pulse
start_phase : float
Phase of the pulse signal when t=0
"""
def reset(self):
"""
reset attributes to default values
"""
PulseGen.reset(self)
self.periodic = True
self.num_waves = None
self.freq = 1.0
self.wavelen = None
self.start_phase = 0.0
self.apply_params()
def init_pulse(self, num_waves=None, wavelen=None,
freq=None, start_phase=None):
"""
Calculate the wavelength, frequency, number of waves etc
from the each other and the other parameters
If num_waves is given then the other parameters are worked from this
Otherwise if the wavelength is given then it is the driver
Otherwise the frequency is used to calculate wavelength and num_waves
"""
PulseGen.init_pulse(self)
if start_phase is not None:
self.start_phase = start_phase
if num_waves is not None or wavelen is not None or freq is not None:
self.num_waves = num_waves
self.wavelen = wavelen
self.freq = freq
if self.num_waves is not None:
self.freq = float(self.num_waves) / self.pulse_time
self.wavelen = 1.0/self.freq
elif self.wavelen is not None:
self.freq = 1.0/self.wavelen
self.num_waves = self.wavelen*self.pulse_time
else:
self.wavelen = 1.0/self.freq
self.num_waves = self.wavelen*self.pulse_time
class PulseGenSine(PulseGenPeriodic):
"""
Generates sine wave pulses
"""
def gen_pulse(self, num_waves=None, wavelen=None,
freq=None, start_phase=None):
"""
Generate a sine wave pulse
If no params are provided then the class object attributes are used.
If they are provided, then these will reinitialise the object attribs.
returns the pulse as an array of vales for each timeslot
"""
if start_phase is not None:
self.start_phase = start_phase
if num_waves is not None or wavelen is not None or freq is not None:
self.init_pulse(num_waves, wavelen, freq, start_phase)
if not self._pulse_initialised:
self.init_pulse()
pulse = np.empty(self.num_tslots)
t = 0.0
for k in range(self.num_tslots):
phase = 2*np.pi*self.freq*t + self.start_phase
pulse[k] = self.scaling*np.sin(phase)
t = t + self.tau[k]
return self._apply_bounds_and_offset(pulse)
class PulseGenSquare(PulseGenPeriodic):
"""
Generates square wave pulses
"""
def gen_pulse(self, num_waves=None, wavelen=None,
freq=None, start_phase=None):
"""
Generate a square wave pulse
If no parameters are pavided then the class object attributes are used.
If they are provided, then these will reinitialise the object attribs
"""
if start_phase is not None:
self.start_phase = start_phase
if num_waves is not None or wavelen is not None or freq is not None:
self.init_pulse(num_waves, wavelen, freq, start_phase)
if not self._pulse_initialised:
self.init_pulse()
pulse = np.empty(self.num_tslots)
t = 0.0
for k in range(self.num_tslots):
phase = 2*np.pi*self.freq*t + self.start_phase
x = phase/(2*np.pi)
y = 4*np.floor(x) - 2*np.floor(2*x) + 1
pulse[k] = self.scaling*y
t = t + self.tau[k]
return self._apply_bounds_and_offset(pulse)
class PulseGenSaw(PulseGenPeriodic):
"""
Generates saw tooth wave pulses
"""
def gen_pulse(self, num_waves=None, wavelen=None,
freq=None, start_phase=None):
"""
Generate a saw tooth wave pulse
If no parameters are pavided then the class object attributes are used.
If they are provided, then these will reinitialise the object attribs
"""
if start_phase is not None:
self.start_phase = start_phase
if num_waves is not None or wavelen is not None or freq is not None:
self.init_pulse(num_waves, wavelen, freq, start_phase)
if not self._pulse_initialised:
self.init_pulse()
pulse = np.empty(self.num_tslots)
t = 0.0
for k in range(self.num_tslots):
phase = 2*np.pi*self.freq*t + self.start_phase
x = phase/(2*np.pi)
y = 2*(x - np.floor(0.5 + x))
pulse[k] = self.scaling*y
t = t + self.tau[k]
return self._apply_bounds_and_offset(pulse)
class PulseGenTriangle(PulseGenPeriodic):
"""
Generates triangular wave pulses
"""
def gen_pulse(self, num_waves=None, wavelen=None,
freq=None, start_phase=None):
"""
Generate a sine wave pulse
If no parameters are pavided then the class object attributes are used.
If they are provided, then these will reinitialise the object attribs
"""
if start_phase is not None:
self.start_phase = start_phase
if num_waves is not None or wavelen is not None or freq is not None:
self.init_pulse(num_waves, wavelen, freq, start_phase)
if not self._pulse_initialised:
self.init_pulse()
pulse = np.empty(self.num_tslots)
t = 0.0
for k in range(self.num_tslots):
phase = 2*np.pi*self.freq*t + self.start_phase + np.pi/2.0
x = phase/(2*np.pi)
y = 2*np.abs(2*(x - np.floor(0.5 + x))) - 1
pulse[k] = self.scaling*y
t = t + self.tau[k]
return self._apply_bounds_and_offset(pulse)
class PulseGenGaussian(PulseGen):
"""
Generates pulses with a Gaussian profile
"""
def reset(self):
"""
reset attributes to default values
"""
PulseGen.reset(self)
self._uses_time = True
self.mean = 0.5*self.pulse_time
self.variance = 0.5*self.pulse_time
self.apply_params()
def gen_pulse(self, mean=None, variance=None):
"""
Generate a pulse with Gaussian shape. The peak is centre around the
mean and the variance determines the breadth
The scaling and offset attributes are applied as an amplitude
and fixed linear offset. Note that the maximum amplitude will be
scaling + offset.
"""
if not self._pulse_initialised:
self.init_pulse()
if mean:
Tm = mean
else:
Tm = self.mean
if variance:
Tv = variance
else:
Tv = self.variance
t = self.time
T = self.pulse_time
pulse = self.scaling*np.exp(-(t-Tm)**2/(2*Tv))
return self._apply_bounds_and_offset(pulse)
class PulseGenGaussianEdge(PulseGen):
"""
Generate pulses with inverted Gaussian ramping in and out
It's intended use for a ramping modulation, which is often required in
experimental setups.
Attributes
----------
decay_time : float
Determines the ramping rate. It is approximately the time
required to bring the pulse to full amplitude
It is set to 1/10 of the pulse time by default
"""
def reset(self):
"""
reset attributes to default values
"""
PulseGen.reset(self)
self._uses_time = True
self.decay_time = self.pulse_time / 10.0
self.apply_params()
def gen_pulse(self, decay_time=None):
"""
Generate a pulse that starts and ends at zero and 1.0 in between
then apply scaling and offset
The tailing in and out is an inverted Gaussian shape
"""
if not self._pulse_initialised:
self.init_pulse()
t = self.time
if decay_time:
Td = decay_time
else:
Td = self.decay_time
T = self.pulse_time
pulse = 1.0 - np.exp(-t**2/Td) - np.exp(-(t-T)**2/Td)
pulse = pulse*self.scaling
return self._apply_bounds_and_offset(pulse)
### The following are pulse generators for the CRAB algorithm ###
# AJGP 2015-05-14:
# The intention is to have a more general base class that allows
# setting of general basis functions
class PulseGenCrab(PulseGen):
"""
Base class for all CRAB pulse generators
Note these are more involved in the optimisation process as they are
used to produce piecewise control amplitudes each time new optimisation
parameters are tried
Attributes
----------
num_coeffs : integer
Number of coefficients used for each basis function
num_basis_funcs : integer
Number of basis functions
In this case set at 2 and should not be changed
coeffs : float array[num_coeffs, num_basis_funcs]
The basis coefficient values
randomize_coeffs : bool
If True (default) then the coefficients are set to some random values
when initialised, otherwise they will all be equal to self.scaling
"""
def __init__(self, dyn=None, num_coeffs=None, params=None):
self.parent = dyn
self.num_coeffs = num_coeffs
self.params = params
self.reset()
def reset(self):
"""
reset attributes to default values
"""
PulseGen.reset(self)
self.NUM_COEFFS_WARN_LVL = 20
self.DEF_NUM_COEFFS = 4
self._BSC_ALL = 1
self._BSC_GT_MEAN = 2
self._BSC_LT_MEAN = 3
self._uses_time = True
self.time = None
self.num_basis_funcs = 2
self.num_optim_vars = 0
self.coeffs = None
self.randomize_coeffs = True
self._num_coeffs_estimated = False
self.guess_pulse_action = 'MODULATE'
self.guess_pulse = None
self.guess_pulse_func = None
self.apply_params()
def init_pulse(self, num_coeffs=None):
"""
Set the initial freq and coefficient values
"""
PulseGen.init_pulse(self)
self.init_coeffs(num_coeffs=num_coeffs)
if self.guess_pulse is not None:
self.init_guess_pulse()
self._init_bounds()
if self.log_level <= logging.DEBUG and not self._num_coeffs_estimated:
logger.debug(
"CRAB pulse initialised with {} coefficients per basis "
"function, which means a total of {} "
"optimisation variables for this pulse".format(
self.num_coeffs, self.num_optim_vars))
# def generate_guess_pulse(self)
# if isinstance(self.guess_pulsegen, PulseGen):
# self.guess_pulse = self.guess_pulsegen.gen_pulse()
# return self.guess_pulse
def init_coeffs(self, num_coeffs=None):
"""
Generate the initial ceofficent values.
Parameters
----------
num_coeffs : integer
Number of coefficients used for each basis function
If given this overides the default and sets the attribute
of the same name.
"""
if num_coeffs:
self.num_coeffs = num_coeffs
self._num_coeffs_estimated = False
if not self.num_coeffs:
if isinstance(self.parent, dynamics.Dynamics):
dim = self.parent.get_drift_dim()
self.num_coeffs = self.estimate_num_coeffs(dim)
self._num_coeffs_estimated = True
else:
self.num_coeffs = self.DEF_NUM_COEFFS
self.num_optim_vars = self.num_coeffs*self.num_basis_funcs
if self._num_coeffs_estimated:
if self.log_level <= logging.INFO:
logger.info(
"The number of CRAB coefficients per basis function "
"has been estimated as {}, which means a total of {} "
"optimisation variables for this pulse. Based on the "
"dimension ({}) of the system".format(
self.num_coeffs, self.num_optim_vars, dim))
# Issue warning if beyond the recommended level
if self.log_level <= logging.WARN:
if self.num_coeffs > self.NUM_COEFFS_WARN_LVL:
logger.warn(
"The estimated number of coefficients {} exceeds "
"the amount ({}) recommended for efficient "
"optimisation. You can set this level explicitly "
"to suppress this message.".format(
self.num_coeffs, self.NUM_COEFFS_WARN_LVL))
if self.randomize_coeffs:
r = np.random.random([self.num_coeffs, self.num_basis_funcs])
self.coeffs = (2*r - 1.0) * self.scaling
else:
self.coeffs = np.ones([self.num_coeffs,
self.num_basis_funcs])*self.scaling
def estimate_num_coeffs(self, dim):
"""
Estimate the number coefficients based on the dimensionality of the
system.
Returns
-------
num_coeffs : int
estimated number of coefficients
"""
num_coeffs = max(2, dim - 1)
return num_coeffs
def get_optim_var_vals(self):
"""
Get the parameter values to be optimised
Returns
-------
list (or 1d array) of floats
"""
return self.coeffs.ravel().tolist()
def set_optim_var_vals(self, param_vals):
"""
Set the values of the any of the pulse generation parameters
based on new values from the optimisation method
Typically this will be the basis coefficients
"""
# Type and size checking avoided here as this is in the
# main optmisation call sequence
self.set_coeffs(param_vals)
def set_coeffs(self, param_vals):
self.coeffs = param_vals.reshape(
[self.num_coeffs, self.num_basis_funcs])
def init_guess_pulse(self):
self.guess_pulse_func = None
if not self.guess_pulse_action:
logger.WARN("No guess pulse action given, hence ignored.")
elif self.guess_pulse_action.upper() == 'MODULATE':
self.guess_pulse_func = self.guess_pulse_modulate
elif self.guess_pulse_action.upper() == 'ADD':
self.guess_pulse_func = self.guess_pulse_add
else:
logger.WARN("No option for guess pulse action '{}' "
", hence ignored.".format(self.guess_pulse_action))
def guess_pulse_add(self, pulse):
pulse = pulse + self.guess_pulse
return pulse
def guess_pulse_modulate(self, pulse):
pulse = (1.0 + pulse)*self.guess_pulse
return pulse
def _init_bounds(self):
add_guess_pulse_scale = False
if self.lbound is None and self.ubound is None:
# no bounds to apply
self._bound_scale_cond = None
elif self.lbound is None:
# only upper bound
if self.ubound > 0:
self._bound_mean = 0.0
self._bound_scale = self.ubound
else:
add_guess_pulse_scale = True
self._bound_scale = self.scaling*self.num_coeffs + \
self.get_guess_pulse_scale()
self._bound_mean = -abs(self._bound_scale) + self.ubound
self._bound_scale_cond = self._BSC_GT_MEAN
elif self.ubound is None:
# only lower bound
if self.lbound < 0:
self._bound_mean = 0.0
self._bound_scale = abs(self.lbound)
else:
self._bound_scale = self.scaling*self.num_coeffs + \
self.get_guess_pulse_scale()
self._bound_mean = abs(self._bound_scale) + self.lbound
self._bound_scale_cond = self._BSC_LT_MEAN
else:
# lower and upper bounds
self._bound_mean = 0.5*(self.ubound + self.lbound)
self._bound_scale = 0.5*(self.ubound - self.lbound)
self._bound_scale_cond = self._BSC_ALL
def get_guess_pulse_scale(self):
scale = 0.0
if self.guess_pulse is not None:
scale = max(np.amax(self.guess_pulse) - np.amin(self.guess_pulse),
np.amax(self.guess_pulse))
return scale
def _apply_bounds(self, pulse):
"""
Scaling the amplitudes using the tanh function if there are bounds
"""
if self._bound_scale_cond == self._BSC_ALL:
pulse = np.tanh(pulse)*self._bound_scale + self._bound_mean
return pulse
elif self._bound_scale_cond == self._BSC_GT_MEAN:
scale_where = pulse > self._bound_mean
pulse[scale_where] = (np.tanh(pulse[scale_where])*self._bound_scale
+ self._bound_mean)
return pulse
elif self._bound_scale_cond == self._BSC_LT_MEAN:
scale_where = pulse < self._bound_mean
pulse[scale_where] = (np.tanh(pulse[scale_where])*self._bound_scale
+ self._bound_mean)
return pulse
else:
return pulse
class PulseGenCrabFourier(PulseGenCrab):
"""
Generates a pulse using the Fourier basis functions, i.e. sin and cos
Attributes
----------
freqs : float array[num_coeffs]
Frequencies for the basis functions
randomize_freqs : bool
If True (default) the some random offset is applied to the frequencies
"""
def reset(self):
"""
reset attributes to default values
"""
PulseGenCrab.reset(self)
self.freqs = None
self.randomize_freqs = True
def init_pulse(self, num_coeffs=None):
"""
Set the initial freq and coefficient values
"""
PulseGenCrab.init_pulse(self)
self.init_freqs()
def init_freqs(self):
"""
Generate the frequencies
These are the Fourier harmonics with a uniformly distributed
random offset
"""
self.freqs = np.empty(self.num_coeffs)
ff = 2*np.pi / self.pulse_time
for i in range(self.num_coeffs):
self.freqs[i] = ff*(i + 1)
if self.randomize_freqs:
self.freqs += np.random.random(self.num_coeffs) - 0.5
def gen_pulse(self, coeffs=None):
"""
Generate a pulse using the Fourier basis with the freqs and
coeffs attributes.
Parameters
----------
coeffs : float array[num_coeffs, num_basis_funcs]
The basis coefficient values
If given this overides the default and sets the attribute
of the same name.
"""
if coeffs:
self.coeffs = coeffs
if not self._pulse_initialised:
self.init_pulse()
pulse = np.zeros(self.num_tslots)
for i in range(self.num_coeffs):
phase = self.freqs[i]*self.time
# basis1comp = self.coeffs[i, 0]*np.sin(phase)
# basis2comp = self.coeffs[i, 1]*np.cos(phase)
# pulse += basis1comp + basis2comp
pulse += self.coeffs[i, 0]*np.sin(phase) + \
self.coeffs[i, 1]*np.cos(phase)
if self.guess_pulse_func:
pulse = self.guess_pulse_func(pulse)
if self.ramping_pulse is not None:
pulse = self._apply_ramping_pulse(pulse)
return self._apply_bounds(pulse)
| bsd-3-clause | d36a3b1ba9120842c7de29368cf4a469 | 32.492 | 79 | 0.588941 | 3.999331 | false | false | false | false |
qutip/qutip | qutip/legacy/bloch_redfield.py | 2 | 4918 | __all__ = ['bloch_redfield_tensor']
import numpy as np
import os
import time
import types
import warnings
from functools import partial
import scipy.sparse as sp
from qutip.qobj import Qobj, isket
from qutip.states import ket2dm
from qutip.operators import qdiags
from qutip.superoperator import spre, spost, vec2mat, mat2vec, vec2mat_index
from qutip.cy.spconvert import dense2D_to_fastcsr_fmode
from qutip.superoperator import liouvillian
from qutip.cy.spconvert import arr_coo2fast
import qutip.settings as qset
def bloch_redfield_tensor(H, a_ops, spectra_cb=None, c_ops=[], use_secular=True, sec_cutoff=0.1):
"""
Calculate the Bloch-Redfield tensor for a system given a set of operators
and corresponding spectral functions that describes the system's coupling
to its environment.
.. note::
This tensor generation requires a time-independent Hamiltonian.
Parameters
----------
H : :class:`qutip.qobj`
System Hamiltonian.
a_ops : list of :class:`qutip.qobj`
List of system operators that couple to the environment.
spectra_cb : list of callback functions
List of callback functions that evaluate the noise power spectrum
at a given frequency.
c_ops : list of :class:`qutip.qobj`
List of system collapse operators.
use_secular : bool
Flag (True of False) that indicates if the secular approximation should
be used.
sec_cutoff : float {0.1}
Threshold for secular approximation.
Returns
-------
R, kets: :class:`qutip.Qobj`, list of :class:`qutip.Qobj`
R is the Bloch-Redfield tensor and kets is a list eigenstates of the
Hamiltonian.
"""
if not (spectra_cb is None):
warnings.warn("The use of spectra_cb is depreciated.", DeprecationWarning)
_a_ops = []
for kk, a in enumerate(a_ops):
_a_ops.append([a,spectra_cb[kk]])
a_ops = _a_ops
# Sanity checks for input parameters
if not isinstance(H, Qobj):
raise TypeError("H must be an instance of Qobj")
for a in a_ops:
if not isinstance(a[0], Qobj) or not a[0].isherm:
raise TypeError("Operators in a_ops must be Hermitian Qobj.")
if c_ops is None:
c_ops = []
# use the eigenbasis
evals, ekets = H.eigenstates()
N = len(evals)
K = len(a_ops)
#only Lindblad collapse terms
if K==0:
Heb = qdiags(evals,0,dims=H.dims)
L = liouvillian(Heb, c_ops=[c_op.transform(ekets) for c_op in c_ops])
return L, ekets
A = np.array([a_ops[k][0].transform(ekets).full() for k in range(K)])
Jw = np.zeros((K, N, N), dtype=complex)
# pre-calculate matrix elements and spectral densities
# W[m,n] = real(evals[m] - evals[n])
W = np.real(evals[:,np.newaxis] - evals[np.newaxis,:])
for k in range(K):
# do explicit loops here in case spectra_cb[k] can not deal with array arguments
for n in range(N):
for m in range(N):
Jw[k, n, m] = a_ops[k][1](W[n, m])
dw_min = np.abs(W[W.nonzero()]).min()
# pre-calculate mapping between global index I and system indices a,b
Iabs = np.empty((N*N,3),dtype=int)
for I, Iab in enumerate(Iabs):
# important: use [:] to change array values, instead of creating new variable Iab
Iab[0] = I
Iab[1:] = vec2mat_index(N, I)
# unitary part + dissipation from c_ops (if given):
Heb = qdiags(evals,0,dims=H.dims)
L = liouvillian(Heb, c_ops=[c_op.transform(ekets) for c_op in c_ops])
# dissipative part:
rows = []
cols = []
data = []
for I, a, b in Iabs:
# only check use_secular once per I
if use_secular:
# only loop over those indices J which actually contribute
Jcds = Iabs[np.where(np.abs(W[a, b] - W[Iabs[:,1], Iabs[:,2]]) < dw_min * sec_cutoff)]
else:
Jcds = Iabs
for J, c, d in Jcds:
elem = 0+0j
# summed over k, i.e., each operator coupling the system to the environment
elem += 0.5 * np.sum(A[:, a, c] * A[:, d, b] * (Jw[:, c, a] + Jw[:, d, b]))
if b==d:
# sum_{k,n} A[k, a, n] * A[k, n, c] * Jw[k, c, n])
elem -= 0.5 * np.sum(A[:, a, :] * A[:, :, c] * Jw[:, c, :])
if a==c:
# sum_{k,n} A[k, d, n] * A[k, n, b] * Jw[k, d, n])
elem -= 0.5 * np.sum(A[:, d, :] * A[:, :, b] * Jw[:, d, :])
if elem != 0:
rows.append(I)
cols.append(J)
data.append(elem)
R = arr_coo2fast(np.array(data, dtype=complex),
np.array(rows, dtype=np.int32),
np.array(cols, dtype=np.int32), N**2, N**2)
L.data = L.data + R
return L, ekets
| bsd-3-clause | ab25ba446fa7c3bf6a311981b0345a88 | 31.143791 | 98 | 0.57259 | 3.203909 | false | false | false | false |
qutip/qutip | qutip/tests/test_partial_transpose.py | 2 | 2627 | """
Unit tests for QuTiP partial transpose functions.
"""
import numpy as np
from numpy.testing import assert_, run_module_suite
from qutip import Qobj, partial_transpose, tensor, rand_dm
from qutip.partial_transpose import _partial_transpose_reference
def test_partial_transpose_bipartite():
"""partial transpose of bipartite systems"""
rho = Qobj(np.arange(16).reshape(4, 4), dims=[[2, 2], [2, 2]])
# no transpose
rho_pt = partial_transpose(rho, [0, 0])
assert_(np.abs(np.max(rho_pt.full() - rho.full())) < 1e-12)
# partial transpose subsystem 1
rho_pt = partial_transpose(rho, [1, 0])
rho_pt_expected = np.array([[0, 1, 8, 9],
[4, 5, 12, 13],
[2, 3, 10, 11],
[6, 7, 14, 15]])
assert_(np.abs(np.max(rho_pt.full() - rho_pt_expected)) < 1e-12)
# partial transpose subsystem 2
rho_pt = partial_transpose(rho, [0, 1])
rho_pt_expected = np.array([[0, 4, 2, 6],
[1, 5, 3, 7],
[8, 12, 10, 14],
[9, 13, 11, 15]])
assert_(np.abs(np.max(rho_pt.full() - rho_pt_expected)) < 1e-12)
# full transpose
rho_pt = partial_transpose(rho, [1, 1])
assert_(np.abs(np.max(rho_pt.full() - rho.trans().full())) < 1e-12)
def test_partial_transpose_comparison():
"""partial transpose: comparing sparse and dense implementations"""
N = 10
rho = tensor(rand_dm(N, density=0.5), rand_dm(N, density=0.5))
# partial transpose of system 1
rho_pt1 = partial_transpose(rho, [1, 0], method="dense")
rho_pt2 = partial_transpose(rho, [1, 0], method="sparse")
np.abs(np.max(rho_pt1.full() - rho_pt1.full())) < 1e-12
# partial transpose of system 2
rho_pt1 = partial_transpose(rho, [0, 1], method="dense")
rho_pt2 = partial_transpose(rho, [0, 1], method="sparse")
np.abs(np.max(rho_pt1.full() - rho_pt2.full())) < 1e-12
def test_partial_transpose_randomized():
"""partial transpose: randomized tests on tripartite system"""
rho = tensor(rand_dm(2, density=1),
rand_dm(2, density=1),
rand_dm(2, density=1))
mask = np.random.randint(2, size=3)
rho_pt_ref = _partial_transpose_reference(rho, mask)
rho_pt1 = partial_transpose(rho, mask, method="dense")
np.abs(np.max(rho_pt1.full() - rho_pt_ref.full())) < 1e-12
rho_pt2 = partial_transpose(rho, mask, method="sparse")
np.abs(np.max(rho_pt2.full() - rho_pt_ref.full())) < 1e-12
if __name__ == "__main__":
run_module_suite()
| bsd-3-clause | 09f2523b85e41bfc07f9aecb93dc96e9 | 32.679487 | 71 | 0.574039 | 3.127381 | false | true | false | false |
qutip/qutip | qutip/continuous_variables.py | 1 | 8376 | """
This module contains a collection functions for calculating continuous variable
quantities from fock-basis representation of the state of multi-mode fields.
"""
__all__ = ['correlation_matrix', 'covariance_matrix',
'correlation_matrix_field', 'correlation_matrix_quadrature',
'wigner_covariance_matrix', 'logarithmic_negativity']
from qutip.expect import expect
import numpy as np
def correlation_matrix(basis, rho=None):
r"""
Given a basis set of operators :math:`\{a\}_n`, calculate the correlation
matrix:
.. math::
C_{mn} = \langle a_m a_n \rangle
Parameters
----------
basis : list
List of operators that defines the basis for the correlation matrix.
rho : Qobj
Density matrix for which to calculate the correlation matrix. If
`rho` is `None`, then a matrix of correlation matrix operators is
returned instead of expectation values of those operators.
Returns
-------
corr_mat : ndarray
A 2-dimensional *array* of correlation values or operators.
"""
if rho is None:
# return array of operators
out = np.empty((len(basis), len(basis)), dtype=object)
for i, op2 in enumerate(basis):
out[i, :] = [op1 * op2 for op1 in basis]
return out
else:
# return array of expectation values
return np.array([[expect(op1 * op2, rho)
for op1 in basis] for op2 in basis])
def covariance_matrix(basis, rho, symmetrized=True):
r"""
Given a basis set of operators :math:`\{a\}_n`, calculate the covariance
matrix:
.. math::
V_{mn} = \frac{1}{2}\langle a_m a_n + a_n a_m \rangle -
\langle a_m \rangle \langle a_n\rangle
or, if of the optional argument `symmetrized=False`,
.. math::
V_{mn} = \langle a_m a_n\rangle -
\langle a_m \rangle \langle a_n\rangle
Parameters
----------
basis : list
List of operators that defines the basis for the covariance matrix.
rho : Qobj
Density matrix for which to calculate the covariance matrix.
symmetrized : bool {True, False}
Flag indicating whether the symmetrized (default) or non-symmetrized
correlation matrix is to be calculated.
Returns
-------
corr_mat : ndarray
A 2-dimensional array of covariance values.
"""
if symmetrized:
return np.array([[0.5 * expect(op1 * op2 + op2 * op1, rho) -
expect(op1, rho) * expect(op2, rho)
for op1 in basis] for op2 in basis])
else:
return np.array([[expect(op1 * op2, rho) -
expect(op1, rho) * expect(op2, rho)
for op1 in basis] for op2 in basis])
def correlation_matrix_field(a1, a2, rho=None):
"""
Calculates the correlation matrix for given field operators :math:`a_1` and
:math:`a_2`. If a density matrix is given the expectation values are
calculated, otherwise a matrix with operators is returned.
Parameters
----------
a1 : Qobj
Field operator for mode 1.
a2 : Qobj
Field operator for mode 2.
rho : Qobj
Density matrix for which to calculate the covariance matrix.
Returns
-------
cov_mat : ndarray
Array of complex numbers or Qobj's
A 2-dimensional *array* of covariance values, or, if rho=0, a matrix
of operators.
"""
basis = [a1, a1.dag(), a2, a2.dag()]
return correlation_matrix(basis, rho)
def correlation_matrix_quadrature(a1, a2, rho=None, g=np.sqrt(2)):
"""
Calculate the quadrature correlation matrix with given field operators
:math:`a_1` and :math:`a_2`. If a density matrix is given the expectation
values are calculated, otherwise a matrix with operators is returned.
Parameters
----------
a1 : Qobj
Field operator for mode 1.
a2 : Qobj
Field operator for mode 2.
rho : Qobj
Density matrix for which to calculate the covariance matrix.
g : float
Scaling factor for `a = 0.5 * g * (x + iy)`, default `g = sqrt(2)`.
The value of `g` is related to the value of `hbar` in the commutation
relation `[x, y] = i * hbar` via `hbar=2/g ** 2` giving the default
value `hbar=1`.
Returns
-------
corr_mat : ndarray
Array of complex numbers or Qobj's
A 2-dimensional *array* of covariance values for the field quadratures,
or, if rho=0, a matrix of operators.
"""
x1 = (a1 + a1.dag()) / g
p1 = -1j * (a1 - a1.dag()) / g
x2 = (a2 + a2.dag()) / g
p2 = -1j * (a2 - a2.dag()) / g
basis = [x1, p1, x2, p2]
return correlation_matrix(basis, rho)
def wigner_covariance_matrix(a1=None, a2=None, R=None, rho=None, g=np.sqrt(2)):
r"""
Calculates the Wigner covariance matrix
:math:`V_{ij} = \frac{1}{2}(R_{ij} + R_{ji})`, given
the quadrature correlation matrix
:math:`R_{ij} = \langle R_{i} R_{j}\rangle -
\langle R_{i}\rangle \langle R_{j}\rangle`, where
:math:`R = (q_1, p_1, q_2, p_2)^T` is the vector with quadrature operators
for the two modes.
Alternatively, if `R = None`, and if annihilation operators `a1` and `a2`
for the two modes are supplied instead, the quadrature correlation matrix
is constructed from the annihilation operators before then the covariance
matrix is calculated.
Parameters
----------
a1 : Qobj
Field operator for mode 1.
a2 : Qobj
Field operator for mode 2.
R : ndarray
The quadrature correlation matrix.
rho : Qobj
Density matrix for which to calculate the covariance matrix.
g : float
Scaling factor for `a = 0.5 * g * (x + iy)`, default `g = sqrt(2)`.
The value of `g` is related to the value of `hbar` in the commutation
relation `[x, y] = i * hbar` via `hbar=2/g ** 2` giving the default
value `hbar=1`.
Returns
-------
cov_mat : ndarray
A 2-dimensional array of covariance values.
"""
if R is not None:
if rho is None:
return np.array([[0.5 * np.real(R[i, j] + R[j, i])
for i in range(4)]
for j in range(4)], dtype=np.float64)
else:
return np.array([[0.5 * np.real(expect(R[i, j] + R[j, i], rho))
for i in range(4)]
for j in range(4)], dtype=np.float64)
elif a1 is not None and a2 is not None:
if rho is not None:
x1 = (a1 + a1.dag()) / g
p1 = -1j * (a1 - a1.dag()) / g
x2 = (a2 + a2.dag()) / g
p2 = -1j * (a2 - a2.dag()) / g
return covariance_matrix([x1, p1, x2, p2], rho)
else:
raise ValueError("Must give rho if using field operators " +
"(a1 and a2)")
else:
raise ValueError("Must give either field operators (a1 and a2) " +
"or a precomputed correlation matrix (R)")
def logarithmic_negativity(V, g=np.sqrt(2)):
"""
Calculates the logarithmic negativity given a symmetrized covariance
matrix, see :func:`qutip.continuous_variables.covariance_matrix`. Note that
the two-mode field state that is described by `V` must be Gaussian for this
function to applicable.
Parameters
----------
V : *2d array*
The covariance matrix.
g : float
Scaling factor for `a = 0.5 * g * (x + iy)`, default `g = sqrt(2)`.
The value of `g` is related to the value of `hbar` in the commutation
relation `[x, y] = i * hbar` via `hbar=2/g ** 2` giving the default
value `hbar=1`.
Returns
-------
N : float
The logarithmic negativity for the two-mode Gaussian state
that is described by the the Wigner covariance matrix V.
"""
A = 0.5 * V[0:2, 0:2] * g ** 2
B = 0.5 * V[2:4, 2:4] * g ** 2
C = 0.5 * V[0:2, 2:4] * g ** 2
sigma = np.linalg.det(A) + np.linalg.det(B) - 2 * np.linalg.det(C)
nu_ = sigma / 2 - np.sqrt(sigma ** 2 - 4 * np.linalg.det(V)) / 2
if nu_ < 0.0:
return 0.0
nu = np.sqrt(nu_)
lognu = -np.log(2 * nu)
logneg = max(0, lognu)
return logneg
| bsd-3-clause | 66bdd86728d3ed142bb91eeff2c35198 | 31.339768 | 79 | 0.575096 | 3.584082 | false | false | false | false |
qutip/qutip | qutip/krylovsolve.py | 1 | 21299 | __all__ = ["krylovsolve"]
"""
This module provides approximations of the time evolution operator
using small dimensional Krylov subspaces.
"""
from scipy.optimize import root_scalar
from math import ceil
import numpy as np
import warnings
from qutip.expect import expect
from qutip.qobj import Qobj
from qutip.solver import Result, Options
from qutip.ui.progressbar import BaseProgressBar, TextProgressBar
from qutip.sparse import eigh
def krylovsolve(
H: Qobj,
psi0: Qobj,
tlist: np.array,
krylov_dim: int,
e_ops=None,
options=None,
progress_bar: bool = None,
sparse: bool = False,
):
"""
Time evolution of state vectors for time independent Hamiltonians.
Evolve the state vector ("psi0") finding an approximation for the time
evolution operator of Hamiltonian ("H") by obtaining the projection of
the time evolution operator on a set of small dimensional Krylov
subspaces (m << dim(H)).
The output is either the state vector or the expectation values of
supplied operators ("e_ops") at arbitrary points at ("tlist").
**Additional options**
Additional options to krylovsolve can be set with the following:
* "store_states": stores states even though expectation values are
requested via the "e_ops" argument.
* "store_final_state": store final state even though expectation values are
requested via the "e_ops" argument.
Parameters
----------
H : :class:`qutip.Qobj`
System Hamiltonian.
psi0 : :class: `qutip.Qobj`
Initial state vector (ket).
tlist : None / *list* / *array*
List of times on which to evolve the initial state. If None, nothing
happens but the code won't break.
krylov_dim: int
Dimension of Krylov approximation subspaces used for the time
evolution approximation.
e_ops : None / list of :class:`qutip.Qobj`
Single operator or list of operators for which to evaluate
expectation values.
options : Options
Instance of ODE solver options, as well as krylov parameters.
atol: controls (approximately) the error desired for the final
solution. (Defaults to 1e-8)
nsteps: maximum number of krylov's internal number of Lanczos
iterations. (Defaults to 10000)
progress_bar : None / BaseProgressBar
Optional instance of BaseProgressBar, or a subclass thereof, for
showing the progress of the simulation.
sparse : bool (default False)
Use np.array to represent system Hamiltonians. If True, scipy sparse
arrays are used instead.
Returns
-------
result: :class:`qutip.solver.Result`
An instance of the class :class:`qutip.solver.Result`, which contains
either an *array* `result.expect` of expectation values for the times
`tlist`, or an *array* `result.states` of state vectors corresponding
to the times `tlist` [if `e_ops` is an empty list].
"""
# check the physics
_check_inputs(H, psi0, krylov_dim)
# check extra inputs
e_ops, e_ops_dict = _check_e_ops(e_ops)
pbar = _check_progress_bar(progress_bar)
# transform inputs type from Qobj to np.ndarray/csr_matrix
if sparse:
_H = H.get_data() # (fast_) csr_matrix
else:
_H = H.full().copy() # np.ndarray
_psi = psi0.full().copy()
_psi = _psi / np.linalg.norm(_psi)
# create internal variable and output containers
if options is None:
options = Options(nsteps=10000)
krylov_results = Result()
krylov_results.solver = "krylovsolve"
# handle particular cases of an empty tlist or single element
n_tlist_steps = len(tlist)
if n_tlist_steps < 1:
return krylov_results
if n_tlist_steps == 1: # if tlist has only one element, return it
krylov_results = particular_tlist_or_happy_breakdown(
tlist, n_tlist_steps, options, psi0, e_ops, krylov_results, pbar
) # this will also raise a warning
return krylov_results
tf = tlist[-1]
t0 = tlist[0]
# optimization step using Lanczos, then reuse it for the first partition
dim_m = krylov_dim
krylov_basis, T_m = lanczos_algorithm(
_H, _psi, krylov_dim=dim_m, sparse=sparse
)
# check if a happy breakdown occurred
if T_m.shape[0] < krylov_dim + 1:
if T_m.shape[0] == 1:
# this means that the state does not evolve in time, it lies in a
# symmetry of H subspace. Thus, theres no work to be done.
krylov_results = particular_tlist_or_happy_breakdown(
tlist,
n_tlist_steps,
options,
psi0,
e_ops,
krylov_results,
pbar,
happy_breakdown=True,
)
return krylov_results
else:
# no optimization is required, convergence is guaranteed.
delta_t = tf - t0
n_timesteps = 1
else:
# calculate optimal number of internal timesteps.
delta_t = _optimize_lanczos_timestep_size(
T_m, krylov_basis=krylov_basis, tlist=tlist, options=options
)
n_timesteps = int(ceil((tf - t0) / delta_t))
if n_timesteps >= options.nsteps:
raise Exception(
f"Optimization requires a number {n_timesteps} of lanczos iterations, "
f"which exceeds the defined allowed number {options.nsteps}. This can "
"be increased via the 'Options.nsteps' property."
)
partitions = _make_partitions(tlist=tlist, n_timesteps=n_timesteps)
if progress_bar:
pbar.start(len(partitions))
# update parameters regarding e_ops
krylov_results, expt_callback, options, n_expt_op = _e_ops_outputs(
krylov_results, e_ops, n_tlist_steps, options
)
# parameters for the lazy iteration evolve tlist
psi_norm = np.linalg.norm(_psi)
last_t = t0
for idx, partition in enumerate(partitions):
evolved_states = _evolve_krylov_tlist(
H=_H,
psi0=_psi,
krylov_dim=dim_m,
tlist=partition,
t0=last_t,
psi_norm=psi_norm,
krylov_basis=krylov_basis,
T_m=T_m,
sparse=sparse,
)
if idx == 0:
krylov_basis = None
T_m = None
t_idx = 0
_psi = evolved_states[-1]
psi_norm = np.linalg.norm(_psi)
last_t = partition[-1]
# apply qobj to each evolved state, remove repeated tail elements
qobj_evolved_states = [
Qobj(state, dims=psi0.dims) for state in evolved_states[1:-1]
]
krylov_results = _expectation_values(
e_ops,
n_expt_op,
expt_callback,
krylov_results,
qobj_evolved_states,
partitions,
idx,
t_idx,
options,
)
t_idx += len(partition[1:-1])
pbar.update(idx)
pbar.finished()
if e_ops_dict:
krylov_results.expect = {
e: krylov_results.expect[n]
for n, e in enumerate(e_ops_dict.keys())
}
return krylov_results
def _expectation_values(
e_ops,
n_expt_op,
expt_callback,
res,
evolved_states,
partitions,
idx,
t_idx,
options,
):
if options.store_states:
res.states += evolved_states
for t, state in zip(
range(t_idx, t_idx + len(partitions[idx][1:-1])), evolved_states
):
if expt_callback:
# use callback method
res.expect.append(e_ops(t, state))
for m in range(n_expt_op):
op = e_ops[m]
if not isinstance(op, Qobj) and callable(op):
res.expect[m][t] = op(t, state)
continue
res.expect[m][t] = expect(op, state)
if (
idx == len(partitions) - 1
and options.store_final_state
and not options.store_states
):
res.states = [evolved_states[-1]]
return res
def lanczos_algorithm(
H,
psi: np.ndarray,
krylov_dim: int,
sparse: bool = False,
):
"""
Computes a basis of the Krylov subspace for Hamiltonian 'H', a system
state 'psi' and Krylov dimension 'krylov_dim'. The space is spanned
by {psi, H psi, H^2 psi, ..., H^(krylov_dim) psi}.
Parameters
------------
H : np.ndarray or csr_matrix
System Hamiltonian. If the Hamiltonian is dense, a np.ndarray is
preferred, whereas if it is sparse, a scipy csr_matrix is optimal.
psi: np.ndarray
State used to calculate Krylov subspace.
krylov_dim: int
Dimension (krylov_dim + 1) of the spanned Krylov subspace.
sparse: bool (optional, default False)
Wether to perform scipy sparse matrix multiplication operations or
numpy dense matrix multiplications.
Returns
---------
v: np.ndarray
Lanczos eigenvector.
T: np.ndarray
Tridiagonal decomposition.
"""
v = np.zeros((krylov_dim + 1, psi.shape[0]), dtype=complex)
T_m = np.zeros((krylov_dim + 1, krylov_dim + 1), dtype=complex)
v[0, :] = psi.squeeze()
w_prime = H.dot(v[0, :])
alpha = np.vdot(w_prime, v[0, :])
w = w_prime - alpha * v[0, :]
T_m[0, 0] = alpha
for j in range(1, krylov_dim + 1):
beta = np.linalg.norm(w)
if beta < 1e-7:
# Happy breakdown
v_happy = v[0:j, :]
T_m_happy = T_m[0:j, 0:j]
return v_happy, T_m_happy
v[j, :] = w / beta
w_prime = H.dot(v[j, :])
alpha = np.vdot(w_prime, v[j, :])
w = w_prime - alpha * v[j, :] - beta * v[j - 1, :]
T_m[j, j] = alpha
T_m[j, j - 1] = beta
T_m[j - 1, j] = beta
return v, T_m
def _evolve(t0: float, krylov_basis: np.ndarray, T_m: np.ndarray):
"""
Computes the time evolution operator 'U(t - t0) psi0_k', where 'psi0_k'
is the first basis element of the Krylov subspace, as a function of time.
Parameters
------------
t0: float
Initial time for the time evolution.
krylov_basis: np.ndarray
Krylov basis projector operator.
T_m: np.ndarray
Tridiagonal matrix decomposition of the system given by lanczos
algorithm.
Returns
---------
time_evolution: function
Time evolution given by the Krylov subspace approximation.
"""
eigenvalues, eigenvectors = eigh(T_m)
U = np.matmul(krylov_basis.T, eigenvectors)
e0 = eigenvectors.conj().T[:, 0]
def time_evolution(t):
delta_t = t - t0
aux = np.multiply(np.exp(-1j * delta_t * eigenvalues), e0)
return np.matmul(U, aux)
return time_evolution
def _evolve_krylov_tlist(
H,
psi0: np.ndarray,
krylov_dim: int,
tlist: list,
t0: float,
psi_norm: float = None,
krylov_basis: np.array = None,
T_m: np.array = None,
sparse: bool = False,
):
"""
Computes the Krylov approximation time evolution of dimension 'krylov_dim'
for Hamiltonian 'H' and initial state 'psi0' for each time in 'tlist'.
Parameters
------------
H: np.ndarray or csr_matrix
System Hamiltonian.
psi0: np.ndarray
Initial state vector.
krylov_basis: np.ndarray
Krylov basis projector operator.
tlist: list
List of timesteps for the time evolution.
t0: float
Initial time for the time evolution.
psi_norm: float (optional, default False)
Norm-2 of psi0.
krylov_basis: np.ndarray (optional, default None)
Krylov basis projector operator. If 'krylov_basis' is None, perform
a lanczos iteration.
T_m: np.ndarray (optional, default None)
Tridiagonal matrix decomposition of the system given by lanczos
algorithm. If 'T_m' is None, perform a lanczos iteration.
Returns
---------
psi_list: List[np.ndarray]
List of evolved states at times t in 'tlist'.
"""
if psi_norm is None:
psi_norm = np.linalg.norm(psi0)
if psi_norm != 1:
psi = psi0 / psi_norm
else:
psi = psi0
if (krylov_basis is None) or (T_m is None):
krylov_basis, T_m = lanczos_algorithm(
H=H, psi=psi, krylov_dim=krylov_dim, sparse=sparse
)
evolve = _evolve(t0, krylov_basis, T_m)
psi_list = list(map(evolve, tlist))
return psi_list
# ----------------------------------------------------------------------
# Auxiliar functions
def _check_inputs(H, psi0, krylov_dim):
"""Check that the inputs 'H' and 'psi0' have the correct structures."""
if not isinstance(H, Qobj):
raise TypeError(
"krylovsolve currently supports Hamiltonian Qobj operators only"
)
if not H.isherm:
raise TypeError("Hamiltonian 'H' must be hermician.")
if not isinstance(psi0, Qobj):
raise TypeError("'psi0' must be a Qobj.")
if not psi0.isket:
raise TypeError("Initial state must be a ket Qobj.")
if not ((len(H.shape) == 2) and (H.shape[0] == H.shape[1])):
raise ValueError("the Hamiltonian must be 2-dimensional square Qobj.")
if not (psi0.dims[0] == H.dims[0]):
raise ValueError(
"'psi0' and the Hamiltonian must share the same dimension."
)
if not (H.shape[0] >= krylov_dim):
raise ValueError(
"the Hamiltonian dimension must be greater or equal to the maximum"
" allowed krylov dimension 'krylov_dim'."
)
def _check_e_ops(e_ops):
"""
Check instances of e_ops and return the formatted version of e_ops
and e_ops_dict.
"""
if e_ops is None:
e_ops = []
if isinstance(e_ops, Qobj):
e_ops = [e_ops]
if isinstance(e_ops, dict):
e_ops_dict = e_ops
e_ops = [e for e in e_ops.values()]
else:
e_ops_dict = None
return e_ops, e_ops_dict
def _check_progress_bar(progress_bar):
"""
Check instance of progress_bar and return the object.
"""
if progress_bar is None:
pbar = BaseProgressBar()
if progress_bar is True:
pbar = TextProgressBar()
return pbar
def particular_tlist_or_happy_breakdown(
tlist,
n_tlist_steps,
options,
psi0,
e_ops,
res,
progress_bar,
happy_breakdown=False,
):
"""Deals with the problem when 'tlist' contains a single element, where
that same ket is returned and evaluated at 'e_ops', if provided.
"""
if len(tlist) == 0:
warnings.warn(
"Input 'tlist' contains a single element. If 'e_ops' were provided"
", return its corresponding expectation values at 'psi0', else "
"return 'psi0'."
)
progress_bar.start(1)
res, expt_callback, options, n_expt_op = _e_ops_outputs(
res, e_ops, n_tlist_steps, options
)
if options.store_states:
res.states = [psi0]
e_0 = None
if expt_callback:
# use callback method
e_0 = e_ops(0, psi0)
res.expect.append(e_0)
e_m_0 = []
for m in range(n_expt_op):
op = e_ops[m]
if not isinstance(op, Qobj) and callable(op):
e_m_0.append(op(0, psi0))
res.expect[m][0] = e_m_0[m]
continue
e_m_0.append(expect(op, psi0))
res.expect[m][0] = e_m_0[m]
if happy_breakdown:
res = _happy_breakdown(
tlist,
options,
res,
psi0,
expt_callback,
e_0,
n_expt_op,
e_ops,
e_m_0,
)
if (options.store_final_state) and (not options.store_states):
res.states = [psi0]
progress_bar.update(1)
progress_bar.finished()
return res
def _happy_breakdown(
tlist, options, res, psi0, expt_callback, e_0, n_expt_op, e_ops, e_m_0
):
"""
Dummy evolves the system if a happy breakdown of an eigenstate occurs.
"""
for i in range(1, len(tlist)):
if options.store_states:
res.states.append(psi0)
if expt_callback:
res.expect.append(e_0)
for m in range(n_expt_op):
op = e_ops[m]
res.expect[m][i] = e_m_0[m]
return res
def _optimize_lanczos_timestep_size(T, krylov_basis, tlist, options):
"""
Solves the equation defined to optimize the number of Lanczos
iterations to be performed inside Krylov's algorithm.
"""
f = _lanczos_error_equation_to_optimize_delta_t(
T,
krylov_basis=krylov_basis,
t0=tlist[0],
tf=tlist[-1],
target_tolerance=options.atol,
)
# To avoid the singularity at t0, we add a small epsilon value
t_min = (tlist[-1] - tlist[0]) / options.nsteps + tlist[0]
bracket = [t_min, tlist[-1]]
if (np.sign(f(bracket[0])) == -1) and (np.sign(f(bracket[-1])) == -1):
delta_t = tlist[-1] - tlist[0]
return delta_t
elif (np.sign(f(bracket[0])) == 1) and (np.sign(f(bracket[-1])) == 1):
raise ValueError(
"No solution exists with the given combination of parameters 'krylov_dim', "
"tolerance = 'options.atol', maximum number allowed of krylov internal "
"partitions = 'options.nsteps' and 'tlist'. Try reducing the tolerance, or "
"increasing 'krylov_dim'. If nothing works, then a deeper analysis of the "
"problem is recommended."
)
else:
sol = root_scalar(f=f, bracket=bracket, method="brentq", xtol=options.atol)
if sol.converged:
delta_t = sol.root
return delta_t
else:
raise Exception(
"Method did not converge, try increasing 'krylov_dim', "
"taking a lesser final time 'tlist[-1]' or decreasing the "
"tolerance via Options().atol. "
"If nothing works, this problem might not be suitable for "
"Krylov or a deeper analysis might be required."
)
def _lanczos_error_equation_to_optimize_delta_t(
T, krylov_basis, t0, tf, target_tolerance
):
"""
Function to optimize in order to obtain the optimal number of
Lanczos algorithm iterations, governed by the optimal timestep size between
Lanczos iteractions.
"""
eigenvalues1, eigenvectors1 = eigh(T[0:, 0:])
U1 = np.matmul(krylov_basis[0:, 0:].T, eigenvectors1)
e01 = eigenvectors1.conj().T[:, 0]
eigenvalues2, eigenvectors2 = eigh(T[0:-1, 0: T.shape[1] - 1])
U2 = np.matmul(krylov_basis[0:-1, :].T, eigenvectors2)
e02 = eigenvectors2.conj().T[:, 0]
def f(t):
delta_t = -1j * (t - t0)
aux1 = np.multiply(np.exp(delta_t * eigenvalues1), e01)
psi1 = np.matmul(U1, aux1)
aux2 = np.multiply(np.exp(delta_t * eigenvalues2), e02)
psi2 = np.matmul(U2, aux2)
error = np.linalg.norm(psi1 - psi2)
steps = max(1, (tf - t0) // (t - t0))
return np.log10(error) + np.log10(steps) - np.log10(target_tolerance)
return f
def _make_partitions(tlist, n_timesteps):
"""Generates an internal 'partitions' list of np.arrays to iterate Lanczos
algorithms on each of them, based on 'tlist' and the optimized number of
iterations 'n_timesteps'.
"""
_tlist = np.copy(tlist)
if n_timesteps == 1:
_tlist = np.insert(_tlist, 0, tlist[0])
_tlist = np.append(_tlist, tlist[-1])
partitions = [_tlist]
return partitions
n_timesteps += 1
krylov_tlist = np.linspace(tlist[0], tlist[-1], n_timesteps)
krylov_partitions = [
np.array(krylov_tlist[i: i + 2]) for i in range(n_timesteps - 1)
]
partitions = []
for krylov_partition in krylov_partitions:
start = krylov_partition[0]
end = krylov_partition[-1]
condition = _tlist <= end
partitions.append([start] + _tlist[condition].tolist() + [end])
_tlist = _tlist[~condition]
return partitions
def _e_ops_outputs(krylov_results, e_ops, n_tlist_steps, opt):
krylov_results.expect = []
if callable(e_ops):
n_expt_op = 0
expt_callback = True
krylov_results.num_expect = 1
elif isinstance(e_ops, list):
n_expt_op = len(e_ops)
expt_callback = False
krylov_results.num_expect = n_expt_op
if n_expt_op == 0:
# fall back on storing states
opt.store_states = True
else:
for op in e_ops:
if not isinstance(op, Qobj) and callable(op):
krylov_results.expect.append(
np.zeros(n_tlist_steps, dtype=complex)
)
continue
if op.isherm:
krylov_results.expect.append(np.zeros(n_tlist_steps))
else:
krylov_results.expect.append(
np.zeros(n_tlist_steps, dtype=complex)
)
else:
raise TypeError("Expectation parameter must be a list or a function")
return krylov_results, expt_callback, opt, n_expt_op
| bsd-3-clause | 2d64a0984234abb40b528b56e82920a2 | 28.664345 | 88 | 0.581624 | 3.428686 | false | false | false | false |
qutip/qutip | qutip/matplotlib_utilities.py | 2 | 5460 | """
This module contains utility functions that enhance Matplotlib
in one way or another.
"""
__all__ = ['wigner_cmap', 'MidpointNorm', 'complex_phase_cmap']
import numpy as np
try:
import matplotlib as mpl
from matplotlib import cm
from matplotlib.colors import (Normalize, ColorConverter)
except:
class Normalize(object):
def __init__(self, vmin=None, vmax=None, clip=False):
pass
def wigner_cmap(W, levels=1024, shift=0, max_color='#09224F',
mid_color='#FFFFFF', min_color='#530017',
neg_color='#FF97D4', invert=False):
"""A custom colormap that emphasizes negative values by creating a
nonlinear colormap.
Parameters
----------
W : array
Wigner function array, or any array.
levels : int
Number of color levels to create.
shift : float
Shifts the value at which Wigner elements are emphasized.
This parameter should typically be negative and small (i.e -1e-5).
max_color : str
String for color corresponding to maximum value of data. Accepts
any string format compatible with the Matplotlib.colors.ColorConverter.
mid_color : str
Color corresponding to zero values. Accepts any string format
compatible with the Matplotlib.colors.ColorConverter.
min_color : str
Color corresponding to minimum data values. Accepts any string format
compatible with the Matplotlib.colors.ColorConverter.
neg_color : str
Color that starts highlighting negative values. Accepts any string
format compatible with the Matplotlib.colors.ColorConverter.
invert : bool
Invert the color scheme for negative values so that smaller negative
values have darker color.
Returns
-------
Returns a Matplotlib colormap instance for use in plotting.
Notes
-----
The 'shift' parameter allows you to vary where the colormap begins
to highlight negative colors. This is beneficial in cases where there
are small negative Wigner elements due to numerical round-off and/or
truncation.
"""
cc = ColorConverter()
max_color = np.array(cc.to_rgba(max_color), dtype=float)
mid_color = np.array(cc.to_rgba(mid_color), dtype=float)
if invert:
min_color = np.array(cc.to_rgba(neg_color), dtype=float)
neg_color = np.array(cc.to_rgba(min_color), dtype=float)
else:
min_color = np.array(cc.to_rgba(min_color), dtype=float)
neg_color = np.array(cc.to_rgba(neg_color), dtype=float)
# get min and max values from Wigner function
bounds = [W.min(), W.max()]
# create empty array for RGBA colors
adjust_RGBA = np.hstack((np.zeros((levels, 3)), np.ones((levels, 1))))
zero_pos = int(np.round(levels * np.abs(shift - bounds[0])
/ (bounds[1] - bounds[0])))
num_pos = levels - zero_pos
num_neg = zero_pos - 1
# set zero values to mid_color
adjust_RGBA[zero_pos] = mid_color
# interpolate colors
for k in range(0, levels):
if k < zero_pos:
interp = k / (num_neg + 1.0)
adjust_RGBA[k][0:3] = (1.0 - interp) * \
min_color[0:3] + interp * neg_color[0:3]
elif k > zero_pos:
interp = (k - zero_pos) / (num_pos + 1.0)
adjust_RGBA[k][0:3] = (1.0 - interp) * \
mid_color[0:3] + interp * max_color[0:3]
# create colormap
wig_cmap = mpl.colors.LinearSegmentedColormap.from_list('wigner_cmap',
adjust_RGBA,
N=levels)
return wig_cmap
def complex_phase_cmap():
"""
Create a cyclic colormap for representing the phase of complex variables
Returns
-------
cmap :
A matplotlib linear segmented colormap.
"""
cdict = {'blue': ((0.00, 0.0, 0.0),
(0.25, 0.0, 0.0),
(0.50, 1.0, 1.0),
(0.75, 1.0, 1.0),
(1.00, 0.0, 0.0)),
'green': ((0.00, 0.0, 0.0),
(0.25, 1.0, 1.0),
(0.50, 0.0, 0.0),
(0.75, 1.0, 1.0),
(1.00, 0.0, 0.0)),
'red': ((0.00, 1.0, 1.0),
(0.25, 0.5, 0.5),
(0.50, 0.0, 0.0),
(0.75, 0.0, 0.0),
(1.00, 1.0, 1.0))}
cmap = mpl.colors.LinearSegmentedColormap('phase_colormap', cdict, 256)
return cmap
class MidpointNorm(Normalize):
"""Normalization for a colormap centered about a given midpoint.
Parameters
----------
midpoint : float (optional, default=0)
Midpoint about which colormap is centered.
vmin: float (optional)
Minimal value for colormap. Calculated from data by default.
vmax: float (optional)
Maximal value for colormap. Calculated from data by default.
Returns
-------
Returns a Matplotlib colormap normalization that can be used
with any colormap.
"""
def __init__(self, midpoint=0, vmin=None, vmax=None, clip=False):
self.midpoint = midpoint
Normalize.__init__(self, vmin, vmax, clip)
def __call__(self, value, clip=None):
x, y = [self.vmin, self.midpoint, self.vmax], [0, 0.5, 1]
return np.ma.masked_array(np.interp(value, x, y))
| bsd-3-clause | 735cdb8419d763ae5da8bb71e0e4a9a4 | 34.454545 | 79 | 0.574725 | 3.721881 | false | false | false | false |
mitodl/open-discussions | interactions/migrations/0001_add_content_type_interactions.py | 1 | 1276 | # Generated by Django 2.2.9 on 2020-02-05 16:56
from django.db import migrations, models
import django.db.models.deletion
import open_discussions.utils
class Migration(migrations.Migration):
initial = True
dependencies = [("contenttypes", "0002_remove_content_type_name")]
operations = [
migrations.CreateModel(
name="ContentTypeInteraction",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("interaction_type", models.CharField(max_length=30)),
("content_id", models.PositiveIntegerField()),
(
"recorded_on",
models.DateTimeField(default=open_discussions.utils.now_in_utc),
),
(
"content_type",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="contenttypes.ContentType",
),
),
],
)
]
| bsd-3-clause | 1ae64a0736adc70a5dd94cf18a966ee2 | 29.380952 | 84 | 0.454545 | 5.338912 | false | false | false | false |
mitodl/open-discussions | channels/management/commands/backpopulate_posts.py | 1 | 1955 | """Management command for populating posts and comments from reddit"""
import sys
import base36
from django.core.management import BaseCommand
from channels import tasks
class Command(BaseCommand):
"""Populate posts and comments from reddit"""
help = "Populate posts and comments from reddit"
def add_arguments(self, parser):
parser.add_argument("--post", nargs="?", action="append")
def handle(self, *args, **options):
"""Populate posts and comments from reddit"""
if options["post"]:
task = tasks.populate_posts_and_comments.delay(
[base36.loads(post_id) for post_id in options["post"]]
)
else:
task = tasks.populate_all_posts_and_comments.delay()
self.stdout.write(
"Started celery task {task} to backpopulate posts and comments".format(
task=task
)
)
self.stdout.write("Waiting on task...")
results = task.get()
# Results will look like this:
# results = {
# 'posts': 1734,
# 'comments': 3547,
# "failures": [
# {"thing_type": "comment", "thing_id": "c4", "reason": "errors happen"},
# {"thing_type": "post", "thing_id": "b9i", "reason": "more than you want them to"}
# ]
# }
self.stdout.write("Successes:")
self.stdout.write(f"Posts: {results['posts']}")
self.stdout.write(f"Comments: {results['comments']}")
failures = results["failures"]
if failures:
self.stdout.write("")
self.stdout.write("Failures:")
self.stdout.write("thing_type thing_id reason")
for failure in results["failures"]:
self.stdout.write(
f"{failure['thing_type']:<12} {failure['thing_id']:<10} {failure['reason']}"
)
sys.exit(1)
| bsd-3-clause | 4ff59240b5028b88b757b20b20c1baae | 33.910714 | 99 | 0.546292 | 4.089958 | false | false | false | false |
mitodl/open-discussions | open_discussions/models.py | 1 | 1660 | """
Classes related to models for open_discussions
"""
from django.db.models import DateTimeField, Model
from django.db.models.query import QuerySet
from open_discussions.utils import now_in_utc
class TimestampedModelQuerySet(QuerySet):
"""
Subclassed QuerySet for TimestampedModelManager
"""
def update(self, **kwargs):
"""
Automatically update updated_on timestamp when .update(). This is because .update()
does not go through .save(), thus will not auto_now, because it happens on the
database level without loading objects into memory.
"""
if "updated_on" not in kwargs:
kwargs["updated_on"] = now_in_utc()
return super().update(**kwargs)
class TimestampedModel(Model):
"""
Base model for create/update timestamps
"""
objects = TimestampedModelQuerySet.as_manager()
created_on = DateTimeField(auto_now_add=True) # UTC
updated_on = DateTimeField(auto_now=True) # UTC
class Meta:
abstract = True
class NoDefaultTimestampedModel(TimestampedModel):
"""
This model is an alternative for TimestampedModel with one
important difference: it doesn't specify `auto_now` and `auto_now_add`.
This allows us to pass in our own values without django overriding them.
You'd typically use this model when backpopulating data from a source that
already has values for these fields and then switch to TimestampedModel
after existing data has been backpopulated.
"""
created_on = DateTimeField(default=now_in_utc)
updated_on = DateTimeField(default=now_in_utc)
class Meta:
abstract = True
| bsd-3-clause | 39117d2147e8986fd92ebeb6e7cd1f8b | 30.320755 | 91 | 0.695181 | 4.300518 | false | false | false | false |
mitodl/open-discussions | course_catalog/migrations/0082_enrollment_updates.py | 1 | 1547 | # Generated by Django 2.2.13 on 2021-02-28 00:55
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
("course_catalog", "0081_enrollment"),
]
operations = [
migrations.AddField(
model_name="enrollment",
name="course",
field=models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="enrollments",
to="course_catalog.Course",
),
),
migrations.AddField(
model_name="enrollment",
name="enrollments_table_run_id",
field=models.CharField(blank=True, max_length=256, null=True),
),
migrations.AlterField(
model_name="enrollment",
name="enrollment_timestamp",
field=models.DateTimeField(null=True),
),
migrations.AlterField(
model_name="enrollment",
name="run",
field=models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="enrollments",
to="course_catalog.LearningResourceRun",
),
),
migrations.AlterUniqueTogether(
name="enrollment", unique_together={("user", "enrollments_table_run_id")}
),
]
| bsd-3-clause | 8e7992a37afc471c5044dc16801f0a3b | 30.571429 | 85 | 0.561086 | 4.576923 | false | false | false | false |
mitodl/open-discussions | fixtures/aws.py | 1 | 2903 | """Fixtures for AWS"""
# pylint: disable=redefined-outer-name
import logging
from types import SimpleNamespace
import boto3
import pytest
from moto import mock_s3
@pytest.fixture(autouse=True)
def silence_s3_logging():
"""Only show S3 errors"""
logging.getLogger("botocore").setLevel(logging.ERROR)
@pytest.fixture
def mock_s3_fixture():
"""Mock the S3 fixture for the duration of the test"""
with mock_s3():
yield
@pytest.fixture
def aws_settings(settings):
"""Default AWS test settings"""
settings.AWS_ACCESS_KEY_ID = "aws_id"
settings.AWS_SECRET_ACCESS_KEY = "aws_secret"
return settings
@pytest.fixture(autouse=True)
def ocw_aws_settings(aws_settings):
"""Default OCW test settings"""
aws_settings.OCW_LEARNING_COURSE_BUCKET_NAME = (
"test-bucket" # impossible bucket name
)
return aws_settings
@pytest.fixture(autouse=True)
def mock_ocw_learning_bucket(
ocw_aws_settings, mock_s3_fixture
): # pylint: disable=unused-argument
"""Mock OCW learning bucket"""
s3 = boto3.resource(
"s3",
aws_access_key_id=ocw_aws_settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=ocw_aws_settings.AWS_SECRET_ACCESS_KEY,
)
bucket = s3.create_bucket(Bucket=ocw_aws_settings.OCW_LEARNING_COURSE_BUCKET_NAME)
yield SimpleNamespace(s3=s3, bucket=bucket)
@pytest.fixture(autouse=True)
def xpro_aws_settings(aws_settings):
"""Default xPRO test settings"""
aws_settings.XPRO_LEARNING_COURSE_BUCKET_NAME = (
"test-xpro-bucket" # impossible bucket name
)
return aws_settings
@pytest.fixture(autouse=True)
def mitxonline_aws_settings(aws_settings):
"""Default MITx Online test settings"""
aws_settings.MITX_ONLINE_LEARNING_COURSE_BUCKET_NAME = (
"test-mitxonline-bucket" # impossible bucket name
)
return aws_settings
@pytest.fixture(autouse=True)
def mock_xpro_learning_bucket(
xpro_aws_settings, mock_s3_fixture
): # pylint: disable=unused-argument
"""Mock OCW learning bucket"""
s3 = boto3.resource(
"s3",
aws_access_key_id=xpro_aws_settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=xpro_aws_settings.AWS_SECRET_ACCESS_KEY,
)
bucket = s3.create_bucket(Bucket=xpro_aws_settings.XPRO_LEARNING_COURSE_BUCKET_NAME)
yield SimpleNamespace(s3=s3, bucket=bucket)
@pytest.fixture(autouse=True)
def mock_mitxonline_learning_bucket(
mitxonline_aws_settings, mock_s3_fixture
): # pylint: disable=unused-argument
"""Mock OCW learning bucket"""
s3 = boto3.resource(
"s3",
aws_access_key_id=mitxonline_aws_settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=mitxonline_aws_settings.AWS_SECRET_ACCESS_KEY,
)
bucket = s3.create_bucket(
Bucket=mitxonline_aws_settings.MITX_ONLINE_LEARNING_COURSE_BUCKET_NAME
)
yield SimpleNamespace(s3=s3, bucket=bucket)
| bsd-3-clause | 524209648624ef0264cf68b6a8b45739 | 28.03 | 88 | 0.695143 | 3.169214 | false | true | false | false |
mitodl/open-discussions | discussions/factories.py | 1 | 1919 | """Discussions factories"""
import factory
from factory.django import DjangoModelFactory
from factory.fuzzy import FuzzyChoice
from channels.factories.utils import channel_name
from discussions import api
from discussions.constants import ChannelTypes
from discussions.models import Channel
class ChannelFactory(DjangoModelFactory):
"""Factory for a channels.models.Channel object"""
name = factory.LazyAttributeSequence(channel_name)
title = factory.Faker("text", max_nb_chars=50)
channel_type = FuzzyChoice(ChannelTypes.values())
about = factory.List(
[
factory.Dict({"node": "text", "value": factory.Faker("text")}),
factory.Dict({"node": "text", "value": factory.Faker("text")}),
factory.Dict({"node": "text", "value": factory.Faker("text")}),
]
)
moderator_group = factory.SubFactory(
"open_discussions.factories.GroupFactory",
name=factory.LazyAttribute(
lambda obj: f"Moderators: {obj.factory_parent.name}"
),
)
contributor_group = factory.SubFactory(
"open_discussions.factories.GroupFactory",
name=factory.LazyAttribute(
lambda obj: f"Contributors: {obj.factory_parent.name}"
),
)
@factory.post_generation
def create_permissions(
self, create, extracted, **kwargs
): # pylint: disable=unused-argument
"""Create the channel groups and roles after the channel is created"""
if not create:
return
api.channels.create_channel_groups(self.name)
api.channels.set_channel_permissions(self)
class Meta:
model = Channel
class Params:
is_public = factory.Trait(channel_type=ChannelTypes.PUBLIC.value)
is_restricted = factory.Trait(channel_type=ChannelTypes.RESTRICTED.value)
is_private = factory.Trait(channel_type=ChannelTypes.PRIVATE.value)
| bsd-3-clause | a1da74135016823385da90053e566d01 | 33.890909 | 81 | 0.66962 | 4.04 | false | false | false | false |
mitodl/open-discussions | notifications/notifiers/moderator_posts.py | 1 | 1830 | """Notifier for new posts for moderators"""
from django.db import transaction
from notifications.notifiers.email import EmailNotifier
from notifications.models import NOTIFICATION_TYPE_MODERATOR, PostEvent
from notifications.utils import praw_error_to_cancelled
from channels import api
from channels.serializers.posts import PostSerializer
class ModeratorPostsNotifier(EmailNotifier):
"""Notifier for posts for moderators"""
def __init__(self, notification_settings):
super().__init__(NOTIFICATION_TYPE_MODERATOR, notification_settings)
@praw_error_to_cancelled()
def _get_notification_data(
self, current_notification, last_notification
): # pylint: disable=unused-argument
"""
Gets the data for this notification
Args:
current_notification (NotificationBase): current notification we're sending for
last_notification (NotificationBase): last notification that was triggered for this NotificationSettings
"""
ctx = {"current_user": self.user}
event = PostEvent.objects.get(email_notification=current_notification)
api_client = api.Api(self.user)
post = api_client.get_post(event.post_id)
return {"post": PostSerializer(post, context=ctx).data, "moderator_email": True}
def create_moderator_post_event(self, user, post_id):
"""
Creates a new PostEvent
Args:
user (User): the moderator who should be notified
post_id (str): the base36 id of the new post
Returns:
CommentEvent: the created event
"""
kwargs = {}
with transaction.atomic():
kwargs["email_notification"] = self._create_notification()
return PostEvent.objects.create(user=user, post_id=post_id, **kwargs)
| bsd-3-clause | bd18742dbe48b863edc4a7a32b2527d2 | 36.346939 | 116 | 0.676503 | 4.42029 | false | false | false | false |
mitodl/open-discussions | open_discussions/features.py | 1 | 1870 | """Discussions feature flags"""
from functools import wraps
from django.conf import settings
EMAIL_NOTIFICATIONS = "EMAIL_NOTIFICATIONS"
FRONTPAGE_EMAIL_DIGESTS = "FRONTPAGE_EMAIL_DIGESTS"
COMMENT_NOTIFICATIONS = "COMMENT_NOTIFICATIONS"
INDEX_UPDATES = "INDEX_UPDATES"
SAML_AUTH = "SAML_AUTH"
PROFILE_UI = "PROFILE_UI"
ARTICLE_UI = "ARTICLE_UI"
COURSE_UI = "COURSE_UI"
COURSE_FILE_SEARCH = "COURSE_FILE_SEARCH"
RELATED_POSTS_UI = "RELATED_POSTS_UI"
LIVESTREAM_UI = "LIVESTREAM_UI"
HOT_POST_REPAIR = "HOT_POST_REPAIR"
MOIRA = "MOIRA"
PODCAST_APIS = "PODCAST_APIS"
PODCAST_SEARCH = "PODCAST_SEARCH"
SPAM_EXEMPTIONS = "SPAM_EXEMPTIONS"
def is_enabled(name, default=None):
"""
Returns True if the feature flag is enabled
Args:
name (str): feature flag name
default (bool): default value if not set in settings
Returns:
bool: True if the feature flag is enabled
"""
return settings.FEATURES.get(
name, default or settings.OPEN_DISCUSSIONS_FEATURES_DEFAULT
)
def if_feature_enabled(name, default=None):
"""
Wrapper that results in a no-op if the given feature isn't enabled, and otherwise
runs the wrapped function as normal.
Args:
name (str): Feature flag name
default (bool): default value if not set in settings
"""
def if_feature_enabled_inner(func): # pylint: disable=missing-docstring
@wraps(func)
def wrapped_func(*args, **kwargs): # pylint: disable=missing-docstring
if not is_enabled(name, default):
# If the given feature name is not enabled, do nothing (no-op).
return
else:
# If the given feature name is enabled, call the function and return as normal.
return func(*args, **kwargs)
return wrapped_func
return if_feature_enabled_inner
| bsd-3-clause | dca4eee02fed48457d9263e2a74b99fc | 29.655738 | 95 | 0.667914 | 3.555133 | false | false | false | false |
mitodl/open-discussions | channels/views/subscribers.py | 1 | 2679 | """Views for REST APIs for channels"""
from django.conf import settings
from rest_framework import status
from rest_framework.exceptions import NotFound
from rest_framework.generics import ListCreateAPIView
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from channels.api import Api
from channels.models import Channel, ChannelSubscription
from channels.serializers.subscribers import SubscriberSerializer
from open_discussions.permissions import IsOwnSubscriptionOrAdminPermission
class SubscriberListView(ListCreateAPIView):
"""
View to add subscribers in channels
"""
permission_classes = (IsAuthenticated, IsOwnSubscriptionOrAdminPermission)
serializer_class = SubscriberSerializer
def get_serializer_context(self):
"""Context for the request and view"""
return {"channel_api": self.request.channel_api, "view": self}
def get_queryset(self):
"""Get generator for subscribers in channel"""
return (
subscriber
for subscriber in list(
Channel.objects.get(name=self.kwargs["channel_name"]).subscribers
)
if subscriber.username != settings.INDEXING_API_USERNAME
)
class SubscriberDetailView(APIView):
"""
View to retrieve and remove subscribers in channels
"""
permission_classes = (IsAuthenticated, IsOwnSubscriptionOrAdminPermission)
def get_serializer_context(self):
"""Context for the request and view"""
return {"channel_api": self.request.channel_api, "view": self}
def get(self, request, *args, **kwargs):
"""Get subscriber for the channel"""
subscriber_name = self.kwargs["subscriber_name"]
channel_name = self.kwargs["channel_name"]
subscription = ChannelSubscription.objects.filter(
channel__name=channel_name, user__username=subscriber_name
).first()
if not subscription:
raise NotFound(
"User {} is not a subscriber of {}".format(
subscriber_name, channel_name
)
)
return Response(SubscriberSerializer(subscription.user).data)
def delete(self, request, *args, **kwargs): # pylint: disable=unused-argument
"""
Removes a subscriber from a channel
"""
api = Api(user=request.user)
channel_name = self.kwargs["channel_name"]
subscriber_name = self.kwargs["subscriber_name"]
api.remove_subscriber(subscriber_name, channel_name)
return Response(status=status.HTTP_204_NO_CONTENT)
| bsd-3-clause | 7d0264bfa82db6d0e035727acde6e698 | 34.72 | 82 | 0.679358 | 4.675393 | false | false | false | false |
mitodl/open-discussions | course_catalog/api.py | 1 | 24175 | """
course_catalog api functions
"""
import logging
from datetime import datetime
from urllib.parse import urljoin
import boto3
import pytz
import rapidjson
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.db import transaction
from ocw_data_parser import OCWParser
from course_catalog.constants import (
NON_COURSE_DIRECTORIES,
AvailabilityType,
OfferedBy,
PlatformType,
)
from course_catalog.etl.loaders import load_content_files, load_offered_bys
from course_catalog.etl.ocw import (
get_ocw_learning_course_bucket,
transform_content_files,
)
from course_catalog.etl.ocw_next import transform_ocw_next_content_files
from course_catalog.models import Course, LearningResourceRun
from course_catalog.serializers import (
LearningResourceRunSerializer,
OCWNextSerializer,
OCWSerializer,
)
from course_catalog.utils import get_course_url, get_s3_object_and_read, safe_load_json
from search.task_helpers import delete_course, upsert_course
log = logging.getLogger(__name__)
def digest_ocw_course(
master_json,
last_modified,
is_published,
course_prefix="",
keep_existing_image_src=False,
):
"""
Takes in OCW course master json to store it in DB
Args:
master_json (dict): course master JSON object as an output from ocw-data-parser
last_modified (datetime): timestamp of latest modification of all course files
is_published (bool): Flags OCW course as published or not
course_prefix (str): (Optional) String used to query S3 bucket for course raw JSONs
keep_existing_image_src (boolean): (Optional) Avoid overwriting image_src if image_src is
blank because the backpopulate is run without uploading to s3
"""
if "course_id" not in master_json:
log.error("Course %s is missing 'course_id'", master_json.get("uid"))
return
existing_course_instance = Course.objects.filter(
platform=PlatformType.ocw.value,
course_id=f"{master_json.get('uid')}+{master_json.get('course_id')}",
).first()
data = {
**master_json,
"last_modified": last_modified,
"is_published": True, # This will be updated after all course runs are serialized
"course_prefix": course_prefix,
}
if existing_course_instance and keep_existing_image_src:
data["image_src"] = existing_course_instance.image_src
ocw_serializer = OCWSerializer(data=data, instance=existing_course_instance)
if not ocw_serializer.is_valid():
log.error(
"Course %s is not valid: %s %s",
master_json.get("uid"),
ocw_serializer.errors,
master_json.get("image_src"),
)
return
# Make changes atomically so we don't end up with partially saved/deleted data
with transaction.atomic():
course = ocw_serializer.save()
load_offered_bys(course, [{"name": OfferedBy.ocw.value}])
# Try and get the run instance.
courserun_instance = course.runs.filter(
platform=PlatformType.ocw.value, run_id=master_json.get("uid")
).first()
run_serializer = LearningResourceRunSerializer(
data={
**master_json,
"platform": PlatformType.ocw.value,
"key": master_json.get("uid"),
"is_published": is_published,
"staff": master_json.get("instructors"),
"seats": [{"price": "0.00", "mode": "audit", "upgrade_deadline": None}],
"content_language": master_json.get("language"),
"short_description": master_json.get("description"),
"level_type": master_json.get("course_level"),
"year": master_json.get("from_year"),
"semester": master_json.get("from_semester"),
"availability": AvailabilityType.current.value,
"image": {
"src": master_json.get("image_src"),
"description": master_json.get("image_description"),
},
"max_modified": last_modified,
"content_type": ContentType.objects.get(model="course").id,
"object_id": course.id,
"url": get_course_url(
master_json.get("uid"), master_json, PlatformType.ocw.value
),
"slug": master_json.get("short_url"),
"raw_json": master_json,
},
instance=courserun_instance,
)
if not run_serializer.is_valid():
log.error(
"OCW LearningResourceRun %s is not valid: %s",
master_json.get("uid"),
run_serializer.errors,
)
return
run = run_serializer.save()
load_offered_bys(run, [{"name": OfferedBy.ocw.value}])
return course, run
def digest_ocw_next_course(course_json, last_modified, uid, url_path):
"""
Takes in OCW next course data.json to store it in DB
Args:
course_json (dict): course data JSON object from s3
last_modified (datetime): timestamp of latest modification of all course files
uid (str): Course uid
url_path (str):String used to query S3 bucket for course data JSONs
"""
courserun_instance = LearningResourceRun.objects.filter(
platform=PlatformType.ocw.value, run_id=uid
).first()
if courserun_instance:
existing_course_instance = courserun_instance.content_object
else:
existing_course_instance = None
data = {
**course_json,
"uid": uid,
"last_modified": last_modified,
"is_published": True,
"course_prefix": url_path,
}
ocw_serializer = OCWNextSerializer(data=data, instance=existing_course_instance)
if not ocw_serializer.is_valid():
log.error(
"Course %s is not valid: %s",
course_json.get("primary_course_number"),
ocw_serializer.errors,
)
return
# Make changes atomically so we don't end up with partially saved/deleted data
with transaction.atomic():
course = ocw_serializer.save()
load_offered_bys(course, [{"name": OfferedBy.ocw.value}])
# Try and get the run instance.
courserun_instance = course.runs.filter(
platform=PlatformType.ocw.value, run_id=uid
).first()
run_slug = url_path.strip("/")
run_serializer = LearningResourceRunSerializer(
data={
"platform": PlatformType.ocw.value,
"key": uid,
"is_published": True,
"staff": course_json.get("instructors"),
"seats": [{"price": "0.00", "mode": "audit", "upgrade_deadline": None}],
"short_description": course_json.get("course_description"),
"level_type": ", ".join(course_json.get("level", [])),
"year": course_json.get("year"),
"semester": course_json.get("term"),
"availability": AvailabilityType.current.value,
"image": {
"src": course_json.get("image_src"),
"description": course_json.get("course_image_metadata", {}).get(
"description"
),
},
"max_modified": last_modified,
"content_type": ContentType.objects.get(model="course").id,
"object_id": course.id,
"raw_json": course_json,
"title": course_json.get("course_title"),
"slug": run_slug,
"url": urljoin(settings.OCW_NEXT_BASE_URL, run_slug),
},
instance=courserun_instance,
)
if not run_serializer.is_valid():
log.error(
"OCW LearningResourceRun %s is not valid: %s",
uid,
run_serializer.errors,
)
run = run_serializer.save()
load_offered_bys(run, [{"name": OfferedBy.ocw.value}])
return course, run
def format_date(date_str):
"""
Coverts date from 2016/02/02 20:28:06 US/Eastern to 2016-02-02 20:28:06-05:00
Args:
date_str (String): Datetime object as string in the following format (2016/02/02 20:28:06 US/Eastern)
Returns:
Datetime object if passed date is valid, otherwise None
"""
if date_str and date_str != "None":
date_pieces = date_str.split(" ") # e.g. 2016/02/02 20:28:06 US/Eastern
date_pieces[0] = date_pieces[0].replace("/", "-")
# Discard milliseconds if exists
date_pieces[1] = (
date_pieces[1][:-4] if "." in date_pieces[1] else date_pieces[1]
)
tz = date_pieces.pop(2)
timezone = pytz.timezone(tz) if "GMT" not in tz else pytz.timezone("Etc/" + tz)
tz_stripped_date = datetime.strptime(" ".join(date_pieces), "%Y-%m-%d %H:%M:%S")
tz_aware_date = timezone.localize(tz_stripped_date)
tz_aware_date = tz_aware_date.astimezone(pytz.utc)
return tz_aware_date
return None
def generate_course_prefix_list(bucket, course_urls=None):
"""
Assembles a list of OCW course prefixes from an S3 Bucket that contains all the raw jsons files
Args:
bucket (s3.Bucket): Instantiated S3 Bucket object
course_urls (List[str] or None): List of site urls to return
Returns:
List of course prefixes
"""
ocw_courses = set()
log.info("Assembling list of courses...")
for bucket_file in bucket.objects.all():
# retrieve courses, skipping non-courses (bootcamps, department topics, etc)
if ocw_parent_folder(bucket_file.key) not in NON_COURSE_DIRECTORIES:
key_pieces = bucket_file.key.split("/")
course_prefix = f'{"/".join(key_pieces[:-2])}/'
if (
course_prefix != "/"
and course_prefix not in ocw_courses
and (not course_urls or key_pieces[0:-2][-1].lower() in course_urls)
):
ocw_courses.add(course_prefix)
if course_urls and len(ocw_courses) == len(course_urls):
break
log.info("Done assembling list of courses...")
return list(ocw_courses)
def get_course_availability(course):
"""
Gets the attribute `availability` for a course if any
Args:
course (Course): Course model instance
Returns:
str: The url for the course if any
"""
if course.platform == PlatformType.ocw.value:
return AvailabilityType.current.value
elif course.platform == PlatformType.mitx.value:
course_json = course.raw_json
if course_json is None:
return
runs = course_json.get("course_runs")
if runs is None:
return
# get appropriate course_run
for run in runs:
if run.get("key") == course.course_id:
return run.get("availability")
def sync_ocw_course_files(ids=None):
"""
Sync all OCW course run files for a list of course ids to database
Args:
ids(list of int or None): list of course ids to process, all if None
"""
bucket = get_ocw_learning_course_bucket()
courses = Course.objects.filter(platform="ocw").filter(published=True)
if ids:
courses = courses.filter(id__in=ids)
for course in courses.iterator():
runs = course.runs.exclude(url="").exclude(published=False)
for run in runs.iterator():
try:
s3_parsed_json = rapidjson.loads(
bucket.Object(f"{run.slug}/{run.slug}_parsed.json")
.get()["Body"]
.read()
)
load_content_files(run, transform_content_files(s3_parsed_json))
except: # pylint: disable=bare-except
log.exception("Error syncing files for course run %d", run.id)
# pylint: disable=too-many-locals, too-many-branches, too-many-statements
def sync_ocw_course(
*,
course_prefix,
raw_data_bucket,
force_overwrite,
upload_to_s3,
blocklist,
start_timestamp=None,
force_s3_upload=False,
):
"""
Sync an OCW course run
Args:
course_prefix (str): The course prefix
raw_data_bucket (boto3.resource): The S3 bucket containing the OCW information
force_overwrite (bool): A boolean value to force the incoming course data to overwrite existing data
upload_to_s3 (bool): If True, upload course media to S3
blocklist (list of str): list of course ids that should not be published
start_timestamp (timestamp): start timestamp of backpoplate. If the updated_on is after this the update already happened
force_s3_upload (bool): If True, upload parsed JSON even if course imported from OCW-Next
Returns:
str:
The UID, or None if the run_id is not found, or if it was found but not synced
"""
loaded_raw_jsons_for_course = []
last_modified_dates = []
uid = None
is_published = True
if ocw_parent_folder(course_prefix) in NON_COURSE_DIRECTORIES:
log.info("Non-course folder, skipping: %s ...", course_prefix)
return
log.info("Syncing: %s ...", course_prefix)
# Collect last modified timestamps for all course files of the course
for obj in raw_data_bucket.objects.filter(Prefix=course_prefix):
# the "1.json" metadata file contains a course's uid
if obj.key == course_prefix + "0/1.json":
try:
first_json = safe_load_json(get_s3_object_and_read(obj), obj.key)
uid = first_json.get("_uid")
last_published_to_production = format_date(
first_json.get("last_published_to_production", None)
)
last_unpublishing_date = format_date(
first_json.get("last_unpublishing_date", None)
)
if last_published_to_production is None or (
last_unpublishing_date
and (last_unpublishing_date > last_published_to_production)
):
is_published = False
except: # pylint: disable=bare-except
log.exception("Error encountered reading 1.json for %s", course_prefix)
# accessing last_modified from s3 object summary is fast (does not download file contents)
last_modified_dates.append(obj.last_modified)
if not uid:
# skip if we're unable to fetch course's uid
log.info("Skipping %s, no course_id", course_prefix)
return None
# get the latest modified timestamp of any file in the course
last_modified = max(last_modified_dates)
# if course run synced before, check if modified since then
courserun_instance = LearningResourceRun.objects.filter(
platform=PlatformType.ocw.value, run_id=uid
).first()
is_ocw_next_course = (
courserun_instance is not None
and courserun_instance.content_object.ocw_next_course
)
if is_ocw_next_course and not force_s3_upload:
log.info(
"%s is imported into OCW Studio. Skipping sync and s3 json upload from Plone",
course_prefix,
)
return None
# Make sure that the data we are syncing is newer than what we already have
if ( # pylint: disable=too-many-boolean-expressions
courserun_instance
and last_modified <= courserun_instance.last_modified
and not force_overwrite
) or (
start_timestamp
and courserun_instance
and start_timestamp <= courserun_instance.updated_on
):
log.info("Already synced. No changes found for %s", course_prefix)
return None
# fetch JSON contents for each course file in memory (slow)
log.info("Loading JSON for %s...", course_prefix)
for obj in sorted(
raw_data_bucket.objects.filter(Prefix=course_prefix),
key=lambda x: int(x.key.split("/")[-1].split(".")[0]),
):
loaded_raw_jsons_for_course.append(
safe_load_json(get_s3_object_and_read(obj), obj.key)
)
log.info("Parsing for %s...", course_prefix)
# pass course contents into parser
parser = OCWParser(
loaded_jsons=loaded_raw_jsons_for_course,
s3_bucket_name=settings.OCW_LEARNING_COURSE_BUCKET_NAME,
create_vtt_files=True,
)
course_json = parser.get_parsed_json()
course_json["uid"] = uid
course_json["course_id"] = "{}.{}".format(
course_json.get("department_number"), course_json.get("master_course_number")
)
if course_json["course_id"] in blocklist:
is_published = False
if upload_to_s3 or force_s3_upload:
parser.setup_s3_uploading(
settings.OCW_LEARNING_COURSE_BUCKET_NAME,
settings.AWS_ACCESS_KEY_ID,
settings.AWS_SECRET_ACCESS_KEY,
# course_prefix now has trailing slash so [-2] below is the last
# actual element and [-1] is an empty string
course_prefix.split("/")[-2],
)
if is_published:
try:
if settings.OCW_UPLOAD_IMAGE_ONLY:
parser.upload_course_image()
else:
parser.upload_all_media_to_s3(upload_parsed_json=True)
except: # pylint: disable=bare-except
log.exception(
"Error encountered uploading OCW files for %s", course_prefix
)
raise
else:
parser.get_s3_base_url()
parser.upload_parsed_json_to_s3(
boto3.resource("s3").Bucket(settings.OCW_LEARNING_COURSE_BUCKET_NAME)
)
if is_ocw_next_course:
return None
log.info("Digesting %s...", course_prefix)
keep_existing_image_src = not upload_to_s3
try:
course, run = digest_ocw_course(
course_json,
last_modified,
is_published,
course_prefix,
keep_existing_image_src,
)
except TypeError:
log.info("Course and run not returned, skipping")
return None
if upload_to_s3 and is_published:
load_content_files(run, transform_content_files(course_json))
course.published = is_published or (
Course.objects.get(id=course.id).runs.filter(published=True).exists()
)
course.save()
if course.published:
upsert_course(course.id)
else:
delete_course(course)
def sync_ocw_next_course(
*, url_path, s3_resource, force_overwrite, start_timestamp=None
):
"""
Sync an OCW course run
Args:
url_path (str): The course url path
s3_resource (boto3.resource): Boto3 s3 resource
force_overwrite (bool): A boolean value to force the incoming course data to overwrite existing data
start_timestamp (timestamp): start timestamp of backpoplate. If the updated_on is after this the update already happened
Returns:
str:
The UID, or None if the run_id is not found, or if it was found but not synced
"""
course_json = {}
uid = None
log.info("Syncing: %s ...", url_path)
s3_data_object = s3_resource.Object(
settings.OCW_NEXT_LIVE_BUCKET, url_path + "data.json"
)
try:
course_json = safe_load_json(
get_s3_object_and_read(s3_data_object), s3_data_object.key
)
last_modified = s3_data_object.last_modified
except: # pylint: disable=bare-except
log.exception("Error encountered reading data.json for %s", url_path)
uid = course_json.get("legacy_uid")
if not uid:
uid = course_json.get("site_uid")
if not uid:
log.info("Skipping %s, both site_uid and legacy_uid missing", url_path)
return None
else:
uid = uid.replace("-", "")
# if course run synced before, check if modified since then
courserun_instance = LearningResourceRun.objects.filter(
platform=PlatformType.ocw.value, run_id=uid
).first()
# Make sure that the data we are syncing is newer than what we already have
if ( # pylint: disable=too-many-boolean-expressions
courserun_instance
and last_modified <= courserun_instance.last_modified
and not force_overwrite
) or (
start_timestamp
and courserun_instance
and start_timestamp <= courserun_instance.updated_on
):
log.info("Already synced. No changes found for %s", url_path)
return None
log.info("Digesting %s...", url_path)
try:
course, run = digest_ocw_next_course( # pylint: disable=unused-variable
course_json, last_modified, uid, url_path
)
except TypeError:
log.info("Course and run not returned, skipping")
return None
upsert_course(course.id)
load_content_files(
run, transform_ocw_next_content_files(s3_resource, url_path, force_overwrite)
)
def sync_ocw_courses(
*,
course_prefixes,
blocklist,
force_overwrite,
upload_to_s3,
start_timestamp=None,
force_s3_upload=False,
):
"""
Sync OCW courses to the database
Args:
course_prefixes (list of str): The course prefixes to process
blocklist (list of str): list of course ids to skip
force_overwrite (bool): A boolean value to force the incoming course data to overwrite existing data
upload_to_s3 (bool): If True, upload course media to S3
start_timestamp (datetime or None): backpopulate start time
force_s3_upload (bool): If True, upload parsed JSON even if course imported from OCW-Next
Returns:
set[str]: All LearningResourceRun.run_id values for course runs which were synced
"""
raw_data_bucket = boto3.resource(
"s3",
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
).Bucket(name=settings.OCW_CONTENT_BUCKET_NAME)
for course_prefix in course_prefixes:
try:
sync_ocw_course(
course_prefix=course_prefix,
raw_data_bucket=raw_data_bucket,
force_overwrite=force_overwrite,
upload_to_s3=upload_to_s3,
blocklist=blocklist,
start_timestamp=start_timestamp,
force_s3_upload=force_s3_upload,
)
except: # pylint: disable=bare-except
log.exception("Error encountered parsing OCW json for %s", course_prefix)
def sync_ocw_next_courses(*, url_paths, force_overwrite, start_timestamp=None):
"""
Sync OCW courses to the database
Args:
url_paths (list of str): The course url paths to process
force_overwrite (bool): A boolean value to force the incoming course data to overwrite existing data
start_timestamp (datetime or None): backpopulate start time
Returns:
set[str]: All LearningResourceRun.run_id values for course runs which were synced
"""
s3_resource = boto3.resource(
"s3",
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
)
for url_path in url_paths:
try:
sync_ocw_next_course(
url_path=url_path,
s3_resource=s3_resource,
force_overwrite=force_overwrite,
start_timestamp=start_timestamp,
)
except: # pylint: disable=bare-except
log.exception("Error encountered parsing OCW json for %s", url_path)
def ocw_parent_folder(prefix):
"""
Get the S3 parent folder of an OCW course
Args:
prefix(str): The course prefix
Returns:
str: The parent folder for the course prefix
"""
prefix_parts = prefix.split("/")
return "/".join(prefix_parts[0:2]) if prefix_parts[0] == "PROD" else prefix_parts[0]
| bsd-3-clause | 91a46d0651b18f4ae6542793b465cb65 | 35.028316 | 128 | 0.602316 | 3.895424 | false | false | false | false |
mitodl/open-discussions | open_discussions/views.py | 1 | 5714 | """
open_discussions views
"""
from django.conf import settings
from django.http import (
Http404,
HttpResponse,
HttpResponsePermanentRedirect,
HttpResponseNotFound,
HttpResponseForbidden,
HttpResponseBadRequest,
)
from django.shortcuts import render, get_object_or_404
from django.urls import reverse
from social_django.utils import load_strategy, load_backend
from channels.models import Post
from moira_lists.moira_api import is_list_staff
from open_discussions import features
from profiles.models import SOCIAL_SITE_NAME_MAP
def _render_app(request, initial_state=None): # pylint:disable=unused-argument
"""Render the app with settings"""
if request.META.get("HTTP_USER_AGENT", "").startswith("facebookexternalhit"):
return render(
request,
"social.html",
context={
"url": request.build_absolute_uri(),
"description_value": "MIT Open Learning's discussion platform",
},
)
username = None
user_full_name = None
user_email = None
user_is_superuser = False
user_id = None
user_list_staff = False
if request.user.is_authenticated:
user = request.user
username = user.username
user_full_name = user.profile.name
user_email = user.email
user_is_superuser = user.is_superuser
user_id = user.id
user_list_staff = is_list_staff(user)
article_ui_enabled = (
features.is_enabled(features.ARTICLE_UI)
if settings.CKEDITOR_ENVIRONMENT_ID
and settings.CKEDITOR_SECRET_KEY
and settings.CKEDITOR_UPLOAD_URL
else False
)
livestream_ui_enabled = (
features.is_enabled(features.LIVESTREAM_UI)
if settings.LIVESTREAM_ACCOUNT_ID and settings.LIVESTREAM_SECRET_KEY
else False
)
js_settings = {
"gaTrackingID": settings.GA_TRACKING_ID,
"gaGTrackingID": settings.GA_G_TRACKING_ID,
"environment": settings.ENVIRONMENT,
"sentry_dsn": settings.SENTRY_DSN,
"release_version": settings.VERSION,
"site_url": settings.SITE_BASE_URL,
"max_comment_depth": settings.OPEN_DISCUSSIONS_MAX_COMMENT_DEPTH,
"username": username,
"user_full_name": user_full_name,
"user_email": user_email,
"user_id": user_id,
"is_admin": user_is_superuser,
"is_list_staff": user_list_staff,
"authenticated_site": {
"title": settings.OPEN_DISCUSSIONS_TITLE,
"base_url": settings.SITE_BASE_URL,
"tos_url": settings.OPEN_DISCUSSIONS_TOS_URL,
},
"support_email": settings.EMAIL_SUPPORT,
"is_authenticated": bool(request.user.is_authenticated),
"profile_ui_enabled": features.is_enabled(features.PROFILE_UI),
"allow_saml_auth": features.is_enabled(features.SAML_AUTH),
"allow_related_posts_ui": features.is_enabled(features.RELATED_POSTS_UI),
"embedlyKey": settings.EMBEDLY_KEY,
"recaptchaKey": settings.RECAPTCHA_SITE_KEY,
"search_page_size": settings.ELASTICSEARCH_DEFAULT_PAGE_SIZE,
"search_min_length": settings.ELASTICSEARCH_MIN_QUERY_SIZE,
"accepted_social_sites": list(SOCIAL_SITE_NAME_MAP.values()),
"article_ui_enabled": article_ui_enabled,
"ckeditor_upload_url": settings.CKEDITOR_UPLOAD_URL,
"course_ui_enabled": features.is_enabled(features.COURSE_UI),
"file_search_enabled": features.is_enabled(features.COURSE_FILE_SEARCH),
"livestream_ui_enabled": livestream_ui_enabled,
"ocw_next_base_url": settings.OCW_NEXT_BASE_URL,
}
return render(request, "react.html", context=dict(js_settings=js_settings))
def index(request, **kwargs): # pylint: disable=unused-argument
"""Render the react app"""
return _render_app(request)
def channel_post(request, **kwargs):
"""Render a channel post's page as long as it isn't removed"""
post_id = kwargs.get("post_id", None)
if not post_id:
raise Http404("No post specified")
post = get_object_or_404(Post, post_id=post_id)
if post.removed and (
request.user.is_anonymous
or not (
request.channel_api.is_moderator(post.channel.name, request.user.username)
)
):
raise Http404("Post doesn't exist")
return _render_app(request)
def handle_400(request, exception=None): # pylint:disable=unused-argument
"""400 error handler"""
return HttpResponseBadRequest(
_render_app(request, initial_state={"server": {"statusCode": 400}})
)
def handle_403(request, exception=None): # pylint:disable=unused-argument
"""403 error handler"""
return HttpResponseForbidden(
_render_app(request, initial_state={"server": {"statusCode": 403}})
)
def handle_404(request, exception=None): # pylint:disable=unused-argument
"""404 error handler"""
return HttpResponseNotFound(
_render_app(request, initial_state={"server": {"statusCode": 404}})
)
def saml_metadata(request):
"""Display SAML configuration metadata as XML"""
if not features.is_enabled(features.SAML_AUTH):
raise Http404("Page not found")
complete_url = reverse("social:complete", args=("saml",))
saml_backend = load_backend(
load_strategy(request), "saml", redirect_uri=complete_url
)
metadata, _ = saml_backend.generate_metadata_xml()
return HttpResponse(content=metadata, content_type="text/xml")
def channel_redirect(request):
"""Redirect all URL's starting with `channel/` to `c/`"""
return HttpResponsePermanentRedirect(request.path.replace("channel/", "c/", 1))
| bsd-3-clause | 261202b60da16cbf64dba0495c887908 | 33.841463 | 86 | 0.655933 | 3.693601 | false | false | false | false |
theislab/scanpy | scanpy/external/pp/_scrublet.py | 1 | 21492 | from anndata import AnnData
from typing import Optional
import numpy as np
import pandas as pd
from scipy import sparse
from ... import logging as logg
from ... import preprocessing as pp
from ...get import _get_obs_rep
def scrublet(
adata: AnnData,
adata_sim: Optional[AnnData] = None,
batch_key: str = None,
sim_doublet_ratio: float = 2.0,
expected_doublet_rate: float = 0.05,
stdev_doublet_rate: float = 0.02,
synthetic_doublet_umi_subsampling: float = 1.0,
knn_dist_metric: str = 'euclidean',
normalize_variance: bool = True,
log_transform: bool = False,
mean_center: bool = True,
n_prin_comps: int = 30,
use_approx_neighbors: bool = True,
get_doublet_neighbor_parents: bool = False,
n_neighbors: Optional[int] = None,
threshold: Optional[float] = None,
verbose: bool = True,
copy: bool = False,
random_state: int = 0,
) -> Optional[AnnData]:
"""\
Predict doublets using Scrublet [Wolock19]_.
Predict cell doublets using a nearest-neighbor classifier of observed
transcriptomes and simulated doublets. Works best if the input is a raw
(unnormalized) counts matrix from a single sample or a collection of
similar samples from the same experiment.
This function is a wrapper around functions that pre-process using Scanpy
and directly call functions of Scrublet(). You may also undertake your own
preprocessing, simulate doublets with
scanpy.external.pp.scrublet_simulate_doublets(), and run the core scrublet
function scanpy.external.pp.scrublet.scrublet().
.. note::
More information and bug reports `here
<https://github.com/swolock/scrublet>`__.
Parameters
----------
adata
The annotated data matrix of shape ``n_obs`` × ``n_vars``. Rows
correspond to cells and columns to genes. Expected to be un-normalised
where adata_sim is not supplied, in which case doublets will be
simulated and pre-processing applied to both objects. If adata_sim is
supplied, this should be the observed transcriptomes processed
consistently (filtering, transform, normalisaton, hvg) with adata_sim.
adata_sim
(Advanced use case) Optional annData object generated by
sc.external.pp.scrublet_simulate_doublets(), with same number of vars
as adata. This should have been built from adata_obs after
filtering genes and cells and selcting highly-variable genes.
batch_key
Optional `adata.obs` column name discriminating between batches.
sim_doublet_ratio
Number of doublets to simulate relative to the number of observed
transcriptomes.
expected_doublet_rate
Where adata_sim not suplied, the estimated doublet rate for the
experiment.
stdev_doublet_rate
Where adata_sim not suplied, uncertainty in the expected doublet rate.
synthetic_doublet_umi_subsampling
Where adata_sim not suplied, rate for sampling UMIs when creating
synthetic doublets. If 1.0, each doublet is created by simply adding
the UMI counts from two randomly sampled observed transcriptomes. For
values less than 1, the UMI counts are added and then randomly sampled
at the specified rate.
knn_dist_metric
Distance metric used when finding nearest neighbors. For list of
valid values, see the documentation for annoy (if `use_approx_neighbors`
is True) or sklearn.neighbors.NearestNeighbors (if `use_approx_neighbors`
is False).
normalize_variance
If True, normalize the data such that each gene has a variance of 1.
`sklearn.decomposition.TruncatedSVD` will be used for dimensionality
reduction, unless `mean_center` is True.
log_transform
Whether to use :func:``~scanpy.pp.log1p`` to log-transform the data
prior to PCA.
mean_center
If True, center the data such that each gene has a mean of 0.
`sklearn.decomposition.PCA` will be used for dimensionality
reduction.
n_prin_comps
Number of principal components used to embed the transcriptomes prior
to k-nearest-neighbor graph construction.
use_approx_neighbors
Use approximate nearest neighbor method (annoy) for the KNN
classifier.
get_doublet_neighbor_parents
If True, return (in .uns) the parent transcriptomes that generated the
doublet neighbors of each observed transcriptome. This information can
be used to infer the cell states that generated a given doublet state.
n_neighbors
Number of neighbors used to construct the KNN graph of observed
transcriptomes and simulated doublets. If ``None``, this is
automatically set to ``np.round(0.5 * np.sqrt(n_obs))``.
threshold
Doublet score threshold for calling a transcriptome a doublet. If
`None`, this is set automatically by looking for the minimum between
the two modes of the `doublet_scores_sim_` histogram. It is best
practice to check the threshold visually using the
`doublet_scores_sim_` histogram and/or based on co-localization of
predicted doublets in a 2-D embedding.
verbose
If True, print progress updates.
copy
If ``True``, return a copy of the input ``adata`` with Scrublet results
added. Otherwise, Scrublet results are added in place.
random_state
Initial state for doublet simulation and nearest neighbors.
Returns
-------
adata : anndata.AnnData
if ``copy=True`` it returns or else adds fields to ``adata``. Those fields:
``.obs['doublet_score']``
Doublet scores for each observed transcriptome
``.obs['predicted_doublets']``
Boolean indicating predicted doublet status
``.uns['scrublet']['doublet_scores_sim']``
Doublet scores for each simulated doublet transcriptome
``.uns['scrublet']['doublet_parents']``
Pairs of ``.obs_names`` used to generate each simulated doublet
transcriptome
``.uns['scrublet']['parameters']``
Dictionary of Scrublet parameters
See also
--------
:func:`~scanpy.external.pp.scrublet_simulate_doublets`: Run Scrublet's doublet
simulation separately for advanced usage.
:func:`~scanpy.external.pl.scrublet_score_distribution`: Plot histogram of doublet
scores for observed transcriptomes and simulated doublets.
"""
try:
import scrublet as sl
except ImportError:
raise ImportError(
'Please install scrublet: `pip install scrublet` or `conda install scrublet`.'
)
if copy:
adata = adata.copy()
start = logg.info('Running Scrublet')
adata_obs = adata.copy()
def _run_scrublet(ad_obs, ad_sim=None):
# With no adata_sim we assume the regular use case, starting with raw
# counts and simulating doublets
if ad_sim is None:
pp.filter_genes(ad_obs, min_cells=3)
pp.filter_cells(ad_obs, min_genes=3)
# Doublet simulation will be based on the un-normalised counts, but on the
# selection of genes following normalisation and variability filtering. So
# we need to save the raw and subset at the same time.
ad_obs.layers['raw'] = ad_obs.X.copy()
pp.normalize_total(ad_obs)
# HVG process needs log'd data.
logged = pp.log1p(ad_obs, copy=True)
pp.highly_variable_genes(logged)
ad_obs = ad_obs[:, logged.var['highly_variable']]
# Simulate the doublets based on the raw expressions from the normalised
# and filtered object.
ad_sim = scrublet_simulate_doublets(
ad_obs,
layer='raw',
sim_doublet_ratio=sim_doublet_ratio,
synthetic_doublet_umi_subsampling=synthetic_doublet_umi_subsampling,
)
if log_transform:
pp.log1p(ad_obs)
pp.log1p(ad_sim)
# Now normalise simulated and observed in the same way
pp.normalize_total(ad_obs, target_sum=1e6)
pp.normalize_total(ad_sim, target_sum=1e6)
ad_obs = _scrublet_call_doublets(
adata_obs=ad_obs,
adata_sim=ad_sim,
n_neighbors=n_neighbors,
expected_doublet_rate=expected_doublet_rate,
stdev_doublet_rate=stdev_doublet_rate,
mean_center=mean_center,
normalize_variance=normalize_variance,
n_prin_comps=n_prin_comps,
use_approx_neighbors=use_approx_neighbors,
knn_dist_metric=knn_dist_metric,
get_doublet_neighbor_parents=get_doublet_neighbor_parents,
threshold=threshold,
random_state=random_state,
verbose=verbose,
)
return {'obs': ad_obs.obs, 'uns': ad_obs.uns['scrublet']}
if batch_key is not None:
if batch_key not in adata.obs.keys():
raise ValueError(
'`batch_key` must be a column of .obs in the input annData object.'
)
# Run Scrublet independently on batches and return just the
# scrublet-relevant parts of the objects to add to the input object
batches = np.unique(adata.obs[batch_key])
scrubbed = [
_run_scrublet(
adata_obs[
adata_obs.obs[batch_key] == batch,
],
adata_sim,
)
for batch in batches
]
scrubbed_obs = pd.concat([scrub['obs'] for scrub in scrubbed])
# Now reset the obs to get the scrublet scores
adata.obs = scrubbed_obs.loc[adata.obs_names.values]
# Save the .uns from each batch separately
adata.uns['scrublet'] = {}
adata.uns['scrublet']['batches'] = dict(
zip(batches, [scrub['uns'] for scrub in scrubbed])
)
# Record that we've done batched analysis, so e.g. the plotting
# function knows what to do.
adata.uns['scrublet']['batched_by'] = batch_key
else:
scrubbed = _run_scrublet(adata_obs, adata_sim)
# Copy outcomes to input object from our processed version
adata.obs['doublet_score'] = scrubbed['obs']['doublet_score']
adata.obs['predicted_doublet'] = scrubbed['obs']['predicted_doublet']
adata.uns['scrublet'] = scrubbed['uns']
logg.info(' Scrublet finished', time=start)
if copy:
return adata
else:
return None
def _scrublet_call_doublets(
adata_obs: AnnData,
adata_sim: AnnData,
n_neighbors: Optional[int] = None,
expected_doublet_rate: float = 0.05,
stdev_doublet_rate: float = 0.02,
mean_center: bool = True,
normalize_variance: bool = True,
n_prin_comps: int = 30,
use_approx_neighbors: bool = True,
knn_dist_metric: str = 'euclidean',
get_doublet_neighbor_parents: bool = False,
threshold: Optional[float] = None,
random_state: int = 0,
verbose: bool = True,
) -> AnnData:
"""\
Core function for predicting doublets using Scrublet [Wolock19]_.
Predict cell doublets using a nearest-neighbor classifier of observed
transcriptomes and simulated doublets. This is a wrapper around the core
functions of `Scrublet <https://github.com/swolock/scrublet>`__ to allow
for flexibility in applying Scanpy filtering operations upstream. Unless
you know what you're doing you should use the main scrublet() function.
.. note::
More information and bug reports `here
<https://github.com/swolock/scrublet>`__.
Parameters
----------
adata_obs
The annotated data matrix of shape ``n_obs`` × ``n_vars``. Rows
correspond to cells and columns to genes. Should be normalised with
scanpy.pp.normalize_total() and filtered to include only highly
variable genes.
adata_sim
Anndata object generated by
sc.external.pp.scrublet_simulate_doublets(), with same number of vars
as adata_obs. This should have been built from adata_obs after
filtering genes and cells and selcting highly-variable genes.
n_neighbors
Number of neighbors used to construct the KNN graph of observed
transcriptomes and simulated doublets. If ``None``, this is
automatically set to ``np.round(0.5 * np.sqrt(n_obs))``.
expected_doublet_rate
The estimated doublet rate for the experiment.
stdev_doublet_rate
Uncertainty in the expected doublet rate.
mean_center
If True, center the data such that each gene has a mean of 0.
`sklearn.decomposition.PCA` will be used for dimensionality
reduction.
normalize_variance
If True, normalize the data such that each gene has a variance of 1.
`sklearn.decomposition.TruncatedSVD` will be used for dimensionality
reduction, unless `mean_center` is True.
n_prin_comps
Number of principal components used to embed the transcriptomes prior
to k-nearest-neighbor graph construction.
use_approx_neighbors
Use approximate nearest neighbor method (annoy) for the KNN
classifier.
knn_dist_metric
Distance metric used when finding nearest neighbors. For list of
valid values, see the documentation for annoy (if `use_approx_neighbors`
is True) or sklearn.neighbors.NearestNeighbors (if `use_approx_neighbors`
is False).
get_doublet_neighbor_parents
If True, return the parent transcriptomes that generated the
doublet neighbors of each observed transcriptome. This information can
be used to infer the cell states that generated a given
doublet state.
threshold
Doublet score threshold for calling a transcriptome a doublet. If
`None`, this is set automatically by looking for the minimum between
the two modes of the `doublet_scores_sim_` histogram. It is best
practice to check the threshold visually using the
`doublet_scores_sim_` histogram and/or based on co-localization of
predicted doublets in a 2-D embedding.
random_state
Initial state for doublet simulation and nearest neighbors.
verbose
If True, print progress updates.
Returns
-------
adata : anndata.AnnData
if ``copy=True`` it returns or else adds fields to ``adata``:
``.obs['doublet_score']``
Doublet scores for each observed transcriptome
``.obs['predicted_doublets']``
Boolean indicating predicted doublet status
``.uns['scrublet']['doublet_scores_sim']``
Doublet scores for each simulated doublet transcriptome
``.uns['scrublet']['doublet_parents']``
Pairs of ``.obs_names`` used to generate each simulated doublet transcriptome
``.uns['scrublet']['parameters']``
Dictionary of Scrublet parameters
"""
try:
import scrublet as sl
except ImportError:
raise ImportError(
'Please install scrublet: `pip install scrublet` or `conda install scrublet`.'
)
# Estimate n_neighbors if not provided, and create scrublet object.
if n_neighbors is None:
n_neighbors = int(round(0.5 * np.sqrt(adata_obs.shape[0])))
# Note: Scrublet() will sparse adata_obs.X if it's not already, but this
# matrix won't get used if we pre-set the normalised slots.
scrub = sl.Scrublet(
adata_obs.X,
n_neighbors=n_neighbors,
expected_doublet_rate=expected_doublet_rate,
stdev_doublet_rate=stdev_doublet_rate,
random_state=random_state,
)
# Ensure normalised matrix sparseness as Scrublet does
# https://github.com/swolock/scrublet/blob/67f8ecbad14e8e1aa9c89b43dac6638cebe38640/src/scrublet/scrublet.py#L100
scrub._E_obs_norm = sparse.csc_matrix(adata_obs.X)
scrub._E_sim_norm = sparse.csc_matrix(adata_sim.X)
scrub.doublet_parents_ = adata_sim.obsm['doublet_parents']
# Call scrublet-specific preprocessing where specified
if mean_center and normalize_variance:
sl.pipeline_zscore(scrub)
elif mean_center:
sl.pipeline_mean_center(scrub)
elif normalize_variance:
sl.pipeline_normalize_variance(scrub)
# Do PCA. Scrublet fits to the observed matrix and decomposes both observed
# and simulated based on that fit, so we'll just let it do its thing rather
# than trying to use Scanpy's PCA wrapper of the same functions.
if mean_center:
logg.info('Embedding transcriptomes using PCA...')
sl.pipeline_pca(
scrub, n_prin_comps=n_prin_comps, random_state=scrub.random_state
)
else:
logg.info('Embedding transcriptomes using Truncated SVD...')
sl.pipeline_truncated_svd(
scrub, n_prin_comps=n_prin_comps, random_state=scrub.random_state
)
# Score the doublets
scrub.calculate_doublet_scores(
use_approx_neighbors=use_approx_neighbors,
distance_metric=knn_dist_metric,
get_doublet_neighbor_parents=get_doublet_neighbor_parents,
)
# Actually call doublets
scrub.call_doublets(threshold=threshold, verbose=verbose)
# Store results in AnnData for return
adata_obs.obs['doublet_score'] = scrub.doublet_scores_obs_
# Store doublet Scrublet metadata
adata_obs.uns['scrublet'] = {
'doublet_scores_sim': scrub.doublet_scores_sim_,
'doublet_parents': adata_sim.obsm['doublet_parents'],
'parameters': {
'expected_doublet_rate': expected_doublet_rate,
'sim_doublet_ratio': (
adata_sim.uns.get('scrublet', {})
.get('parameters', {})
.get('sim_doublet_ratio', None)
),
'n_neighbors': n_neighbors,
'random_state': random_state,
},
}
# If threshold hasn't been located successfully then we couldn't make any
# predictions. The user will get a warning from Scrublet, but we need to
# set the boolean so that any downstream filtering on
# predicted_doublet=False doesn't incorrectly filter cells. The user can
# still use this object to generate the plot and derive a threshold
# manually.
if hasattr(scrub, 'threshold_'):
adata_obs.uns['scrublet']['threshold'] = scrub.threshold_
adata_obs.obs['predicted_doublet'] = scrub.predicted_doublets_
else:
adata_obs.obs['predicted_doublet'] = False
if get_doublet_neighbor_parents:
adata_obs.uns['scrublet'][
'doublet_neighbor_parents'
] = scrub.doublet_neighbor_parents_
return adata_obs
def scrublet_simulate_doublets(
adata: AnnData,
layer=None,
sim_doublet_ratio: float = 2.0,
synthetic_doublet_umi_subsampling: float = 1.0,
random_seed: int = 0,
) -> AnnData:
"""\
Simulate doublets by adding the counts of random observed transcriptome pairs.
Parameters
----------
adata
The annotated data matrix of shape ``n_obs`` × ``n_vars``. Rows
correspond to cells and columns to genes. Genes should have been
filtered for expression and variability, and the object should contain
raw expression of the same dimensions.
layer
Layer of adata where raw values are stored, or 'X' if values are in .X.
sim_doublet_ratio
Number of doublets to simulate relative to the number of observed
transcriptomes. If `None`, self.sim_doublet_ratio is used.
synthetic_doublet_umi_subsampling
Rate for sampling UMIs when creating synthetic doublets. If 1.0,
each doublet is created by simply adding the UMIs from two randomly
sampled observed transcriptomes. For values less than 1, the
UMI counts are added and then randomly sampled at the specified
rate.
Returns
-------
adata : anndata.AnnData with simulated doublets in .X
Adds fields to ``adata``:
``.obsm['scrublet']['doublet_parents']``
Pairs of ``.obs_names`` used to generate each simulated doublet transcriptome
``.uns['scrublet']['parameters']``
Dictionary of Scrublet parameters
See also
--------
:func:`~scanpy.external.pp.scrublet`: Main way of running Scrublet, runs
preprocessing, doublet simulation (this function) and calling.
:func:`~scanpy.external.pl.scrublet_score_distribution`: Plot histogram of doublet
scores for observed transcriptomes and simulated doublets.
"""
try:
import scrublet as sl
except ImportError:
raise ImportError(
'Please install scrublet: `pip install scrublet` or `conda install scrublet`.'
)
X = _get_obs_rep(adata, layer=layer)
scrub = sl.Scrublet(X)
scrub.simulate_doublets(
sim_doublet_ratio=sim_doublet_ratio,
synthetic_doublet_umi_subsampling=synthetic_doublet_umi_subsampling,
)
adata_sim = AnnData(scrub._E_sim)
adata_sim.obs['n_counts'] = scrub._total_counts_sim
adata_sim.obsm['doublet_parents'] = scrub.doublet_parents_
adata_sim.uns['scrublet'] = {'parameters': {'sim_doublet_ratio': sim_doublet_ratio}}
return adata_sim
| bsd-3-clause | a438944fe646a8b068603e22a5eb8003 | 37.168739 | 117 | 0.65238 | 3.871194 | false | false | false | false |
foauth/foauth.org | services/reddit.py | 2 | 1871 | from oauthlib.oauth2.draft25 import utils
import foauth.providers
class Reddit(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'http://www.reddit.com/'
docs_url = 'http://www.reddit.com/dev/api'
category = 'News'
# URLs to interact with the API
authorize_url = 'https://ssl.reddit.com/api/v1/authorize'
access_token_url = 'https://ssl.reddit.com/api/v1/access_token'
api_domain = 'oauth.reddit.com'
available_permissions = [
(None, 'access your identity information'),
('read', 'read information about articles'),
('vote', 'vote on articles'),
('submit', 'submit new articles and comments'),
('edit', 'edit your posts and comments'),
('mysubreddits', 'manage your subreddits'),
('subscribe', 'manage your subscriptions'),
('modlog', 'view your moderation logs'),
('modposts', 'moderate posts in your subreddits'),
('modflair', 'manage and assign flair in your subreddits'),
('modconfig', 'manage the configuration of your subreddits'),
('privatemessages', 'read and write to your private messages'),
]
def __init__(self, *args, **kwargs):
super(Reddit, self).__init__(*args, **kwargs)
self.auth = (self.client_id, self.client_secret)
def get_authorize_params(self, redirect_uri, scopes):
# Always request account info, in order to get the user ID
scopes.append('identity')
params = super(Reddit, self).get_authorize_params(redirect_uri, scopes)
# Make sure we get refresh tokens
params['duration'] = 'permanent'
return params
def get_scope_string(self, scopes):
return ','.join(scopes)
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/api/v1/me')
return unicode(r.json()[u'id'])
| bsd-3-clause | 5772034fa398f1f266b5e1c254ab9b9e | 37.979167 | 79 | 0.630144 | 3.810591 | false | false | false | false |
foauth/foauth.org | services/elance.py | 1 | 1088 | import foauth.providers
from foauth import OAuthDenied
class Elance(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'https://www.elance.com/'
docs_url = 'https://www.elance.com/q/api2'
category = 'Career'
# URLs to interact with the API
authorize_url = 'https://api.elance.com/api2/oauth/authorize'
access_token_url = 'https://api.elance.com/api2/oauth/token'
api_domain = 'api.elance.com'
available_permissions = [
(None, 'access and manage your Elance account'),
]
bearer_type = foauth.providers.BEARER_URI
def parse_token(self, content):
return super(Elance, self).parse_token(content)[u'data']
def callback(self, data, *args, **kwargs):
if data.get('error') == 'access_denied':
raise OAuthDenied('Denied access to Elance')
return super(Elance, self).callback(data, *args, **kwargs)
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/api2/profiles/my')
return unicode(r.json()[u'data'][u'providerProfile'][u'userId'])
| bsd-3-clause | 5490f39ea2137a5c2b81a26ac88fdcd3 | 31.969697 | 72 | 0.652574 | 3.337423 | false | false | false | false |
foauth/foauth.org | services/familysearch.py | 1 | 1342 | import requests
import foauth.providers
class FamilySearch(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'http://www.familysearch.com/'
docs_url = 'https://familysearch.org/developers/docs/api/resources'
category = 'Genealogy'
favicon_url = 'https://familysearch.org/favicon.ico'
# URLs to interact with the API
authorize_url = 'https://sandbox.familysearch.org/cis-web/oauth2/v3/authorization'
access_token_url = 'https://sandbox.familysearch.org/cis-web/oauth2/v3/token'
api_domain = 'sandbox.familysearch.org'
available_permissions = [
(None, 'read and write to your family tree'),
]
def get_access_token_response(self, redirect_uri, data):
# Sending the (basically empty) client secret will fail,
# so this must send its own custom request.
return requests.post(self.get_access_token_url(), {
'client_id': self.client_id,
'grant_type': 'authorization_code',
'code': data['code'],
'redirect_uri': redirect_uri,
}, verify=self.verify, auth=self.auth)
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/platform/users/current',
headers={'Accept': 'application/json'})
return unicode(r.json()[u'users'][0][u'id'])
| bsd-3-clause | c0967ea508d80c8d2446e7f339170aff | 37.342857 | 86 | 0.643815 | 3.597855 | false | false | false | false |
foauth/foauth.org | foauth/providers.py | 1 | 9528 | import json
from os import urandom
import urllib
import urlparse
import flask
import requests
from requests_oauthlib import OAuth1 as OAuth1Manager
from oauthlib.oauth1.rfc5849 import SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER
from oauthlib.oauth2.draft25 import tokens
from werkzeug.urls import url_decode
from foauth import OAuthError
BEARER = 'BEARER'
BEARER_HEADER = 'HEADER'
BEARER_BODY = 'BODY'
BEARER_URI = 'URI'
BEARER_TYPES = (BEARER_HEADER, BEARER_BODY, BEARER_URI)
class Bearer(object):
def __init__(self, token, bearer_type=BEARER_HEADER):
self.token = token
if bearer_type in BEARER_TYPES or callable(bearer_type):
self.bearer_type = bearer_type
else:
raise ValueError('Unknown bearer type %s' % bearer_type)
def __call__(self, r):
if self.bearer_type == BEARER_HEADER:
r.headers = tokens.prepare_bearer_headers(self.token, r.headers)
elif self.bearer_type == BEARER_BODY:
r.data = tokens.prepare_bearer_body(self.token, r.data)
elif self.bearer_type == BEARER_URI:
r.url = tokens.prepare_bearer_uri(self.token, r.url)
elif callable(self.bearer_type):
r = self.bearer_type(self.token, r)
return r
class OAuthMeta(type):
def __init__(cls, name, bases, attrs):
if 'alias' not in attrs:
cls.alias = cls.__name__.lower()
if 'api_domain' in attrs and 'api_domains' not in attrs:
cls.api_domains = [cls.api_domain]
if 'provider_url' in attrs and 'favicon_url' not in attrs:
# Use a favicon service when no favicon is supplied
domain = urlparse.urlparse(cls.provider_url).netloc
cls.favicon_url = 'https://www.google.com/s2/favicons?domain=%s' % domain
if 'name' not in attrs:
cls.name = cls.__name__
class OAuth(object):
__metaclass__ = OAuthMeta
https = True
verify = True
signature_method = SIGNATURE_HMAC
signature_type = SIGNATURE_TYPE_AUTH_HEADER
permissions_widget = 'checkbox'
description = ''
disclaimer = ''
def __init__(self, client_id, client_secret):
self.client_id = client_id
self.client_secret = client_secret
def get_request_token_url(self):
return self.request_token_url
def get_access_token_url(self):
return self.access_token_url
def get_scope_string(self, scopes):
return ''
def get_authorize_url(self, redirect_uri, scopes):
params = self.get_authorize_params(redirect_uri=redirect_uri,
scopes=scopes)
req = requests.Request(url=self.authorize_url, params=params)
return req.prepare().url
def get_login_uri(self, redirect_uri):
params = self.get_authorize_params(redirect_uri=redirect_uri,
scopes=[])
req = requests.Request(url=self.authorize_url, params=params)
return req.prepare().url
# The remainder of the API must be implemented for each flavor of OAuth
def callback(self, data, redirect_uri):
"""
Receives the full callback from the service and returns a 2-tuple
containing the user token and user secret (if applicable).
"""
raise NotImplementedError("callback() must be defined in a subclass")
def api(self, key, domain, path, method='GET', params=None, data=None):
"""
Passes along an API request to the service and returns the response.
"""
raise NotImplementedError("api() must be defined in a subclass")
class OAuth1(OAuth):
returns_token = True
def parse_token(self, content):
content = url_decode(content)
return {
'access_token': content['oauth_token'],
'secret': content['oauth_token_secret'],
}
def get_request_token_params(self, redirect_uri, scopes):
return {}
def get_request_token_response(self, redirect_uri, scopes):
auth = OAuth1Manager(client_key=self.client_id,
client_secret=self.client_secret,
callback_uri=redirect_uri,
signature_method=self.signature_method,
signature_type=self.signature_type)
return requests.post(self.get_request_token_url(), auth=auth,
params=self.get_request_token_params(redirect_uri, scopes),
verify=self.verify)
def get_authorize_params(self, redirect_uri, scopes):
resp = self.get_request_token_response(redirect_uri, scopes)
try:
data = self.parse_token(resp.content)
except Exception:
raise OAuthError('Unable to parse access token')
flask.session['%s_temp_secret' % self.alias] = data['secret']
if not self.returns_token:
redirect_uri += ('?oauth_token=%s' % data['access_token'])
return {
'oauth_token': data['access_token'],
'oauth_callback': redirect_uri,
}
def get_access_token_response(self, token, secret, verifier=None):
auth = OAuth1Manager(client_key=self.client_id,
client_secret=self.client_secret,
resource_owner_key=token,
resource_owner_secret=secret,
verifier=verifier,
signature_method=self.signature_method,
signature_type=self.signature_type)
return requests.post(self.get_access_token_url(), auth=auth,
verify=self.verify)
def callback(self, data, redirect_uri):
token = data['oauth_token']
verifier = data.get('oauth_verifier', None)
secret = flask.session['%s_temp_secret' % self.alias]
del flask.session['%s_temp_secret' % self.alias]
resp = self.get_access_token_response(token, secret, verifier)
try:
return self.parse_token(resp.content)
except Exception:
raise OAuthError('Unable to parse access token')
def api(self, key, domain, path, method='GET', params=None, data=None,
headers=None):
protocol = self.https and 'https' or 'http'
url = '%s://%s%s' % (protocol, domain, path)
auth = OAuth1Manager(client_key=self.client_id,
client_secret=self.client_secret,
resource_owner_key=key.access_token,
resource_owner_secret=key.secret,
signature_method=self.signature_method,
signature_type=self.signature_type)
return requests.request(method, url, auth=auth, params=params or {},
data=data or {}, headers=headers or {},
verify=self.verify, stream=True)
class OAuth2(OAuth):
token_type = BEARER
bearer_type = BEARER_HEADER
supports_state = True
auth = None
def parse_token(self, content):
return json.loads(content)
def get_scope_string(self, scopes):
return ' '.join(scopes)
def get_authorize_params(self, redirect_uri, scopes):
state = ''.join('%02x' % ord(x) for x in urandom(16))
flask.session['%s_state' % self.alias] = state
if not self.supports_state:
redirect_uri += ('?state=%s' % state)
params = {
'client_id': self.client_id,
'response_type': 'code',
'redirect_uri': redirect_uri,
'state': state,
}
if any(scopes):
params['scope'] = self.get_scope_string(scopes)
return params
def get_access_token_response(self, redirect_uri, data):
return requests.post(self.get_access_token_url(), {
'client_id': self.client_id,
'client_secret': self.client_secret,
'grant_type': 'authorization_code',
'code': data['code'],
'redirect_uri': redirect_uri
}, verify=self.verify, auth=self.auth)
def callback(self, data, redirect_uri):
state = flask.session['%s_state' % self.alias]
if 'state' in data and state != data['state']:
flask.abort(403)
del flask.session['%s_state' % self.alias]
if not self.supports_state:
redirect_uri += ('?state=%s' % state)
resp = self.get_access_token_response(redirect_uri, data)
return self.parse_token(resp.content)
def refresh_token(self, token):
resp = requests.post(self.get_access_token_url(), {
'client_id': self.client_id,
'client_secret': self.client_secret,
'grant_type': 'refresh_token',
'refresh_token': token
}, verify=self.verify, auth=self.auth)
return self.parse_token(resp.content)
def api(self, key, domain, path, method='GET', params=None, data=None,
headers=None):
protocol = self.https and 'https' or 'http'
url = '%s://%s%s' % (protocol, domain, path)
if self.token_type == BEARER:
auth = Bearer(key.access_token, bearer_type=self.bearer_type)
return requests.request(method, url, auth=auth, params=params or {},
data=data or {}, headers=headers or {},
verify=self.verify, stream=True)
| bsd-3-clause | 6e6b8a86070170a2220404f13bc586e7 | 37.112 | 88 | 0.583963 | 3.968347 | false | false | false | false |
foauth/foauth.org | models.py | 2 | 3290 | import datetime
from werkzeug.security import generate_password_hash, check_password_hash
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext import login
import config
db = SQLAlchemy(config.app)
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String, unique=True)
password = db.Column(db.String)
def __init__(self, email, password):
self.email = email
self.set_password(password)
def hash_password(self, password):
return generate_password_hash(password)
def check_password(self, password):
return check_password_hash(self.password, password)
def set_password(self, password):
self.password = self.hash_password(password)
def is_authenticated(self):
return self.id is not None
def is_anonymous(self):
return False
def is_active(self):
return self.is_authenticated()
def get_id(self):
return unicode(self.id)
def __repr__(self):
return '<User: %s>' % self.email
def key_for_service(self, alias):
return self.keys.filter_by(service_alias=alias).first()
class Key(db.Model):
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
service_alias = db.Column(db.String)
access_token = db.Column(db.String)
secret = db.Column(db.String)
expires = db.Column(db.DateTime)
refresh_token = db.Column(db.String)
service_user_id = db.Column(db.String)
user = db.relationship('User', backref=db.backref('keys', lazy='dynamic'))
@property
def service(self):
if not self.service_alias:
raise AttributeError('No service specified.')
try:
return config.alias_map[self.service_alias]
except KeyError:
raise AttributeError('%r is not a valid service.' % self.service_alias)
def update(self, data):
self.access_token = data['access_token']
self.secret = data.get('secret', None)
if data.get('expires_in'):
# Convert to a real datetime
expires_in = datetime.timedelta(seconds=int(data['expires_in']))
self.expires = datetime.datetime.now() + expires_in
else:
self.expires = None
self.refresh_token = data.get('refresh_token', None)
self.service_user_id = data.get('service_user_id', None)
def is_expired(self):
return self.will_expire(days=0)
def will_expire(self, days=7):
soon = datetime.datetime.now() + datetime.timedelta(days=days)
return self.expires and self.expires < soon
def fill_user_id(self):
try:
self.service_user_id = self.service.get_user_id(self)
except Exception:
# Normally `except Exception` would be a tremendously terrible
# idea, but in this case a lot of things can go wrong, and the
# end result is simply that the key couldn't be retrieved. In
# that case, we can still handle it gracefully and return None.
self.service_user_id = None
login_manager = login.LoginManager()
login_manager.setup_app(config.app)
@login_manager.user_loader
def load_user(user_id):
return User.query.get(user_id)
| bsd-3-clause | bee96d05169a0fc314f99e3f774b909d | 30.333333 | 83 | 0.643769 | 3.755708 | false | false | false | false |
wagtail/wagtail | wagtail/migrations/0048_add_default_workflows.py | 4 | 3725 | # -*- coding: utf-8 -*-
from django.db import migrations
from django.db.models import Count, Q
from wagtail.models import Page as RealPage
def ancestor_of_q(page):
paths = [page.path[0:pos] for pos in range(0, len(page.path) + 1, page.steplen)[1:]]
q = Q(path__in=paths)
return q
def create_default_workflows(apps, schema_editor):
# This will recreate the existing publish-permission based moderation setup in the new workflow system, by creating new workflows
# Get models
ContentType = apps.get_model("contenttypes.ContentType")
Workflow = apps.get_model("wagtailcore.Workflow")
GroupApprovalTask = apps.get_model("wagtailcore.GroupApprovalTask")
GroupPagePermission = apps.get_model("wagtailcore.GroupPagePermission")
WorkflowPage = apps.get_model("wagtailcore.WorkflowPage")
WorkflowTask = apps.get_model("wagtailcore.WorkflowTask")
Page = apps.get_model("wagtailcore.Page")
Group = apps.get_model("auth.Group")
# Get this from real page model just in case it has been overridden
Page.steplen = RealPage.steplen
# Create content type for GroupApprovalTask model
group_approval_content_type, __ = ContentType.objects.get_or_create(
model="groupapprovaltask", app_label="wagtailcore"
)
publish_permissions = GroupPagePermission.objects.filter(permission_type="publish")
for permission in publish_permissions:
# find groups with publish permission over this page or its ancestors (and therefore this page by descent)
page = permission.page
page = Page.objects.get(pk=page.pk)
ancestors = Page.objects.filter(ancestor_of_q(page))
ancestor_permissions = publish_permissions.filter(page__in=ancestors)
groups = Group.objects.filter(
Q(page_permissions__in=ancestor_permissions)
| Q(page_permissions__pk=permission.pk)
).distinct()
# get a GroupApprovalTask with groups matching these publish permission groups (and no others)
task = (
GroupApprovalTask.objects.filter(groups__id__in=groups.all())
.annotate(count=Count("groups"))
.filter(count=groups.count())
.filter(active=True)
.first()
)
if not task:
# if no such task exists, create it
group_names = " ".join([group.name for group in groups])
task = GroupApprovalTask.objects.create(
name=group_names + " approval",
content_type=group_approval_content_type,
active=True,
)
task.groups.set(groups)
# get a Workflow containing only this task if if exists, otherwise create it
workflow = (
Workflow.objects.annotate(task_number=Count("workflow_tasks"))
.filter(task_number=1)
.filter(workflow_tasks__task=task)
.filter(active=True)
.first()
)
if not workflow:
workflow = Workflow.objects.create(name=task.name, active=True)
WorkflowTask.objects.create(
workflow=workflow,
task=task,
sort_order=0,
)
# if the workflow is not linked by a WorkflowPage to the permission's linked page, link it by creating a new WorkflowPage now
if not WorkflowPage.objects.filter(workflow=workflow, page=page).exists():
WorkflowPage.objects.create(workflow=workflow, page=page)
class Migration(migrations.Migration):
dependencies = [
("wagtailcore", "0047_add_workflow_models"),
]
operations = [
migrations.RunPython(create_default_workflows, migrations.RunPython.noop),
]
| bsd-3-clause | fb45335b85b8b927bb4b43564b3623a8 | 37.802083 | 133 | 0.648859 | 4.242597 | false | false | false | false |
wagtail/wagtail | wagtail/admin/api/views.py | 4 | 5160 | from collections import OrderedDict
from django.conf import settings
from django.http import Http404
from django.urls import path
from rest_framework.authentication import SessionAuthentication
from rest_framework.response import Response
from wagtail.api.v2.views import PagesAPIViewSet
from wagtail.models import Page
from .actions.convert_alias import ConvertAliasPageAPIAction
from .actions.copy import CopyPageAPIAction
from .actions.copy_for_translation import CopyForTranslationAPIAction
from .actions.create_alias import CreatePageAliasAPIAction
from .actions.delete import DeletePageAPIAction
from .actions.move import MovePageAPIAction
from .actions.publish import PublishPageAPIAction
from .actions.revert_to_page_revision import RevertToPageRevisionAPIAction
from .actions.unpublish import UnpublishPageAPIAction
from .filters import ForExplorerFilter, HasChildrenFilter
from .serializers import AdminPageSerializer
class PagesAdminAPIViewSet(PagesAPIViewSet):
base_serializer_class = AdminPageSerializer
authentication_classes = [SessionAuthentication]
actions = {
"convert_alias": ConvertAliasPageAPIAction,
"copy": CopyPageAPIAction,
"delete": DeletePageAPIAction,
"publish": PublishPageAPIAction,
"unpublish": UnpublishPageAPIAction,
"move": MovePageAPIAction,
"copy_for_translation": CopyForTranslationAPIAction,
"create_alias": CreatePageAliasAPIAction,
"revert_to_page_revision": RevertToPageRevisionAPIAction,
}
# Add has_children and for_explorer filters
filter_backends = PagesAPIViewSet.filter_backends + [
HasChildrenFilter,
ForExplorerFilter,
]
meta_fields = PagesAPIViewSet.meta_fields + [
"latest_revision_created_at",
"status",
"children",
"descendants",
"parent",
"ancestors",
"translations",
]
body_fields = PagesAPIViewSet.body_fields + [
"admin_display_title",
]
listing_default_fields = PagesAPIViewSet.listing_default_fields + [
"latest_revision_created_at",
"status",
"children",
"admin_display_title",
]
# Allow the parent field to appear on listings
detail_only_fields = []
known_query_parameters = PagesAPIViewSet.known_query_parameters.union(
["for_explorer", "has_children"]
)
@classmethod
def get_detail_default_fields(cls, model):
detail_default_fields = super().get_detail_default_fields(model)
# When i18n is disabled, remove "translations" from default fields
if not getattr(settings, "WAGTAIL_I18N_ENABLED", False):
detail_default_fields.remove("translations")
return detail_default_fields
def get_root_page(self):
"""
Returns the page that is used when the `&child_of=root` filter is used.
"""
return Page.get_first_root_node()
def get_base_queryset(self):
"""
Returns a queryset containing all pages that can be seen by this user.
This is used as the base for get_queryset and is also used to find the
parent pages when using the child_of and descendant_of filters as well.
"""
return Page.objects.all()
def get_queryset(self):
queryset = super().get_queryset()
# Hide root page
# TODO: Add "include_root" flag
queryset = queryset.exclude(depth=1).defer_streamfields().specific()
return queryset
def get_type_info(self):
types = OrderedDict()
for name, model in self.seen_types.items():
types[name] = OrderedDict(
[
("verbose_name", model._meta.verbose_name),
("verbose_name_plural", model._meta.verbose_name_plural),
]
)
return types
def listing_view(self, request):
response = super().listing_view(request)
response.data["__types"] = self.get_type_info()
return response
def detail_view(self, request, pk):
response = super().detail_view(request, pk)
response.data["__types"] = self.get_type_info()
return response
def action_view(self, request, pk, action_name):
instance = self.get_object()
if action_name not in self.actions:
raise Http404(f"unrecognised action '{action_name}'")
action = self.actions[action_name](self, request)
action_data = action.serializer(data=request.data)
if not action_data.is_valid():
return Response(action_data.errors, status=400)
return action.execute(instance, action_data.data)
@classmethod
def get_urlpatterns(cls):
"""
This returns a list of URL patterns for the endpoint
"""
urlpatterns = super().get_urlpatterns()
urlpatterns.extend(
[
path(
"<int:pk>/action/<str:action_name>/",
cls.as_view({"post": "action_view"}),
name="action",
),
]
)
return urlpatterns
| bsd-3-clause | e7eb75bd99be15de4070342354e6fc47 | 31.049689 | 79 | 0.646899 | 4.271523 | false | false | false | false |
wagtail/wagtail | wagtail/images/management/commands/wagtail_update_image_renditions.py | 4 | 2118 | from django.core.management.base import BaseCommand
from wagtail.images import get_image_model
class Command(BaseCommand):
"""Command to create missing image renditions with the option to remove (purge) any existing ones."""
help = "This command will generate all image renditions, with an option to purge existing renditions first."
def add_arguments(self, parser):
parser.add_argument(
"--purge-only",
action="store_true",
help="Purge all image renditions without regenerating them",
)
def handle(self, *args, **options):
renditions = get_image_model().get_rendition_model().objects.all()
if len(renditions) == 0:
self.stdout.write("No image renditions found.")
return
success_count = 0
if options["purge_only"]:
for rendition in renditions:
try:
rendition_image = rendition.image
rendition.delete()
success_count = success_count + 1
except Exception:
self.stderr.write(
f"Could not purge rendition for {rendition_image.title}"
)
self.stdout.write(
self.style.SUCCESS(
f"Successfully purged {success_count} image rendition(s)"
)
)
else:
for rendition in renditions:
try:
rendition_filter = rendition.filter
rendition_image = rendition.image
rendition.delete()
rendition_image.get_rendition(rendition_filter)
success_count = success_count + 1
except Exception:
self.stderr.write(
f"Could not regenerate rendition for {rendition_image.title}"
)
self.stdout.write(
self.style.SUCCESS(
f"Successfully regenerated {success_count} image rendition(s)"
)
)
| bsd-3-clause | 5466b347e9b8ddf3adb3d7d9b39fbec5 | 36.821429 | 112 | 0.532578 | 4.983529 | false | false | false | false |
wagtail/wagtail | wagtail/test/modeladmintest/migrations/0009_relatedlink.py | 4 | 1073 | # Generated by Django 3.1.1 on 2020-10-01 18:16
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
("wagtailcore", "0057_page_locale_fields_notnull"),
("modeladmintest", "0008_solobook"),
]
operations = [
migrations.CreateModel(
name="RelatedLink",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("title", models.CharField(max_length=255)),
(
"link",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="+",
to="wagtailcore.page",
),
),
],
),
]
| bsd-3-clause | 818f71de46f24c7201f247680cf0bde7 | 27.236842 | 68 | 0.416589 | 5.234146 | false | false | false | false |
wagtail/wagtail | wagtail/contrib/simple_translation/tests/test_forms.py | 4 | 5490 | from django.forms import CheckboxInput, HiddenInput
from django.test import TestCase, override_settings
from wagtail.contrib.simple_translation.forms import SubmitTranslationForm
from wagtail.models import Locale, Page
from wagtail.test.i18n.models import TestPage
from wagtail.test.utils import WagtailTestUtils
@override_settings(
LANGUAGES=[
("en", "English"),
("fr", "French"),
("de", "German"),
],
WAGTAIL_CONTENT_LANGUAGES=[
("en", "English"),
("fr", "French"),
("de", "German"),
],
)
class TestSubmitPageTranslation(WagtailTestUtils, TestCase):
def setUp(self):
self.en_locale = Locale.objects.first()
self.fr_locale = Locale.objects.create(language_code="fr")
self.de_locale = Locale.objects.create(language_code="de")
self.en_homepage = Page.objects.get(depth=2)
self.fr_homepage = self.en_homepage.copy_for_translation(self.fr_locale)
self.de_homepage = self.en_homepage.copy_for_translation(self.de_locale)
self.en_blog_index = TestPage(title="Blog", slug="blog")
self.en_homepage.add_child(instance=self.en_blog_index)
self.en_blog_post = TestPage(title="Blog post", slug="blog-post")
self.en_blog_index.add_child(instance=self.en_blog_post)
def test_include_subtree(self):
form = SubmitTranslationForm(instance=self.en_blog_post)
self.assertIsInstance(form.fields["include_subtree"].widget, HiddenInput)
form = SubmitTranslationForm(instance=self.en_blog_index)
self.assertIsInstance(form.fields["include_subtree"].widget, CheckboxInput)
self.assertEqual(
form.fields["include_subtree"].label, "Include subtree (1 page)"
)
form = SubmitTranslationForm(instance=self.en_homepage)
self.assertEqual(
form.fields["include_subtree"].label, "Include subtree (2 pages)"
)
def test_locales_queryset(self):
# Homepage is translated to all locales.
form = SubmitTranslationForm(instance=self.en_homepage)
self.assertEqual(
list(
form.fields["locales"].queryset.values_list("language_code", flat=True)
),
[],
)
# Blog index can be translated to `de` and `fr`.
form = SubmitTranslationForm(instance=self.en_blog_index)
self.assertEqual(
list(
form.fields["locales"].queryset.values_list("language_code", flat=True)
),
["de", "fr"],
)
# Blog post can be translated to `de` and `fr`.
form = SubmitTranslationForm(instance=self.en_blog_post)
self.assertEqual(
list(
form.fields["locales"].queryset.values_list("language_code", flat=True)
),
["de", "fr"],
)
def test_select_all(self):
form = SubmitTranslationForm(instance=self.en_homepage)
# Homepage is translated to all locales.
self.assertIsInstance(form.fields["select_all"].widget, HiddenInput)
form = SubmitTranslationForm(instance=self.en_blog_index)
# Blog post can be translated to `de` and `fr`.
self.assertIsInstance(form.fields["select_all"].widget, CheckboxInput)
def test_locale_disabled(self):
form = SubmitTranslationForm(instance=self.en_blog_post)
# The parent (blog_index) is translated to English.
# German and French are disabled.
self.assertEqual(
list(form.fields["locales"].widget.disabled_values),
[self.de_locale.id, self.fr_locale.id],
)
label = f"""
<label class="disabled">
<input type="checkbox" name="None" value="{self.de_locale.id}" disabled>
German
</label>
"""
self.assertInHTML(label, form.fields["locales"].widget.render(None, None))
def test_locale_help_text(self):
# German and French are disabled.
# The help_text is plural
form = SubmitTranslationForm(instance=self.en_blog_post)
help_text = f"""
Some locales are disabled because some parent pages are not translated.
<br>
<a href="/admin/translation/submit/page/{self.en_blog_index.id}/">
Translate the parent pages.
</a>
"""
self.assertHTMLEqual(form.fields["locales"].help_text, help_text)
# Add German translation
self.en_blog_index.copy_for_translation(self.de_locale)
# French is disabled.
# The help_text is singular.
form = SubmitTranslationForm(instance=self.en_blog_post)
help_text = f"""
A locale is disabled because a parent page is not translated.
<br>
<a href="/admin/translation/submit/page/{self.en_blog_index.id}/">
Translate the parent page.
</a>
"""
self.assertHTMLEqual(form.fields["locales"].help_text, help_text)
def test_hide_submit(self):
# German and French are disabled.
# There are no other pages to be translated.
# Submit is hidden.
form = SubmitTranslationForm(instance=self.en_blog_post)
self.assertFalse(form.show_submit)
# A parent is translated
self.en_blog_index.copy_for_translation(self.de_locale)
form = SubmitTranslationForm(instance=self.en_blog_post)
self.assertTrue(form.show_submit)
| bsd-3-clause | 5c76fb081b90d09a86af5d043fa4fc28 | 38.214286 | 87 | 0.620765 | 3.986928 | false | true | false | false |
wagtail/wagtail | wagtail/users/views/bulk_actions/assign_role.py | 4 | 1569 | from django import forms
from django.contrib.auth.models import Group
from django.utils.translation import gettext_lazy as _
from django.utils.translation import ngettext
from wagtail.users.views.bulk_actions.user_bulk_action import UserBulkAction
from wagtail.users.views.users import change_user_perm
class RoleForm(forms.Form):
role = forms.ModelChoiceField(queryset=Group.objects.all())
class AssignRoleBulkAction(UserBulkAction):
display_name = _("Assign role")
action_type = "assign_role"
aria_label = _("Assign role to selected users")
template_name = "wagtailusers/bulk_actions/confirm_bulk_assign_role.html"
action_priority = 30
form_class = RoleForm
def check_perm(self, obj):
return self.request.user.has_perm(change_user_perm)
def get_execution_context(self):
return {
"role": self.cleaned_form.cleaned_data["role"],
}
@classmethod
def execute_action(cls, objects, role=None, **kwargs):
if role is None:
return
role.user_set.add(*objects)
num_parent_objects = len(objects)
return num_parent_objects, 0
def get_success_message(self, num_parent_objects, num_child_objects):
return ngettext(
"%(num_parent_objects)d user has been assigned as %(role)s",
"%(num_parent_objects)d users have been assigned as %(role)s",
num_parent_objects,
) % {
"num_parent_objects": num_parent_objects,
"role": self.cleaned_form.cleaned_data["role"].name,
}
| bsd-3-clause | a16d6717f051cfad92b0270d2d67a33e | 33.108696 | 77 | 0.664117 | 3.845588 | false | false | false | false |
wagtail/wagtail | wagtail/contrib/styleguide/views.py | 4 | 3918 | from django import forms
from django.core.paginator import Paginator
from django.template.response import TemplateResponse
from django.utils.translation import gettext as _
from wagtail.admin import messages
from wagtail.admin.forms.search import SearchForm
from wagtail.admin.rich_text import get_rich_text_editor_widget
from wagtail.admin.widgets import (
AdminAutoHeightTextInput,
AdminDateInput,
AdminDateTimeInput,
AdminPageChooser,
AdminTimeInput,
SwitchInput,
)
from wagtail.documents.widgets import AdminDocumentChooser
from wagtail.images.widgets import AdminImageChooser
from wagtail.models import Page
from wagtail.snippets.widgets import AdminSnippetChooser
class FakeAdminSnippetChooser(AdminSnippetChooser):
"""
AdminSnippetChooser can't be used on non-snippet models (because it fails when constructing the
URL to the chooser modal), and we can't guarantee that any given Wagtail installation using
this style guide will have any snippet models registered. We therefore override the
get_chooser_modal_url method so that we can use it with Page as a stand-in for a real snippet.
"""
def get_chooser_modal_url(self):
return "/"
class ExampleForm(forms.Form):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields["page_chooser"].widget = AdminPageChooser()
self.fields["image_chooser"].widget = AdminImageChooser()
self.fields["document_chooser"].widget = AdminDocumentChooser()
self.fields["snippet_chooser"].widget = FakeAdminSnippetChooser(Page)
self.fields["date"].widget = AdminDateInput()
self.fields["time"].widget = AdminTimeInput()
self.fields["datetime"].widget = AdminDateTimeInput()
self.fields["auto_height_text"].widget = AdminAutoHeightTextInput()
self.fields["default_rich_text"].widget = get_rich_text_editor_widget("default")
self.fields["switch"].widget = SwitchInput()
self.fields["disabled_switch"].widget = SwitchInput(attrs={"disabled": True})
CHOICES = (
("choice1", "choice 1"),
("choice2", "choice 2"),
)
text = forms.CharField(required=True, help_text="help text")
auto_height_text = forms.CharField(required=True)
default_rich_text = forms.CharField(required=True)
url = forms.URLField(required=True)
email = forms.EmailField(max_length=254)
date = forms.DateField()
time = forms.TimeField()
datetime = forms.DateTimeField()
select = forms.ChoiceField(choices=CHOICES)
radio_select = forms.ChoiceField(choices=CHOICES, widget=forms.RadioSelect)
boolean = forms.BooleanField(required=False)
switch = forms.BooleanField(required=False)
disabled_switch = forms.BooleanField(required=False)
page_chooser = forms.BooleanField(required=True)
image_chooser = forms.BooleanField(required=True)
document_chooser = forms.BooleanField(required=True)
snippet_chooser = forms.BooleanField(required=True)
def index(request):
form = SearchForm(placeholder=_("Search something"))
example_form = ExampleForm()
messages.success(
request,
_("Success message"),
buttons=[messages.button("", _("View live")), messages.button("", _("Edit"))],
)
messages.warning(
request,
_("Warning message"),
buttons=[messages.button("", _("View live")), messages.button("", _("Edit"))],
)
messages.error(
request,
_("Error message"),
buttons=[messages.button("", _("View live")), messages.button("", _("Edit"))],
)
paginator = Paginator(list(range(100)), 10)
page = paginator.page(2)
return TemplateResponse(
request,
"wagtailstyleguide/base.html",
{
"search_form": form,
"example_form": example_form,
"example_page": page,
},
)
| bsd-3-clause | 1c08400d9ee24987bd4f88dac4757a77 | 35.616822 | 99 | 0.682746 | 4.258696 | false | false | false | false |
wagtail/wagtail | wagtail/admin/tests/pages/test_page_search.py | 4 | 8003 | from django.contrib.auth.models import Permission
from django.test import TestCase
from django.urls import reverse
from wagtail.models import Page
from wagtail.search.index import SearchField
from wagtail.test.testapp.models import SimplePage, SingleEventPage
from wagtail.test.utils import WagtailTestUtils
from wagtail.test.utils.timestamps import local_datetime
class TestPageSearch(TestCase, WagtailTestUtils):
def setUp(self):
self.user = self.login()
def get(self, params=None, **extra):
return self.client.get(
reverse("wagtailadmin_pages:search"), params or {}, **extra
)
def test_view(self):
response = self.get()
self.assertTemplateUsed(response, "wagtailadmin/pages/search.html")
self.assertEqual(response.status_code, 200)
def test_search(self):
response = self.get({"q": "Hello"})
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "wagtailadmin/pages/search.html")
self.assertEqual(response.context["query_string"], "Hello")
def test_search_searchable_fields(self):
# Find root page
root_page = Page.objects.get(id=2)
# Create a page
root_page.add_child(
instance=SimplePage(
title="Hi there!",
slug="hello-world",
content="good morning",
live=True,
has_unpublished_changes=False,
)
)
# Confirm the slug is not being searched
response = self.get({"q": "hello"})
self.assertNotContains(response, "There is one matching page")
search_fields = Page.search_fields
# Add slug to the search_fields
Page.search_fields = Page.search_fields + [
SearchField("slug", partial_match=True)
]
# Confirm the slug is being searched
response = self.get({"q": "hello"})
self.assertContains(response, "There is one matching page")
# Reset the search fields
Page.search_fields = search_fields
def test_ajax(self):
response = self.get({"q": "Hello"}, HTTP_X_REQUESTED_WITH="XMLHttpRequest")
self.assertEqual(response.status_code, 200)
self.assertTemplateNotUsed(response, "wagtailadmin/pages/search.html")
self.assertTemplateUsed(response, "wagtailadmin/pages/search_results.html")
self.assertEqual(response.context["query_string"], "Hello")
def test_pagination(self):
pages = ["0", "1", "-1", "9999", "Not a page"]
for page in pages:
response = self.get({"q": "Hello", "p": page})
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "wagtailadmin/pages/search.html")
def test_root_can_appear_in_search_results(self):
response = self.get({"q": "roo"})
self.assertEqual(response.status_code, 200)
# 'pages' list in the response should contain root
results = response.context["pages"]
self.assertTrue(any([r.slug == "root" for r in results]))
def test_search_uses_admin_display_title_from_specific_class(self):
# SingleEventPage has a custom get_admin_display_title method; explorer should
# show the custom title rather than the basic database one
root_page = Page.objects.get(id=2)
new_event = SingleEventPage(
title="Lunar event",
location="the moon",
audience="public",
cost="free",
date_from="2001-01-01",
latest_revision_created_at=local_datetime(2016, 1, 1),
)
root_page.add_child(instance=new_event)
response = self.get({"q": "lunar"})
self.assertContains(response, "Lunar event (single event)")
def test_search_no_perms(self):
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(
content_type__app_label="wagtailadmin", codename="access_admin"
)
)
self.user.save()
self.assertRedirects(self.get(), "/admin/")
def test_search_order_by_title(self):
root_page = Page.objects.get(id=2)
new_event = SingleEventPage(
title="Lunar event",
location="the moon",
audience="public",
cost="free",
date_from="2001-01-01",
latest_revision_created_at=local_datetime(2016, 1, 1),
)
root_page.add_child(instance=new_event)
new_event_2 = SingleEventPage(
title="A Lunar event",
location="the moon",
audience="public",
cost="free",
date_from="2001-01-01",
latest_revision_created_at=local_datetime(2016, 1, 1),
)
root_page.add_child(instance=new_event_2)
response = self.get({"q": "Lunar", "ordering": "title"})
page_ids = [page.id for page in response.context["pages"]]
self.assertEqual(page_ids, [new_event_2.id, new_event.id])
response = self.get({"q": "Lunar", "ordering": "-title"})
page_ids = [page.id for page in response.context["pages"]]
self.assertEqual(page_ids, [new_event.id, new_event_2.id])
def test_search_order_by_updated(self):
root_page = Page.objects.get(id=2)
new_event = SingleEventPage(
title="Lunar event",
location="the moon",
audience="public",
cost="free",
date_from="2001-01-01",
latest_revision_created_at=local_datetime(2016, 1, 1),
)
root_page.add_child(instance=new_event)
new_event_2 = SingleEventPage(
title="Lunar event 2",
location="the moon",
audience="public",
cost="free",
date_from="2001-01-01",
latest_revision_created_at=local_datetime(2015, 1, 1),
)
root_page.add_child(instance=new_event_2)
response = self.get({"q": "Lunar", "ordering": "latest_revision_created_at"})
page_ids = [page.id for page in response.context["pages"]]
self.assertEqual(page_ids, [new_event_2.id, new_event.id])
response = self.get({"q": "Lunar", "ordering": "-latest_revision_created_at"})
page_ids = [page.id for page in response.context["pages"]]
self.assertEqual(page_ids, [new_event.id, new_event_2.id])
def test_search_order_by_status(self):
root_page = Page.objects.get(id=2)
live_event = SingleEventPage(
title="Lunar event",
location="the moon",
audience="public",
cost="free",
date_from="2001-01-01",
latest_revision_created_at=local_datetime(2016, 1, 1),
live=True,
)
root_page.add_child(instance=live_event)
draft_event = SingleEventPage(
title="Lunar event",
location="the moon",
audience="public",
cost="free",
date_from="2001-01-01",
latest_revision_created_at=local_datetime(2016, 1, 1),
live=False,
)
root_page.add_child(instance=draft_event)
response = self.get({"q": "Lunar", "ordering": "live"})
page_ids = [page.id for page in response.context["pages"]]
self.assertEqual(page_ids, [draft_event.id, live_event.id])
response = self.get({"q": "Lunar", "ordering": "-live"})
page_ids = [page.id for page in response.context["pages"]]
self.assertEqual(page_ids, [live_event.id, draft_event.id])
def test_search_filter_content_type(self):
# Correct content_type
response = self.get({"content_type": "demosite.standardpage"})
self.assertEqual(response.status_code, 200)
# Incorrect content_type
response = self.get({"content_type": "demosite.standardpage.error"})
self.assertEqual(response.status_code, 404)
| bsd-3-clause | 98bc54ef8e7157221e95eb98c7513270 | 37.109524 | 86 | 0.595652 | 3.845747 | false | true | false | false |
wagtail/wagtail | wagtail/admin/site_summary.py | 4 | 2552 | from django.forms import Media
from wagtail import hooks
from wagtail.admin.auth import user_has_any_page_permission
from wagtail.admin.navigation import get_site_for_user
from wagtail.admin.ui.components import Component
from wagtail.models import Page, Site
class SummaryItem(Component):
order = 100
def __init__(self, request):
self.request = request
def is_shown(self):
return True
class PagesSummaryItem(SummaryItem):
order = 100
template_name = "wagtailadmin/home/site_summary_pages.html"
def get_context_data(self, parent_context):
site_details = get_site_for_user(self.request.user)
root_page = site_details["root_page"]
site_name = site_details["site_name"]
if root_page:
page_count = Page.objects.descendant_of(root_page, inclusive=True).count()
if root_page.is_root():
# If the root page the user has access to is the Wagtail root,
# subtract one from this count because the root is not a real page.
page_count -= 1
# If precisely one site exists, link to its homepage rather than the
# tree root, to discourage people from trying to create pages as siblings
# of the homepage (#1883)
try:
root_page = Site.objects.get().root_page
except (Site.DoesNotExist, Site.MultipleObjectsReturned):
pass
else:
page_count = 0
return {
"root_page": root_page,
"total_pages": page_count,
"site_name": site_name,
}
def is_shown(self):
return user_has_any_page_permission(self.request.user)
class SiteSummaryPanel(Component):
name = "site_summary"
template_name = "wagtailadmin/home/site_summary.html"
order = 100
def __init__(self, request):
self.request = request
summary_items = []
for fn in hooks.get_hooks("construct_homepage_summary_items"):
fn(request, summary_items)
self.summary_items = [s for s in summary_items if s.is_shown()]
self.summary_items.sort(key=lambda p: p.order)
def get_context_data(self, parent_context):
context = super().get_context_data(parent_context)
context["summary_items"] = self.summary_items
return context
@property
def media(self):
media = Media()
for item in self.summary_items:
media += item.media
return media
| bsd-3-clause | 49d5672c3ec3117947e4c385b48f64bb | 30.9 | 89 | 0.612069 | 4.116129 | false | false | false | false |
wagtail/wagtail | wagtail/search/tests/test_index_functions.py | 4 | 6914 | from datetime import date
from unittest import mock
from django.test import TestCase, override_settings
from wagtail.models import Page
from wagtail.search import index
from wagtail.test.search import models
from wagtail.test.testapp.models import SimplePage
from wagtail.test.utils import WagtailTestUtils
class TestGetIndexedInstance(TestCase):
fixtures = ["search"]
def test_gets_instance(self):
obj = models.Author.objects.get(id=1)
# Should just return the object
indexed_instance = index.get_indexed_instance(obj)
self.assertEqual(indexed_instance, obj)
def test_gets_specific_class(self):
obj = models.Novel.objects.get(id=1)
# Running the command with the parent class should find the specific class again
indexed_instance = index.get_indexed_instance(obj.book_ptr)
self.assertEqual(indexed_instance, obj)
def test_blocks_not_in_indexed_objects(self):
obj = models.Novel(
title="Don't index me!",
publication_date=date(2017, 10, 18),
number_of_pages=100,
)
obj.save()
# We've told it not to index anything with the title "Don't index me"
# get_indexed_instance should return None
indexed_instance = index.get_indexed_instance(obj.book_ptr)
self.assertIsNone(indexed_instance)
@mock.patch("wagtail.search.tests.DummySearchBackend", create=True)
@override_settings(
WAGTAILSEARCH_BACKENDS={
"default": {"BACKEND": "wagtail.search.tests.DummySearchBackend"}
}
)
class TestInsertOrUpdateObject(TestCase, WagtailTestUtils):
def test_inserts_object(self, backend):
obj = models.Book.objects.create(
title="Test", publication_date=date(2017, 10, 18), number_of_pages=100
)
backend().reset_mock()
index.insert_or_update_object(obj)
backend().add.assert_called_with(obj)
def test_doesnt_insert_unsaved_object(self, backend):
obj = models.Book(
title="Test", publication_date=date(2017, 10, 18), number_of_pages=100
)
backend().reset_mock()
index.insert_or_update_object(obj)
self.assertFalse(backend().add.mock_calls)
def test_converts_to_specific_page(self, backend):
root_page = Page.objects.get(id=1)
page = root_page.add_child(
instance=SimplePage(title="test", slug="test", content="test")
)
# Convert page into a generic "Page" object and add it into the index
unspecific_page = page.page_ptr
backend().reset_mock()
index.insert_or_update_object(unspecific_page)
# It should be automatically converted back to the specific version
backend().add.assert_called_with(page)
def test_catches_index_error(self, backend):
obj = models.Book.objects.create(
title="Test", publication_date=date(2017, 10, 18), number_of_pages=100
)
backend().add.side_effect = ValueError("Test")
backend().reset_mock()
with self.assertLogs("wagtail.search.index", level="ERROR") as cm:
index.insert_or_update_object(obj)
self.assertEqual(len(cm.output), 1)
self.assertIn(
"Exception raised while adding <Book: Test> into the 'default' search backend",
cm.output[0],
)
self.assertIn("Traceback (most recent call last):", cm.output[0])
self.assertIn("ValueError: Test", cm.output[0])
@mock.patch("wagtail.search.tests.DummySearchBackend", create=True)
@override_settings(
WAGTAILSEARCH_BACKENDS={
"default": {"BACKEND": "wagtail.search.tests.DummySearchBackend"}
}
)
class TestRemoveObject(TestCase, WagtailTestUtils):
def test_removes_object(self, backend):
obj = models.Book.objects.create(
title="Test", publication_date=date(2017, 10, 18), number_of_pages=100
)
backend().reset_mock()
index.remove_object(obj)
backend().delete.assert_called_with(obj)
def test_removes_unsaved_object(self, backend):
obj = models.Book(
title="Test", publication_date=date(2017, 10, 18), number_of_pages=100
)
backend().reset_mock()
index.remove_object(obj)
backend().delete.assert_called_with(obj)
def test_catches_index_error(self, backend):
obj = models.Book.objects.create(
title="Test", publication_date=date(2017, 10, 18), number_of_pages=100
)
backend().reset_mock()
backend().delete.side_effect = ValueError("Test")
with self.assertLogs("wagtail.search.index", level="ERROR") as cm:
index.remove_object(obj)
self.assertEqual(len(cm.output), 1)
self.assertIn(
"Exception raised while deleting <Book: Test> from the 'default' search backend",
cm.output[0],
)
self.assertIn("Traceback (most recent call last):", cm.output[0])
self.assertIn("ValueError: Test", cm.output[0])
@mock.patch("wagtail.search.tests.DummySearchBackend", create=True)
@override_settings(
WAGTAILSEARCH_BACKENDS={
"default": {"BACKEND": "wagtail.search.tests.DummySearchBackend"}
}
)
class TestSignalHandlers(TestCase, WagtailTestUtils):
def test_index_on_create(self, backend):
backend().reset_mock()
obj = models.Book.objects.create(
title="Test", publication_date=date(2017, 10, 18), number_of_pages=100
)
backend().add.assert_called_with(obj)
def test_index_on_update(self, backend):
obj = models.Book.objects.create(
title="Test", publication_date=date(2017, 10, 18), number_of_pages=100
)
backend().reset_mock()
obj.title = "Updated test"
obj.save()
self.assertEqual(backend().add.call_count, 1)
indexed_object = backend().add.call_args[0][0]
self.assertEqual(indexed_object.title, "Updated test")
def test_index_on_delete(self, backend):
obj = models.Book.objects.create(
title="Test", publication_date=date(2017, 10, 18), number_of_pages=100
)
backend().reset_mock()
obj.delete()
backend().delete.assert_called_with(obj)
def test_do_not_index_fields_omitted_from_update_fields(self, backend):
obj = models.Book.objects.create(
title="Test", publication_date=date(2017, 10, 18), number_of_pages=100
)
backend().reset_mock()
obj.title = "Updated test"
obj.publication_date = date(2001, 10, 19)
obj.save(update_fields=["title"])
self.assertEqual(backend().add.call_count, 1)
indexed_object = backend().add.call_args[0][0]
self.assertEqual(indexed_object.title, "Updated test")
self.assertEqual(indexed_object.publication_date, date(2017, 10, 18))
| bsd-3-clause | 825a92562fa9441ecc175aaf6310d0bf | 33.059113 | 93 | 0.638849 | 3.813569 | false | true | false | false |
wagtail/wagtail | wagtail/api/v2/router.py | 4 | 2899 | import functools
from django.urls import include, re_path
from wagtail.utils.urlpatterns import decorate_urlpatterns
class WagtailAPIRouter:
"""
A class that provides routing and cross-linking for a collection
of API endpoints
"""
def __init__(self, url_namespace):
self.url_namespace = url_namespace
self._endpoints = {}
def register_endpoint(self, name, class_):
self._endpoints[name] = class_
def get_model_endpoint(self, model):
"""
Finds the endpoint in the API that represents a model
Returns a (name, endpoint_class) tuple. Or None if an
endpoint is not found.
"""
for name, class_ in self._endpoints.items():
if issubclass(model, class_.model):
return name, class_
def get_model_listing_urlpath(self, model):
"""
Returns a URL path (excluding scheme and hostname) to the listing
page of a model
Returns None if the model is not represented by any endpoints.
"""
endpoint = self.get_model_endpoint(model)
if endpoint:
endpoint_name, endpoint_class = endpoint[0], endpoint[1]
url_namespace = self.url_namespace + ":" + endpoint_name
return endpoint_class.get_model_listing_urlpath(
model, namespace=url_namespace
)
def get_object_detail_urlpath(self, model, pk):
"""
Returns a URL path (excluding scheme and hostname) to the detail
page of an object.
Returns None if the object is not represented by any endpoints.
"""
endpoint = self.get_model_endpoint(model)
if endpoint:
endpoint_name, endpoint_class = endpoint[0], endpoint[1]
url_namespace = self.url_namespace + ":" + endpoint_name
return endpoint_class.get_object_detail_urlpath(
model, pk, namespace=url_namespace
)
def wrap_view(self, func):
@functools.wraps(func)
def wrapped(request, *args, **kwargs):
request.wagtailapi_router = self
return func(request, *args, **kwargs)
return wrapped
def get_urlpatterns(self):
urlpatterns = []
for name, class_ in self._endpoints.items():
pattern = re_path(
r"^{}/".format(name),
include((class_.get_urlpatterns(), name), namespace=name),
)
urlpatterns.append(pattern)
decorate_urlpatterns(urlpatterns, self.wrap_view)
return urlpatterns
@property
def urls(self):
"""
A shortcut to allow quick registration of the API in a URLconf.
Use with Django's include() function:
path('api/', include(myapi.urls)),
"""
return self.get_urlpatterns(), self.url_namespace, self.url_namespace
| bsd-3-clause | 67227bee20a5023cd0fbcdcde9a95d28 | 29.515789 | 77 | 0.595378 | 4.48068 | false | false | false | false |
wagtail/wagtail | wagtail/contrib/table_block/tests.py | 4 | 18755 | import json
import unittest
from django.test import SimpleTestCase, TestCase
from django.urls import reverse
from django.utils import translation
from wagtail.blocks.field_block import FieldBlockAdapter
from wagtail.contrib.table_block.blocks import DEFAULT_TABLE_OPTIONS, TableBlock
from wagtail.models import Page
from wagtail.test.testapp.models import TableBlockStreamPage
from wagtail.test.utils import WagtailTestUtils
from .blocks import TableInput
class TestTableBlock(TestCase):
def setUp(self):
self.default_table_options = {
"minSpareRows": 0,
"startRows": 3,
"startCols": 3,
"colHeaders": False,
"rowHeaders": False,
"contextMenu": True,
"editor": "text",
"stretchH": "all",
"height": 108,
"language": "en",
"renderer": "text",
"autoColumnSize": False,
}
def test_table_block_render(self):
"""
Test a generic render.
"""
value = {
"first_row_is_table_header": False,
"first_col_is_header": False,
"data": [
["Test 1", "Test 2", "Test 3"],
[None, None, None],
[None, None, None],
],
}
block = TableBlock()
result = block.render(value)
expected = """
<table>
<tbody>
<tr><td>Test 1</td><td>Test 2</td><td>Test 3</td></tr>
<tr><td></td><td></td><td></td></tr>
<tr><td></td><td></td><td></td></tr>
</tbody>
</table>
"""
self.assertHTMLEqual(result, expected)
self.assertIn("Test 2", result)
def test_table_block_alignment_render(self):
"""
Test a generic render with some cells aligned.
"""
value = {
"first_row_is_table_header": True,
"first_col_is_header": False,
"cell": [
{"row": 0, "col": 1, "className": "htLeft"},
{"row": 1, "col": 1, "className": "htRight"},
],
"data": [
["Test 1", "Test 2", "Test 3"],
[None, None, None],
[None, None, None],
],
}
block = TableBlock()
result = block.render(value)
expected = """
<table>
<thead>
<tr><th scope="col">Test 1</th><th scope="col" class="htLeft">Test 2</th><th scope="col">Test 3</th></tr>
</thead>
<tbody>
<tr><td></td><td class="htRight"></td><td></td></tr>
<tr><td></td><td></td><td></td></tr>
</tbody>
</table>
"""
self.assertHTMLEqual(result, expected)
self.assertIn("Test 2", result)
def test_render_empty_table(self):
"""
An empty table should render okay.
"""
block = TableBlock()
result = block.render(
{
"first_row_is_table_header": False,
"first_col_is_header": False,
"data": [[None, None, None], [None, None, None], [None, None, None]],
}
)
expected = """
<table>
<tbody>
<tr><td></td><td></td><td></td></tr>
<tr><td></td><td></td><td></td></tr>
<tr><td></td><td></td><td></td></tr>
</tbody>
</table>
"""
self.assertHTMLEqual(result, expected)
def test_do_not_render_html(self):
"""
Ensure that raw html doesn't render
by default.
"""
value = {
"first_row_is_table_header": False,
"first_col_is_header": False,
"data": [
["<p><strong>Test</strong></p>", None, None],
[None, None, None],
[None, None, None],
],
}
expected = """
<table>
<tbody>
<tr><td><p><strong>Test</strong></p></td><td></td><td></td></tr>
<tr><td></td><td></td><td></td></tr>
<tr><td></td><td></td><td></td></tr>
</tbody>
</table>
"""
block = TableBlock()
result = block.render(value)
self.assertHTMLEqual(result, expected)
def test_row_headers(self):
"""
Ensure that row headers are properly rendered.
"""
value = {
"first_row_is_table_header": True,
"first_col_is_header": False,
"data": [["Foo", "Bar", "Baz"], [None, None, None], [None, None, None]],
}
expected = """
<table>
<thead>
<tr><th scope="col">Foo</th><th scope="col">Bar</th><th scope="col">Baz</th></tr>
</thead>
<tbody>
<tr><td></td><td></td><td></td></tr>
<tr><td></td><td></td><td></td></tr>
</tbody>
</table>
"""
block = TableBlock()
result = block.render(value)
self.assertHTMLEqual(result, expected)
def test_column_headers(self):
"""
Ensure that column headers are properly rendered.
"""
value = {
"first_row_is_table_header": False,
"first_col_is_header": True,
"data": [
["Foo", "Bar", "Baz"],
["one", "two", "three"],
["four", "five", "six"],
],
}
expected = """
<table>
<tbody>
<tr><th scope="row">Foo</th><td>Bar</td><td>Baz</td></tr>
<tr><th scope="row">one</th><td>two</td><td>three</td></tr>
<tr><th scope="row">four</th><td>five</td><td>six</td></tr>
</tbody>
</table>
"""
block = TableBlock()
result = block.render(value)
self.assertHTMLEqual(result, expected)
def test_row_and_column_headers(self):
"""
Test row and column headers at the same time.
"""
value = {
"first_row_is_table_header": True,
"first_col_is_header": True,
"data": [
["Foo", "Bar", "Baz"],
["one", "two", "three"],
["four", "five", "six"],
],
}
expected = """
<table>
<thead>
<tr><th scope="col">Foo</th><th scope="col">Bar</th><th scope="col">Baz</th></tr>
</thead>
<tbody>
<tr><th scope="row">one</th><td>two</td><td>three</td></tr>
<tr><th scope="row">four</th><td>five</td><td>six</td></tr>
</tbody>
</table>
"""
block = TableBlock()
result = block.render(value)
self.assertHTMLEqual(result, expected)
def test_value_for_and_from_form(self):
"""
Make sure we get back good json and make
sure it translates back to python.
"""
value = {
"first_row_is_table_header": False,
"first_col_is_header": False,
"data": [["Foo", 1, None], [3.5, "Bar", "Baz"]],
}
block = TableBlock()
expected_json = '{"first_row_is_table_header": false, "first_col_is_header": false, "data": [["Foo", 1, null], [3.5, "Bar", "Baz"]]}'
returned_json = block.value_for_form(value)
self.assertJSONEqual(expected_json, returned_json)
self.assertEqual(block.value_from_form(returned_json), value)
def test_is_html_renderer(self):
"""
Test that settings flow through correctly to
the is_html_renderer method.
"""
# TableBlock with default table_options
block1 = TableBlock()
self.assertIs(block1.is_html_renderer(), False)
# TableBlock with altered table_options
new_options = self.default_table_options.copy()
new_options["renderer"] = "html"
block2 = TableBlock(table_options=new_options)
self.assertIs(block2.is_html_renderer(), True)
def test_searchable_content(self):
value = {
"first_row_is_table_header": False,
"first_col_is_header": False,
"data": [
["Test 1", "Test 2", "Test 3"],
[None, "Bar", None],
[None, "Foo", None],
],
}
block = TableBlock()
content = block.get_searchable_content(value)
self.assertEqual(
content,
[
"Test 1",
"Test 2",
"Test 3",
"Bar",
"Foo",
],
)
def test_searchable_content_for_null_block(self):
value = None
block = TableBlock()
content = block.get_searchable_content(value)
self.assertEqual(content, [])
def test_render_with_extra_context(self):
"""
Test that extra context variables passed in block.render are passed through
to the template.
"""
block = TableBlock(template="tests/blocks/table_block_with_caption.html")
value = {
"first_row_is_table_header": False,
"first_col_is_header": False,
"data": [
["Test 1", "Test 2", "Test 3"],
[None, None, None],
[None, None, None],
],
}
result = block.render(value, context={"caption": "A fascinating table."})
self.assertIn("Test 1", result)
self.assertIn("<div>A fascinating table.</div>", result)
def test_table_block_caption_render(self):
"""
Test a generic render with caption.
"""
value = {
"table_caption": "caption",
"first_row_is_table_header": False,
"first_col_is_header": False,
"data": [
["Test 1", "Test 2", "Test 3"],
[None, None, None],
[None, None, None],
],
}
block = TableBlock()
result = block.render(value)
expected = """
<table>
<caption>caption</caption>
<tbody>
<tr><td>Test 1</td><td>Test 2</td><td>Test 3</td></tr>
<tr><td></td><td></td><td></td></tr>
<tr><td></td><td></td><td></td></tr>
</tbody>
</table>
"""
self.assertHTMLEqual(result, expected)
self.assertIn("Test 2", result)
def test_empty_table_block_is_not_rendered(self):
"""
Test an empty table is not rendered.
"""
value = None
block = TableBlock()
result = block.render(value)
expected = ""
self.assertHTMLEqual(result, expected)
self.assertNotIn("None", result)
class TestTableBlockForm(WagtailTestUtils, SimpleTestCase):
def setUp(self):
# test value for table data
self.value = {
"first_row_is_table_header": True,
"first_col_is_header": True,
"data": [
["Ship", "Type", "Status"],
["Galactica", "Battlestar", "Active"],
["Valkyrie", "Battlestar", "Destroyed"],
["Cylon Basestar", "Basestar", "Active"],
["Brenik", "Small Military Vessel", "Destroyed"],
],
}
# set language from testing environment
language = translation.get_language()
self.default_table_options = DEFAULT_TABLE_OPTIONS.copy()
self.default_table_options["language"] = language
def test_default_table_options(self):
"""
Test options without any custom table_options provided.
"""
block = TableBlock()
# check that default_table_options created correctly
self.assertEqual(block.table_options, block.get_table_options())
# check that default_table_options used on self
self.assertEqual(self.default_table_options, block.table_options)
# check a few individual keys from DEFAULT_TABLE_OPTIONS
self.assertEqual(
DEFAULT_TABLE_OPTIONS["startRows"], block.table_options["startRows"]
)
self.assertEqual(
DEFAULT_TABLE_OPTIONS["colHeaders"], block.table_options["colHeaders"]
)
self.assertEqual(
DEFAULT_TABLE_OPTIONS["contextMenu"], block.table_options["contextMenu"]
)
self.assertEqual(DEFAULT_TABLE_OPTIONS["editor"], block.table_options["editor"])
self.assertEqual(
DEFAULT_TABLE_OPTIONS["stretchH"], block.table_options["stretchH"]
)
def test_table_options_language(self):
"""
Test that the environment's language is used if no language provided.
"""
# default must always contain a language value
block = TableBlock()
self.assertIn("language", block.table_options)
# French
translation.activate("fr-fr")
block_fr = TableBlock()
self.assertEqual("fr-fr", block_fr.table_options["language"])
translation.activate("it")
# Italian
block_it = TableBlock()
self.assertEqual("it", block_it.table_options["language"])
# table_options with language provided, different to environment
block_with_lang = TableBlock(table_options={"language": "ja"})
self.assertNotEqual("it", block_with_lang.table_options["language"])
self.assertEqual("ja", block_with_lang.table_options["language"])
translation.activate("en")
def test_table_options_context_menu(self):
"""
Test how contextMenu is set to default.
"""
default_context_menu = list(DEFAULT_TABLE_OPTIONS["contextMenu"]) # create copy
# confirm the default is correct
table_options = TableBlock().table_options
self.assertEqual(table_options["contextMenu"], default_context_menu)
# confirm that when custom option is True, default is still used
table_options_menu_true = TableBlock(
table_options={"contextMenu": True}
).table_options
self.assertEqual(table_options_menu_true["contextMenu"], default_context_menu)
# confirm menu is removed if False is passed in
table_options_menu_false = TableBlock(
table_options={"contextMenu": False}
).table_options
self.assertIs(table_options_menu_false["contextMenu"], False)
# confirm if list passed in, it is used
table_options_menu_list = TableBlock(
table_options={"contextMenu": ["undo", "redo"]}
).table_options
self.assertEqual(table_options_menu_list["contextMenu"], ["undo", "redo"])
# test if empty array passed in
table_options_menu_list = TableBlock(
table_options={"contextMenu": []}
).table_options
self.assertEqual(table_options_menu_list["contextMenu"], [])
def test_table_options_others(self):
"""
Test simple options overrides get passed correctly.
"""
block_1_opts = TableBlock(
table_options={"startRows": 5, "startCols": 2}
).table_options
self.assertEqual(block_1_opts["startRows"], 5)
self.assertEqual(block_1_opts["startCols"], 2)
block_2_opts = TableBlock(table_options={"stretchH": "none"}).table_options
self.assertEqual(block_2_opts["stretchH"], "none")
# check value that is not part of the defaults
block_3_opts = TableBlock(table_options={"allowEmpty": False}).table_options
self.assertIs(block_3_opts["allowEmpty"], False)
def test_adapt(self):
block = TableBlock()
block.set_name("test_tableblock")
js_args = FieldBlockAdapter().js_args(block)
self.assertEqual(js_args[0], "test_tableblock")
self.assertIsInstance(js_args[1], TableInput)
self.assertEqual(
js_args[2],
{
"label": "Test tableblock",
"required": True,
"icon": "table",
"classname": "w-field w-field--char_field w-field--table_input",
"showAddCommentButton": True,
"strings": {"ADD_COMMENT": "Add Comment"},
},
)
def test_searchable_content(self):
"""
Test searchable content is created correctly.
"""
block = TableBlock()
search_content = block.get_searchable_content(value=self.value)
self.assertIn("Galactica", search_content)
self.assertIn("Brenik", search_content)
# TODO(telepath) replace this with a functional test
class TestTableBlockPageEdit(TestCase, WagtailTestUtils):
def setUp(self):
self.value = {
"first_row_is_table_header": True,
"first_col_is_header": True,
"data": [
["Ship", "Type", "Status"],
["Galactica", "Battlestar", "Active"],
["Valkyrie", "Battlestar", "Destroyed"],
["Cylon Basestar", "Basestar", "Active"],
["Brenik", "Small Military Vessel", "Destroyed"],
],
}
self.root_page = Page.objects.get(id=2)
table_block_page_instance = TableBlockStreamPage(
title="Ships", table=json.dumps([{"type": "table", "value": self.value}])
)
self.table_block_page = self.root_page.add_child(
instance=table_block_page_instance
)
self.user = self.login()
@unittest.expectedFailure
def test_page_edit_page_view(self):
"""
Test that edit page loads with saved table data and correct init function.
"""
response = self.client.get(
reverse("wagtailadmin_pages:edit", args=(self.table_block_page.id,))
)
# check page + field renders
self.assertContains(
response,
'<div data-contentpath="table" class="w-field w-field--char_field w-field--table_input">',
)
# check data
self.assertContains(response, "Battlestar")
self.assertContains(response, "Galactica")
# check init
self.assertContains(response, 'initTable("table\\u002D0\\u002Dvalue"')
self.assertContains(response, "minSpareRows")
self.assertContains(response, "startRows")
| bsd-3-clause | db3b9bf763e9a078cd3c5fac50b88427 | 34.056075 | 141 | 0.511864 | 4.087838 | false | true | false | false |
wagtail/wagtail | wagtail/admin/navigation.py | 4 | 1452 | from django.conf import settings
from wagtail.models import Page
def get_pages_with_direct_explore_permission(user):
# Get all pages that the user has direct add/edit/publish/lock permission on
if user.is_superuser:
# superuser has implicit permission on the root node
return Page.objects.filter(depth=1)
else:
return Page.objects.filter(
group_permissions__group__in=user.groups.all(),
group_permissions__permission_type__in=["add", "edit", "publish", "lock"],
)
def get_explorable_root_page(user):
# Get the highest common explorable ancestor for the given user. If the user
# has no permissions over any pages, this method will return None.
pages = get_pages_with_direct_explore_permission(user)
try:
root_page = pages.first_common_ancestor(include_self=True, strict=True)
except Page.DoesNotExist:
root_page = None
return root_page
def get_site_for_user(user):
root_page = get_explorable_root_page(user)
if root_page:
root_site = root_page.get_site()
else:
root_site = None
real_site_name = None
if root_site:
real_site_name = (
root_site.site_name if root_site.site_name else root_site.hostname
)
return {
"root_page": root_page,
"root_site": root_site,
"site_name": real_site_name if real_site_name else settings.WAGTAIL_SITE_NAME,
}
| bsd-3-clause | 743f46250dc9172e655df389dfc38be4 | 31.266667 | 86 | 0.652204 | 3.657431 | false | false | false | false |
wagtail/wagtail | wagtail/admin/tests/api/test_pages.py | 4 | 70767 | import collections
import datetime
import json
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group, Permission
from django.test.utils import override_settings
from django.urls import reverse
from django.utils import timezone
from wagtail import hooks
from wagtail.api.v2.tests.test_pages import TestPageDetail, TestPageListing
from wagtail.models import GroupPagePermission, Locale, Page, PageLogEntry
from wagtail.test.demosite import models
from wagtail.test.testapp.models import (
EventIndex,
EventPage,
PageWithExcludedCopyField,
SimplePage,
StreamPage,
)
from wagtail.users.models import UserProfile
from .utils import AdminAPITestCase
def get_total_page_count():
# Need to take away 1 as the root page is invisible over the API by default
return Page.objects.count() - 1
class TestAdminPageListing(AdminAPITestCase, TestPageListing):
fixtures = ["demosite.json"]
def get_response(self, **params):
return self.client.get(reverse("wagtailadmin_api:pages:listing"), params)
def get_page_id_list(self, content):
return [page["id"] for page in content["items"]]
def get_homepage(self):
return Page.objects.get(slug="home-page")
# BASIC TESTS
def test_basic(self):
response = self.get_response()
self.assertEqual(response.status_code, 200)
self.assertEqual(response["Content-type"], "application/json")
# Will crash if the JSON is invalid
content = json.loads(response.content.decode("UTF-8"))
# Check that the meta section is there
self.assertIn("meta", content)
self.assertIsInstance(content["meta"], dict)
# Check that the total count is there and correct
self.assertIn("total_count", content["meta"])
self.assertIsInstance(content["meta"]["total_count"], int)
self.assertEqual(content["meta"]["total_count"], get_total_page_count())
# Check that the items section is there
self.assertIn("items", content)
self.assertIsInstance(content["items"], list)
# Check that each page has a meta section with type, detail_url, html_url, status and children attributes
for page in content["items"]:
self.assertIn("meta", page)
self.assertIsInstance(page["meta"], dict)
self.assertEqual(
set(page["meta"].keys()),
{
"type",
"detail_url",
"html_url",
"status",
"children",
"slug",
"first_published_at",
"latest_revision_created_at",
},
)
# Check the type info
self.assertIsInstance(content["__types"], dict)
self.assertEqual(
set(content["__types"].keys()),
{
"demosite.EventPage",
"demosite.StandardIndexPage",
"demosite.PersonPage",
"demosite.HomePage",
"demosite.StandardPage",
"demosite.EventIndexPage",
"demosite.ContactPage",
"demosite.BlogEntryPage",
"demosite.BlogIndexPage",
},
)
self.assertEqual(
set(content["__types"]["demosite.EventPage"].keys()),
{"verbose_name", "verbose_name_plural"},
)
self.assertEqual(
content["__types"]["demosite.EventPage"]["verbose_name"], "event page"
)
self.assertEqual(
content["__types"]["demosite.EventPage"]["verbose_name_plural"],
"event pages",
)
# Not applicable to the admin API
test_unpublished_pages_dont_appear_in_list = None
test_private_pages_dont_appear_in_list = None
def test_unpublished_pages_appear_in_list(self):
total_count = get_total_page_count()
page = models.BlogEntryPage.objects.get(id=16)
page.unpublish()
response = self.get_response()
content = json.loads(response.content.decode("UTF-8"))
self.assertEqual(content["meta"]["total_count"], total_count)
def test_private_pages_appear_in_list(self):
total_count = get_total_page_count()
page = models.BlogIndexPage.objects.get(id=5)
page.view_restrictions.create(password="test")
new_total_count = get_total_page_count()
self.assertEqual(total_count, total_count)
response = self.get_response()
content = json.loads(response.content.decode("UTF-8"))
self.assertEqual(content["meta"]["total_count"], new_total_count)
def test_get_in_non_content_language(self):
# set logged-in user's admin UI language to Swedish
user = get_user_model().objects.get(email="test@email.com")
UserProfile.objects.update_or_create(
user=user, defaults={"preferred_language": "se"}
)
response = self.get_response()
self.assertEqual(response.status_code, 200)
self.assertEqual(response["Content-type"], "application/json")
# Will crash if the JSON is invalid
content = json.loads(response.content.decode("UTF-8"))
self.assertIn("meta", content)
# FIELDS
# Not applicable to the admin API
test_parent_field_gives_error = None
def test_fields(self):
response = self.get_response(
type="demosite.BlogEntryPage", fields="title,date,feed_image"
)
content = json.loads(response.content.decode("UTF-8"))
for page in content["items"]:
self.assertEqual(
set(page.keys()),
{"id", "meta", "title", "admin_display_title", "date", "feed_image"},
)
def test_fields_default(self):
response = self.get_response(type="demosite.BlogEntryPage")
content = json.loads(response.content.decode("UTF-8"))
for page in content["items"]:
self.assertEqual(
set(page.keys()), {"id", "meta", "title", "admin_display_title"}
)
self.assertEqual(
set(page["meta"].keys()),
{
"type",
"detail_url",
"html_url",
"children",
"status",
"slug",
"first_published_at",
"latest_revision_created_at",
},
)
def test_remove_meta_fields(self):
response = self.get_response(fields="-html_url")
content = json.loads(response.content.decode("UTF-8"))
for page in content["items"]:
self.assertEqual(
set(page.keys()), {"id", "meta", "title", "admin_display_title"}
)
self.assertEqual(
set(page["meta"].keys()),
{
"type",
"detail_url",
"slug",
"first_published_at",
"latest_revision_created_at",
"status",
"children",
},
)
def test_remove_all_meta_fields(self):
response = self.get_response(
fields="-type,-detail_url,-slug,-first_published_at,-html_url,-latest_revision_created_at,-status,-children"
)
content = json.loads(response.content.decode("UTF-8"))
for page in content["items"]:
self.assertEqual(set(page.keys()), {"id", "title", "admin_display_title"})
def test_remove_fields(self):
response = self.get_response(fields="-title,-admin_display_title")
content = json.loads(response.content.decode("UTF-8"))
for page in content["items"]:
self.assertEqual(set(page.keys()), {"id", "meta"})
def test_remove_id_field(self):
response = self.get_response(fields="-id")
content = json.loads(response.content.decode("UTF-8"))
for page in content["items"]:
self.assertEqual(set(page.keys()), {"meta", "title", "admin_display_title"})
def test_all_fields(self):
response = self.get_response(type="demosite.BlogEntryPage", fields="*")
content = json.loads(response.content.decode("UTF-8"))
for page in content["items"]:
self.assertEqual(
set(page.keys()),
{
"id",
"meta",
"title",
"admin_display_title",
"date",
"related_links",
"tags",
"carousel_items",
"body",
"feed_image",
"feed_image_thumbnail",
},
)
self.assertEqual(
set(page["meta"].keys()),
{
"type",
"detail_url",
"show_in_menus",
"first_published_at",
"seo_title",
"slug",
"parent",
"html_url",
"search_description",
"locale",
"alias_of",
"children",
"descendants",
"ancestors",
"translations",
"status",
"latest_revision_created_at",
},
)
def test_all_fields_then_remove_something(self):
response = self.get_response(
type="demosite.BlogEntryPage",
fields="*,-title,-admin_display_title,-date,-seo_title,-status",
)
content = json.loads(response.content.decode("UTF-8"))
for page in content["items"]:
self.assertEqual(
set(page.keys()),
{
"id",
"meta",
"related_links",
"tags",
"carousel_items",
"body",
"feed_image",
"feed_image_thumbnail",
},
)
self.assertEqual(
set(page["meta"].keys()),
{
"type",
"detail_url",
"show_in_menus",
"first_published_at",
"slug",
"parent",
"html_url",
"search_description",
"locale",
"alias_of",
"children",
"descendants",
"ancestors",
"translations",
"latest_revision_created_at",
},
)
def test_all_nested_fields(self):
response = self.get_response(
type="demosite.BlogEntryPage", fields="feed_image(*)"
)
content = json.loads(response.content.decode("UTF-8"))
for page in content["items"]:
self.assertEqual(
set(page["feed_image"].keys()),
{"id", "meta", "title", "width", "height", "thumbnail"},
)
def test_fields_foreign_key(self):
# Only the base the detail_url is different here from the public API
response = self.get_response(
type="demosite.BlogEntryPage", fields="title,date,feed_image"
)
content = json.loads(response.content.decode("UTF-8"))
for page in content["items"]:
feed_image = page["feed_image"]
if feed_image is not None:
self.assertIsInstance(feed_image, dict)
self.assertEqual(set(feed_image.keys()), {"id", "meta", "title"})
self.assertIsInstance(feed_image["id"], int)
self.assertIsInstance(feed_image["meta"], dict)
self.assertEqual(
set(feed_image["meta"].keys()),
{"type", "detail_url", "download_url"},
)
self.assertEqual(feed_image["meta"]["type"], "wagtailimages.Image")
self.assertEqual(
feed_image["meta"]["detail_url"],
"http://localhost/admin/api/main/images/%d/" % feed_image["id"],
)
def test_fields_parent(self):
response = self.get_response(type="demosite.BlogEntryPage", fields="parent")
content = json.loads(response.content.decode("UTF-8"))
for page in content["items"]:
parent = page["meta"]["parent"]
# All blog entry pages have the same parent
self.assertDictEqual(
parent,
{
"id": 5,
"meta": {
"type": "demosite.BlogIndexPage",
"detail_url": "http://localhost/admin/api/main/pages/5/",
"html_url": "http://localhost/blog-index/",
},
"title": "Blog index",
},
)
def test_fields_descendants(self):
response = self.get_response(fields="descendants")
content = json.loads(response.content.decode("UTF-8"))
for page in content["items"]:
descendants = page["meta"]["descendants"]
self.assertEqual(set(descendants.keys()), {"count", "listing_url"})
self.assertIsInstance(descendants["count"], int)
self.assertEqual(
descendants["listing_url"],
"http://localhost/admin/api/main/pages/?descendant_of=%d" % page["id"],
)
def test_fields_child_relation(self):
response = self.get_response(
type="demosite.BlogEntryPage", fields="title,related_links"
)
content = json.loads(response.content.decode("UTF-8"))
for page in content["items"]:
self.assertEqual(
set(page.keys()),
{"id", "meta", "title", "admin_display_title", "related_links"},
)
self.assertIsInstance(page["related_links"], list)
def test_fields_ordering(self):
response = self.get_response(
type="demosite.BlogEntryPage", fields="date,title,feed_image,related_links"
)
# Will crash if the JSON is invalid
content = json.loads(response.content.decode("UTF-8"))
# Test field order
content = json.JSONDecoder(object_pairs_hook=collections.OrderedDict).decode(
response.content.decode("UTF-8")
)
field_order = [
"id",
"meta",
"title",
"admin_display_title",
"date",
"feed_image",
"related_links",
]
self.assertEqual(list(content["items"][0].keys()), field_order)
def test_fields_tags(self):
response = self.get_response(type="demosite.BlogEntryPage", fields="tags")
content = json.loads(response.content.decode("UTF-8"))
for page in content["items"]:
self.assertEqual(
set(page.keys()), {"id", "meta", "tags", "title", "admin_display_title"}
)
self.assertIsInstance(page["tags"], list)
def test_fields_translations(self):
# Add a translation of the homepage
french = Locale.objects.create(language_code="fr")
homepage = self.get_homepage()
french_homepage = homepage.copy_for_translation(french)
response = self.get_response(fields="translations")
content = json.loads(response.content.decode("UTF-8"))
for page in content["items"]:
translations = page["meta"]["translations"]
if page["id"] == homepage.id:
self.assertEqual(len(translations), 1)
self.assertEqual(translations[0]["id"], french_homepage.id)
self.assertEqual(translations[0]["meta"]["locale"], "fr")
elif page["id"] == french_homepage.id:
self.assertEqual(len(translations), 1)
self.assertEqual(translations[0]["id"], homepage.id)
self.assertEqual(translations[0]["meta"]["locale"], "en")
else:
self.assertEqual(translations, [])
# CHILD OF FILTER
# Not applicable to the admin API
test_child_of_page_thats_not_in_same_site_gives_error = None
def test_child_of_root(self):
# Only return the homepage as that's the only child of the "root" node
# in the tree. This is different to the public API which pretends the
# homepage of the current site is the root page.
response = self.get_response(child_of="root")
content = json.loads(response.content.decode("UTF-8"))
page_id_list = self.get_page_id_list(content)
self.assertEqual(page_id_list, [2, 24])
def test_child_of_page_1(self):
# Public API doesn't allow this, as it's the root page
response = self.get_response(child_of=1)
json.loads(response.content.decode("UTF-8"))
self.assertEqual(response.status_code, 200)
# DESCENDANT OF FILTER
# Not applicable to the admin API
test_descendant_of_page_thats_not_in_same_site_gives_error = None
def test_descendant_of_root(self):
response = self.get_response(descendant_of="root")
content = json.loads(response.content.decode("UTF-8"))
page_id_list = self.get_page_id_list(content)
self.assertEqual(
page_id_list,
[2, 4, 8, 9, 5, 16, 18, 19, 6, 10, 15, 17, 21, 22, 23, 20, 13, 14, 12, 24],
)
def test_descendant_of_root_doesnt_give_error(self):
# Public API doesn't allow this
response = self.get_response(descendant_of=1)
json.loads(response.content.decode("UTF-8"))
self.assertEqual(response.status_code, 200)
# FOR EXPLORER FILTER
def make_simple_page(self, parent, title):
return parent.add_child(instance=SimplePage(title=title, content="Simple page"))
def test_for_explorer_filter(self):
movies = self.make_simple_page(Page.objects.get(pk=1), "Movies")
visible_movies = [
self.make_simple_page(movies, "The Way of the Dragon"),
self.make_simple_page(movies, "Enter the Dragon"),
self.make_simple_page(movies, "Dragons Forever"),
]
hidden_movies = [
self.make_simple_page(movies, "The Hidden Fortress"),
self.make_simple_page(movies, "Crouching Tiger, Hidden Dragon"),
self.make_simple_page(
movies, "Crouching Tiger, Hidden Dragon: Sword of Destiny"
),
]
response = self.get_response(child_of=movies.pk, for_explorer=1)
content = json.loads(response.content.decode("UTF-8"))
page_id_list = self.get_page_id_list(content)
self.assertEqual(page_id_list, [page.pk for page in visible_movies])
response = self.get_response(child_of=movies.pk)
content = json.loads(response.content.decode("UTF-8"))
page_id_list = self.get_page_id_list(content)
self.assertEqual(
page_id_list, [page.pk for page in visible_movies + hidden_movies]
)
def test_for_explorer_no_child_of(self):
response = self.get_response(for_explorer=1)
self.assertEqual(response.status_code, 400)
content = json.loads(response.content.decode("UTF-8"))
self.assertEqual(
content,
{
"message": "filtering by for_explorer without child_of is not supported",
},
)
def test_for_explorer_construct_explorer_page_queryset_ordering(self):
def set_custom_ordering(parent_page, pages, request):
return pages.order_by("-title")
with hooks.register_temporarily(
"construct_explorer_page_queryset", set_custom_ordering
):
response = self.get_response(for_explorer=True, child_of=2)
content = json.loads(response.content.decode("UTF-8"))
page_id_list = self.get_page_id_list(content)
self.assertEqual(page_id_list, [6, 20, 4, 12, 5])
# HAS CHILDREN FILTER
def test_has_children_filter(self):
response = self.get_response(has_children="true")
content = json.loads(response.content.decode("UTF-8"))
page_id_list = self.get_page_id_list(content)
self.assertEqual(page_id_list, [2, 4, 5, 6, 21, 20, 24])
def test_has_children_filter_off(self):
response = self.get_response(has_children="false")
content = json.loads(response.content.decode("UTF-8"))
page_id_list = self.get_page_id_list(content)
self.assertEqual(
page_id_list, [8, 9, 16, 18, 19, 10, 15, 17, 22, 23, 13, 14, 12, 25]
)
def test_has_children_filter_int(self):
response = self.get_response(has_children=1)
content = json.loads(response.content.decode("UTF-8"))
page_id_list = self.get_page_id_list(content)
self.assertEqual(page_id_list, [2, 4, 5, 6, 21, 20, 24])
def test_has_children_filter_int_off(self):
response = self.get_response(has_children=0)
content = json.loads(response.content.decode("UTF-8"))
page_id_list = self.get_page_id_list(content)
self.assertEqual(
page_id_list, [8, 9, 16, 18, 19, 10, 15, 17, 22, 23, 13, 14, 12, 25]
)
def test_has_children_filter_invalid_integer(self):
response = self.get_response(has_children=3)
content = json.loads(response.content.decode("UTF-8"))
self.assertEqual(response.status_code, 400)
self.assertEqual(content, {"message": "has_children must be 'true' or 'false'"})
def test_has_children_filter_invalid_value(self):
response = self.get_response(has_children="yes")
content = json.loads(response.content.decode("UTF-8"))
self.assertEqual(response.status_code, 400)
self.assertEqual(content, {"message": "has_children must be 'true' or 'false'"})
# TYPE FILTER
def test_type_filter_items_are_all_blog_entries(self):
response = self.get_response(type="demosite.BlogEntryPage")
content = json.loads(response.content.decode("UTF-8"))
for page in content["items"]:
self.assertEqual(page["meta"]["type"], "demosite.BlogEntryPage")
# No specific fields available by default
self.assertEqual(
set(page.keys()), {"id", "meta", "title", "admin_display_title"}
)
def test_type_filter_multiple(self):
response = self.get_response(type="demosite.BlogEntryPage,demosite.EventPage")
content = json.loads(response.content.decode("UTF-8"))
blog_page_seen = False
event_page_seen = False
for page in content["items"]:
self.assertIn(
page["meta"]["type"], ["demosite.BlogEntryPage", "demosite.EventPage"]
)
if page["meta"]["type"] == "demosite.BlogEntryPage":
blog_page_seen = True
elif page["meta"]["type"] == "demosite.EventPage":
event_page_seen = True
# Only generic fields available
self.assertEqual(
set(page.keys()), {"id", "meta", "title", "admin_display_title"}
)
self.assertTrue(blog_page_seen, "No blog pages were found in the items")
self.assertTrue(event_page_seen, "No event pages were found in the items")
# Not applicable to the admin API
test_site_filter_same_hostname_returns_error = None
test_site_filter = None
def test_ordering_default(self):
# overridden because the admin API lists all pages, regardless of sites
response = self.get_response()
content = json.loads(response.content.decode("UTF-8"))
page_id_list = self.get_page_id_list(content)
self.assertEqual(
page_id_list,
[2, 4, 8, 9, 5, 16, 18, 19, 6, 10, 15, 17, 21, 22, 23, 20, 13, 14, 12, 24],
)
def test_ordering_by_title(self):
# overridden because the admin API lists all pages, regardless of sites
response = self.get_response(order="title")
content = json.loads(response.content.decode("UTF-8"))
page_id_list = self.get_page_id_list(content)
self.assertEqual(
page_id_list,
[21, 22, 19, 23, 5, 16, 18, 12, 14, 8, 9, 4, 25, 2, 24, 13, 20, 17, 6, 10],
)
def test_ordering_by_title_backwards(self):
# overridden because the admin API lists all pages, regardless of sites
response = self.get_response(order="-title")
content = json.loads(response.content.decode("UTF-8"))
page_id_list = self.get_page_id_list(content)
self.assertEqual(
page_id_list,
[15, 10, 6, 17, 20, 13, 24, 2, 25, 4, 9, 8, 14, 12, 18, 16, 5, 23, 19, 22],
)
def test_limit_total_count(self):
# overridden because the admin API lists all pages, regardless of sites
# the function is actually unchanged, but uses a different total page count helper
response = self.get_response(limit=2)
content = json.loads(response.content.decode("UTF-8"))
# The total count must not be affected by "limit"
self.assertEqual(content["meta"]["total_count"], get_total_page_count())
def test_offset_total_count(self):
# overridden because the admin API lists all pages, regardless of sites
# the function is actually unchanged, but uses a different total page count helper
response = self.get_response(offset=10)
content = json.loads(response.content.decode("UTF-8"))
# The total count must not be affected by "offset"
self.assertEqual(content["meta"]["total_count"], get_total_page_count())
@override_settings(WAGTAILAPI_LIMIT_MAX=None)
def test_limit_max_none_gives_no_errors(self):
# overridden because the admin API lists all pages, regardless of sites
# the function is actually unchanged, but uses a different total page count helper
response = self.get_response(limit=1000000)
content = json.loads(response.content.decode("UTF-8"))
self.assertEqual(response.status_code, 200)
self.assertEqual(len(content["items"]), get_total_page_count())
class TestAdminPageDetail(AdminAPITestCase, TestPageDetail):
fixtures = ["demosite.json"]
def get_response(self, page_id, **params):
return self.client.get(
reverse("wagtailadmin_api:pages:detail", args=(page_id,)), params
)
def test_basic(self):
response = self.get_response(16)
self.assertEqual(response.status_code, 200)
self.assertEqual(response["Content-type"], "application/json")
# Will crash if the JSON is invalid
content = json.loads(response.content.decode("UTF-8"))
# Check the id field
self.assertIn("id", content)
self.assertEqual(content["id"], 16)
# Check that the meta section is there
self.assertIn("meta", content)
self.assertIsInstance(content["meta"], dict)
# Check the meta type
self.assertIn("type", content["meta"])
self.assertEqual(content["meta"]["type"], "demosite.BlogEntryPage")
# Check the meta detail_url
self.assertIn("detail_url", content["meta"])
self.assertEqual(
content["meta"]["detail_url"], "http://localhost/admin/api/main/pages/16/"
)
# Check the meta html_url
self.assertIn("html_url", content["meta"])
self.assertEqual(
content["meta"]["html_url"], "http://localhost/blog-index/blog-post/"
)
# Check the meta status
self.assertIn("status", content["meta"])
self.assertEqual(
content["meta"]["status"],
{"status": "live", "live": True, "has_unpublished_changes": False},
)
# Check the meta children
self.assertIn("children", content["meta"])
self.assertEqual(
content["meta"]["children"],
{
"count": 0,
"listing_url": "http://localhost/admin/api/main/pages/?child_of=16",
},
)
# Check the parent field
self.assertIn("parent", content["meta"])
self.assertIsInstance(content["meta"]["parent"], dict)
self.assertEqual(set(content["meta"]["parent"].keys()), {"id", "meta", "title"})
self.assertEqual(content["meta"]["parent"]["id"], 5)
self.assertIsInstance(content["meta"]["parent"]["meta"], dict)
self.assertEqual(
set(content["meta"]["parent"]["meta"].keys()),
{"type", "detail_url", "html_url"},
)
self.assertEqual(
content["meta"]["parent"]["meta"]["type"], "demosite.BlogIndexPage"
)
self.assertEqual(
content["meta"]["parent"]["meta"]["detail_url"],
"http://localhost/admin/api/main/pages/5/",
)
self.assertEqual(
content["meta"]["parent"]["meta"]["html_url"],
"http://localhost/blog-index/",
)
# Check the alias_of field
# See test_alias_page for a test on an alias page
self.assertIn("alias_of", content["meta"])
self.assertIsNone(content["meta"]["alias_of"])
# Check that the custom fields are included
self.assertIn("date", content)
self.assertIn("body", content)
self.assertIn("tags", content)
self.assertIn("feed_image", content)
self.assertIn("related_links", content)
self.assertIn("carousel_items", content)
# Check that the date was serialised properly
self.assertEqual(content["date"], "2013-12-02")
# Check that the tags were serialised properly
self.assertEqual(content["tags"], ["bird", "wagtail"])
# Check that the feed image was serialised properly
self.assertIsInstance(content["feed_image"], dict)
self.assertEqual(set(content["feed_image"].keys()), {"id", "meta", "title"})
self.assertEqual(content["feed_image"]["id"], 7)
self.assertIsInstance(content["feed_image"]["meta"], dict)
self.assertEqual(
set(content["feed_image"]["meta"].keys()),
{"type", "detail_url", "download_url"},
)
self.assertEqual(content["feed_image"]["meta"]["type"], "wagtailimages.Image")
self.assertEqual(
content["feed_image"]["meta"]["detail_url"],
"http://localhost/admin/api/main/images/7/",
)
# Check that the child relations were serialised properly
self.assertEqual(content["related_links"], [])
for carousel_item in content["carousel_items"]:
self.assertEqual(
set(carousel_item.keys()),
{"id", "meta", "embed_url", "link", "caption", "image"},
)
self.assertEqual(set(carousel_item["meta"].keys()), {"type"})
# Check the type info
self.assertIsInstance(content["__types"], dict)
self.assertEqual(
set(content["__types"].keys()),
{
"wagtailcore.Page",
"demosite.HomePage",
"demosite.BlogIndexPage",
"demosite.BlogEntryPageCarouselItem",
"demosite.BlogEntryPage",
"wagtailimages.Image",
},
)
self.assertEqual(
set(content["__types"]["demosite.BlogIndexPage"].keys()),
{"verbose_name", "verbose_name_plural"},
)
self.assertEqual(
content["__types"]["demosite.BlogIndexPage"]["verbose_name"],
"blog index page",
)
self.assertEqual(
content["__types"]["demosite.BlogIndexPage"]["verbose_name_plural"],
"blog index pages",
)
# overridden from public API tests
def test_meta_parent_id_doesnt_show_root_page(self):
# Root page is visible in the admin API
response = self.get_response(2)
content = json.loads(response.content.decode("UTF-8"))
self.assertIsNotNone(content["meta"]["parent"])
def test_field_ordering(self):
# Need to override this as the admin API has a __types field
response = self.get_response(16)
# Will crash if the JSON is invalid
content = json.loads(response.content.decode("UTF-8"))
# Test field order
content = json.JSONDecoder(object_pairs_hook=collections.OrderedDict).decode(
response.content.decode("UTF-8")
)
field_order = [
"id",
"meta",
"title",
"admin_display_title",
"body",
"tags",
"date",
"feed_image",
"feed_image_thumbnail",
"carousel_items",
"related_links",
"__types",
]
self.assertEqual(list(content.keys()), field_order)
def test_meta_status_draft(self):
# Unpublish the page
Page.objects.get(id=16).unpublish()
response = self.get_response(16)
content = json.loads(response.content.decode("UTF-8"))
self.assertIn("status", content["meta"])
self.assertEqual(
content["meta"]["status"],
{"status": "draft", "live": False, "has_unpublished_changes": True},
)
def test_meta_status_live_draft(self):
# Save revision without republish
Page.objects.get(id=16).specific.save_revision()
response = self.get_response(16)
content = json.loads(response.content.decode("UTF-8"))
self.assertIn("status", content["meta"])
self.assertEqual(
content["meta"]["status"],
{"status": "live + draft", "live": True, "has_unpublished_changes": True},
)
def test_meta_status_scheduled(self):
# Unpublish and save revision with go live date in the future
Page.objects.get(id=16).unpublish()
tomorrow = timezone.now() + datetime.timedelta(days=1)
Page.objects.get(id=16).specific.save_revision(approved_go_live_at=tomorrow)
response = self.get_response(16)
content = json.loads(response.content.decode("UTF-8"))
self.assertIn("status", content["meta"])
self.assertEqual(
content["meta"]["status"],
{"status": "scheduled", "live": False, "has_unpublished_changes": True},
)
def test_meta_status_expired(self):
# Unpublish and set expired flag
Page.objects.get(id=16).unpublish()
Page.objects.filter(id=16).update(expired=True)
response = self.get_response(16)
content = json.loads(response.content.decode("UTF-8"))
self.assertIn("status", content["meta"])
self.assertEqual(
content["meta"]["status"],
{"status": "expired", "live": False, "has_unpublished_changes": True},
)
def test_meta_children_for_parent(self):
# Homepage should have children
response = self.get_response(2)
content = json.loads(response.content.decode("UTF-8"))
self.assertIn("children", content["meta"])
self.assertEqual(
content["meta"]["children"],
{
"count": 5,
"listing_url": "http://localhost/admin/api/main/pages/?child_of=2",
},
)
def test_meta_descendants(self):
# Homepage should have children
response = self.get_response(2)
content = json.loads(response.content.decode("UTF-8"))
self.assertIn("descendants", content["meta"])
self.assertEqual(
content["meta"]["descendants"],
{
"count": 18,
"listing_url": "http://localhost/admin/api/main/pages/?descendant_of=2",
},
)
def test_meta_ancestors(self):
# Homepage should have children
response = self.get_response(16)
content = json.loads(response.content.decode("UTF-8"))
self.assertIn("ancestors", content["meta"])
self.assertIsInstance(content["meta"]["ancestors"], list)
self.assertEqual(len(content["meta"]["ancestors"]), 3)
self.assertEqual(
content["meta"]["ancestors"][0].keys(),
{"id", "meta", "title", "admin_display_title"},
)
self.assertEqual(content["meta"]["ancestors"][0]["title"], "Root")
self.assertEqual(content["meta"]["ancestors"][1]["title"], "Home page")
self.assertEqual(content["meta"]["ancestors"][2]["title"], "Blog index")
def test_alias_page(self):
original = Page.objects.get(id=16).specific
alias = original.create_alias(update_slug="new-slug")
response = self.get_response(alias.id)
self.assertEqual(response.status_code, 200)
self.assertEqual(response["Content-type"], "application/json")
# Will crash if the JSON is invalid
content = json.loads(response.content.decode("UTF-8"))
self.assertEqual(content["meta"]["type"], "demosite.BlogEntryPage")
self.assertEqual(
content["meta"]["html_url"], "http://localhost/blog-index/new-slug/"
)
# Check alias_of field
self.assertIn("alias_of", content["meta"])
self.assertIsInstance(content["meta"]["alias_of"], dict)
self.assertEqual(
set(content["meta"]["alias_of"].keys()), {"id", "meta", "title"}
)
self.assertEqual(content["meta"]["alias_of"]["id"], 16)
self.assertIsInstance(content["meta"]["alias_of"]["meta"], dict)
self.assertEqual(
set(content["meta"]["alias_of"]["meta"].keys()),
{"type", "detail_url", "html_url"},
)
self.assertEqual(
content["meta"]["alias_of"]["meta"]["type"], "demosite.BlogEntryPage"
)
self.assertEqual(
content["meta"]["alias_of"]["meta"]["detail_url"],
"http://localhost/admin/api/main/pages/16/",
)
self.assertEqual(
content["meta"]["alias_of"]["meta"]["html_url"],
"http://localhost/blog-index/blog-post/",
)
# FIELDS
def test_remove_all_meta_fields(self):
response = self.get_response(
16,
fields="-type,-detail_url,-slug,-first_published_at,-html_url,-descendants,-latest_revision_created_at,-alias_of,-children,-ancestors,-show_in_menus,-seo_title,-parent,-status,-search_description",
)
content = json.loads(response.content.decode("UTF-8"))
self.assertNotIn("meta", set(content.keys()))
self.assertIn("id", set(content.keys()))
def test_remove_all_fields(self):
response = self.get_response(16, fields="_,id,type")
content = json.loads(response.content.decode("UTF-8"))
self.assertEqual(set(content.keys()), {"id", "meta", "__types"})
self.assertEqual(set(content["meta"].keys()), {"type"})
def test_all_nested_fields(self):
response = self.get_response(16, fields="feed_image(*)")
content = json.loads(response.content.decode("UTF-8"))
self.assertEqual(
set(content["feed_image"].keys()),
{"id", "meta", "title", "width", "height", "thumbnail"},
)
def test_fields_foreign_key(self):
response = self.get_response(16)
content = json.loads(response.content.decode("UTF-8"))
feed_image = content["feed_image"]
self.assertIsInstance(feed_image, dict)
self.assertEqual(set(feed_image.keys()), {"id", "meta", "title"})
self.assertIsInstance(feed_image["id"], int)
self.assertIsInstance(feed_image["meta"], dict)
self.assertEqual(
set(feed_image["meta"].keys()), {"type", "detail_url", "download_url"}
)
self.assertEqual(feed_image["meta"]["type"], "wagtailimages.Image")
self.assertEqual(
feed_image["meta"]["detail_url"],
"http://localhost/admin/api/main/images/%d/" % feed_image["id"],
)
class TestAdminPageDetailWithStreamField(AdminAPITestCase):
fixtures = ["test.json"]
def setUp(self):
super().setUp()
self.homepage = Page.objects.get(url_path="/home/")
def make_stream_page(self, body):
stream_page = StreamPage(title="stream page", slug="stream-page", body=body)
return self.homepage.add_child(instance=stream_page)
def test_can_fetch_streamfield_content(self):
stream_page = self.make_stream_page('[{"type": "text", "value": "foo"}]')
response_url = reverse("wagtailadmin_api:pages:detail", args=(stream_page.id,))
response = self.client.get(response_url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response["content-type"], "application/json")
content = json.loads(response.content.decode("utf-8"))
self.assertIn("id", content)
self.assertEqual(content["id"], stream_page.id)
self.assertIn("body", content)
self.assertEqual(len(content["body"]), 1)
self.assertEqual(content["body"][0]["type"], "text")
self.assertEqual(content["body"][0]["value"], "foo")
self.assertTrue(content["body"][0]["id"])
def test_image_block(self):
stream_page = self.make_stream_page('[{"type": "image", "value": 1}]')
response_url = reverse("wagtailadmin_api:pages:detail", args=(stream_page.id,))
response = self.client.get(response_url)
content = json.loads(response.content.decode("utf-8"))
# ForeignKeys in a StreamField shouldn't be translated into dictionary representation
self.assertEqual(content["body"][0]["type"], "image")
self.assertEqual(content["body"][0]["value"], 1)
class TestCustomAdminDisplayTitle(AdminAPITestCase):
fixtures = ["test.json"]
def setUp(self):
super().setUp()
self.event_page = Page.objects.get(url_path="/home/events/saint-patrick/")
def test_custom_admin_display_title_shown_on_detail_page(self):
api_url = reverse("wagtailadmin_api:pages:detail", args=(self.event_page.id,))
response = self.client.get(api_url)
content = json.loads(response.content.decode("utf-8"))
self.assertEqual(content["title"], "Saint Patrick")
self.assertEqual(content["admin_display_title"], "Saint Patrick (single event)")
def test_custom_admin_display_title_shown_on_listing(self):
api_url = reverse("wagtailadmin_api:pages:listing")
response = self.client.get(api_url)
content = json.loads(response.content.decode("utf-8"))
matching_items = [
item for item in content["items"] if item["id"] == self.event_page.id
]
self.assertEqual(1, len(matching_items))
self.assertEqual(matching_items[0]["title"], "Saint Patrick")
self.assertEqual(
matching_items[0]["admin_display_title"], "Saint Patrick (single event)"
)
class TestCopyPageAction(AdminAPITestCase):
fixtures = ["test.json"]
def get_response(self, page_id, data):
return self.client.post(
reverse("wagtailadmin_api:pages:action", args=[page_id, "copy"]), data
)
def test_copy_page(self):
response = self.get_response(3, {})
self.assertEqual(response.status_code, 201)
content = json.loads(response.content.decode("utf-8"))
new_page = Page.objects.get(id=content["id"])
self.assertEqual(new_page.title, "Events")
self.assertEqual(new_page.slug, "events-1")
self.assertTrue(new_page.live)
self.assertFalse(new_page.get_children().exists())
def test_copy_page_change_title(self):
response = self.get_response(3, {"title": "New title"})
self.assertEqual(response.status_code, 201)
content = json.loads(response.content.decode("utf-8"))
new_page = Page.objects.get(id=content["id"])
self.assertEqual(new_page.title, "New title")
self.assertEqual(new_page.slug, "events-1")
def test_copy_page_change_slug(self):
response = self.get_response(3, {"slug": "new-slug"})
self.assertEqual(response.status_code, 201)
content = json.loads(response.content.decode("utf-8"))
new_page = Page.objects.get(id=content["id"])
self.assertEqual(new_page.slug, "new-slug")
def test_copy_page_with_exclude_fields_in_copy(self):
response = self.get_response(21, {})
self.assertEqual(response.status_code, 201)
content = json.loads(response.content.decode("utf-8"))
original_page = PageWithExcludedCopyField.objects.get(pk=21)
new_page = PageWithExcludedCopyField.objects.get(id=content["id"])
self.assertEqual(new_page.content, original_page.content)
self.assertNotEqual(new_page.special_field, original_page.special_field)
self.assertEqual(
new_page.special_field, new_page._meta.get_field("special_field").default
)
def test_copy_page_destination(self):
response = self.get_response(3, {"destination_page_id": 3})
self.assertEqual(response.status_code, 201)
content = json.loads(response.content.decode("utf-8"))
new_page = Page.objects.get(id=content["id"])
self.assertEqual(new_page.title, "Events")
self.assertTrue(new_page.live)
self.assertFalse(new_page.get_children().exists())
def test_copy_page_recursive(self):
response = self.get_response(
3,
{
"recursive": True,
},
)
self.assertEqual(response.status_code, 201)
content = json.loads(response.content.decode("utf-8"))
new_page = Page.objects.get(id=content["id"])
self.assertEqual(new_page.title, "Events")
self.assertTrue(new_page.get_children().exists())
def test_copy_page_in_draft(self):
response = self.get_response(
3,
{
"keep_live": False,
},
)
self.assertEqual(response.status_code, 201)
content = json.loads(response.content.decode("utf-8"))
new_page = Page.objects.get(id=content["id"])
self.assertEqual(new_page.title, "Events")
self.assertFalse(new_page.live)
# Check errors
def test_without_publish_permissions_at_destination_with_keep_live_false(self):
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(
content_type__app_label="wagtailadmin", codename="access_admin"
)
)
self.user.groups.add(Group.objects.get(name="Editors"))
self.user.save()
response = self.get_response(
3,
{
"destination_page_id": 1,
"keep_live": False,
},
)
self.assertEqual(response.status_code, 403)
content = json.loads(response.content.decode("utf-8"))
self.assertEqual(
content, {"detail": "You do not have permission to perform this action."}
)
def test_recursively_copy_into_self(self):
response = self.get_response(
3,
{
"destination_page_id": 3,
"recursive": True,
},
)
self.assertEqual(response.status_code, 400)
content = json.loads(response.content.decode("utf-8"))
self.assertEqual(
content,
{"message": "You cannot copy a tree branch recursively into itself"},
)
def test_without_create_permissions_at_destination(self):
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(
content_type__app_label="wagtailadmin", codename="access_admin"
)
)
self.user.save()
response = self.get_response(
3,
{
"destination_page_id": 2,
},
)
self.assertEqual(response.status_code, 403)
content = json.loads(response.content.decode("utf-8"))
self.assertEqual(
content, {"detail": "You do not have permission to perform this action."}
)
def test_without_publish_permissions_at_destination_with_keep_live(self):
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(
content_type__app_label="wagtailadmin", codename="access_admin"
)
)
self.user.groups.add(Group.objects.get(name="Editors"))
self.user.save()
GroupPagePermission.objects.create(
group=Group.objects.get(name="Editors"), page_id=2, permission_type="add"
)
response = self.get_response(
3,
{
"destination_page_id": 2,
},
)
self.assertEqual(response.status_code, 403)
content = json.loads(response.content.decode("utf-8"))
self.assertEqual(
content, {"detail": "You do not have permission to perform this action."}
)
def test_respects_page_creation_rules(self):
# Only one homepage may exist
response = self.get_response(2, {})
self.assertEqual(response.status_code, 403)
content = json.loads(response.content.decode("utf-8"))
self.assertEqual(
content, {"detail": "You do not have permission to perform this action."}
)
def test_copy_page_slug_in_use(self):
response = self.get_response(
3,
{
"slug": "events",
},
)
self.assertEqual(response.status_code, 400)
content = json.loads(response.content.decode("utf-8"))
self.assertEqual(content, {"slug": ["This slug is already in use"]})
class TestConvertAliasPageAction(AdminAPITestCase):
fixtures = ["test.json"]
def setUp(self):
super().setUp()
# Find root page
self.root_page = Page.objects.get(id=2)
# Add child page
self.child_page = SimplePage(
title="Hello world!", slug="hello-world", content="hello"
)
self.root_page.add_child(instance=self.child_page)
# Add alias page
self.alias_page = self.child_page.create_alias(update_slug="alias-page")
def get_response(self, page_id):
return self.client.post(
reverse("wagtailadmin_api:pages:action", args=[page_id, "convert_alias"])
)
def test_convert_alias(self):
response = self.get_response(self.alias_page.id)
self.assertEqual(response.status_code, 200)
# Check the page was converted
self.alias_page.refresh_from_db()
self.assertIsNone(self.alias_page.alias_of)
# Check that a revision was created
revision = self.alias_page.revisions.get()
self.assertEqual(revision.user, self.user)
self.assertEqual(self.alias_page.live_revision, revision)
# Check audit log
log = PageLogEntry.objects.get(action="wagtail.convert_alias")
self.assertFalse(log.content_changed)
self.assertEqual(
log.data,
{
"page": {
"id": self.alias_page.id,
"title": self.alias_page.get_admin_display_title(),
}
},
)
self.assertEqual(log.page, self.alias_page.page_ptr)
self.assertEqual(log.revision, revision)
self.assertEqual(log.user, self.user)
def test_convert_alias_not_alias(self):
response = self.get_response(self.child_page.id)
self.assertEqual(response.status_code, 400)
content = json.loads(response.content.decode("utf-8"))
self.assertEqual(content, {"message": "Page must be an alias to be converted."})
def test_convert_alias_bad_permission(self):
# Remove privileges from user
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(
content_type__app_label="wagtailadmin", codename="access_admin"
)
)
self.user.save()
response = self.get_response(self.alias_page.id)
self.assertEqual(response.status_code, 403)
class TestDeletePageAction(AdminAPITestCase):
fixtures = ["test.json"]
def get_response(self, page_id):
return self.client.post(
reverse("wagtailadmin_api:pages:action", args=[page_id, "delete"])
)
def test_delete_page(self):
response = self.get_response(4)
# Page is deleted
self.assertEqual(response.status_code, 204)
self.assertFalse(Page.objects.filter(id=4).exists())
def test_delete_page_bad_permissions(self):
# Remove privileges from user
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(
content_type__app_label="wagtailadmin", codename="access_admin"
)
)
self.user.save()
# delete
response = self.get_response(4)
self.assertEqual(response.status_code, 403)
content = json.loads(response.content.decode("utf-8"))
self.assertEqual(
content, {"detail": "You do not have permission to perform this action."}
)
# Page is still there
self.assertTrue(Page.objects.filter(id=4).exists())
class TestPublishPageAction(AdminAPITestCase):
fixtures = ["test.json"]
def get_response(self, page_id):
return self.client.post(
reverse("wagtailadmin_api:pages:action", args=[page_id, "publish"])
)
def test_publish_page(self):
unpublished_page = Page.objects.get(slug="tentative-unpublished-event")
self.assertIsNone(unpublished_page.first_published_at)
self.assertEqual(
unpublished_page.first_published_at, unpublished_page.last_published_at
)
self.assertIs(unpublished_page.live, False)
response = self.get_response(unpublished_page.id)
self.assertEqual(response.status_code, 200)
unpublished_page.refresh_from_db()
self.assertIsNotNone(unpublished_page.first_published_at)
self.assertEqual(
unpublished_page.first_published_at, unpublished_page.last_published_at
)
self.assertIs(unpublished_page.live, True)
def test_publish_insufficient_permissions(self):
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(
content_type__app_label="wagtailadmin", codename="access_admin"
)
)
self.user.groups.add(Group.objects.get(name="Editors"))
self.user.save()
response = self.get_response(4)
self.assertEqual(response.status_code, 403)
content = json.loads(response.content.decode("utf-8"))
self.assertEqual(
content, {"detail": "You do not have permission to perform this action."}
)
def test_publish_alias_page(self):
home = Page.objects.get(slug="home")
alias_page = home.create_alias(update_slug="new-home-page")
response = self.get_response(alias_page.id)
self.assertEqual(response.status_code, 400)
content = json.loads(response.content.decode("utf-8"))
self.assertEqual(
content,
{
"message": (
"page.save_revision() was called on an alias page. "
"Revisions are not required for alias pages as they are an exact copy of another page."
)
},
)
class TestUnpublishPageAction(AdminAPITestCase):
fixtures = ["test.json"]
def get_response(self, page_id, data):
return self.client.post(
reverse("wagtailadmin_api:pages:action", args=[page_id, "unpublish"]), data
)
def test_unpublish_page(self):
self.assertTrue(Page.objects.get(id=3).live)
response = self.get_response(3, {})
self.assertEqual(response.status_code, 200)
# Check that the page was unpublished
self.assertFalse(Page.objects.get(id=3).live)
def test_unpublish_page_include_descendants(self):
page = Page.objects.get(slug="home")
# Check that the page has live descendants that aren't locked.
self.assertTrue(page.get_descendants().live().filter(locked=False).exists())
response = self.get_response(page.id, {"recursive": True})
self.assertEqual(response.status_code, 200)
# Check that the page is unpublished
page.refresh_from_db()
self.assertFalse(page.live)
# Check that the descendant pages that weren't locked are unpublished as well
descendant_pages = page.get_descendants().filter(locked=False)
self.assertTrue(descendant_pages.exists())
for descendant_page in descendant_pages:
self.assertFalse(descendant_page.live)
def test_unpublish_page_without_including_descendants(self):
page = Page.objects.get(slug="secret-plans")
# Check that the page has live descendants that aren't locked.
self.assertTrue(page.get_descendants().live().filter(locked=False).exists())
response = self.get_response(page.id, {"recursive": False})
self.assertEqual(response.status_code, 200)
# Check that the page is unpublished
page.refresh_from_db()
self.assertFalse(page.live)
# Check that the descendant pages that weren't locked aren't unpublished.
self.assertTrue(page.get_descendants().live().filter(locked=False).exists())
def test_unpublish_invalid_page_id(self):
response = self.get_response(12345, {})
self.assertEqual(response.status_code, 404)
def test_unpublish_page_insufficient_permission(self):
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(
content_type__app_label="wagtailadmin", codename="access_admin"
)
)
self.user.save()
response = self.get_response(3, {})
self.assertEqual(response.status_code, 403)
content = json.loads(response.content.decode("utf-8"))
self.assertEqual(
content, {"detail": "You do not have permission to perform this action."}
)
class TestMovePageAction(AdminAPITestCase):
fixtures = ["test.json"]
def get_response(self, page_id, data):
return self.client.post(
reverse("wagtailadmin_api:pages:action", args=[page_id, "move"]), data
)
def test_move_page(self):
response = self.get_response(4, {"destination_page_id": 3})
self.assertEqual(response.status_code, 200)
def test_move_page_bad_permissions(self):
# Remove privileges from user
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(
content_type__app_label="wagtailadmin", codename="access_admin"
)
)
self.user.save()
# Move
response = self.get_response(4, {"destination_page_id": 3})
self.assertEqual(response.status_code, 403)
content = json.loads(response.content.decode("utf-8"))
self.assertEqual(
content, {"detail": "You do not have permission to perform this action."}
)
def test_move_page_without_destination_page_id(self):
response = self.get_response(4, {})
self.assertEqual(response.status_code, 400)
content = json.loads(response.content.decode("utf-8"))
self.assertEqual(content, {"destination_page_id": ["This field is required."]})
class TestCopyForTranslationAction(AdminAPITestCase):
fixtures = ["test.json"]
def get_response(self, page_id, data):
return self.client.post(
reverse(
"wagtailadmin_api:pages:action", args=[page_id, "copy_for_translation"]
),
data,
)
def setUp(self):
super().setUp()
self.en_homepage = Page.objects.get(url_path="/home/").specific
self.en_eventindex = EventIndex.objects.get(url_path="/home/events/")
self.en_eventpage = EventPage.objects.get(url_path="/home/events/christmas/")
self.root_page = self.en_homepage.get_parent()
self.fr_locale = Locale.objects.create(language_code="fr")
def test_copy_homepage_for_translation(self):
response = self.get_response(self.en_homepage.id, {"locale": "fr"})
self.assertEqual(response.status_code, 201)
content = json.loads(response.content.decode("utf-8"))
fr_homepage = Page.objects.get(id=content["id"])
self.assertNotEqual(self.en_homepage.id, fr_homepage.id)
self.assertEqual(fr_homepage.locale, self.fr_locale)
self.assertEqual(fr_homepage.translation_key, self.en_homepage.translation_key)
# At the top level, the language code should be appended to the slug
self.assertEqual(fr_homepage.slug, "home-fr")
# Translation must be in draft
self.assertFalse(fr_homepage.live)
self.assertTrue(fr_homepage.has_unpublished_changes)
def test_copy_childpage_without_parent(self):
response = self.get_response(self.en_eventindex.id, {"locale": "fr"})
self.assertEqual(response.status_code, 400)
content = json.loads(response.content.decode("utf-8"))
self.assertEqual(content, {"message": "Parent page is not translated."})
def test_copy_childpage_with_copy_parents(self):
response = self.get_response(
self.en_eventindex.id, {"locale": "fr", "copy_parents": True}
)
self.assertEqual(response.status_code, 201)
content = json.loads(response.content.decode("utf-8"))
fr_eventindex = Page.objects.get(id=content["id"])
self.assertNotEqual(self.en_eventindex.id, fr_eventindex.id)
self.assertEqual(fr_eventindex.locale, self.fr_locale)
self.assertEqual(
fr_eventindex.translation_key, self.en_eventindex.translation_key
)
self.assertEqual(self.en_eventindex.slug, fr_eventindex.slug)
# This should create the homepage as well
fr_homepage = fr_eventindex.get_parent()
self.assertNotEqual(self.en_homepage.id, fr_homepage.id)
self.assertEqual(fr_homepage.locale, self.fr_locale)
self.assertEqual(fr_homepage.translation_key, self.en_homepage.translation_key)
self.assertEqual(fr_homepage.slug, "home-fr")
def test_copy_for_translation_no_locale(self):
response = self.get_response(self.en_homepage.id, {})
self.assertEqual(response.status_code, 400)
content = json.loads(response.content.decode("utf-8"))
self.assertEqual(content, {"locale": ["This field is required."]})
def test_copy_for_translation_unknown_locale(self):
response = self.get_response(self.en_homepage.id, {"locale": "de"})
self.assertEqual(response.status_code, 404)
content = json.loads(response.content.decode("utf-8"))
self.assertEqual(content, {"message": "No Locale matches the given query."})
class TestCreatePageAliasAction(AdminAPITestCase):
fixtures = ["test.json"]
def setUp(self):
super().setUp()
self.events_index = EventIndex.objects.get(url_path="/home/events/")
self.about_us = SimplePage.objects.get(url_path="/home/about-us/")
def get_response(self, page_id, data):
return self.client.post(
reverse("wagtailadmin_api:pages:action", args=[page_id, "create_alias"]),
data,
)
def test_create_alias(self):
# Set a different draft title, aliases are not supposed to
# have a different draft_title because they don't have revisions.
# This should be corrected when copying
self.about_us.draft_title = "Draft title"
self.about_us.save(update_fields=["draft_title"])
response = self.get_response(
self.about_us.id, data={"update_slug": "new-about-us"}
)
self.assertEqual(response.status_code, 201)
content = json.loads(response.content.decode("utf-8"))
new_about_us = Page.objects.get(id=content["id"])
# Check that new_about_us is correct
self.assertIsInstance(new_about_us.specific, SimplePage)
self.assertEqual(new_about_us.slug, "new-about-us")
# Draft title should be changed to match the live title
self.assertEqual(new_about_us.draft_title, "About us")
# Check that new_about_us is a different page
self.assertNotEqual(self.about_us.id, new_about_us.id)
# Check that the url path was updated
self.assertEqual(new_about_us.url_path, "/home/new-about-us/")
# Check that the alias_of field was filled in
self.assertEqual(new_about_us.alias_of.specific, self.about_us)
def test_create_alias_recursive(self):
response = self.get_response(
self.events_index.id,
data={"recursive": True, "update_slug": "new-events-index"},
)
self.assertEqual(response.status_code, 201)
content = json.loads(response.content.decode("utf-8"))
new_events_index = Page.objects.get(id=content["id"])
# Get christmas event
old_christmas_event = (
self.events_index.get_children().filter(slug="christmas").first()
)
new_christmas_event = (
new_events_index.get_children().filter(slug="christmas").first()
)
# Check that the event exists in both places
self.assertIsNotNone(new_christmas_event, "Child pages weren't copied")
self.assertIsNotNone(
old_christmas_event, "Child pages were removed from original page"
)
# Check that the url path was updated
self.assertEqual(
new_christmas_event.url_path, "/home/new-events-index/christmas/"
)
# Check that the children were also created as aliases
self.assertEqual(new_christmas_event.alias_of, old_christmas_event)
def test_create_alias_doesnt_copy_recursively_to_the_same_tree(self):
response = self.get_response(
self.events_index.id,
data={"recursive": True, "destination_page_id": self.events_index.id},
)
self.assertEqual(response.status_code, 400)
content = json.loads(response.content.decode("utf-8"))
self.assertEqual(
content,
{"message": "You cannot copy a tree branch recursively into itself"},
)
def test_create_alias_without_publish_permissions(self):
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(
content_type__app_label="wagtailadmin", codename="access_admin"
)
)
self.user.save()
response = self.get_response(
self.events_index.id,
data={"recursive": True, "update_slug": "new-events-index"},
)
self.assertEqual(response.status_code, 403)
content = json.loads(response.content.decode("utf-8"))
self.assertEqual(
content, {"detail": "You do not have permission to perform this action."}
)
class TestRevertToPageRevisionAction(AdminAPITestCase):
fixtures = ["test.json"]
def setUp(self):
super().setUp()
self.events_page = Page.objects.get(id=3)
# Create revision to revert back to
self.first_revision = self.events_page.specific.save_revision()
# Change page title
self.events_page.title = "Evenements"
self.events_page.specific.save_revision().publish()
def get_response(self, page_id, data):
return self.client.post(
reverse(
"wagtailadmin_api:pages:action",
args=[page_id, "revert_to_page_revision"],
),
data,
)
def test_revert_to_page_revision(self):
self.assertEqual(self.events_page.title, "Evenements")
response = self.get_response(
self.events_page.id, {"revision_id": self.first_revision.id}
)
self.assertEqual(response.status_code, 200)
self.events_page.specific.get_latest_revision().publish()
self.events_page.refresh_from_db()
self.assertEqual(self.events_page.title, "Events")
def test_revert_to_page_revision_bad_permissions(self):
# Remove privileges from user
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(
content_type__app_label="wagtailadmin", codename="access_admin"
)
)
self.user.save()
response = self.get_response(
self.events_page.id, {"revision_id": self.first_revision.id}
)
self.assertEqual(response.status_code, 403)
content = json.loads(response.content.decode("utf-8"))
self.assertEqual(
content, {"detail": "You do not have permission to perform this action."}
)
def test_revert_to_page_revision_without_revision_id(self):
response = self.get_response(self.events_page.id, {})
self.assertEqual(response.status_code, 400)
content = json.loads(response.content.decode("utf-8"))
self.assertEqual(content, {"revision_id": ["This field is required."]})
def test_revert_to_page_revision_bad_revision_id(self):
self.assertEqual(self.events_page.title, "Evenements")
response = self.get_response(self.events_page.id, {"revision_id": 999})
self.assertEqual(response.status_code, 404)
content = json.loads(response.content.decode("utf-8"))
self.assertEqual(content, {"message": "No Revision matches the given query."})
# Overwrite imported test cases do Django doesn't run them
TestPageDetail = None
TestPageListing = None
| bsd-3-clause | 781e3c1f352f0d75f2ec0d9c0377ad04 | 35.459042 | 209 | 0.585951 | 4.0307 | false | true | false | false |
wagtail/wagtail | wagtail/actions/publish_revision.py | 4 | 7722 | import logging
from django.core.exceptions import PermissionDenied
from django.utils import timezone
from wagtail.log_actions import log
from wagtail.permission_policies.base import ModelPermissionPolicy
from wagtail.signals import published
logger = logging.getLogger("wagtail")
class PublishPermissionError(PermissionDenied):
"""
Raised when the publish cannot be performed due to insufficient permissions.
"""
pass
class PublishRevisionAction:
"""
Publish or schedule revision for publishing.
:param revision: revision to publish
:param user: the publishing user
:param changed: indicated whether content has changed
:param log_action:
flag for the logging action. Pass False to skip logging. Cannot pass an action string as the method
performs several actions: "publish", "revert" (and publish the reverted revision),
"schedule publishing with a live revision", "schedule revision reversal publishing, with a live revision",
"schedule publishing", "schedule revision reversal publishing"
:param previous_revision: indicates a revision reversal. Should be set to the previous revision instance
"""
def __init__(
self, revision, user=None, changed=True, log_action=True, previous_revision=None
):
self.revision = revision
self.object = self.revision.as_object()
self.permission_policy = ModelPermissionPolicy(type(self.object))
self.user = user
self.changed = changed
self.log_action = log_action
self.previous_revision = previous_revision
def check(self, skip_permission_checks=False):
if (
self.user
and not skip_permission_checks
and not self.permission_policy.user_has_permission(self.user, "publish")
):
raise PublishPermissionError(
"You do not have permission to publish this object"
)
def log_scheduling_action(self):
log(
instance=self.object,
action="wagtail.publish.schedule",
user=self.user,
data={
"revision": {
"id": self.revision.id,
"created": self.revision.created_at.strftime("%d %b %Y %H:%M"),
"go_live_at": self.object.go_live_at.strftime("%d %b %Y %H:%M"),
"has_live_version": self.object.live,
}
},
revision=self.revision,
content_changed=self.changed,
)
def _after_publish(self):
published.send(
sender=type(self.object),
instance=self.object,
revision=self.revision,
)
def _publish_revision(
self, revision, object, user, changed, log_action, previous_revision
):
from wagtail.models import Revision
if object.go_live_at and object.go_live_at > timezone.now():
object.has_unpublished_changes = True
# Instead set the approved_go_live_at of this revision
revision.approved_go_live_at = object.go_live_at
revision.save()
# And clear the the approved_go_live_at of any other revisions
object.revisions.exclude(id=revision.id).update(approved_go_live_at=None)
# if we are updating a currently live object skip the rest
if object.live_revision:
# Log scheduled publishing
if log_action:
self.log_scheduling_action()
return
# if we have a go_live in the future don't make the object live
object.live = False
else:
object.live = True
# at this point, the object has unpublished changes if and only if there are newer revisions than this one
object.has_unpublished_changes = not revision.is_latest_revision()
# If object goes live clear the approved_go_live_at of all revisions
object.revisions.update(approved_go_live_at=None)
object.expired = False # When a object is published it can't be expired
# Set first_published_at, last_published_at and live_revision
# if the object is being published now
if object.live:
now = timezone.now()
object.last_published_at = now
object.live_revision = revision
if object.first_published_at is None:
object.first_published_at = now
if previous_revision:
previous_revision_object = previous_revision.as_object()
old_object_title = (
str(previous_revision_object)
if str(object) != str(previous_revision_object)
else None
)
else:
try:
previous = revision.get_previous()
except Revision.DoesNotExist:
previous = None
old_object_title = (
str(previous.content_object)
if previous and str(object) != str(previous.content_object)
else None
)
else:
# Unset live_revision if the object is going live in the future
object.live_revision = None
object.save()
revision.submitted_for_moderation = False
object.revisions.update(submitted_for_moderation=False)
self._after_publish()
if object.live:
if log_action:
data = None
if previous_revision:
data = {
"revision": {
"id": previous_revision.id,
"created": previous_revision.created_at.strftime(
"%d %b %Y %H:%M"
),
}
}
if old_object_title:
data = data or {}
data["title"] = {
"old": old_object_title,
"new": str(object),
}
log(
instance=object,
action="wagtail.rename",
user=user,
data=data,
revision=revision,
)
log(
instance=object,
action=log_action
if isinstance(log_action, str)
else "wagtail.publish",
user=user,
data=data,
revision=revision,
content_changed=changed,
)
logger.info(
'Published: "%s" pk=%s revision_id=%d',
str(object),
str(object.pk),
revision.id,
)
elif object.go_live_at:
logger.info(
'Scheduled for publish: "%s" pk=%s revision_id=%d go_live_at=%s',
str(object),
str(object.pk),
revision.id,
object.go_live_at.isoformat(),
)
if log_action:
self.log_scheduling_action()
def execute(self, skip_permission_checks=False):
self.check(skip_permission_checks=skip_permission_checks)
return self._publish_revision(
self.revision,
self.object,
user=self.user,
changed=self.changed,
log_action=self.log_action,
previous_revision=self.previous_revision,
)
| bsd-3-clause | ff9b821fa9f908e41d27d6fb1b7fd34e | 34.916279 | 118 | 0.535354 | 4.823235 | false | false | false | false |
wagtail/wagtail | wagtail/admin/search.py | 4 | 3772 | from django.forms import Media, MediaDefiningClass
from django.forms.utils import flatatt
from django.template.loader import render_to_string
from django.utils.functional import cached_property, total_ordering
from django.utils.safestring import mark_safe
from django.utils.text import slugify
from wagtail import hooks
from wagtail.admin.forms.search import SearchForm
@total_ordering
class SearchArea(metaclass=MediaDefiningClass):
template = "wagtailadmin/shared/search_area.html"
def __init__(
self, label, url, name=None, classnames="", icon_name="", attrs=None, order=1000
):
self.label = label
self.url = url
self.classnames = classnames
self.icon_name = icon_name
self.name = name or slugify(str(label))
self.order = order
if attrs:
self.attr_string = flatatt(attrs)
else:
self.attr_string = ""
def __lt__(self, other):
return (self.order, self.label) < (other.order, other.label)
def __eq__(self, other):
return (self.order, self.label) == (other.order, other.label)
def is_shown(self, request):
"""
Whether this search area should be shown for the given request; permission
checks etc should go here. By default, search areas are shown all the time
"""
return True
def is_active(self, request, current=None):
if current is None:
return request.path.startswith(self.url)
else:
return self.name == current
def render_html(self, request, query, current=None):
return render_to_string(
self.template,
{
"name": self.name,
"url": self.url,
"classnames": self.classnames,
"icon_name": self.icon_name,
"attr_string": self.attr_string,
"label": self.label,
"active": self.is_active(request, current),
"query_string": query,
},
request=request,
)
class Search:
def __init__(self, register_hook_name, construct_hook_name=None):
self.register_hook_name = register_hook_name
self.construct_hook_name = construct_hook_name
@cached_property
def registered_search_areas(self):
return sorted([fn() for fn in hooks.get_hooks(self.register_hook_name)])
def search_items_for_request(self, request):
return [item for item in self.registered_search_areas if item.is_shown(request)]
def active_search(self, request, current=None):
return [
item
for item in self.search_items_for_request(request)
if item.is_active(request, current)
]
@property
def media(self):
media = Media()
for item in self.registered_search_areas:
media += item.media
return media
def render_html(self, request, current=None):
search_areas = self.search_items_for_request(request)
# Get query parameter
form = SearchForm(request.GET)
query = ""
if form.is_valid():
query = form.cleaned_data["q"]
# provide a hook for modifying the search area, if construct_hook_name has been set
if self.construct_hook_name:
for fn in hooks.get_hooks(self.construct_hook_name):
fn(request, search_areas)
rendered_search_areas = []
for item in search_areas:
rendered_search_areas.append(item.render_html(request, query, current))
return mark_safe("".join(rendered_search_areas))
admin_search_areas = Search(
register_hook_name="register_admin_search_area",
construct_hook_name="construct_search",
)
| bsd-3-clause | 2377013de7ab8d505f950b8f0f9012ce | 31.239316 | 91 | 0.613998 | 4.06028 | false | false | false | false |
wagtail/wagtail | wagtail/admin/views/pages/revisions.py | 2 | 5408 | from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import PermissionDenied
from django.shortcuts import get_object_or_404, redirect
from django.template.loader import render_to_string
from django.template.response import TemplateResponse
from django.utils.decorators import method_decorator
from django.utils.safestring import mark_safe
from django.utils.translation import gettext as _
from django.utils.translation import gettext_lazy
from wagtail.admin import messages
from wagtail.admin.action_menu import PageActionMenu
from wagtail.admin.auth import user_has_any_page_permission, user_passes_test
from wagtail.admin.ui.side_panels import PageSidePanels
from wagtail.admin.views.generic.models import (
RevisionsCompareView,
RevisionsUnscheduleView,
)
from wagtail.admin.views.generic.preview import PreviewRevision
from wagtail.models import Page, UserPagePermissionsProxy
def revisions_index(request, page_id):
return redirect("wagtailadmin_pages:history", page_id)
def revisions_revert(request, page_id, revision_id):
page = get_object_or_404(Page, id=page_id).specific
page_perms = page.permissions_for_user(request.user)
if not page_perms.can_edit():
raise PermissionDenied
revision = get_object_or_404(page.revisions, id=revision_id)
revision_page = revision.as_object()
content_type = ContentType.objects.get_for_model(page)
page_class = content_type.model_class()
edit_handler = page_class.get_edit_handler()
form_class = edit_handler.get_form_class()
form = form_class(instance=revision_page, for_user=request.user)
edit_handler = edit_handler.get_bound_panel(
instance=revision_page, request=request, form=form
)
lock = page.get_lock()
action_menu = PageActionMenu(
request,
view="revisions_revert",
page=page,
lock=lock,
locked_for_user=lock is not None and lock.for_user(request.user),
)
side_panels = PageSidePanels(
request,
page,
preview_enabled=True,
comments_enabled=form.show_comments_toggle,
show_schedule_publishing_toggle=form.show_schedule_publishing_toggle,
)
user_avatar = render_to_string(
"wagtailadmin/shared/user_avatar.html", {"user": revision.user}
)
messages.warning(
request,
mark_safe(
_(
"You are viewing a previous version of this page from <b>%(created_at)s</b> by %(user)s"
)
% {
"created_at": revision.created_at.strftime("%d %b %Y %H:%M"),
"user": user_avatar,
}
),
)
return TemplateResponse(
request,
"wagtailadmin/pages/edit.html",
{
"page": page,
"revision": revision,
"is_revision": True,
"content_type": content_type,
"edit_handler": edit_handler,
"errors_debug": None,
"action_menu": action_menu,
"side_panels": side_panels,
"form": form, # Used in unit tests
"media": edit_handler.media
+ form.media
+ action_menu.media
+ side_panels.media,
},
)
@method_decorator(user_passes_test(user_has_any_page_permission), name="dispatch")
class RevisionsView(PreviewRevision):
model = Page
def setup(self, request, page_id, revision_id, *args, **kwargs):
# Rename path kwargs from pk to page_id
return super().setup(request, page_id, revision_id, *args, **kwargs)
def get_object(self):
page = get_object_or_404(Page, id=self.pk).specific
perms = page.permissions_for_user(self.request.user)
if not (perms.can_publish() or perms.can_edit()):
raise PermissionDenied
return page
class RevisionsCompare(RevisionsCompareView):
history_label = gettext_lazy("Page history")
edit_label = gettext_lazy("Edit this page")
history_url_name = "wagtailadmin_pages:history"
edit_url_name = "wagtailadmin_pages:edit"
header_icon = "doc-empty-inverse"
@method_decorator(user_passes_test(user_has_any_page_permission))
def dispatch(self, request, *args, **kwargs):
return super().dispatch(request, *args, **kwargs)
def get_object(self, queryset=None):
return get_object_or_404(Page, id=self.pk).specific
def get_edit_handler(self):
return self.object.get_edit_handler()
def get_page_subtitle(self):
return self.object.get_admin_display_title()
class RevisionsUnschedule(RevisionsUnscheduleView):
model = Page
edit_url_name = "wagtailadmin_pages:edit"
history_url_name = "wagtailadmin_pages:history"
revisions_unschedule_url_name = "wagtailadmin_pages:revisions_unschedule"
def setup(self, request, page_id, revision_id, *args, **kwargs):
# Rename path kwargs from pk to page_id
return super().setup(request, page_id, revision_id, *args, **kwargs)
def get_object(self, queryset=None):
page = get_object_or_404(Page, id=self.pk).specific
user_perms = UserPagePermissionsProxy(self.request.user)
if not user_perms.for_page(page).can_unschedule():
raise PermissionDenied
return page
def get_object_display_title(self):
return self.object.get_admin_display_title()
| bsd-3-clause | da1e73246f9c4969fbba9d801e342b79 | 32.8 | 104 | 0.663462 | 3.824611 | false | false | false | false |
wagtail/wagtail | wagtail/images/migrations/0016_deprecate_rendition_filter_relation.py | 4 | 2721 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.11 on 2016-11-11 17:04
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("wagtailimages", "0015_fill_filter_spec_field"),
]
operations = [
migrations.AlterField(
model_name="rendition",
name="filter_spec",
field=models.CharField(db_index=True, max_length=255),
),
# New step introduced in Wagtail 1.8.1:
#
# Reduce max_length of rendition.focal_point_key to 16, from the previous value of 255
# which existed on Wagtail <= 1.8. MySQL has a limit of 767 (on InnoDB) or 1000 (on MyISAM)
# bytes; depending on the character encoding used, this limit may be reached by the
# original index on ['image', 'filter', 'focal_point_key'] (= 1 varchar and two FKs)
# or the new index on ['image', 'filter_spec', 'focal_point_key'] (= 2 varchars and one FK).
#
# To mitigate this, we reduce focal_point_key in the following places:
# * Retrospectively in the original migration, so that configurations that previously
# failed on wagtailimages/0001_initial can now run (see #2925 / #2953);
# * Here, so that previously-working Wagtail <=1.7 installations that failed on the
# AlterUniqueTogether below when upgrading to 1.8 can now succeed;
# * In the newly-added migration wagtailimages/0017, so that existing Wagtail 1.8 installations
# that successfully applied the old 1.8 version of this migration are consistent with
# other setups.
#
# Since Django will optimise away any AlterField operations that appear to match
# the current state (according to earlier migrations) - which would cause them to be
# skipped on installations that ran the earlier (max_length=255) versions of the
# migrations - we need to make them superficially different; we do this by stepping
# max_length down from 18 to 17 then 16.
#
# Projects with a custom image model don't have to worry about this - they'll have an existing
# migration with the max_length=255, and will get a new migration reducing it to max_length=16
# the next time they run makemigrations.
migrations.AlterField(
model_name="rendition",
name="focal_point_key",
field=models.CharField(
blank=True, default="", max_length=17, editable=False
),
),
migrations.AlterUniqueTogether(
name="rendition",
unique_together={("image", "filter_spec", "focal_point_key")},
),
]
| bsd-3-clause | 679dcaa5de0bdf014c41ad3b81669269 | 48.472727 | 103 | 0.636531 | 4.043091 | false | false | false | false |
wagtail/wagtail | wagtail/search/views/queries.py | 4 | 1540 | from django.core.paginator import Paginator
from django.template.response import TemplateResponse
from wagtail.admin.forms.search import SearchForm
from wagtail.admin.modal_workflow import render_modal_workflow
from wagtail.search import models
from wagtail.search.utils import normalise_query_string
def chooser(request, get_results=False):
# Get most popular queries
queries = models.Query.get_most_popular()
# If searching, filter results by query string
query_string = None
if "q" in request.GET:
searchform = SearchForm(request.GET)
if searchform.is_valid():
query_string = searchform.cleaned_data["q"]
queries = queries.filter(
query_string__icontains=normalise_query_string(query_string)
)
else:
searchform = SearchForm()
paginator = Paginator(queries, per_page=10)
queries = paginator.get_page(request.GET.get("p"))
# Render
if get_results:
return TemplateResponse(
request,
"wagtailsearch/queries/chooser/results.html",
{
"queries": queries,
},
)
else:
return render_modal_workflow(
request,
"wagtailsearch/queries/chooser/chooser.html",
None,
{
"queries": queries,
"searchform": searchform,
},
json_data={"step": "chooser"},
)
def chooserresults(request):
return chooser(request, get_results=True)
| bsd-3-clause | 83397aa2cf49380112fa8de262021495 | 28.615385 | 76 | 0.615584 | 4.350282 | false | false | false | false |
wagtail/wagtail | wagtail/utils/setup.py | 4 | 2739 | import io
import json
import os
import subprocess
from setuptools import Command
from setuptools.command.bdist_egg import bdist_egg
from setuptools.command.sdist import sdist as base_sdist
from wagtail import __semver__
class assets_mixin:
def compile_assets(self):
try:
subprocess.check_call(["npm", "run", "build"])
except (OSError, subprocess.CalledProcessError) as e:
print("Error compiling assets: " + str(e)) # noqa
raise SystemExit(1)
def publish_assets(self):
try:
subprocess.check_call(["npm", "publish", "client"])
except (OSError, subprocess.CalledProcessError) as e:
print("Error publishing front-end assets: " + str(e)) # noqa
raise SystemExit(1)
def bump_client_version(self):
"""
Writes the current Wagtail version number into package.json
"""
path = os.path.join(".", "client", "package.json")
input_file = io.open(path, "r")
try:
package = json.loads(input_file.read().decode("utf-8"))
except (ValueError) as e:
print("Unable to read " + path + " " + e) # noqa
raise SystemExit(1)
package["version"] = __semver__
try:
with io.open(path, "w", encoding="utf-8") as f:
f.write(str(json.dumps(package, indent=2, ensure_ascii=False)))
except (IOError) as e:
print("Error setting the version for front-end assets: " + str(e)) # noqa
raise SystemExit(1)
class assets(Command, assets_mixin):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
self.bump_client_version()
self.compile_assets()
self.publish_assets()
class sdist(base_sdist, assets_mixin):
def run(self):
self.compile_assets()
base_sdist.run(self)
class check_bdist_egg(bdist_egg):
# If this file does not exist, warn the user to compile the assets
sentinel_dir = "wagtail/wagtailadmin/static/"
def run(self):
bdist_egg.run(self)
if not os.path.isdir(self.sentinel_dir):
print( # noqa
"\n".join(
[ # noqa
"************************************************************",
"The front end assets for Wagtail are missing.",
"To generate the assets, please refer to the documentation in",
"docs/contributing/developing.md",
"************************************************************",
]
)
)
| bsd-3-clause | b79c9b9ade9984ff9a6e5ea111e92e1e | 29.433333 | 87 | 0.530486 | 4.306604 | false | false | false | false |
wagtail/wagtail | wagtail/admin/widgets/button_select.py | 4 | 1278 | from django import forms
from django.utils.translation import gettext_lazy as _
class ButtonSelect(forms.Select):
"""
A select widget for fields with choices. Displays as a list of buttons.
"""
input_type = "hidden"
template_name = "wagtailadmin/widgets/button_select.html"
option_template_name = "wagtailadmin/widgets/button_select_option.html"
class BooleanButtonSelect(ButtonSelect):
"""
A select widget for boolean fields. Displays as three buttons. "All", "Yes" and "No".
"""
def __init__(self, attrs=None):
choices = (
("", _("All")),
("true", _("Yes")),
("false", _("No")),
)
super().__init__(attrs, choices)
def format_value(self, value):
try:
return {
True: ["true"],
False: ["false"],
"true": ["true"],
"false": ["false"],
}[value]
except KeyError:
return ""
def value_from_datadict(self, data, files, name):
value = data.get(name)
return {
True: True,
"True": True,
"False": False,
False: False,
"true": True,
"false": False,
}.get(value)
| bsd-3-clause | 7b8fa695d4bcd5882379c851c3336d5c | 25.625 | 89 | 0.507825 | 4.30303 | false | false | false | false |
wagtail/wagtail | wagtail/utils/deprecation.py | 4 | 2966 | import warnings
from importlib import import_module
class RemovedInWagtail50Warning(DeprecationWarning):
pass
removed_in_next_version_warning = RemovedInWagtail50Warning
class RemovedInWagtail60Warning(PendingDeprecationWarning):
pass
class MovedDefinitionHandler:
"""
A wrapper for module objects to enable definitions to be moved to a new module, with a
deprecation path for the old location. Importing the name from the old location will
raise a deprecation warning (but will still complete successfully).
To use, place the following code in the old module:
import sys
from wagtail.utils.deprecation import MovedDefinitionHandler, RemovedInWagtailXWarning
MOVED_DEFINITIONS = {
'SomeClassOrVariableName': 'path.to.new.module',
}
sys.modules[__name__] = MovedDefinitionHandler(sys.modules[__name__], MOVED_DEFINITIONS, RemovedInWagtailXWarning)
If the name of the definition has also changed, you can specify its new name along with
the path to its new module using a tuple. For example:
MOVED_DEFINITIONS = {
'SomeClassOrVariableName': ('path.to.new.module', 'NewClassOrVariableName'),
}
"""
def __init__(self, real_module, moved_definitions, warning_class):
self.real_module = real_module
self.moved_definitions = moved_definitions
self.warning_class = warning_class
def __getattr__(self, name):
try:
return getattr(self.real_module, name)
except AttributeError as e:
try:
# is the missing name one of our moved definitions?
new_module_name = self.moved_definitions[name]
new_name = name
if isinstance(new_module_name, tuple):
new_module_name, new_name = new_module_name
except KeyError:
# raise the original AttributeError without including the inner try/catch
# in the stack trace
raise e from None
if new_name != name:
warnings.warn(
"%s has been moved from %s to %s and renamed to %s"
% (name, self.real_module.__name__, new_module_name, new_name),
category=self.warning_class,
stacklevel=2,
)
else:
warnings.warn(
"%s has been moved from %s to %s"
% (name, self.real_module.__name__, new_module_name),
category=self.warning_class,
stacklevel=2,
)
# load the requested definition from the module named in moved_definitions
new_module = import_module(new_module_name)
definition = getattr(new_module, new_name)
# stash that definition into the current module so that we don't have to
# redo this import next time we access it
setattr(self.real_module, name, definition)
return definition
| bsd-3-clause | cfad856b77a2f01df5d1fb3ab89adc65 | 33.091954 | 118 | 0.63149 | 4.493939 | false | false | false | false |
wagtail/wagtail | wagtail/search/utils.py | 4 | 5988 | import operator
import re
from functools import partial
from django.apps import apps
from django.db import connections
from wagtail.search.index import RelatedFields, SearchField
from .query import MATCH_NONE, Phrase, PlainText
NOT_SET = object()
def balanced_reduce(operator, seq, initializer=NOT_SET):
"""
Has the same result as Python's reduce function, but performs the calculations in a different order.
This is important when the operator is constructing data structures such as search query classes.
This method will make the resulting data structures flatter, so operations that need to traverse
them don't end up crashing with recursion errors.
For example:
Python's builtin reduce() function will do the following calculation:
reduce(add, [1, 2, 3, 4, 5, 6, 7, 8])
(1 + (2 + (3 + (4 + (5 + (6 + (7 + 8)))))))
When using this with query classes, it would create a large data structure with a depth of 7
Whereas balanced_reduce will execute this like so:
balanced_reduce(add, [1, 2, 3, 4, 5, 6, 7, 8])
((1 + 2) + (3 + 4)) + ((5 + 6) + (7 + 8))
Which only has a depth of 2
"""
# Casting all iterables to list makes the implementation simpler
if not isinstance(seq, list):
seq = list(seq)
# Note, it needs to be possible to use None as an initial value
if initializer is not NOT_SET:
if len(seq) == 0:
return initializer
else:
return operator(initializer, balanced_reduce(operator, seq))
if len(seq) == 0:
raise TypeError("reduce() of empty sequence with no initial value")
elif len(seq) == 1:
return seq[0]
else:
break_point = len(seq) // 2
first_set = balanced_reduce(operator, seq[:break_point])
second_set = balanced_reduce(operator, seq[break_point:])
return operator(first_set, second_set)
# Reduce any iterable to a single value using a logical OR e.g. (a | b | ...)
OR = partial(balanced_reduce, operator.or_)
# Reduce any iterable to a single value using a logical AND e.g. (a & b & ...)
AND = partial(balanced_reduce, operator.and_)
# Reduce any iterable to a single value using an addition
ADD = partial(balanced_reduce, operator.add)
# Reduce any iterable to a single value using a multiplication
MUL = partial(balanced_reduce, operator.mul)
MAX_QUERY_STRING_LENGTH = 255
def normalise_query_string(query_string):
# Truncate query string
if len(query_string) > MAX_QUERY_STRING_LENGTH:
query_string = query_string[:MAX_QUERY_STRING_LENGTH]
# Convert query_string to lowercase
query_string = query_string.lower()
# Remove leading, trailing and multiple spaces
query_string = re.sub(" +", " ", query_string).strip()
return query_string
def separate_filters_from_query(query_string):
filters_regexp = r'(\w+):(\w+|".+")'
filters = {}
for match_object in re.finditer(filters_regexp, query_string):
key, value = match_object.groups()
filters[key] = value.strip('"')
query_string = re.sub(filters_regexp, "", query_string).strip()
return filters, query_string
def parse_query_string(query_string, operator=None, zero_terms=MATCH_NONE):
"""
This takes a query string typed in by a user and extracts the following:
- Quoted terms (for phrase search)
- Filters
For example, the following query:
`hello "this is a phrase" live:true` would be parsed into:
filters: {'live': 'true'}
tokens: And([PlainText('hello'), Phrase('this is a phrase')])
"""
filters, query_string = separate_filters_from_query(query_string)
is_phrase = False
tokens = []
for part in query_string.split('"'):
part = part.strip()
if part:
if is_phrase:
tokens.append(Phrase(part))
else:
tokens.append(
PlainText(part, operator=operator or PlainText.DEFAULT_OPERATOR)
)
is_phrase = not is_phrase
if tokens:
if operator == "or":
search_query = OR(tokens)
else:
search_query = AND(tokens)
else:
search_query = zero_terms
return filters, search_query
def get_descendant_models(model):
"""
Returns all descendants of a model, including the model itself.
"""
descendant_models = {
other_model
for other_model in apps.get_models()
if issubclass(other_model, model)
}
descendant_models.add(model)
return descendant_models
def get_content_type_pk(model):
# We import it locally because this file is loaded before apps are ready.
from django.contrib.contenttypes.models import ContentType
return ContentType.objects.get_for_model(model).pk
def get_ancestors_content_types_pks(model):
"""
Returns content types ids for the ancestors of this model, excluding it.
"""
from django.contrib.contenttypes.models import ContentType
return [
ct.pk
for ct in ContentType.objects.get_for_models(
*model._meta.get_parent_list()
).values()
]
def get_descendants_content_types_pks(model):
"""
Returns content types ids for the descendants of this model, including it.
"""
from django.contrib.contenttypes.models import ContentType
return [
ct.pk
for ct in ContentType.objects.get_for_models(
*get_descendant_models(model)
).values()
]
def get_search_fields(search_fields):
for search_field in search_fields:
if isinstance(search_field, SearchField):
yield search_field
elif isinstance(search_field, RelatedFields):
for sub_field in get_search_fields(search_field.fields):
yield sub_field
def get_postgresql_connections():
return [
connection
for connection in connections.all()
if connection.vendor == "postgresql"
]
| bsd-3-clause | 7c71d0fe81c43581d8bf1ff1732e9d66 | 28.643564 | 104 | 0.647128 | 3.978738 | false | false | false | false |
wagtail/wagtail | wagtail/images/migrations/0012_copy_image_permissions_to_collections.py | 4 | 1769 | # -*- coding: utf-8 -*-
from django.db import migrations
def get_image_permissions(apps):
# return a queryset of the 'add_image' and 'change_image' permissions
Permission = apps.get_model("auth.Permission")
ContentType = apps.get_model("contenttypes.ContentType")
image_content_type, _created = ContentType.objects.get_or_create(
model="image",
app_label="wagtailimages",
)
return Permission.objects.filter(
content_type=image_content_type, codename__in=["add_image", "change_image"]
)
def copy_image_permissions_to_collections(apps, schema_editor):
Collection = apps.get_model("wagtailcore.Collection")
Group = apps.get_model("auth.Group")
GroupCollectionPermission = apps.get_model("wagtailcore.GroupCollectionPermission")
root_collection = Collection.objects.get(depth=1)
for permission in get_image_permissions(apps):
for group in Group.objects.filter(permissions=permission):
GroupCollectionPermission.objects.create(
group=group, collection=root_collection, permission=permission
)
def remove_image_permissions_from_collections(apps, schema_editor):
GroupCollectionPermission = apps.get_model("wagtailcore.GroupCollectionPermission")
image_permissions = get_image_permissions(apps)
GroupCollectionPermission.objects.filter(permission__in=image_permissions).delete()
class Migration(migrations.Migration):
dependencies = [
("wagtailcore", "0026_group_collection_permission"),
("wagtailimages", "0011_image_collection"),
]
operations = [
migrations.RunPython(
copy_image_permissions_to_collections,
remove_image_permissions_from_collections,
),
]
| bsd-3-clause | 151482c35b4a6cc96f12329ea8cfc733 | 33.019231 | 87 | 0.6987 | 4.221957 | false | false | false | false |
wagtail/wagtail | wagtail/admin/views/bulk_action/registry.py | 4 | 1640 | from wagtail import hooks
from wagtail.admin.views.bulk_action import BulkAction
class BulkActionRegistry:
def __init__(self):
self.actions = {} # {app_name: {model_name: {action_name: action_class]}}
self.has_scanned_for_bulk_actions = False
def _scan_for_bulk_actions(self):
if not self.has_scanned_for_bulk_actions:
for action_class in hooks.get_hooks("register_bulk_action"):
if not issubclass(action_class, BulkAction):
raise Exception(
"{} is not a subclass of {}".format(
action_class.__name__, BulkAction.__name__
)
)
for model in action_class.models:
self.actions.setdefault(model._meta.app_label, {})
self.actions[model._meta.app_label].setdefault(
model._meta.model_name, {}
)
self.actions[model._meta.app_label][model._meta.model_name][
action_class.action_type
] = action_class
self.has_scanned_for_bulk_actions = True
def get_bulk_actions_for_model(self, app_label, model_name):
self._scan_for_bulk_actions()
return self.actions.get(app_label, {}).get(model_name, {}).values()
def get_bulk_action_class(self, app_label, model_name, action_type):
self._scan_for_bulk_actions()
return (
self.actions.get(app_label, {}).get(model_name, {}).get(action_type, None)
)
bulk_action_registry = BulkActionRegistry()
| bsd-3-clause | 0f9c0578fd2662bacf0d53fa7f887f34 | 40 | 86 | 0.55 | 4.029484 | false | false | false | false |
wagtail/wagtail | wagtail/rich_text/__init__.py | 4 | 5190 | import re
from functools import lru_cache
from html import unescape
from django.core.validators import MaxLengthValidator
from django.db.models import Model
from django.template.loader import render_to_string
from django.utils.html import strip_tags
from django.utils.safestring import mark_safe
from wagtail.rich_text.feature_registry import FeatureRegistry
from wagtail.rich_text.rewriters import EmbedRewriter, LinkRewriter, MultiRuleRewriter
features = FeatureRegistry()
# Rewriter function to be built up on first call to expand_db_html, using the utility classes
# from wagtail.rich_text.rewriters along with the embed handlers / link handlers registered
# with the feature registry
@lru_cache(maxsize=1)
def get_rewriter():
embed_rules = features.get_embed_types()
link_rules = features.get_link_types()
return MultiRuleRewriter(
[
LinkRewriter(
{
linktype: handler.expand_db_attributes
for linktype, handler in link_rules.items()
},
{
linktype: handler.extract_references
for linktype, handler in link_rules.items()
},
),
EmbedRewriter(
{
embedtype: handler.expand_db_attributes
for embedtype, handler in embed_rules.items()
},
{
linktype: handler.extract_references
for linktype, handler in embed_rules.items()
},
),
]
)
def expand_db_html(html):
"""
Expand database-representation HTML into proper HTML usable on front-end templates
"""
rewriter = get_rewriter()
return rewriter(html)
def extract_references_from_rich_text(html):
rewriter = get_rewriter()
yield from rewriter.extract_references(html)
def get_text_for_indexing(richtext):
"""
Return a plain text version of a rich text string, suitable for search indexing;
like Django's strip_tags, but ensures that whitespace is left between block elements
so that <p>hello</p><p>world</p> gives "hello world", not "helloworld".
"""
# insert space after </p>, </h1> - </h6>, </li> and </blockquote> tags
richtext = re.sub(
r"(</(p|h\d|li|blockquote)>)", r"\1 ", richtext, flags=re.IGNORECASE
)
# also insert space after <br /> and <hr />
richtext = re.sub(r"(<(br|hr)\s*/>)", r"\1 ", richtext, flags=re.IGNORECASE)
return unescape(strip_tags(richtext).strip())
class RichText:
"""
A custom object used to represent a renderable rich text value.
Provides a 'source' property to access the original source code,
and renders to the front-end HTML rendering.
Used as the native value of a wagtailcore.blocks.field_block.RichTextBlock.
"""
def __init__(self, source):
self.source = source or ""
def __html__(self):
return render_to_string(
"wagtailcore/shared/richtext.html", {"html": expand_db_html(self.source)}
)
def __str__(self):
return mark_safe(self.__html__())
def __bool__(self):
return bool(self.source)
class EntityHandler:
"""
An 'entity' is a placeholder tag within the saved rich text, which needs to be rewritten
into real HTML at the point of rendering. Typically (but not necessarily) the entity will
be a reference to a model to be fetched to have its data output into the rich text content
(so that we aren't storing potentially changeable data within the saved rich text).
An EntityHandler defines how this rewriting is performed.
Currently Wagtail supports two kinds of entity: links (represented as <a linktype="...">...</a>)
and embeds (represented as <embed embedtype="..." />).
"""
@staticmethod
def get_model():
"""
If supported, returns the type of model able to be handled by this handler, e.g. Page.
"""
raise NotImplementedError
@classmethod
def get_instance(cls, attrs: dict) -> Model:
model = cls.get_model()
return model._default_manager.get(id=attrs["id"])
@staticmethod
def expand_db_attributes(attrs: dict) -> str:
"""
Given a dict of attributes from the entity tag
stored in the database, returns the real HTML representation.
"""
raise NotImplementedError
@classmethod
def extract_references(cls, attrs):
"""
Yields a sequence of (content_type_id, object_id, model_path, content_path) tuples for the
database objects referenced by this entity, as per
wagtail.models.ReferenceIndex._extract_references_from_object
"""
return []
class LinkHandler(EntityHandler):
pass
class EmbedHandler(EntityHandler):
pass
class RichTextMaxLengthValidator(MaxLengthValidator):
"""
A variant of MaxLengthValidator that only counts text (not HTML tags) towards the limit
Un-escapes entities for consistency with client-side character count.
"""
def clean(self, x):
return len(unescape(strip_tags(x)))
| bsd-3-clause | cc1cd7e5b43e02cf7e833fda08b5210e | 31.236025 | 100 | 0.642967 | 4.247136 | false | false | false | false |
wagtail/wagtail | wagtail/migrations/0068_log_entry_empty_object.py | 4 | 1040 | # Generated by Django 4.0.2 on 2022-02-22 04:27
from django.db import migrations
def replace_empty_string_with_empty_object(apps, schema_editor):
ModelLogEntry = apps.get_model("wagtailcore.ModelLogEntry")
PageLogEntry = apps.get_model("wagtailcore.PageLogEntry")
ModelLogEntry.objects.filter(data_json='""').update(data_json="{}")
PageLogEntry.objects.filter(data_json='""').update(data_json="{}")
def revert_empty_object_to_empty_string(apps, schema_editor):
ModelLogEntry = apps.get_model("wagtailcore.ModelLogEntry")
PageLogEntry = apps.get_model("wagtailcore.PageLogEntry")
ModelLogEntry.objects.filter(data_json="{}").update(data_json='""')
PageLogEntry.objects.filter(data_json="{}").update(data_json='""')
class Migration(migrations.Migration):
dependencies = [
("wagtailcore", "0067_alter_pagerevision_content_json"),
]
operations = [
migrations.RunPython(
replace_empty_string_with_empty_object, revert_empty_object_to_empty_string
),
]
| bsd-3-clause | 21b324ae5e905bb3644412e57475b98d | 33.666667 | 87 | 0.697115 | 3.573883 | false | false | false | false |
python-control/python-control | examples/vehicle.py | 2 | 3257 | # vehicle.py - planar vehicle model (with flatness)
# RMM, 16 Jan 2022
import numpy as np
import matplotlib.pyplot as plt
import control as ct
import control.flatsys as fs
#
# Vehicle dynamics
#
# Function to take states, inputs and return the flat flag
def _vehicle_flat_forward(x, u, params={}):
# Get the parameter values
b = params.get('wheelbase', 3.)
# Create a list of arrays to store the flat output and its derivatives
zflag = [np.zeros(3), np.zeros(3)]
# Flat output is the x, y position of the rear wheels
zflag[0][0] = x[0]
zflag[1][0] = x[1]
# First derivatives of the flat output
zflag[0][1] = u[0] * np.cos(x[2]) # dx/dt
zflag[1][1] = u[0] * np.sin(x[2]) # dy/dt
# First derivative of the angle
thdot = (u[0]/b) * np.tan(u[1])
# Second derivatives of the flat output (setting vdot = 0)
zflag[0][2] = -u[0] * thdot * np.sin(x[2])
zflag[1][2] = u[0] * thdot * np.cos(x[2])
return zflag
# Function to take the flat flag and return states, inputs
def _vehicle_flat_reverse(zflag, params={}):
# Get the parameter values
b = params.get('wheelbase', 3.)
dir = params.get('dir', 'f')
# Create a vector to store the state and inputs
x = np.zeros(3)
u = np.zeros(2)
# Given the flat variables, solve for the state
x[0] = zflag[0][0] # x position
x[1] = zflag[1][0] # y position
if dir == 'f':
x[2] = np.arctan2(zflag[1][1], zflag[0][1]) # tan(theta) = ydot/xdot
elif dir == 'r':
# Angle is flipped by 180 degrees (since v < 0)
x[2] = np.arctan2(-zflag[1][1], -zflag[0][1])
else:
raise ValueError("unknown direction:", dir)
# And next solve for the inputs
u[0] = zflag[0][1] * np.cos(x[2]) + zflag[1][1] * np.sin(x[2])
thdot_v = zflag[1][2] * np.cos(x[2]) - zflag[0][2] * np.sin(x[2])
u[1] = np.arctan2(thdot_v, u[0]**2 / b)
return x, u
# Function to compute the RHS of the system dynamics
def _vehicle_update(t, x, u, params):
b = params.get('wheelbase', 3.) # get parameter values
dx = np.array([
np.cos(x[2]) * u[0],
np.sin(x[2]) * u[0],
(u[0]/b) * np.tan(u[1])
])
return dx
def _vehicle_output(t, x, u, params):
return x # return x, y, theta (full state)
# Create differentially flat input/output system
vehicle = fs.FlatSystem(
_vehicle_flat_forward, _vehicle_flat_reverse, name="vehicle",
updfcn=_vehicle_update, outfcn=_vehicle_output,
inputs=('v', 'delta'), outputs=('x', 'y', 'theta'),
states=('x', 'y', 'theta'))
#
# Utility function to plot lane change manuever
#
def plot_lanechange(t, y, u, figure=None, yf=None):
# Plot the xy trajectory
plt.subplot(3, 1, 1, label='xy')
plt.plot(y[0], y[1])
plt.xlabel("x [m]")
plt.ylabel("y [m]")
if yf:
plt.plot(yf[0], yf[1], 'ro')
# Plot the inputs as a function of time
plt.subplot(3, 1, 2, label='v')
plt.plot(t, u[0])
plt.xlabel("t [sec]")
plt.ylabel("velocity [m/s]")
plt.subplot(3, 1, 3, label='delta')
plt.plot(t, u[1])
plt.xlabel("t [sec]")
plt.ylabel("steering [rad/s]")
plt.suptitle("Lane change manuever")
plt.tight_layout()
| bsd-3-clause | 7461265c9078a5a55e146fe0837ca7cd | 28.342342 | 77 | 0.581517 | 2.837108 | false | false | false | false |
python-control/python-control | control/config.py | 2 | 11065 | # config.py - package defaults
# RMM, 4 Nov 2012
#
# This file contains default values and utility functions for setting
# variables that control the behavior of the control package.
# Eventually it will be possible to read and write configuration
# files. For now, you can just choose between MATLAB and FBS default
# values + tweak a few other things.
import collections
import warnings
__all__ = ['defaults', 'set_defaults', 'reset_defaults',
'use_matlab_defaults', 'use_fbs_defaults',
'use_legacy_defaults', 'use_numpy_matrix']
# Package level default values
_control_defaults = {
'control.default_dt': 0,
'control.squeeze_frequency_response': None,
'control.squeeze_time_response': None,
'forced_response.return_x': False,
}
class DefaultDict(collections.UserDict):
"""Map names for settings from older version to their renamed ones.
If a user wants to write to an old setting, issue a warning and write to
the renamed setting instead. Accessing the old setting returns the value
from the new name.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def __setitem__(self, key, value):
super().__setitem__(self._check_deprecation(key), value)
def __missing__(self, key):
# An old key should never have been set. If it is being accessed
# through __getitem__, return the value from the new name.
repl = self._check_deprecation(key)
if self.__contains__(repl):
return self[repl]
else:
raise KeyError(key)
def _check_deprecation(self, key):
if self.__contains__(f"deprecated.{key}"):
repl = self[f"deprecated.{key}"]
warnings.warn(f"config.defaults['{key}'] has been renamed to "
f"config.defaults['{repl}'].",
FutureWarning, stacklevel=3)
return repl
else:
return key
defaults = DefaultDict(_control_defaults)
def set_defaults(module, **keywords):
"""Set default values of parameters for a module.
The set_defaults() function can be used to modify multiple parameter
values for a module at the same time, using keyword arguments:
control.set_defaults('module', param1=val, param2=val)
"""
if not isinstance(module, str):
raise ValueError("module must be a string")
for key, val in keywords.items():
keyname = module + '.' + key
if keyname not in defaults and f"deprecated.{keyname}" not in defaults:
raise TypeError(f"unrecognized keyword: {key}")
defaults[module + '.' + key] = val
def reset_defaults():
"""Reset configuration values to their default (initial) values."""
# System level defaults
defaults.update(_control_defaults)
from .freqplot import _freqplot_defaults, _nyquist_defaults
defaults.update(_freqplot_defaults)
defaults.update(_nyquist_defaults)
from .nichols import _nichols_defaults
defaults.update(_nichols_defaults)
from .pzmap import _pzmap_defaults
defaults.update(_pzmap_defaults)
from .rlocus import _rlocus_defaults
defaults.update(_rlocus_defaults)
from .xferfcn import _xferfcn_defaults
defaults.update(_xferfcn_defaults)
from .statesp import _statesp_defaults
defaults.update(_statesp_defaults)
from .iosys import _iosys_defaults
defaults.update(_iosys_defaults)
from .optimal import _optimal_defaults
defaults.update(_optimal_defaults)
def _get_param(module, param, argval=None, defval=None, pop=False, last=False):
"""Return the default value for a configuration option.
The _get_param() function is a utility function used to get the value of a
parameter for a module based on the default parameter settings and any
arguments passed to the function. The precedence order for parameters is
the value passed to the function (as a keyword), the value from the
config.defaults dictionary, and the default value `defval`.
Parameters
----------
module : str
Name of the module whose parameters are being requested.
param : str
Name of the parameter value to be determeind.
argval : object or dict
Value of the parameter as passed to the function. This can either be
an object or a dictionary (i.e. the keyword list from the function
call). Defaults to None.
defval : object
Default value of the parameter to use, if it is not located in the
`config.defaults` dictionary. If a dictionary is provided, then
`module.param` is used to determine the default value. Defaults to
None.
pop : bool, optional
If True and if argval is a dict, then pop the remove the parameter
entry from the argval dict after retreiving it. This allows the use
of a keyword argument list to be passed through to other functions
internal to the function being called.
last : bool, optional
If True, check to make sure dictionary is empy after processing.
"""
# Make sure that we were passed sensible arguments
if not isinstance(module, str) or not isinstance(param, str):
raise ValueError("module and param must be strings")
# Construction the name of the key, for later use
key = module + '.' + param
# If we were passed a dict for the argval, get the param value from there
if isinstance(argval, dict):
val = argval.pop(param, None) if pop else argval.get(param, None)
if last and argval:
raise TypeError("unrecognized keywords: " + str(argval))
argval = val
# If we were passed a dict for the defval, get the param value from there
if isinstance(defval, dict):
defval = defval.get(key, None)
# Return the parameter value to use (argval > defaults > defval)
return argval if argval is not None else defaults.get(key, defval)
# Set defaults to match MATLAB
def use_matlab_defaults():
"""Use MATLAB compatible configuration settings.
The following conventions are used:
* Bode plots plot gain in dB, phase in degrees, frequency in
rad/sec, with grids
* State space class and functions use Numpy matrix objects
"""
set_defaults('freqplot', dB=True, deg=True, Hz=False, grid=True)
set_defaults('statesp', use_numpy_matrix=True)
# Set defaults to match FBS (Astrom and Murray)
def use_fbs_defaults():
"""Use `Feedback Systems <http://fbsbook.org>`_ (FBS) compatible settings.
The following conventions are used:
* Bode plots plot gain in powers of ten, phase in degrees,
frequency in rad/sec, no grid
* Nyquist plots use dashed lines for mirror image of Nyquist curve
"""
set_defaults('freqplot', dB=False, deg=True, Hz=False, grid=False)
set_defaults('nyquist', mirror_style='--')
# Decide whether to use numpy.matrix for state space operations
def use_numpy_matrix(flag=True, warn=True):
"""Turn on/off use of Numpy `matrix` class for state space operations.
Parameters
----------
flag : bool
If flag is `True` (default), use the deprecated Numpy
`matrix` class to represent matrices in the `~control.StateSpace`
class and functions. If flat is `False`, then matrices are
represented by a 2D `ndarray` object.
warn : bool
If flag is `True` (default), issue a warning when turning on the use
of the Numpy `matrix` class. Set `warn` to false to omit display of
the warning message.
Notes
-----
Prior to release 0.9.x, the default type for 2D arrays is the Numpy
`matrix` class. Starting in release 0.9.0, the default type for state
space operations is a 2D array.
"""
if flag and warn:
warnings.warn("Return type numpy.matrix is deprecated.",
stacklevel=2, category=DeprecationWarning)
set_defaults('statesp', use_numpy_matrix=flag)
def use_legacy_defaults(version):
""" Sets the defaults to whatever they were in a given release.
Parameters
----------
version : string
Version number of the defaults desired. Ranges from '0.1' to '0.8.4'.
"""
import re
(major, minor, patch) = (None, None, None) # default values
# Early release tag format: REL-0.N
match = re.match("REL-0.([12])", version)
if match: (major, minor, patch) = (0, int(match.group(1)), 0)
# Early release tag format: control-0.Np
match = re.match("control-0.([3-6])([a-d])", version)
if match: (major, minor, patch) = \
(0, int(match.group(1)), ord(match.group(2)) - ord('a') + 1)
# Early release tag format: v0.Np
match = re.match("[vV]?0.([3-6])([a-d])", version)
if match: (major, minor, patch) = \
(0, int(match.group(1)), ord(match.group(2)) - ord('a') + 1)
# Abbreviated version format: vM.N or M.N
match = re.match("([vV]?[0-9]).([0-9])", version)
if match: (major, minor, patch) = \
(int(match.group(1)), int(match.group(2)), 0)
# Standard version format: vM.N.P or M.N.P
match = re.match("[vV]?([0-9]).([0-9]).([0-9])", version)
if match: (major, minor, patch) = \
(int(match.group(1)), int(match.group(2)), int(match.group(3)))
# Make sure we found match
if major is None or minor is None:
raise ValueError("Version number not recognized. Try M.N.P format.")
#
# Go backwards through releases and reset defaults
#
reset_defaults() # start from a clean slate
# Version 0.9.2:
if major == 0 and minor < 9 or (minor == 9 and patch < 2):
from math import inf
# Reset Nyquist defaults
set_defaults('nyquist', indent_radius=0.1, max_curve_magnitude=inf,
max_curve_offset=0, primary_style=['-', '-'],
mirror_style=['--', '--'], start_marker_size=0)
# Version 0.9.0:
if major == 0 and minor < 9:
# switched to 'array' as default for state space objects
set_defaults('statesp', use_numpy_matrix=True)
# switched to 0 (=continuous) as default timestep
set_defaults('control', default_dt=None)
# changed iosys naming conventions
set_defaults('iosys', state_name_delim='.',
duplicate_system_name_prefix='copy of ',
duplicate_system_name_suffix='',
linearized_system_name_prefix='',
linearized_system_name_suffix='_linearized')
# turned off _remove_useless_states
set_defaults('statesp', remove_useless_states=True)
# forced_response no longer returns x by default
set_defaults('forced_response', return_x=True)
# time responses are only squeezed if SISO
set_defaults('control', squeeze_time_response=True)
# switched mirror_style of nyquist from '-' to '--'
set_defaults('nyquist', mirror_style='-')
return (major, minor, patch)
| bsd-3-clause | d2ac5d7d3eb82ddf8e8d97a40da2e6ef | 35.160131 | 79 | 0.642476 | 3.920978 | false | false | false | false |
python-control/python-control | control/canonical.py | 2 | 14023 | # canonical.py - functions for converting systems to canonical forms
# RMM, 10 Nov 2012
from .exception import ControlNotImplemented, ControlSlycot
from .namedio import issiso
from .statesp import StateSpace, _convert_to_statespace
from .statefbk import ctrb, obsv
import numpy as np
from numpy import zeros, zeros_like, shape, poly, iscomplex, vstack, hstack, \
transpose, empty, finfo, float64
from numpy.linalg import solve, matrix_rank, eig
from scipy.linalg import schur
__all__ = ['canonical_form', 'reachable_form', 'observable_form', 'modal_form',
'similarity_transform', 'bdschur']
def canonical_form(xsys, form='reachable'):
"""Convert a system into canonical form
Parameters
----------
xsys : StateSpace object
System to be transformed, with state 'x'
form : str
Canonical form for transformation. Chosen from:
* 'reachable' - reachable canonical form
* 'observable' - observable canonical form
* 'modal' - modal canonical form
Returns
-------
zsys : StateSpace object
System in desired canonical form, with state 'z'
T : (M, M) real ndarray
Coordinate transformation matrix, z = T * x
"""
# Call the appropriate tranformation function
if form == 'reachable':
return reachable_form(xsys)
elif form == 'observable':
return observable_form(xsys)
elif form == 'modal':
return modal_form(xsys)
else:
raise ControlNotImplemented(
"Canonical form '%s' not yet implemented" % form)
# Reachable canonical form
def reachable_form(xsys):
"""Convert a system into reachable canonical form
Parameters
----------
xsys : StateSpace object
System to be transformed, with state `x`
Returns
-------
zsys : StateSpace object
System in reachable canonical form, with state `z`
T : (M, M) real ndarray
Coordinate transformation: z = T * x
"""
# Check to make sure we have a SISO system
if not issiso(xsys):
raise ControlNotImplemented(
"Canonical forms for MIMO systems not yet supported")
# Create a new system, starting with a copy of the old one
zsys = StateSpace(xsys)
# Generate the system matrices for the desired canonical form
zsys.B = zeros_like(xsys.B)
zsys.B[0, 0] = 1.0
zsys.A = zeros_like(xsys.A)
Apoly = poly(xsys.A) # characteristic polynomial
for i in range(0, xsys.nstates):
zsys.A[0, i] = -Apoly[i+1] / Apoly[0]
if (i+1 < xsys.nstates):
zsys.A[i+1, i] = 1.0
# Compute the reachability matrices for each set of states
Wrx = ctrb(xsys.A, xsys.B)
Wrz = ctrb(zsys.A, zsys.B)
if matrix_rank(Wrx) != xsys.nstates:
raise ValueError("System not controllable to working precision.")
# Transformation from one form to another
Tzx = solve(Wrx.T, Wrz.T).T # matrix right division, Tzx = Wrz * inv(Wrx)
# Check to make sure inversion was OK. Note that since we are inverting
# Wrx and we already checked its rank, this exception should never occur
if matrix_rank(Tzx) != xsys.nstates: # pragma: no cover
raise ValueError("Transformation matrix singular to working precision.")
# Finally, compute the output matrix
zsys.C = solve(Tzx.T, xsys.C.T).T # matrix right division, zsys.C = xsys.C * inv(Tzx)
return zsys, Tzx
def observable_form(xsys):
"""Convert a system into observable canonical form
Parameters
----------
xsys : StateSpace object
System to be transformed, with state `x`
Returns
-------
zsys : StateSpace object
System in observable canonical form, with state `z`
T : (M, M) real ndarray
Coordinate transformation: z = T * x
"""
# Check to make sure we have a SISO system
if not issiso(xsys):
raise ControlNotImplemented(
"Canonical forms for MIMO systems not yet supported")
# Create a new system, starting with a copy of the old one
zsys = StateSpace(xsys)
# Generate the system matrices for the desired canonical form
zsys.C = zeros_like(xsys.C)
zsys.C[0, 0] = 1
zsys.A = zeros_like(xsys.A)
Apoly = poly(xsys.A) # characteristic polynomial
for i in range(0, xsys.nstates):
zsys.A[i, 0] = -Apoly[i+1] / Apoly[0]
if (i+1 < xsys.nstates):
zsys.A[i, i+1] = 1
# Compute the observability matrices for each set of states
Wrx = obsv(xsys.A, xsys.C)
Wrz = obsv(zsys.A, zsys.C)
# Transformation from one form to another
Tzx = solve(Wrz, Wrx) # matrix left division, Tzx = inv(Wrz) * Wrx
if matrix_rank(Tzx) != xsys.nstates:
raise ValueError("Transformation matrix singular to working precision.")
# Finally, compute the output matrix
zsys.B = Tzx @ xsys.B
return zsys, Tzx
def similarity_transform(xsys, T, timescale=1, inverse=False):
"""Perform a similarity transformation, with option time rescaling.
Transform a linear state space system to a new state space representation
z = T x, or x = T z, where T is an invertible matrix.
Parameters
----------
xsys : StateSpace object
System to transform
T : (M, M) array_like
The matrix `T` defines the new set of coordinates z = T x.
timescale : float, optional
If present, also rescale the time unit to tau = timescale * t
inverse: boolean, optional
If True (default), transform so z = T x. If False, transform
so x = T z.
Returns
-------
zsys : StateSpace object
System in transformed coordinates, with state 'z'
"""
# Create a new system, starting with a copy of the old one
zsys = StateSpace(xsys)
T = np.atleast_2d(T)
# Define a function to compute the right inverse (solve x M = y)
def rsolve(M, y):
return transpose(solve(transpose(M), transpose(y)))
# Update the system matrices
if not inverse:
zsys.A = rsolve(T, T @ zsys.A) / timescale
zsys.B = T @ zsys.B / timescale
zsys.C = rsolve(T, zsys.C)
else:
zsys.A = solve(T, zsys.A) @ T / timescale
zsys.B = solve(T, zsys.B) / timescale
zsys.C = zsys.C @ T
return zsys
_IM_ZERO_TOL = np.finfo(np.float64).eps ** 0.5
_PMAX_SEARCH_TOL = 1.001
def _bdschur_defective(blksizes, eigvals):
"""Check for defective modal decomposition
Parameters
----------
blksizes: (N,) int ndarray
size of Schur blocks
eigvals: (M,) real or complex ndarray
Eigenvalues
Returns
-------
True iff Schur blocks are defective.
blksizes, eigvals are the 3rd and 4th results returned by mb03rd.
"""
if any(blksizes > 2):
return True
if all(blksizes == 1):
return False
# check eigenvalues associated with blocks of size 2
init_idxs = np.cumsum(np.hstack([0, blksizes[:-1]]))
blk_idx2 = blksizes == 2
im = eigvals[init_idxs[blk_idx2]].imag
re = eigvals[init_idxs[blk_idx2]].real
if any(abs(im) < _IM_ZERO_TOL * abs(re)):
return True
return False
def _bdschur_condmax_search(aschur, tschur, condmax):
"""Block-diagonal Schur decomposition search up to condmax
Iterates mb03rd with different pmax values until:
- result is non-defective;
- or condition number of similarity transform is unchanging despite large pmax;
- or condition number of similarity transform is close to condmax.
Parameters
----------
aschur: (N, N) real ndarray
Real Schur-form matrix
tschur: (N, N) real ndarray
Orthogonal transformation giving aschur from some initial matrix a
condmax: float
Maximum condition number of final transformation. Must be >= 1.
Returns
-------
amodal: (N, N) real ndarray
block diagonal Schur form
tmodal: (N, N) real ndarray
similarity transformation give amodal from aschur
blksizes: (M,) int ndarray
Array of Schur block sizes
eigvals: (N,) real or complex ndarray
Eigenvalues of amodal (and a, etc.)
Notes
-----
Outputs as for slycot.mb03rd
aschur, tschur are as returned by scipy.linalg.schur.
"""
try:
from slycot import mb03rd
except ImportError:
raise ControlSlycot("can't find slycot module 'mb03rd'")
# see notes on RuntimeError below
pmaxlower = None
# get lower bound; try condmax ** 0.5 first
pmaxlower = condmax ** 0.5
amodal, tmodal, blksizes, eigvals = mb03rd(aschur.shape[0], aschur, tschur, pmax=pmaxlower)
if np.linalg.cond(tmodal) <= condmax:
reslower = amodal, tmodal, blksizes, eigvals
else:
pmaxlower = 1.0
amodal, tmodal, blksizes, eigvals = mb03rd(aschur.shape[0], aschur, tschur, pmax=pmaxlower)
cond = np.linalg.cond(tmodal)
if cond > condmax:
msg = 'minimum cond={} > condmax={}; try increasing condmax'.format(cond, condmax)
raise RuntimeError(msg)
pmax = pmaxlower
# phase 1: search for upper bound on pmax
for i in range(50):
amodal, tmodal, blksizes, eigvals = mb03rd(aschur.shape[0], aschur, tschur, pmax=pmax)
cond = np.linalg.cond(tmodal)
if cond < condmax:
pmaxlower = pmax
reslower = amodal, tmodal, blksizes, eigvals
else:
# upper bound found; go to phase 2
pmaxupper = pmax
break
if _bdschur_defective(blksizes, eigvals):
pmax *= 2
else:
return amodal, tmodal, blksizes, eigvals
else:
# no upper bound found; return current result
return reslower
# phase 2: bisection search
for i in range(50):
pmax = (pmaxlower * pmaxupper) ** 0.5
amodal, tmodal, blksizes, eigvals = mb03rd(aschur.shape[0], aschur, tschur, pmax=pmax)
cond = np.linalg.cond(tmodal)
if cond < condmax:
if not _bdschur_defective(blksizes, eigvals):
return amodal, tmodal, blksizes, eigvals
pmaxlower = pmax
reslower = amodal, tmodal, blksizes, eigvals
else:
pmaxupper = pmax
if pmaxupper / pmaxlower < _PMAX_SEARCH_TOL:
# hit search limit
return reslower
else:
raise ValueError('bisection failed to converge; pmaxlower={}, pmaxupper={}'.format(pmaxlower, pmaxupper))
def bdschur(a, condmax=None, sort=None):
"""Block-diagonal Schur decomposition
Parameters
----------
a : (M, M) array_like
Real matrix to decompose
condmax : None or float, optional
If None (default), use 1/sqrt(eps), which is approximately 1e8
sort : {None, 'continuous', 'discrete'}
Block sorting; see below.
Returns
-------
amodal : (M, M) real ndarray
Block-diagonal Schur decomposition of `a`
tmodal : (M, M) real ndarray
Similarity transform relating `a` and `amodal`
blksizes : (N,) int ndarray
Array of Schur block sizes
Notes
-----
If `sort` is None, the blocks are not sorted.
If `sort` is 'continuous', the blocks are sorted according to
associated eigenvalues. The ordering is first by real part of
eigenvalue, in descending order, then by absolute value of
imaginary part of eigenvalue, also in decreasing order.
If `sort` is 'discrete', the blocks are sorted as for
'continuous', but applied to log of eigenvalues
(i.e., continuous-equivalent eigenvalues).
"""
if condmax is None:
condmax = np.finfo(np.float64).eps ** -0.5
if not (np.isscalar(condmax) and condmax >= 1.0):
raise ValueError('condmax="{}" must be a scalar >= 1.0'.format(condmax))
a = np.atleast_2d(a)
if a.shape[0] == 0 or a.shape[1] == 0:
return a.copy(), np.eye(a.shape[1], a.shape[0]), np.array([])
aschur, tschur = schur(a)
amodal, tmodal, blksizes, eigvals = _bdschur_condmax_search(aschur, tschur, condmax)
if sort in ('continuous', 'discrete'):
idxs = np.cumsum(np.hstack([0, blksizes[:-1]]))
ev_per_blk = [complex(eigvals[i].real, abs(eigvals[i].imag))
for i in idxs]
if sort == 'discrete':
ev_per_blk = np.log(ev_per_blk)
# put most unstable first
sortidx = np.argsort(ev_per_blk)[::-1]
# block indices
blkidxs = [np.arange(i0, i0+ilen)
for i0, ilen in zip(idxs, blksizes)]
# reordered
permidx = np.hstack([blkidxs[i] for i in sortidx])
rperm = np.eye(amodal.shape[0])[permidx]
tmodal = tmodal @ rperm
amodal = rperm @ amodal @ rperm.T
blksizes = blksizes[sortidx]
elif sort is None:
pass
else:
raise ValueError('unknown sort value "{}"'.format(sort))
return amodal, tmodal, blksizes
def modal_form(xsys, condmax=None, sort=False):
"""Convert a system into modal canonical form
Parameters
----------
xsys : StateSpace object
System to be transformed, with state `x`
condmax : None or float, optional
An upper bound on individual transformations. If None, use `bdschur` default.
sort : bool, optional
If False (default), Schur blocks will not be sorted. See `bdschur` for sort order.
Returns
-------
zsys : StateSpace object
System in modal canonical form, with state `z`
T : (M, M) ndarray
Coordinate transformation: z = T * x
"""
if sort:
discrete = xsys.dt is not None and xsys.dt > 0
bd_sort = 'discrete' if discrete else 'continuous'
else:
bd_sort = None
xsys = _convert_to_statespace(xsys)
amodal, tmodal, _ = bdschur(xsys.A, condmax=condmax, sort=bd_sort)
return similarity_transform(xsys, tmodal, inverse=True), tmodal
| bsd-3-clause | c0da72be678635f4ed6fe36bc43e50dd | 30.162222 | 113 | 0.619839 | 3.58278 | false | false | false | false |
python-control/python-control | examples/steering-optimal.py | 2 | 8749 | # steering-optimal.py - optimal control for vehicle steering
# RMM, 18 Feb 2021
#
# This file works through an optimal control example for the vehicle
# steering system. It is intended to demonstrate the functionality for
# optimal control module (control.optimal) in the python-control package.
import numpy as np
import math
import control as ct
import control.optimal as opt
import matplotlib.pyplot as plt
import logging
import time
import os
#
# Vehicle steering dynamics
#
# The vehicle dynamics are given by a simple bicycle model. We take the state
# of the system as (x, y, theta) where (x, y) is the position of the vehicle
# in the plane and theta is the angle of the vehicle with respect to
# horizontal. The vehicle input is given by (v, phi) where v is the forward
# velocity of the vehicle and phi is the angle of the steering wheel. The
# model includes saturation of the vehicle steering angle.
#
# System state: x, y, theta
# System input: v, phi
# System output: x, y
# System parameters: wheelbase, maxsteer
#
def vehicle_update(t, x, u, params):
# Get the parameters for the model
l = params.get('wheelbase', 3.) # vehicle wheelbase
phimax = params.get('maxsteer', 0.5) # max steering angle (rad)
# Saturate the steering input (use min/max instead of clip for speed)
phi = max(-phimax, min(u[1], phimax))
# Return the derivative of the state
return np.array([
math.cos(x[2]) * u[0], # xdot = cos(theta) v
math.sin(x[2]) * u[0], # ydot = sin(theta) v
(u[0] / l) * math.tan(phi) # thdot = v/l tan(phi)
])
def vehicle_output(t, x, u, params):
return x # return x, y, theta (full state)
# Define the vehicle steering dynamics as an input/output system
vehicle = ct.NonlinearIOSystem(
vehicle_update, vehicle_output, states=3, name='vehicle',
inputs=('v', 'phi'),
outputs=('x', 'y', 'theta'))
#
# Utility function to plot the results
#
def plot_results(t, y, u, figure=None, yf=None):
plt.figure(figure)
# Plot the xy trajectory
plt.subplot(3, 1, 1)
plt.plot(y[0], y[1])
plt.xlabel("x [m]")
plt.ylabel("y [m]")
if yf:
plt.plot(yf[0], yf[1], 'ro')
# Plot the inputs as a function of time
plt.subplot(3, 1, 2)
plt.plot(t, u[0])
plt.xlabel("t [sec]")
plt.ylabel("velocity [m/s]")
plt.subplot(3, 1, 3)
plt.plot(t, u[1])
plt.xlabel("t [sec]")
plt.ylabel("steering [rad/s]")
plt.suptitle("Lane change manuever")
plt.tight_layout()
plt.show(block=False)
#
# Optimal control problem
#
# Perform a "lane change" manuever over the course of 10 seconds.
#
# Initial and final conditions
x0 = [0., -2., 0.]; u0 = [10., 0.]
xf = [100., 2., 0.]; uf = [10., 0.]
Tf = 10
#
# Approach 1: standard quadratic cost
#
# We can set up the optimal control problem as trying to minimize the
# distance form the desired final point while at the same time as not
# exerting too much control effort to achieve our goal.
#
print("Approach 1: standard quadratic cost")
# Set up the cost functions
Q = np.diag([.1, 10, .1]) # keep lateral error low
R = np.diag([.1, 1]) # minimize applied inputs
quad_cost = opt.quadratic_cost(vehicle, Q, R, x0=xf, u0=uf)
# Define the time horizon (and spacing) for the optimization
horizon = np.linspace(0, Tf, 10, endpoint=True)
# Provide an intial guess (will be extended to entire horizon)
bend_left = [10, 0.01] # slight left veer
# Turn on debug level logging so that we can see what the optimizer is doing
logging.basicConfig(
level=logging.DEBUG, filename="steering-integral_cost.log",
filemode='w', force=True)
# Compute the optimal control, setting step size for gradient calculation (eps)
start_time = time.process_time()
result1 = opt.solve_ocp(
vehicle, horizon, x0, quad_cost, initial_guess=bend_left, log=True,
minimize_method='trust-constr',
minimize_options={'finite_diff_rel_step': 0.01},
)
print("* Total time = %5g seconds\n" % (time.process_time() - start_time))
# If we are running CI tests, make sure we succeeded
if 'PYCONTROL_TEST_EXAMPLES' in os.environ:
assert result1.success
# Extract and plot the results (+ state trajectory)
t1, u1 = result1.time, result1.inputs
t1, y1 = ct.input_output_response(vehicle, horizon, u1, x0)
plot_results(t1, y1, u1, figure=1, yf=xf[0:2])
#
# Approach 2: input cost, input constraints, terminal cost
#
# The previous solution integrates the position error for the entire
# horizon, and so the car changes lanes very quickly (at the cost of larger
# inputs). Instead, we can penalize the final state and impose a higher
# cost on the inputs, resuling in a more graduate lane change.
#
# We also set the solver explicitly.
#
print("Approach 2: input cost and constraints plus terminal cost")
# Add input constraint, input cost, terminal cost
constraints = [ opt.input_range_constraint(vehicle, [8, -0.1], [12, 0.1]) ]
traj_cost = opt.quadratic_cost(vehicle, None, np.diag([0.1, 1]), u0=uf)
term_cost = opt.quadratic_cost(vehicle, np.diag([1, 10, 10]), None, x0=xf)
# Change logging to keep less information
logging.basicConfig(
level=logging.INFO, filename="./steering-terminal_cost.log",
filemode='w', force=True)
# Compute the optimal control
start_time = time.process_time()
result2 = opt.solve_ocp(
vehicle, horizon, x0, traj_cost, constraints, terminal_cost=term_cost,
initial_guess=bend_left, log=True,
minimize_method='SLSQP', minimize_options={'eps': 0.01})
print("* Total time = %5g seconds\n" % (time.process_time() - start_time))
# If we are running CI tests, make sure we succeeded
if 'PYCONTROL_TEST_EXAMPLES' in os.environ:
assert result2.success
# Extract and plot the results (+ state trajectory)
t2, u2 = result2.time, result2.inputs
t2, y2 = ct.input_output_response(vehicle, horizon, u2, x0)
plot_results(t2, y2, u2, figure=2, yf=xf[0:2])
#
# Approach 3: terminal constraints
#
# We can also remove the cost function on the state and replace it
# with a terminal *constraint* on the state. If a solution is found,
# it guarantees we get to exactly the final state.
#
print("Approach 3: terminal constraints")
# Input cost and terminal constraints
R = np.diag([1, 1]) # minimize applied inputs
cost3 = opt.quadratic_cost(vehicle, np.zeros((3,3)), R, u0=uf)
constraints = [
opt.input_range_constraint(vehicle, [8, -0.1], [12, 0.1]) ]
terminal = [ opt.state_range_constraint(vehicle, xf, xf) ]
# Reset logging to its default values
logging.basicConfig(
level=logging.DEBUG, filename="./steering-terminal_constraint.log",
filemode='w', force=True)
# Compute the optimal control
start_time = time.process_time()
result3 = opt.solve_ocp(
vehicle, horizon, x0, cost3, constraints,
terminal_constraints=terminal, initial_guess=bend_left, log=False,
solve_ivp_kwargs={'atol': 1e-3, 'rtol': 1e-2},
minimize_method='trust-constr',
)
print("* Total time = %5g seconds\n" % (time.process_time() - start_time))
# If we are running CI tests, make sure we succeeded
if 'PYCONTROL_TEST_EXAMPLES' in os.environ:
assert result3.success
# Extract and plot the results (+ state trajectory)
t3, u3 = result3.time, result3.inputs
t3, y3 = ct.input_output_response(vehicle, horizon, u3, x0)
plot_results(t3, y3, u3, figure=3, yf=xf[0:2])
#
# Approach 4: terminal constraints w/ basis functions
#
# As a final example, we can use a basis function to reduce the size
# of the problem and get faster answers with more temporal resolution.
# Here we parameterize the input by a set of 4 Bezier curves but solve
# for a much more time resolved set of inputs.
print("Approach 4: Bezier basis")
import control.flatsys as flat
# Compute the optimal control
start_time = time.process_time()
result4 = opt.solve_ocp(
vehicle, horizon, x0, quad_cost,
constraints,
terminal_constraints=terminal,
initial_guess=bend_left,
basis=flat.BezierFamily(4, T=Tf),
# solve_ivp_kwargs={'method': 'RK45', 'atol': 1e-2, 'rtol': 1e-2},
solve_ivp_kwargs={'atol': 1e-3, 'rtol': 1e-2},
minimize_method='trust-constr', minimize_options={'disp': True},
log=False
)
print("* Total time = %5g seconds\n" % (time.process_time() - start_time))
# If we are running CI tests, make sure we succeeded
if 'PYCONTROL_TEST_EXAMPLES' in os.environ:
assert result4.success
# Extract and plot the results (+ state trajectory)
t4, u4 = result4.time, result4.inputs
t4, y4 = ct.input_output_response(vehicle, horizon, u4, x0)
plot_results(t4, y4, u4, figure=4, yf=xf[0:2])
# If we are not running CI tests, display the results
if 'PYCONTROL_TEST_EXAMPLES' not in os.environ:
plt.show()
| bsd-3-clause | 64b09fe1f252d168240ed6293721993d | 33.175781 | 79 | 0.688079 | 3.151657 | false | false | false | false |
python-control/python-control | control/tests/statefbk_test.py | 2 | 30709 | """statefbk_test.py - test state feedback functions
RMM, 30 Mar 2011 (based on TestStatefbk from v0.4a)
"""
import numpy as np
import pytest
import control as ct
from control import lqe, dlqe, poles, rss, ss, tf
from control.exception import ControlDimension, ControlSlycot, \
ControlArgument, slycot_check
from control.mateqn import care, dare
from control.statefbk import (ctrb, obsv, place, place_varga, lqr, dlqr,
gram, acker)
from control.tests.conftest import (slycotonly, check_deprecated_matrix,
ismatarrayout, asmatarrayout)
@pytest.fixture
def fixedseed():
"""Get consistent test results"""
np.random.seed(0)
class TestStatefbk:
"""Test state feedback functions"""
# Maximum number of states to test + 1
maxStates = 5
# Maximum number of inputs and outputs to test + 1
maxTries = 4
# Set to True to print systems to the output.
debug = False
def testCtrbSISO(self, matarrayin, matarrayout):
A = matarrayin([[1., 2.], [3., 4.]])
B = matarrayin([[5.], [7.]])
Wctrue = np.array([[5., 19.], [7., 43.]])
with check_deprecated_matrix():
Wc = ctrb(A, B)
assert ismatarrayout(Wc)
np.testing.assert_array_almost_equal(Wc, Wctrue)
def testCtrbMIMO(self, matarrayin):
A = matarrayin([[1., 2.], [3., 4.]])
B = matarrayin([[5., 6.], [7., 8.]])
Wctrue = np.array([[5., 6., 19., 22.], [7., 8., 43., 50.]])
Wc = ctrb(A, B)
np.testing.assert_array_almost_equal(Wc, Wctrue)
# Make sure default type values are correct
assert ismatarrayout(Wc)
def testObsvSISO(self, matarrayin):
A = matarrayin([[1., 2.], [3., 4.]])
C = matarrayin([[5., 7.]])
Wotrue = np.array([[5., 7.], [26., 38.]])
Wo = obsv(A, C)
np.testing.assert_array_almost_equal(Wo, Wotrue)
# Make sure default type values are correct
assert ismatarrayout(Wo)
def testObsvMIMO(self, matarrayin):
A = matarrayin([[1., 2.], [3., 4.]])
C = matarrayin([[5., 6.], [7., 8.]])
Wotrue = np.array([[5., 6.], [7., 8.], [23., 34.], [31., 46.]])
Wo = obsv(A, C)
np.testing.assert_array_almost_equal(Wo, Wotrue)
def testCtrbObsvDuality(self, matarrayin):
A = matarrayin([[1.2, -2.3], [3.4, -4.5]])
B = matarrayin([[5.8, 6.9], [8., 9.1]])
Wc = ctrb(A, B)
A = np.transpose(A)
C = np.transpose(B)
Wo = np.transpose(obsv(A, C))
np.testing.assert_array_almost_equal(Wc,Wo)
@slycotonly
def testGramWc(self, matarrayin, matarrayout):
A = matarrayin([[1., -2.], [3., -4.]])
B = matarrayin([[5., 6.], [7., 8.]])
C = matarrayin([[4., 5.], [6., 7.]])
D = matarrayin([[13., 14.], [15., 16.]])
sys = ss(A, B, C, D)
Wctrue = np.array([[18.5, 24.5], [24.5, 32.5]])
with check_deprecated_matrix():
Wc = gram(sys, 'c')
assert ismatarrayout(Wc)
np.testing.assert_array_almost_equal(Wc, Wctrue)
@slycotonly
def testGramRc(self, matarrayin):
A = matarrayin([[1., -2.], [3., -4.]])
B = matarrayin([[5., 6.], [7., 8.]])
C = matarrayin([[4., 5.], [6., 7.]])
D = matarrayin([[13., 14.], [15., 16.]])
sys = ss(A, B, C, D)
Rctrue = np.array([[4.30116263, 5.6961343], [0., 0.23249528]])
Rc = gram(sys, 'cf')
np.testing.assert_array_almost_equal(Rc, Rctrue)
@slycotonly
def testGramWo(self, matarrayin):
A = matarrayin([[1., -2.], [3., -4.]])
B = matarrayin([[5., 6.], [7., 8.]])
C = matarrayin([[4., 5.], [6., 7.]])
D = matarrayin([[13., 14.], [15., 16.]])
sys = ss(A, B, C, D)
Wotrue = np.array([[257.5, -94.5], [-94.5, 56.5]])
Wo = gram(sys, 'o')
np.testing.assert_array_almost_equal(Wo, Wotrue)
@slycotonly
def testGramWo2(self, matarrayin):
A = matarrayin([[1., -2.], [3., -4.]])
B = matarrayin([[5.], [7.]])
C = matarrayin([[6., 8.]])
D = matarrayin([[9.]])
sys = ss(A,B,C,D)
Wotrue = np.array([[198., -72.], [-72., 44.]])
Wo = gram(sys, 'o')
np.testing.assert_array_almost_equal(Wo, Wotrue)
@slycotonly
def testGramRo(self, matarrayin):
A = matarrayin([[1., -2.], [3., -4.]])
B = matarrayin([[5., 6.], [7., 8.]])
C = matarrayin([[4., 5.], [6., 7.]])
D = matarrayin([[13., 14.], [15., 16.]])
sys = ss(A, B, C, D)
Rotrue = np.array([[16.04680654, -5.8890222], [0., 4.67112593]])
Ro = gram(sys, 'of')
np.testing.assert_array_almost_equal(Ro, Rotrue)
def testGramsys(self):
num =[1.]
den = [1., 1., 1.]
sys = tf(num,den)
with pytest.raises(ValueError):
gram(sys, 'o')
with pytest.raises(ValueError):
gram(sys, 'c')
def testAcker(self, fixedseed):
for states in range(1, self.maxStates):
for i in range(self.maxTries):
# start with a random SS system and transform to TF then
# back to SS, check that the matrices are the same.
sys = rss(states, 1, 1)
if (self.debug):
print(sys)
# Make sure the system is not degenerate
Cmat = ctrb(sys.A, sys.B)
if np.linalg.matrix_rank(Cmat) != states:
if (self.debug):
print(" skipping (not reachable or ill conditioned)")
continue
# Place the poles at random locations
des = rss(states, 1, 1)
desired = poles(des)
# Now place the poles using acker
K = acker(sys.A, sys.B, desired)
new = ss(sys.A - sys.B * K, sys.B, sys.C, sys.D)
placed = poles(new)
# Debugging code
# diff = np.sort(poles) - np.sort(placed)
# if not all(diff < 0.001):
# print("Found a problem:")
# print(sys)
# print("desired = ", poles)
np.testing.assert_array_almost_equal(
np.sort(desired), np.sort(placed), decimal=4)
def checkPlaced(self, P_expected, P_placed):
"""Check that placed poles are correct"""
# No guarantee of the ordering, so sort them
P_expected = np.squeeze(np.asarray(P_expected))
P_expected.sort()
P_placed.sort()
np.testing.assert_array_almost_equal(P_expected, P_placed)
def testPlace(self, matarrayin):
# Matrices shamelessly stolen from scipy example code.
A = matarrayin([[1.380, -0.2077, 6.715, -5.676],
[-0.5814, -4.290, 0, 0.6750],
[1.067, 4.273, -6.654, 5.893],
[0.0480, 4.273, 1.343, -2.104]])
B = matarrayin([[0, 5.679],
[1.136, 1.136],
[0, 0],
[-3.146, 0]])
P = matarrayin([-0.5 + 1j, -0.5 - 1j, -5.0566, -8.6659])
K = place(A, B, P)
assert ismatarrayout(K)
P_placed = np.linalg.eigvals(A - B @ K)
self.checkPlaced(P, P_placed)
# Test that the dimension checks work.
with pytest.raises(ControlDimension):
place(A[1:, :], B, P)
with pytest.raises(ControlDimension):
place(A, B[1:, :], P)
# Check that we get an error if we ask for too many poles in the same
# location. Here, rank(B) = 2, so lets place three at the same spot.
P_repeated = matarrayin([-0.5, -0.5, -0.5, -8.6659])
with pytest.raises(ValueError):
place(A, B, P_repeated)
@slycotonly
def testPlace_varga_continuous(self, matarrayin):
"""
Check that we can place eigenvalues for dtime=False
"""
A = matarrayin([[1., -2.], [3., -4.]])
B = matarrayin([[5.], [7.]])
P = [-2., -2.]
K = place_varga(A, B, P)
P_placed = np.linalg.eigvals(A - B @ K)
self.checkPlaced(P, P_placed)
# Test that the dimension checks work.
np.testing.assert_raises(ControlDimension, place, A[1:, :], B, P)
np.testing.assert_raises(ControlDimension, place, A, B[1:, :], P)
# Regression test against bug #177
# https://github.com/python-control/python-control/issues/177
A = matarrayin([[0, 1], [100, 0]])
B = matarrayin([[0], [1]])
P = matarrayin([-20 + 10*1j, -20 - 10*1j])
K = place_varga(A, B, P)
P_placed = np.linalg.eigvals(A - B @ K)
self.checkPlaced(P, P_placed)
@slycotonly
def testPlace_varga_continuous_partial_eigs(self, matarrayin):
"""
Check that we are able to use the alpha parameter to only place
a subset of the eigenvalues, for the continous time case.
"""
# A matrix has eigenvalues at s=-1, and s=-2. Choose alpha = -1.5
# and check that eigenvalue at s=-2 stays put.
A = matarrayin([[1., -2.], [3., -4.]])
B = matarrayin([[5.], [7.]])
P = matarrayin([-3.])
P_expected = np.array([-2.0, -3.0])
alpha = -1.5
K = place_varga(A, B, P, alpha=alpha)
P_placed = np.linalg.eigvals(A - B @ K)
# No guarantee of the ordering, so sort them
self.checkPlaced(P_expected, P_placed)
@slycotonly
def testPlace_varga_discrete(self, matarrayin):
"""
Check that we can place poles using dtime=True (discrete time)
"""
A = matarrayin([[1., 0], [0, 0.5]])
B = matarrayin([[5.], [7.]])
P = matarrayin([0.5, 0.5])
K = place_varga(A, B, P, dtime=True)
P_placed = np.linalg.eigvals(A - B @ K)
# No guarantee of the ordering, so sort them
self.checkPlaced(P, P_placed)
@slycotonly
def testPlace_varga_discrete_partial_eigs(self, matarrayin):
""""
Check that we can only assign a single eigenvalue in the discrete
time case.
"""
# A matrix has eigenvalues at 1.0 and 0.5. Set alpha = 0.51, and
# check that the eigenvalue at 0.5 is not moved.
A = matarrayin([[1., 0], [0, 0.5]])
B = matarrayin([[5.], [7.]])
P = matarrayin([0.2, 0.6])
P_expected = np.array([0.5, 0.6])
alpha = 0.51
K = place_varga(A, B, P, dtime=True, alpha=alpha)
P_placed = np.linalg.eigvals(A - B @ K)
self.checkPlaced(P_expected, P_placed)
def check_LQR(self, K, S, poles, Q, R):
S_expected = asmatarrayout(np.sqrt(Q @ R))
K_expected = asmatarrayout(S_expected / R)
poles_expected = -np.squeeze(np.asarray(K_expected))
np.testing.assert_array_almost_equal(S, S_expected)
np.testing.assert_array_almost_equal(K, K_expected)
np.testing.assert_array_almost_equal(poles, poles_expected)
def check_DLQR(self, K, S, poles, Q, R):
S_expected = asmatarrayout(Q)
K_expected = asmatarrayout(0)
poles_expected = -np.squeeze(np.asarray(K_expected))
np.testing.assert_array_almost_equal(S, S_expected)
np.testing.assert_array_almost_equal(K, K_expected)
np.testing.assert_array_almost_equal(poles, poles_expected)
@pytest.mark.parametrize("method", [None, 'slycot', 'scipy'])
def test_LQR_integrator(self, matarrayin, matarrayout, method):
if method == 'slycot' and not slycot_check():
return
A, B, Q, R = (matarrayin([[X]]) for X in [0., 1., 10., 2.])
K, S, poles = lqr(A, B, Q, R, method=method)
self.check_LQR(K, S, poles, Q, R)
@pytest.mark.parametrize("method", [None, 'slycot', 'scipy'])
def test_LQR_3args(self, matarrayin, matarrayout, method):
if method == 'slycot' and not slycot_check():
return
sys = ss(0., 1., 1., 0.)
Q, R = (matarrayin([[X]]) for X in [10., 2.])
K, S, poles = lqr(sys, Q, R, method=method)
self.check_LQR(K, S, poles, Q, R)
@pytest.mark.parametrize("method", [None, 'slycot', 'scipy'])
def test_DLQR_3args(self, matarrayin, matarrayout, method):
if method == 'slycot' and not slycot_check():
return
dsys = ss(0., 1., 1., 0., .1)
Q, R = (matarrayin([[X]]) for X in [10., 2.])
K, S, poles = dlqr(dsys, Q, R, method=method)
self.check_DLQR(K, S, poles, Q, R)
def test_DLQR_4args(self, matarrayin, matarrayout):
A, B, Q, R = (matarrayin([[X]]) for X in [0., 1., 10., 2.])
K, S, poles = dlqr(A, B, Q, R)
self.check_DLQR(K, S, poles, Q, R)
@pytest.mark.parametrize("cdlqr", [lqr, dlqr])
def test_lqr_badmethod(self, cdlqr):
A, B, Q, R = 0, 1, 10, 2
with pytest.raises(ControlArgument, match="Unknown method"):
K, S, poles = cdlqr(A, B, Q, R, method='nosuchmethod')
@pytest.mark.parametrize("cdlqr", [lqr, dlqr])
def test_lqr_slycot_not_installed(self, cdlqr):
A, B, Q, R = 0, 1, 10, 2
if not slycot_check():
with pytest.raises(ControlSlycot, match="Can't find slycot"):
K, S, poles = cdlqr(A, B, Q, R, method='slycot')
@pytest.mark.xfail(reason="warning not implemented")
def testLQR_warning(self):
"""Test lqr()
Make sure we get a warning if [Q N;N' R] is not positive semi-definite
"""
# from matlab_test siso.ss2 (testLQR); probably not referenced before
# not yet implemented check
A = np.array([[-2, 3, 1],
[-1, 0, 0],
[0, 1, 0]])
B = np.array([[-1, 0, 0]]).T
Q = np.eye(3)
R = np.eye(1)
N = np.array([[1, 1, 2]]).T
# assert any(np.linalg.eigvals(np.block([[Q, N], [N.T, R]])) < 0)
with pytest.warns(UserWarning):
(K, S, E) = lqr(A, B, Q, R, N)
@pytest.mark.parametrize("cdlqr", [lqr, dlqr])
def test_lqr_call_format(self, cdlqr):
# Create a random state space system for testing
sys = rss(2, 3, 2)
sys.dt = None # treat as either continuous or discrete time
# Weighting matrices
Q = np.eye(sys.nstates)
R = np.eye(sys.ninputs)
N = np.zeros((sys.nstates, sys.ninputs))
# Standard calling format
Kref, Sref, Eref = cdlqr(sys.A, sys.B, Q, R)
# Call with system instead of matricees
K, S, E = cdlqr(sys, Q, R)
np.testing.assert_array_almost_equal(Kref, K)
np.testing.assert_array_almost_equal(Sref, S)
np.testing.assert_array_almost_equal(Eref, E)
# Pass a cross-weighting matrix
K, S, E = cdlqr(sys, Q, R, N)
np.testing.assert_array_almost_equal(Kref, K)
np.testing.assert_array_almost_equal(Sref, S)
np.testing.assert_array_almost_equal(Eref, E)
# Inconsistent system dimensions
with pytest.raises(ct.ControlDimension, match="Incompatible dimen"):
K, S, E = cdlqr(sys.A, sys.C, Q, R)
# Incorrect covariance matrix dimensions
with pytest.raises(ct.ControlDimension, match="Q must be a square"):
K, S, E = cdlqr(sys.A, sys.B, sys.C, R, Q)
# Too few input arguments
with pytest.raises(ct.ControlArgument, match="not enough input"):
K, S, E = cdlqr(sys.A, sys.B)
# First argument is the wrong type (use SISO for non-slycot tests)
sys_tf = tf(rss(3, 1, 1))
sys_tf.dt = None # treat as either continuous or discrete time
with pytest.raises(ct.ControlArgument, match="LTI system must be"):
K, S, E = cdlqr(sys_tf, Q, R)
@pytest.mark.xfail(reason="warning not implemented")
def testDLQR_warning(self):
"""Test dlqr()
Make sure we get a warning if [Q N;N' R] is not positive semi-definite
"""
# from matlab_test siso.ss2 (testLQR); probably not referenced before
# not yet implemented check
A = np.array([[-2, 3, 1],
[-1, 0, 0],
[0, 1, 0]])
B = np.array([[-1, 0, 0]]).T
Q = np.eye(3)
R = np.eye(1)
N = np.array([[1, 1, 2]]).T
# assert any(np.linalg.eigvals(np.block([[Q, N], [N.T, R]])) < 0)
with pytest.warns(UserWarning):
(K, S, E) = dlqr(A, B, Q, R, N)
def test_care(self, matarrayin):
"""Test stabilizing and anti-stabilizing feedback, continuous"""
A = matarrayin(np.diag([1, -1]))
B = matarrayin(np.identity(2))
Q = matarrayin(np.identity(2))
R = matarrayin(np.identity(2))
S = matarrayin(np.zeros((2, 2)))
E = matarrayin(np.identity(2))
X, L, G = care(A, B, Q, R, S, E, stabilizing=True)
assert np.all(np.real(L) < 0)
if slycot_check():
X, L, G = care(A, B, Q, R, S, E, stabilizing=False)
assert np.all(np.real(L) > 0)
else:
with pytest.raises(ControlArgument, match="'scipy' not valid"):
X, L, G = care(A, B, Q, R, S, E, stabilizing=False)
@pytest.mark.parametrize(
"stabilizing",
[True, pytest.param(False, marks=slycotonly)])
def test_dare(self, matarrayin, stabilizing):
"""Test stabilizing and anti-stabilizing feedback, discrete"""
A = matarrayin(np.diag([0.5, 2]))
B = matarrayin(np.identity(2))
Q = matarrayin(np.identity(2))
R = matarrayin(np.identity(2))
S = matarrayin(np.zeros((2, 2)))
E = matarrayin(np.identity(2))
X, L, G = dare(A, B, Q, R, S, E, stabilizing=stabilizing)
sgn = {True: -1, False: 1}[stabilizing]
assert np.all(sgn * (np.abs(L) - 1) > 0)
def test_lqr_discrete(self):
"""Test overloading of lqr operator for discrete time systems"""
csys = ct.rss(2, 1, 1)
dsys = ct.drss(2, 1, 1)
Q = np.eye(2)
R = np.eye(1)
# Calling with a system versus explicit A, B should be the sam
K_csys, S_csys, E_csys = ct.lqr(csys, Q, R)
K_expl, S_expl, E_expl = ct.lqr(csys.A, csys.B, Q, R)
np.testing.assert_almost_equal(K_csys, K_expl)
np.testing.assert_almost_equal(S_csys, S_expl)
np.testing.assert_almost_equal(E_csys, E_expl)
# Calling lqr() with a discrete time system should call dlqr()
K_lqr, S_lqr, E_lqr = ct.lqr(dsys, Q, R)
K_dlqr, S_dlqr, E_dlqr = ct.dlqr(dsys, Q, R)
np.testing.assert_almost_equal(K_lqr, K_dlqr)
np.testing.assert_almost_equal(S_lqr, S_dlqr)
np.testing.assert_almost_equal(E_lqr, E_dlqr)
# Calling lqr() with no timebase should call lqr()
asys = ct.ss(csys.A, csys.B, csys.C, csys.D, dt=None)
K_asys, S_asys, E_asys = ct.lqr(asys, Q, R)
K_expl, S_expl, E_expl = ct.lqr(csys.A, csys.B, Q, R)
np.testing.assert_almost_equal(K_asys, K_expl)
np.testing.assert_almost_equal(S_asys, S_expl)
np.testing.assert_almost_equal(E_asys, E_expl)
# Calling dlqr() with a continuous time system should raise an error
with pytest.raises(ControlArgument, match="dsys must be discrete"):
K, S, E = ct.dlqr(csys, Q, R)
@pytest.mark.parametrize(
'nstates, noutputs, ninputs, nintegrators, type',
[(2, 0, 1, 0, None),
(2, 1, 1, 0, None),
(4, 0, 2, 0, None),
(4, 3, 2, 0, None),
(2, 0, 1, 1, None),
(4, 0, 2, 2, None),
(4, 3, 2, 2, None),
(2, 0, 1, 0, 'nonlinear'),
(4, 0, 2, 2, 'nonlinear'),
(4, 3, 2, 2, 'nonlinear'),
])
def test_statefbk_iosys(
self, nstates, ninputs, noutputs, nintegrators, type):
# Create the system to be controlled (and estimator)
# TODO: make sure it is controllable?
if noutputs == 0:
# Create a system with full state output
sys = ct.rss(nstates, nstates, ninputs, strictly_proper=True)
sys.C = np.eye(nstates)
est = None
else:
# Create a system with of the desired size
sys = ct.rss(nstates, noutputs, ninputs, strictly_proper=True)
# Create an estimator with different signal names
L, _, _ = ct.lqe(
sys.A, sys.B, sys.C, np.eye(ninputs), np.eye(noutputs))
est = ss(
sys.A - L @ sys.C, np.hstack([L, sys.B]), np.eye(nstates), 0,
inputs=sys.output_labels + sys.input_labels,
outputs=[f'xhat[{i}]' for i in range(nstates)])
# Decide whether to include integral action
if nintegrators:
# Choose the first 'n' outputs as integral terms
C_int = np.eye(nintegrators, nstates)
# Set up an augmented system for LQR computation
# TODO: move this computation into LQR
A_aug = np.block([
[sys.A, np.zeros((sys.nstates, nintegrators))],
[C_int, np.zeros((nintegrators, nintegrators))]
])
B_aug = np.vstack([sys.B, np.zeros((nintegrators, ninputs))])
C_aug = np.hstack([sys.C, np.zeros((sys.C.shape[0], nintegrators))])
aug = ss(A_aug, B_aug, C_aug, 0)
else:
C_int = np.zeros((0, nstates))
aug = sys
# Design an LQR controller
K, _, _ = ct.lqr(aug, np.eye(nstates + nintegrators), np.eye(ninputs))
Kp, Ki = K[:, :nstates], K[:, nstates:]
# Create an I/O system for the controller
ctrl, clsys = ct.create_statefbk_iosystem(
sys, K, integral_action=C_int, estimator=est, type=type)
# If we used a nonlinear controller, linearize it for testing
if type == 'nonlinear':
clsys = clsys.linearize(0, 0)
# Make sure the linear system elements are correct
if noutputs == 0:
# No estimator
Ac = np.block([
[sys.A - sys.B @ Kp, -sys.B @ Ki],
[C_int, np.zeros((nintegrators, nintegrators))]
])
Bc = np.block([
[sys.B @ Kp, sys.B],
[-C_int, np.zeros((nintegrators, ninputs))]
])
Cc = np.block([
[np.eye(nstates), np.zeros((nstates, nintegrators))],
[-Kp, -Ki]
])
Dc = np.block([
[np.zeros((nstates, nstates + ninputs))],
[Kp, np.eye(ninputs)]
])
else:
# Estimator
Be1, Be2 = est.B[:, :noutputs], est.B[:, noutputs:]
Ac = np.block([
[sys.A, -sys.B @ Ki, -sys.B @ Kp],
[np.zeros((nintegrators, nstates + nintegrators)), C_int],
[Be1 @ sys.C, -Be2 @ Ki, est.A - Be2 @ Kp]
])
Bc = np.block([
[sys.B @ Kp, sys.B],
[-C_int, np.zeros((nintegrators, ninputs))],
[Be2 @ Kp, Be2]
])
Cc = np.block([
[sys.C, np.zeros((noutputs, nintegrators + nstates))],
[np.zeros_like(Kp), -Ki, -Kp]
])
Dc = np.block([
[np.zeros((noutputs, nstates + ninputs))],
[Kp, np.eye(ninputs)]
])
# Check to make sure everything matches
np.testing.assert_array_almost_equal(clsys.A, Ac)
np.testing.assert_array_almost_equal(clsys.B, Bc)
np.testing.assert_array_almost_equal(clsys.C, Cc)
np.testing.assert_array_almost_equal(clsys.D, Dc)
def test_lqr_integral_continuous(self):
# Generate a continuous time system for testing
sys = ct.rss(4, 4, 2, strictly_proper=True)
sys.C = np.eye(4) # reset output to be full state
C_int = np.eye(2, 4) # integrate outputs for first two states
nintegrators = C_int.shape[0]
# Generate a controller with integral action
K, _, _ = ct.lqr(
sys, np.eye(sys.nstates + nintegrators), np.eye(sys.ninputs),
integral_action=C_int)
Kp, Ki = K[:, :sys.nstates], K[:, sys.nstates:]
# Create an I/O system for the controller
ctrl, clsys = ct.create_statefbk_iosystem(
sys, K, integral_action=C_int)
# Construct the state space matrices for the controller
# Controller inputs = xd, ud, x
# Controller state = z (integral of x-xd)
# Controller output = ud - Kp(x - xd) - Ki z
A_ctrl = np.zeros((nintegrators, nintegrators))
B_ctrl = np.block([
[-C_int, np.zeros((nintegrators, sys.ninputs)), C_int]
])
C_ctrl = -K[:, sys.nstates:]
D_ctrl = np.block([[Kp, np.eye(nintegrators), -Kp]])
# Check to make sure everything matches
np.testing.assert_array_almost_equal(ctrl.A, A_ctrl)
np.testing.assert_array_almost_equal(ctrl.B, B_ctrl)
np.testing.assert_array_almost_equal(ctrl.C, C_ctrl)
np.testing.assert_array_almost_equal(ctrl.D, D_ctrl)
# Construct the state space matrices for the closed loop system
A_clsys = np.block([
[sys.A - sys.B @ Kp, -sys.B @ Ki],
[C_int, np.zeros((nintegrators, nintegrators))]
])
B_clsys = np.block([
[sys.B @ Kp, sys.B],
[-C_int, np.zeros((nintegrators, sys.ninputs))]
])
C_clsys = np.block([
[np.eye(sys.nstates), np.zeros((sys.nstates, nintegrators))],
[-Kp, -Ki]
])
D_clsys = np.block([
[np.zeros((sys.nstates, sys.nstates + sys.ninputs))],
[Kp, np.eye(sys.ninputs)]
])
# Check to make sure closed loop matches
np.testing.assert_array_almost_equal(clsys.A, A_clsys)
np.testing.assert_array_almost_equal(clsys.B, B_clsys)
np.testing.assert_array_almost_equal(clsys.C, C_clsys)
np.testing.assert_array_almost_equal(clsys.D, D_clsys)
# Check the poles of the closed loop system
assert all(np.real(clsys.poles()) < 0)
# Make sure controller infinite zero frequency gain
if slycot_check():
ctrl_tf = tf(ctrl)
assert abs(ctrl_tf(1e-9)[0][0]) > 1e6
assert abs(ctrl_tf(1e-9)[1][1]) > 1e6
def test_lqr_integral_discrete(self):
# Generate a discrete time system for testing
sys = ct.drss(4, 4, 2, strictly_proper=True)
sys.C = np.eye(4) # reset output to be full state
C_int = np.eye(2, 4) # integrate outputs for first two states
nintegrators = C_int.shape[0]
# Generate a controller with integral action
K, _, _ = ct.lqr(
sys, np.eye(sys.nstates + nintegrators), np.eye(sys.ninputs),
integral_action=C_int)
Kp, Ki = K[:, :sys.nstates], K[:, sys.nstates:]
# Create an I/O system for the controller
ctrl, clsys = ct.create_statefbk_iosystem(
sys, K, integral_action=C_int)
# Construct the state space matrices by hand
A_ctrl = np.eye(nintegrators)
B_ctrl = np.block([
[-C_int, np.zeros((nintegrators, sys.ninputs)), C_int]
])
C_ctrl = -K[:, sys.nstates:]
D_ctrl = np.block([[Kp, np.eye(nintegrators), -Kp]])
# Check to make sure everything matches
assert ct.isdtime(clsys)
np.testing.assert_array_almost_equal(ctrl.A, A_ctrl)
np.testing.assert_array_almost_equal(ctrl.B, B_ctrl)
np.testing.assert_array_almost_equal(ctrl.C, C_ctrl)
np.testing.assert_array_almost_equal(ctrl.D, D_ctrl)
@pytest.mark.parametrize(
"rss_fun, lqr_fun",
[(ct.rss, lqr), (ct.drss, dlqr)])
def test_lqr_errors(self, rss_fun, lqr_fun):
# Generate a discrete time system for testing
sys = rss_fun(4, 4, 2, strictly_proper=True)
with pytest.raises(ControlArgument, match="must pass an array"):
K, _, _ = lqr_fun(
sys, np.eye(sys.nstates), np.eye(sys.ninputs),
integral_action="invalid argument")
with pytest.raises(ControlArgument, match="gain size must match"):
C_int = np.eye(2, 3)
K, _, _ = lqr_fun(
sys, np.eye(sys.nstates), np.eye(sys.ninputs),
integral_action=C_int)
with pytest.raises(TypeError, match="unrecognized keywords"):
K, _, _ = lqr_fun(
sys, np.eye(sys.nstates), np.eye(sys.ninputs),
integrator=None)
def test_statefbk_errors(self):
sys = ct.rss(4, 4, 2, strictly_proper=True)
K, _, _ = ct.lqr(sys, np.eye(sys.nstates), np.eye(sys.ninputs))
with pytest.raises(ControlArgument, match="must be I/O system"):
sys_tf = ct.tf([1], [1, 1])
ctrl, clsys = ct.create_statefbk_iosystem(sys_tf, K)
with pytest.raises(ControlArgument, match="output size must match"):
est = ct.rss(3, 3, 2)
ctrl, clsys = ct.create_statefbk_iosystem(sys, K, estimator=est)
with pytest.raises(ControlArgument, match="must be the full state"):
sys_nf = ct.rss(4, 3, 2, strictly_proper=True)
ctrl, clsys = ct.create_statefbk_iosystem(sys_nf, K)
with pytest.raises(ControlArgument, match="gain must be an array"):
ctrl, clsys = ct.create_statefbk_iosystem(sys, "bad argument")
with pytest.raises(ControlArgument, match="unknown type"):
ctrl, clsys = ct.create_statefbk_iosystem(sys, K, type=1)
# Errors involving integral action
C_int = np.eye(2, 4)
K_int, _, _ = ct.lqr(
sys, np.eye(sys.nstates + C_int.shape[0]), np.eye(sys.ninputs),
integral_action=C_int)
with pytest.raises(ControlArgument, match="must pass an array"):
ctrl, clsys = ct.create_statefbk_iosystem(
sys, K_int, integral_action="bad argument")
with pytest.raises(ControlArgument, match="must be an array of size"):
ctrl, clsys = ct.create_statefbk_iosystem(
sys, K, integral_action=C_int)
| bsd-3-clause | b533e9709656de5039cde068ce5f9bf4 | 38.421053 | 80 | 0.532417 | 3.164245 | false | true | false | false |
python-control/python-control | control/tests/descfcn_test.py | 2 | 7175 | """descfcn_test.py - test describing functions and related capabilities
RMM, 23 Jan 2021
This set of unit tests covers the various operatons of the descfcn module, as
well as some of the support functions associated with static nonlinearities.
"""
import pytest
import numpy as np
import control as ct
import math
from control.descfcn import saturation_nonlinearity, \
friction_backlash_nonlinearity, relay_hysteresis_nonlinearity
# Static function via a class
class saturation_class:
# Static nonlinear saturation function
def __call__(self, x, lb=-1, ub=1):
return np.clip(x, lb, ub)
# Describing function for a saturation function
def describing_function(self, a):
if -1 <= a and a <= 1:
return 1.
else:
b = 1/a
return 2/math.pi * (math.asin(b) + b * math.sqrt(1 - b**2))
# Static function without a class
def saturation(x):
return np.clip(x, -1, 1)
# Static nonlinear system implementing saturation
@pytest.fixture
def satsys():
satfcn = saturation_class()
def _satfcn(t, x, u, params):
return satfcn(u)
return ct.NonlinearIOSystem(None, outfcn=_satfcn, input=1, output=1)
def test_static_nonlinear_call(satsys):
# Make sure that the saturation system is a static nonlinearity
assert satsys._isstatic()
# Make sure the saturation function is doing the right computation
input = [-2, -1, -0.5, 0, 0.5, 1, 2]
desired = [-1, -1, -0.5, 0, 0.5, 1, 1]
for x, y in zip(input, desired):
np.testing.assert_allclose(satsys(x), y)
# Test squeeze properties
assert satsys(0.) == 0.
assert satsys([0.], squeeze=True) == 0.
np.testing.assert_allclose(satsys([0.]), [0.])
# Test SIMO nonlinearity
def _simofcn(t, x, u, params):
return np.array([np.cos(u), np.sin(u)])
simo_sys = ct.NonlinearIOSystem(None, outfcn=_simofcn, input=1, output=2)
np.testing.assert_allclose(simo_sys([0.]), [1, 0])
np.testing.assert_allclose(simo_sys([0.], squeeze=True), [1, 0])
# Test MISO nonlinearity
def _misofcn(t, x, u, params={}):
return np.array([np.sin(u[0]) * np.cos(u[1])])
miso_sys = ct.NonlinearIOSystem(None, outfcn=_misofcn, input=2, output=1)
np.testing.assert_allclose(miso_sys([0, 0]), [0])
np.testing.assert_allclose(miso_sys([0, 0], squeeze=True), [0])
# Test saturation describing function in multiple ways
def test_saturation_describing_function(satsys):
satfcn = saturation_class()
# Store the analytic describing function for comparison
amprange = np.linspace(0, 10, 100)
df_anal = [satfcn.describing_function(a) for a in amprange]
# Compute describing function for a static function
df_fcn = ct.describing_function(saturation, amprange)
np.testing.assert_almost_equal(df_fcn, df_anal, decimal=3)
# Compute describing function for a describing function nonlinearity
df_fcn = ct.describing_function(satfcn, amprange)
np.testing.assert_almost_equal(df_fcn, df_anal, decimal=3)
# Compute describing function for a static I/O system
df_sys = ct.describing_function(satsys, amprange)
np.testing.assert_almost_equal(df_sys, df_anal, decimal=3)
# Compute describing function on an array of values
df_arr = ct.describing_function(satsys, amprange)
np.testing.assert_almost_equal(df_arr, df_anal, decimal=3)
# Evaluate static function at a negative amplitude
with pytest.raises(ValueError, match="cannot evaluate"):
ct.describing_function(saturation, -1)
# Create describing function nonlinearity w/out describing_function method
# and make sure it drops through to the underlying computation
class my_saturation(ct.DescribingFunctionNonlinearity):
def __call__(self, x):
return saturation(x)
satfcn_nometh = my_saturation()
df_nometh = ct.describing_function(satfcn_nometh, amprange)
np.testing.assert_almost_equal(df_nometh, df_anal, decimal=3)
@pytest.mark.parametrize("fcn, amin, amax", [
[saturation_nonlinearity(1), 0, 10],
[friction_backlash_nonlinearity(2), 1, 10],
[relay_hysteresis_nonlinearity(1, 1), 3, 10],
])
def test_describing_function(fcn, amin, amax):
# Store the analytic describing function for comparison
amprange = np.linspace(amin, amax, 100)
df_anal = [fcn.describing_function(a) for a in amprange]
# Compute describing function on an array of values
df_arr = ct.describing_function(
fcn, amprange, zero_check=False, try_method=False)
np.testing.assert_almost_equal(df_arr, df_anal, decimal=1)
# Make sure the describing function method also works
df_meth = ct.describing_function(fcn, amprange, zero_check=False)
np.testing.assert_almost_equal(df_meth, df_anal)
# Make sure that evaluation at negative amplitude generates an exception
with pytest.raises(ValueError, match="cannot evaluate"):
ct.describing_function(fcn, -1)
def test_describing_function_plot():
# Simple linear system with at most 1 intersection
H_simple = ct.tf([1], [1, 2, 2, 1])
omega = np.logspace(-1, 2, 100)
# Saturation nonlinearity
F_saturation = ct.descfcn.saturation_nonlinearity(1)
amp = np.linspace(1, 4, 10)
# No intersection
xsects = ct.describing_function_plot(H_simple, F_saturation, amp, omega)
assert xsects == []
# One intersection
H_larger = H_simple * 8
xsects = ct.describing_function_plot(H_larger, F_saturation, amp, omega)
for a, w in xsects:
np.testing.assert_almost_equal(
H_larger(1j*w),
-1/ct.describing_function(F_saturation, a), decimal=5)
# Multiple intersections
H_multiple = H_simple * ct.tf(*ct.pade(5, 4)) * 4
omega = np.logspace(-1, 3, 50)
F_backlash = ct.descfcn.friction_backlash_nonlinearity(1)
amp = np.linspace(0.6, 5, 50)
xsects = ct.describing_function_plot(H_multiple, F_backlash, amp, omega)
for a, w in xsects:
np.testing.assert_almost_equal(
-1/ct.describing_function(F_backlash, a),
H_multiple(1j*w), decimal=5)
def test_describing_function_exceptions():
# Describing function with non-zero bias
with pytest.warns(UserWarning, match="asymmetric"):
saturation = ct.descfcn.saturation_nonlinearity(lb=-1, ub=2)
assert saturation(-3) == -1
assert saturation(3) == 2
# Turn off the bias check
bias = ct.describing_function(saturation, 0, zero_check=False)
# Function should evaluate to zero at zero amplitude
f = lambda x: x + 0.5
with pytest.raises(ValueError, match="must evaluate to zero"):
bias = ct.describing_function(f, 0, zero_check=True)
# Evaluate at a negative amplitude
with pytest.raises(ValueError, match="cannot evaluate"):
ct.describing_function(saturation, -1)
# Describing function with bad label
H_simple = ct.tf([8], [1, 2, 2, 1])
F_saturation = ct.descfcn.saturation_nonlinearity(1)
amp = np.linspace(1, 4, 10)
with pytest.raises(ValueError, match="formatting string"):
ct.describing_function_plot(H_simple, F_saturation, amp, label=1)
| bsd-3-clause | 42657dc5fe7cd95b6a7be6c570ef68ac | 35.607143 | 78 | 0.676098 | 3.271774 | false | true | false | false |
python-control/python-control | control/tests/minreal_test.py | 2 | 4121 | """minreal_test.py - test state space class
Rvp, 13 Jun 2013
"""
import numpy as np
from scipy.linalg import eigvals
import pytest
from control import rss, ss, zeros
from control.statesp import StateSpace
from control.xferfcn import TransferFunction
from itertools import permutations
from control.tests.conftest import slycotonly
@pytest.fixture
def fixedseed(scope="class"):
np.random.seed(5)
@slycotonly
@pytest.mark.usefixtures("fixedseed")
class TestMinreal:
"""Tests for the StateSpace class."""
def assert_numden_almost_equal(self, n1, n2, d1, d2):
n1[np.abs(n1) < 1e-10] = 0.
n1 = np.trim_zeros(n1)
d1[np.abs(d1) < 1e-10] = 0.
d1 = np.trim_zeros(d1)
n2[np.abs(n2) < 1e-10] = 0.
n2 = np.trim_zeros(n2)
d2[np.abs(d2) < 1e-10] = 0.
d2 = np.trim_zeros(d2)
np.testing.assert_array_almost_equal(n1, n2)
np.testing.assert_array_almost_equal(d2, d2)
def testMinrealBrute(self):
# depending on the seed and minreal performance, a number of
# reductions is produced. If random gen or minreal change, this
# will be likely to fail
nreductions = 0
for n, m, p in permutations(range(1,6), 3):
s = rss(n, p, m)
sr = s.minreal()
if s.nstates > sr.nstates:
nreductions += 1
else:
# Check to make sure that poles and zeros match
# For poles, just look at eigenvalues of A
np.testing.assert_array_almost_equal(
np.sort(eigvals(s.A)), np.sort(eigvals(sr.A)))
# For zeros, need to extract SISO systems
for i in range(m):
for j in range(p):
# Extract SISO dynamixs from input i to output j
s1 = ss(s.A, s.B[:,i], s.C[j,:], s.D[j,i])
s2 = ss(sr.A, sr.B[:,i], sr.C[j,:], sr.D[j,i])
# Check that the zeros match
# Note: sorting doesn't work => have to do the hard way
z1 = zeros(s1)
z2 = zeros(s2)
# Start by making sure we have the same # of zeros
assert len(z1) == len(z2)
# Make sure all zeros in s1 are in s2
for z in z1:
# Find the closest zero TODO: find proper bounds
assert min(abs(z2 - z)) <= 1e-7
# Make sure all zeros in s2 are in s1
for z in z2:
# Find the closest zero
assert min(abs(z1 - z)) <= 1e-7
# Make sure that the number of systems reduced is as expected
# (Need to update this number if you change the seed at top of file)
assert nreductions == 2
def testMinrealSS(self):
"""Test a minreal model reduction"""
#A = [-2, 0.5, 0; 0.5, -0.3, 0; 0, 0, -0.1]
A = [[-2, 0.5, 0], [0.5, -0.3, 0], [0, 0, -0.1]]
#B = [0.3, -1.3; 0.1, 0; 1, 0]
B = [[0.3, -1.3], [0.1, 0.], [1.0, 0.0]]
#C = [0, 0.1, 0; -0.3, -0.2, 0]
C = [[0., 0.1, 0.0], [-0.3, -0.2, 0.0]]
#D = [0 -0.8; -0.3 0]
D = [[0., -0.8], [-0.3, 0.]]
# sys = ss(A, B, C, D)
sys = StateSpace(A, B, C, D)
sysr = sys.minreal()
assert sysr.nstates == 2
assert sysr.ninputs == sys.ninputs
assert sysr.noutputs == sys.noutputs
np.testing.assert_array_almost_equal(
eigvals(sysr.A), [-2.136154, -0.1638459])
def testMinrealtf(self):
"""Try the minreal function, and also test easy entry by creation
of a Laplace variable s"""
s = TransferFunction([1, 0], [1])
h = (s+1)*(s+2.00000000001)/(s+2)/(s**2+s+1)
hm = h.minreal()
hr = (s+1)/(s**2+s+1)
np.testing.assert_array_almost_equal(hm.num[0][0], hr.num[0][0])
np.testing.assert_array_almost_equal(hm.den[0][0], hr.den[0][0])
| bsd-3-clause | 65d75e3b31ee4fe3140a51b5928e930e | 34.525862 | 79 | 0.503033 | 3.179784 | false | true | false | false |
datafolklabs/cement | cement/core/handler.py | 1 | 13163 | """
Cement core handler module.
"""
import re
from abc import ABC
from ..core import exc, meta
from ..utils.misc import minimal_logger
LOG = minimal_logger(__name__)
class Handler(ABC, meta.MetaMixin):
"""Base handler class that all Cement Handlers should subclass from."""
class Meta:
"""
Handler meta-data (can also be passed as keyword arguments to the
parent class).
"""
label = NotImplemented
"""The string identifier of this handler."""
interface = NotImplemented
"""The interface that this class implements."""
config_section = None
"""
A config section to merge config_defaults with.
Note: Though ``App.Meta.config_section`` defaults to ``None``, Cement
will set this to the value of ``<interface_label>.<handler_label>`` if
no section is set by the user/developer.
"""
config_defaults = None
"""
A config dictionary that is merged into the applications config
in the ``[<config_section>]`` block. These are defaults and do not
override any existing defaults under that section.
"""
overridable = False
"""
Whether or not handler can be overridden by
``App.Meta.handler_override_options``. Will be listed as an
available choice to override the specific handler (i.e.
``App.Meta.output_handler``, etc).
"""
def __init__(self, **kw):
super(Handler, self).__init__(**kw)
try:
assert self._meta.label, \
"%s.Meta.label undefined." % self.__class__.__name__
assert self._meta.interface, \
"%s.Meta.interface undefined." % self.__class__.__name__
except AssertionError as e:
raise exc.FrameworkError(e.args[0])
self.app = None
def _setup(self, app):
"""
Called during application initialization and must ``setup`` the handler
object making it ready for the framework or the application to make
further calls to it.
Args:
app (instance): The application object.
"""
self.app = app
if self._meta.config_section is None:
self._meta.config_section = "%s.%s" % \
(self._meta.interface, self._meta.label)
if self._meta.config_defaults is not None:
LOG.debug("merging config defaults from '%s' " % self +
"into section '%s'" % self._meta.config_section)
dict_obj = dict()
dict_obj[self._meta.config_section] = self._meta.config_defaults
self.app.config.merge(dict_obj, override=False)
self._validate()
def _validate(self):
"""
Perform any validation to ensure proper data, meta-data, etc.
"""
pass # pragma: nocover
class HandlerManager(object):
"""
Manages the handler system to define, get, resolve, etc handlers with
the Cement Framework.
"""
def __init__(self, app):
self.app = app
self.__handlers__ = {}
def get(self, interface, handler_label, fallback=None, **kwargs):
"""
Get a handler object.
Args:
interface (str): The interface of the handler (i.e. ``output``)
handler_label (str): The label of the handler (i.e. ``json``)
fallback (Handler): A fallback value to return if handler_label
doesn't exist.
Keyword Args:
setup (bool): Whether or not to call ``setup()`` on the handler
before returning. This will not be called on the ``fallback``
if no the handler given does not exist.
Returns:
Handler: An uninstantiated handler object
Raises:
cement.core.exc.InterfaceError: If the ``interface`` does not
exist, or if the handler itself does not exist.
Example:
.. code-block:: python
_handler = app.handler.get('output', 'json')
output = _handler()
output._setup(app)
output.render(dict(foo='bar'))
"""
setup = kwargs.get('setup', False)
if interface not in self.app.interface.list():
raise exc.InterfaceError("Interface '%s' does not exist!" %
interface)
if handler_label in self.__handlers__[interface]:
if setup is True:
han = self.__handlers__[interface][handler_label]
return self.setup(han)
else:
return self.__handlers__[interface][handler_label]
elif fallback is not None:
return fallback
else:
raise exc.InterfaceError("handlers['%s']['%s'] does not exist!" %
(interface, handler_label))
def list(self, interface):
"""
Return a list of handlers for a given ``interface``.
Args:
interface (str): The interface of the handler (i.e. ``output``)
Returns:
list: Handler labels (str) that match ``interface``.
Raises:
cement.core.exc.InterfaceError: If the ``interface`` does not
exist.
Example:
.. code-block:: python
app.handler.list('log')
"""
if not self.app.interface.defined(interface):
raise exc.InterfaceError("Interface '%s' does not exist!" %
interface)
res = []
for label in self.__handlers__[interface]:
res.append(self.__handlers__[interface][label])
return res
def register(self, handler_class, force=False):
"""
Register a handler class to an interface. If the same object is
already registered then no exception is raised, however if a different
object attempts to be registered to the same name a ``InterfaceError``
is raised.
Args:
handler_class (Handler): The uninstantiated handler class to
register.
Keyword Arguments:
force (bool): Whether to allow replacement if an existing
handler of the same ``label`` is already registered.
Raises:
cement.core.exc.InterfaceError: If the ``handler_class`` does not
implement :class:`Handler`, or if ``handler_class`` does not
properly sub-class it's interface.
cement.core.exc.InterfaceError: If the
``handler_class.Meta.interface`` does not exist
Usage:
.. code-block:: python
class MyDatabaseHandler(object):
class Meta:
interface = IDatabase
label = 'mysql'
def connect(self):
# ...
app.handler.register(MyDatabaseHandler)
"""
# for checks
if not issubclass(handler_class, Handler):
raise exc.InterfaceError("Class %s " % handler_class +
"does not implement Handler")
obj = handler_class()
# translate dashes to underscores
handler_class.Meta.label = re.sub('-', '_', obj._meta.label)
obj._meta.label = re.sub('-', '_', obj._meta.label)
interface = obj._meta.interface
LOG.debug("registering handler '%s' into handlers['%s']['%s']" %
(handler_class, interface, obj._meta.label))
if interface not in self.app.interface.list():
raise exc.InterfaceError("Handler interface '%s' doesn't exist." %
interface)
elif interface not in self.__handlers__.keys():
self.__handlers__[interface] = {}
if obj._meta.label in self.__handlers__[interface] and \
self.__handlers__[interface][obj._meta.label] != handler_class:
if force is True:
LOG.debug(
"handlers['%s']['%s'] already exists" %
(interface, obj._meta.label) +
", but `force==True`"
)
else:
raise exc.InterfaceError(
"handlers['%s']['%s'] already exists" %
(interface, obj._meta.label)
)
interface_class = self.app.interface.get(interface)
if not issubclass(handler_class, interface_class):
raise exc.InterfaceError("Handler %s " % handler_class.__name__ +
"does not sub-class %s" %
interface_class.__name__)
self.__handlers__[interface][obj._meta.label] = handler_class
def registered(self, interface, handler_label):
"""
Check if a handler is registered.
Args:
interface (str): The interface of the handler (interface label)
handler_label (str): The label of the handler
Returns:
bool: ``True`` if the handler is registered, ``False`` otherwise
Example:
.. code-block:: python
app.handler.registered('log', 'colorlog')
"""
if interface in self.app.interface.list():
if interface in self.__handlers__.keys() and \
handler_label in self.__handlers__[interface]:
return True
return False
def setup(self, handler_class):
"""
Setup a handler class so that it can be used.
Args:
handler_class (class): An uninstantiated handler class.
Returns: None
Example:
.. code-block:: python
for controller in app.handler.list('controller'):
ch = app.handler.setup(controller)
"""
h = handler_class()
h._setup(self.app)
return h
def resolve(self, interface, handler_def, **kwargs):
"""
Resolves the actual handler, as it can be either a string identifying
the handler to load from ``self.__handlers__``, or it can be an
instantiated or non-instantiated handler class.
Args:
interface (str): The interface of the handler (ex: ``output``)
handler_def(str,instance,Handler): The loose references of the
handler, by label, instantiated object, or non-instantiated
class.
Keyword args:
raise_error (bool): Whether or not to raise an exception if unable
to resolve the handler.
meta_defaults (dict): Optional meta-data dictionary used as
defaults to pass when instantiating uninstantiated handlers.
Use ``App.Meta.meta_defaults`` by default.
setup (bool): Whether or not to call ``.setup()`` before return.
Default: ``False``
Returns:
instance: The instantiated handler object.
Example:
.. code-block:: python
# via label (str)
log = app.handler.resolve('log', 'colorlog')
# via uninstantiated handler class
log = app.handler.resolve('log', ColorLogHanddler)
# via instantiated handler instance
log = app.handler.resolve('log', ColorLogHandler())
"""
raise_error = kwargs.get('raise_error', True)
meta_defaults = kwargs.get('meta_defaults', None)
if meta_defaults is None:
meta_defaults = {}
if type(handler_def) == str:
_meta_label = "%s.%s" % (interface, handler_def)
meta_defaults = self.app._meta.meta_defaults.get(_meta_label,
{})
elif hasattr(handler_def, 'Meta'):
_meta_label = "%s.%s" % (interface, handler_def.Meta.label)
meta_defaults = self.app._meta.meta_defaults.get(_meta_label,
{})
setup = kwargs.get('setup', False)
han = None
if type(handler_def) == str:
han = self.get(interface, handler_def)(**meta_defaults)
elif hasattr(handler_def, '_meta'):
if not self.registered(interface, handler_def._meta.label):
self.register(handler_def.__class__)
han = handler_def
elif hasattr(handler_def, 'Meta'):
han = handler_def(**meta_defaults)
if not self.registered(interface, han._meta.label):
self.register(handler_def)
msg = "Unable to resolve handler '%s' of interface '%s'" % \
(handler_def, interface)
if han is not None:
if setup is True:
han._setup(self.app)
return han
elif han is None and raise_error:
raise exc.FrameworkError(msg)
elif han is None:
LOG.debug(msg)
return None
| bsd-3-clause | 9225e41b5f0b1887fc336efe0558df4b | 32.239899 | 79 | 0.537719 | 4.741715 | false | false | false | false |
datafolklabs/cement | cement/ext/ext_tabulate.py | 1 | 3058 | """
Cement Tabulate extension module.
**Note** This extension has an external dependency on ``tabulate``. Cement
explicitly does **not** include external dependencies for optional
extensions.
* In Cement ``>=3.0.8`` you must include ``cement[tabulate]`` in your
applications dependencies.
* In Cement ``<3.0.8`` you must include ``tabulate`` in your applications
dependencies.
"""
from tabulate import tabulate
from ..core import output
from ..utils.misc import minimal_logger
LOG = minimal_logger(__name__)
class TabulateOutputHandler(output.OutputHandler):
"""
This class implements the :ref:`Output <cement.core.output>` Handler
interface. It provides tabularized text output using the
`Tabulate <https://pypi.python.org/pypi/tabulate>`_ module. Please
see the developer documentation on
:cement:`Output Handling <dev/output>`.
"""
class Meta:
"""Handler meta-data."""
label = 'tabulate'
#: Whether or not to pad the output with an extra pre/post '\n'
padding = True
#: Default template format. See the ``tabulate`` documentation for
#: all supported template formats.
format = 'orgtbl'
#: Default headers to use.
headers = []
#: Default alignment for string columns. See the ``tabulate``
#: documentation for all supported ``stralign`` options.
string_alignment = 'left'
#: Default alignment for numeric columns. See the ``tabulate``
#: documentation for all supported ``numalign`` options.
numeric_alignment = 'decimal'
#: String format to use for float values.
float_format = 'g'
#: Default replacement for missing value.
missing_value = ''
#: Whether or not to include ``tabulate`` as an available to choice
#: to override the ``output_handler`` via command line options.
overridable = False
def render(self, data, **kw):
"""
Take a data dictionary and render it into a table. Additional
keyword arguments are passed directly to ``tabulate.tabulate``.
Args:
data_dict (dict): The data dictionary to render.
Returns:
str: The rendered template text
"""
headers = kw.get('headers', self._meta.headers)
out = tabulate(data, headers,
tablefmt=kw.get('tablefmt', self._meta.format),
stralign=kw.get(
'stralign', self._meta.string_alignment),
numalign=kw.get(
'numalign', self._meta.numeric_alignment),
missingval=kw.get(
'missingval', self._meta.missing_value),
floatfmt=kw.get('floatfmt', self._meta.float_format),
)
out = out + '\n'
if self._meta.padding is True:
out = '\n' + out + '\n'
return out
def load(app):
app.handler.register(TabulateOutputHandler)
| bsd-3-clause | e7f7e930040bd7dde3036c9524402b64 | 30.204082 | 76 | 0.595814 | 4.419075 | false | false | false | false |
datafolklabs/cement | cement/cli/contrib/jinja2/ext.py | 4 | 31502 | """Extension API for adding custom tags and behavior."""
import pprint
import re
import typing as t
from markupsafe import Markup
from . import defaults
from . import nodes
from .environment import Environment
from .exceptions import TemplateAssertionError
from .exceptions import TemplateSyntaxError
from .runtime import concat # type: ignore
from .runtime import Context
from .runtime import Undefined
from .utils import import_string
from .utils import pass_context
if t.TYPE_CHECKING:
import typing_extensions as te
from .lexer import Token
from .lexer import TokenStream
from .parser import Parser
class _TranslationsBasic(te.Protocol):
def gettext(self, message: str) -> str:
...
def ngettext(self, singular: str, plural: str, n: int) -> str:
pass
class _TranslationsContext(_TranslationsBasic):
def pgettext(self, context: str, message: str) -> str:
...
def npgettext(self, context: str, singular: str, plural: str, n: int) -> str:
...
_SupportedTranslations = t.Union[_TranslationsBasic, _TranslationsContext]
# I18N functions available in Jinja templates. If the I18N library
# provides ugettext, it will be assigned to gettext.
GETTEXT_FUNCTIONS: t.Tuple[str, ...] = (
"_",
"gettext",
"ngettext",
"pgettext",
"npgettext",
)
_ws_re = re.compile(r"\s*\n\s*")
class Extension:
"""Extensions can be used to add extra functionality to the Jinja template
system at the parser level. Custom extensions are bound to an environment
but may not store environment specific data on `self`. The reason for
this is that an extension can be bound to another environment (for
overlays) by creating a copy and reassigning the `environment` attribute.
As extensions are created by the environment they cannot accept any
arguments for configuration. One may want to work around that by using
a factory function, but that is not possible as extensions are identified
by their import name. The correct way to configure the extension is
storing the configuration values on the environment. Because this way the
environment ends up acting as central configuration storage the
attributes may clash which is why extensions have to ensure that the names
they choose for configuration are not too generic. ``prefix`` for example
is a terrible name, ``fragment_cache_prefix`` on the other hand is a good
name as includes the name of the extension (fragment cache).
"""
identifier: t.ClassVar[str]
def __init_subclass__(cls) -> None:
cls.identifier = f"{cls.__module__}.{cls.__name__}"
#: if this extension parses this is the list of tags it's listening to.
tags: t.Set[str] = set()
#: the priority of that extension. This is especially useful for
#: extensions that preprocess values. A lower value means higher
#: priority.
#:
#: .. versionadded:: 2.4
priority = 100
def __init__(self, environment: Environment) -> None:
self.environment = environment
def bind(self, environment: Environment) -> "Extension":
"""Create a copy of this extension bound to another environment."""
rv = object.__new__(self.__class__)
rv.__dict__.update(self.__dict__)
rv.environment = environment
return rv
def preprocess(
self, source: str, name: t.Optional[str], filename: t.Optional[str] = None
) -> str:
"""This method is called before the actual lexing and can be used to
preprocess the source. The `filename` is optional. The return value
must be the preprocessed source.
"""
return source
def filter_stream(
self, stream: "TokenStream"
) -> t.Union["TokenStream", t.Iterable["Token"]]:
"""It's passed a :class:`~jinja2.lexer.TokenStream` that can be used
to filter tokens returned. This method has to return an iterable of
:class:`~jinja2.lexer.Token`\\s, but it doesn't have to return a
:class:`~jinja2.lexer.TokenStream`.
"""
return stream
def parse(self, parser: "Parser") -> t.Union[nodes.Node, t.List[nodes.Node]]:
"""If any of the :attr:`tags` matched this method is called with the
parser as first argument. The token the parser stream is pointing at
is the name token that matched. This method has to return one or a
list of multiple nodes.
"""
raise NotImplementedError()
def attr(
self, name: str, lineno: t.Optional[int] = None
) -> nodes.ExtensionAttribute:
"""Return an attribute node for the current extension. This is useful
to pass constants on extensions to generated template code.
::
self.attr('_my_attribute', lineno=lineno)
"""
return nodes.ExtensionAttribute(self.identifier, name, lineno=lineno)
def call_method(
self,
name: str,
args: t.Optional[t.List[nodes.Expr]] = None,
kwargs: t.Optional[t.List[nodes.Keyword]] = None,
dyn_args: t.Optional[nodes.Expr] = None,
dyn_kwargs: t.Optional[nodes.Expr] = None,
lineno: t.Optional[int] = None,
) -> nodes.Call:
"""Call a method of the extension. This is a shortcut for
:meth:`attr` + :class:`jinja2.nodes.Call`.
"""
if args is None:
args = []
if kwargs is None:
kwargs = []
return nodes.Call(
self.attr(name, lineno=lineno),
args,
kwargs,
dyn_args,
dyn_kwargs,
lineno=lineno,
)
@pass_context
def _gettext_alias(
__context: Context, *args: t.Any, **kwargs: t.Any
) -> t.Union[t.Any, Undefined]:
return __context.call(__context.resolve("gettext"), *args, **kwargs)
def _make_new_gettext(func: t.Callable[[str], str]) -> t.Callable[..., str]:
@pass_context
def gettext(__context: Context, __string: str, **variables: t.Any) -> str:
rv = __context.call(func, __string)
if __context.eval_ctx.autoescape:
rv = Markup(rv)
# Always treat as a format string, even if there are no
# variables. This makes translation strings more consistent
# and predictable. This requires escaping
return rv % variables # type: ignore
return gettext
def _make_new_ngettext(func: t.Callable[[str, str, int], str]) -> t.Callable[..., str]:
@pass_context
def ngettext(
__context: Context,
__singular: str,
__plural: str,
__num: int,
**variables: t.Any,
) -> str:
variables.setdefault("num", __num)
rv = __context.call(func, __singular, __plural, __num)
if __context.eval_ctx.autoescape:
rv = Markup(rv)
# Always treat as a format string, see gettext comment above.
return rv % variables # type: ignore
return ngettext
def _make_new_pgettext(func: t.Callable[[str, str], str]) -> t.Callable[..., str]:
@pass_context
def pgettext(
__context: Context, __string_ctx: str, __string: str, **variables: t.Any
) -> str:
variables.setdefault("context", __string_ctx)
rv = __context.call(func, __string_ctx, __string)
if __context.eval_ctx.autoescape:
rv = Markup(rv)
# Always treat as a format string, see gettext comment above.
return rv % variables # type: ignore
return pgettext
def _make_new_npgettext(
func: t.Callable[[str, str, str, int], str]
) -> t.Callable[..., str]:
@pass_context
def npgettext(
__context: Context,
__string_ctx: str,
__singular: str,
__plural: str,
__num: int,
**variables: t.Any,
) -> str:
variables.setdefault("context", __string_ctx)
variables.setdefault("num", __num)
rv = __context.call(func, __string_ctx, __singular, __plural, __num)
if __context.eval_ctx.autoescape:
rv = Markup(rv)
# Always treat as a format string, see gettext comment above.
return rv % variables # type: ignore
return npgettext
class InternationalizationExtension(Extension):
"""This extension adds gettext support to Jinja."""
tags = {"trans"}
# TODO: the i18n extension is currently reevaluating values in a few
# situations. Take this example:
# {% trans count=something() %}{{ count }} foo{% pluralize
# %}{{ count }} fooss{% endtrans %}
# something is called twice here. One time for the gettext value and
# the other time for the n-parameter of the ngettext function.
def __init__(self, environment: Environment) -> None:
super().__init__(environment)
environment.globals["_"] = _gettext_alias
environment.extend(
install_gettext_translations=self._install,
install_null_translations=self._install_null,
install_gettext_callables=self._install_callables,
uninstall_gettext_translations=self._uninstall,
extract_translations=self._extract,
newstyle_gettext=False,
)
def _install(
self, translations: "_SupportedTranslations", newstyle: t.Optional[bool] = None
) -> None:
# ugettext and ungettext are preferred in case the I18N library
# is providing compatibility with older Python versions.
gettext = getattr(translations, "ugettext", None)
if gettext is None:
gettext = translations.gettext
ngettext = getattr(translations, "ungettext", None)
if ngettext is None:
ngettext = translations.ngettext
pgettext = getattr(translations, "pgettext", None)
npgettext = getattr(translations, "npgettext", None)
self._install_callables(
gettext, ngettext, newstyle=newstyle, pgettext=pgettext, npgettext=npgettext
)
def _install_null(self, newstyle: t.Optional[bool] = None) -> None:
import gettext
translations = gettext.NullTranslations()
if hasattr(translations, "pgettext"):
# Python < 3.8
pgettext = translations.pgettext # type: ignore
else:
def pgettext(c: str, s: str) -> str:
return s
if hasattr(translations, "npgettext"):
npgettext = translations.npgettext # type: ignore
else:
def npgettext(c: str, s: str, p: str, n: int) -> str:
return s if n == 1 else p
self._install_callables(
gettext=translations.gettext,
ngettext=translations.ngettext,
newstyle=newstyle,
pgettext=pgettext,
npgettext=npgettext,
)
def _install_callables(
self,
gettext: t.Callable[[str], str],
ngettext: t.Callable[[str, str, int], str],
newstyle: t.Optional[bool] = None,
pgettext: t.Optional[t.Callable[[str, str], str]] = None,
npgettext: t.Optional[t.Callable[[str, str, str, int], str]] = None,
) -> None:
if newstyle is not None:
self.environment.newstyle_gettext = newstyle # type: ignore
if self.environment.newstyle_gettext: # type: ignore
gettext = _make_new_gettext(gettext)
ngettext = _make_new_ngettext(ngettext)
if pgettext is not None:
pgettext = _make_new_pgettext(pgettext)
if npgettext is not None:
npgettext = _make_new_npgettext(npgettext)
self.environment.globals.update(
gettext=gettext, ngettext=ngettext, pgettext=pgettext, npgettext=npgettext
)
def _uninstall(self, translations: "_SupportedTranslations") -> None:
for key in ("gettext", "ngettext", "pgettext", "npgettext"):
self.environment.globals.pop(key, None)
def _extract(
self,
source: t.Union[str, nodes.Template],
gettext_functions: t.Sequence[str] = GETTEXT_FUNCTIONS,
) -> t.Iterator[
t.Tuple[int, str, t.Union[t.Optional[str], t.Tuple[t.Optional[str], ...]]]
]:
if isinstance(source, str):
source = self.environment.parse(source)
return extract_from_ast(source, gettext_functions)
def parse(self, parser: "Parser") -> t.Union[nodes.Node, t.List[nodes.Node]]:
"""Parse a translatable tag."""
lineno = next(parser.stream).lineno
context = None
context_token = parser.stream.next_if("string")
if context_token is not None:
context = context_token.value
# find all the variables referenced. Additionally a variable can be
# defined in the body of the trans block too, but this is checked at
# a later state.
plural_expr: t.Optional[nodes.Expr] = None
plural_expr_assignment: t.Optional[nodes.Assign] = None
num_called_num = False
variables: t.Dict[str, nodes.Expr] = {}
trimmed = None
while parser.stream.current.type != "block_end":
if variables:
parser.stream.expect("comma")
# skip colon for python compatibility
if parser.stream.skip_if("colon"):
break
token = parser.stream.expect("name")
if token.value in variables:
parser.fail(
f"translatable variable {token.value!r} defined twice.",
token.lineno,
exc=TemplateAssertionError,
)
# expressions
if parser.stream.current.type == "assign":
next(parser.stream)
variables[token.value] = var = parser.parse_expression()
elif trimmed is None and token.value in ("trimmed", "notrimmed"):
trimmed = token.value == "trimmed"
continue
else:
variables[token.value] = var = nodes.Name(token.value, "load")
if plural_expr is None:
if isinstance(var, nodes.Call):
plural_expr = nodes.Name("_trans", "load")
variables[token.value] = plural_expr
plural_expr_assignment = nodes.Assign(
nodes.Name("_trans", "store"), var
)
else:
plural_expr = var
num_called_num = token.value == "num"
parser.stream.expect("block_end")
plural = None
have_plural = False
referenced = set()
# now parse until endtrans or pluralize
singular_names, singular = self._parse_block(parser, True)
if singular_names:
referenced.update(singular_names)
if plural_expr is None:
plural_expr = nodes.Name(singular_names[0], "load")
num_called_num = singular_names[0] == "num"
# if we have a pluralize block, we parse that too
if parser.stream.current.test("name:pluralize"):
have_plural = True
next(parser.stream)
if parser.stream.current.type != "block_end":
token = parser.stream.expect("name")
if token.value not in variables:
parser.fail(
f"unknown variable {token.value!r} for pluralization",
token.lineno,
exc=TemplateAssertionError,
)
plural_expr = variables[token.value]
num_called_num = token.value == "num"
parser.stream.expect("block_end")
plural_names, plural = self._parse_block(parser, False)
next(parser.stream)
referenced.update(plural_names)
else:
next(parser.stream)
# register free names as simple name expressions
for name in referenced:
if name not in variables:
variables[name] = nodes.Name(name, "load")
if not have_plural:
plural_expr = None
elif plural_expr is None:
parser.fail("pluralize without variables", lineno)
if trimmed is None:
trimmed = self.environment.policies["ext.i18n.trimmed"]
if trimmed:
singular = self._trim_whitespace(singular)
if plural:
plural = self._trim_whitespace(plural)
node = self._make_node(
singular,
plural,
context,
variables,
plural_expr,
bool(referenced),
num_called_num and have_plural,
)
node.set_lineno(lineno)
if plural_expr_assignment is not None:
return [plural_expr_assignment, node]
else:
return node
def _trim_whitespace(self, string: str, _ws_re: t.Pattern[str] = _ws_re) -> str:
return _ws_re.sub(" ", string.strip())
def _parse_block(
self, parser: "Parser", allow_pluralize: bool
) -> t.Tuple[t.List[str], str]:
"""Parse until the next block tag with a given name."""
referenced = []
buf = []
while True:
if parser.stream.current.type == "data":
buf.append(parser.stream.current.value.replace("%", "%%"))
next(parser.stream)
elif parser.stream.current.type == "variable_begin":
next(parser.stream)
name = parser.stream.expect("name").value
referenced.append(name)
buf.append(f"%({name})s")
parser.stream.expect("variable_end")
elif parser.stream.current.type == "block_begin":
next(parser.stream)
if parser.stream.current.test("name:endtrans"):
break
elif parser.stream.current.test("name:pluralize"):
if allow_pluralize:
break
parser.fail(
"a translatable section can have only one pluralize section"
)
parser.fail(
"control structures in translatable sections are not allowed"
)
elif parser.stream.eos:
parser.fail("unclosed translation block")
else:
raise RuntimeError("internal parser error")
return referenced, concat(buf)
def _make_node(
self,
singular: str,
plural: t.Optional[str],
context: t.Optional[str],
variables: t.Dict[str, nodes.Expr],
plural_expr: t.Optional[nodes.Expr],
vars_referenced: bool,
num_called_num: bool,
) -> nodes.Output:
"""Generates a useful node from the data provided."""
newstyle = self.environment.newstyle_gettext # type: ignore
node: nodes.Expr
# no variables referenced? no need to escape for old style
# gettext invocations only if there are vars.
if not vars_referenced and not newstyle:
singular = singular.replace("%%", "%")
if plural:
plural = plural.replace("%%", "%")
func_name = "gettext"
func_args: t.List[nodes.Expr] = [nodes.Const(singular)]
if context is not None:
func_args.insert(0, nodes.Const(context))
func_name = f"p{func_name}"
if plural_expr is not None:
func_name = f"n{func_name}"
func_args.extend((nodes.Const(plural), plural_expr))
node = nodes.Call(nodes.Name(func_name, "load"), func_args, [], None, None)
# in case newstyle gettext is used, the method is powerful
# enough to handle the variable expansion and autoescape
# handling itself
if newstyle:
for key, value in variables.items():
# the function adds that later anyways in case num was
# called num, so just skip it.
if num_called_num and key == "num":
continue
node.kwargs.append(nodes.Keyword(key, value))
# otherwise do that here
else:
# mark the return value as safe if we are in an
# environment with autoescaping turned on
node = nodes.MarkSafeIfAutoescape(node)
if variables:
node = nodes.Mod(
node,
nodes.Dict(
[
nodes.Pair(nodes.Const(key), value)
for key, value in variables.items()
]
),
)
return nodes.Output([node])
class ExprStmtExtension(Extension):
"""Adds a `do` tag to Jinja that works like the print statement just
that it doesn't print the return value.
"""
tags = {"do"}
def parse(self, parser: "Parser") -> nodes.ExprStmt:
node = nodes.ExprStmt(lineno=next(parser.stream).lineno)
node.node = parser.parse_tuple()
return node
class LoopControlExtension(Extension):
"""Adds break and continue to the template engine."""
tags = {"break", "continue"}
def parse(self, parser: "Parser") -> t.Union[nodes.Break, nodes.Continue]:
token = next(parser.stream)
if token.value == "break":
return nodes.Break(lineno=token.lineno)
return nodes.Continue(lineno=token.lineno)
class DebugExtension(Extension):
"""A ``{% debug %}`` tag that dumps the available variables,
filters, and tests.
.. code-block:: html+jinja
<pre>{% debug %}</pre>
.. code-block:: text
{'context': {'cycler': <class 'jinja2.utils.Cycler'>,
...,
'namespace': <class 'jinja2.utils.Namespace'>},
'filters': ['abs', 'attr', 'batch', 'capitalize', 'center', 'count', 'd',
..., 'urlencode', 'urlize', 'wordcount', 'wordwrap', 'xmlattr'],
'tests': ['!=', '<', '<=', '==', '>', '>=', 'callable', 'defined',
..., 'odd', 'sameas', 'sequence', 'string', 'undefined', 'upper']}
.. versionadded:: 2.11.0
"""
tags = {"debug"}
def parse(self, parser: "Parser") -> nodes.Output:
lineno = parser.stream.expect("name:debug").lineno
context = nodes.ContextReference()
result = self.call_method("_render", [context], lineno=lineno)
return nodes.Output([result], lineno=lineno)
def _render(self, context: Context) -> str:
result = {
"context": context.get_all(),
"filters": sorted(self.environment.filters.keys()),
"tests": sorted(self.environment.tests.keys()),
}
# Set the depth since the intent is to show the top few names.
return pprint.pformat(result, depth=3, compact=True)
def extract_from_ast(
ast: nodes.Template,
gettext_functions: t.Sequence[str] = GETTEXT_FUNCTIONS,
babel_style: bool = True,
) -> t.Iterator[
t.Tuple[int, str, t.Union[t.Optional[str], t.Tuple[t.Optional[str], ...]]]
]:
"""Extract localizable strings from the given template node. Per
default this function returns matches in babel style that means non string
parameters as well as keyword arguments are returned as `None`. This
allows Babel to figure out what you really meant if you are using
gettext functions that allow keyword arguments for placeholder expansion.
If you don't want that behavior set the `babel_style` parameter to `False`
which causes only strings to be returned and parameters are always stored
in tuples. As a consequence invalid gettext calls (calls without a single
string parameter or string parameters after non-string parameters) are
skipped.
This example explains the behavior:
>>> from jinja2 import Environment
>>> env = Environment()
>>> node = env.parse('{{ (_("foo"), _(), ngettext("foo", "bar", 42)) }}')
>>> list(extract_from_ast(node))
[(1, '_', 'foo'), (1, '_', ()), (1, 'ngettext', ('foo', 'bar', None))]
>>> list(extract_from_ast(node, babel_style=False))
[(1, '_', ('foo',)), (1, 'ngettext', ('foo', 'bar'))]
For every string found this function yields a ``(lineno, function,
message)`` tuple, where:
* ``lineno`` is the number of the line on which the string was found,
* ``function`` is the name of the ``gettext`` function used (if the
string was extracted from embedded Python code), and
* ``message`` is the string, or a tuple of strings for functions
with multiple string arguments.
This extraction function operates on the AST and is because of that unable
to extract any comments. For comment support you have to use the babel
extraction interface or extract comments yourself.
"""
out: t.Union[t.Optional[str], t.Tuple[t.Optional[str], ...]]
for node in ast.find_all(nodes.Call):
if (
not isinstance(node.node, nodes.Name)
or node.node.name not in gettext_functions
):
continue
strings: t.List[t.Optional[str]] = []
for arg in node.args:
if isinstance(arg, nodes.Const) and isinstance(arg.value, str):
strings.append(arg.value)
else:
strings.append(None)
for _ in node.kwargs:
strings.append(None)
if node.dyn_args is not None:
strings.append(None)
if node.dyn_kwargs is not None:
strings.append(None)
if not babel_style:
out = tuple(x for x in strings if x is not None)
if not out:
continue
else:
if len(strings) == 1:
out = strings[0]
else:
out = tuple(strings)
yield node.lineno, node.node.name, out
class _CommentFinder:
"""Helper class to find comments in a token stream. Can only
find comments for gettext calls forwards. Once the comment
from line 4 is found, a comment for line 1 will not return a
usable value.
"""
def __init__(
self, tokens: t.Sequence[t.Tuple[int, str, str]], comment_tags: t.Sequence[str]
) -> None:
self.tokens = tokens
self.comment_tags = comment_tags
self.offset = 0
self.last_lineno = 0
def find_backwards(self, offset: int) -> t.List[str]:
try:
for _, token_type, token_value in reversed(
self.tokens[self.offset : offset]
):
if token_type in ("comment", "linecomment"):
try:
prefix, comment = token_value.split(None, 1)
except ValueError:
continue
if prefix in self.comment_tags:
return [comment.rstrip()]
return []
finally:
self.offset = offset
def find_comments(self, lineno: int) -> t.List[str]:
if not self.comment_tags or self.last_lineno > lineno:
return []
for idx, (token_lineno, _, _) in enumerate(self.tokens[self.offset :]):
if token_lineno > lineno:
return self.find_backwards(self.offset + idx)
return self.find_backwards(len(self.tokens))
def babel_extract(
fileobj: t.BinaryIO,
keywords: t.Sequence[str],
comment_tags: t.Sequence[str],
options: t.Dict[str, t.Any],
) -> t.Iterator[
t.Tuple[
int, str, t.Union[t.Optional[str], t.Tuple[t.Optional[str], ...]], t.List[str]
]
]:
"""Babel extraction method for Jinja templates.
.. versionchanged:: 2.3
Basic support for translation comments was added. If `comment_tags`
is now set to a list of keywords for extraction, the extractor will
try to find the best preceding comment that begins with one of the
keywords. For best results, make sure to not have more than one
gettext call in one line of code and the matching comment in the
same line or the line before.
.. versionchanged:: 2.5.1
The `newstyle_gettext` flag can be set to `True` to enable newstyle
gettext calls.
.. versionchanged:: 2.7
A `silent` option can now be provided. If set to `False` template
syntax errors are propagated instead of being ignored.
:param fileobj: the file-like object the messages should be extracted from
:param keywords: a list of keywords (i.e. function names) that should be
recognized as translation functions
:param comment_tags: a list of translator tags to search for and include
in the results.
:param options: a dictionary of additional options (optional)
:return: an iterator over ``(lineno, funcname, message, comments)`` tuples.
(comments will be empty currently)
"""
extensions: t.Dict[t.Type[Extension], None] = {}
for extension_name in options.get("extensions", "").split(","):
extension_name = extension_name.strip()
if not extension_name:
continue
extensions[import_string(extension_name)] = None
if InternationalizationExtension not in extensions:
extensions[InternationalizationExtension] = None
def getbool(options: t.Mapping[str, str], key: str, default: bool = False) -> bool:
return options.get(key, str(default)).lower() in {"1", "on", "yes", "true"}
silent = getbool(options, "silent", True)
environment = Environment(
options.get("block_start_string", defaults.BLOCK_START_STRING),
options.get("block_end_string", defaults.BLOCK_END_STRING),
options.get("variable_start_string", defaults.VARIABLE_START_STRING),
options.get("variable_end_string", defaults.VARIABLE_END_STRING),
options.get("comment_start_string", defaults.COMMENT_START_STRING),
options.get("comment_end_string", defaults.COMMENT_END_STRING),
options.get("line_statement_prefix") or defaults.LINE_STATEMENT_PREFIX,
options.get("line_comment_prefix") or defaults.LINE_COMMENT_PREFIX,
getbool(options, "trim_blocks", defaults.TRIM_BLOCKS),
getbool(options, "lstrip_blocks", defaults.LSTRIP_BLOCKS),
defaults.NEWLINE_SEQUENCE,
getbool(options, "keep_trailing_newline", defaults.KEEP_TRAILING_NEWLINE),
tuple(extensions),
cache_size=0,
auto_reload=False,
)
if getbool(options, "trimmed"):
environment.policies["ext.i18n.trimmed"] = True
if getbool(options, "newstyle_gettext"):
environment.newstyle_gettext = True # type: ignore
source = fileobj.read().decode(options.get("encoding", "utf-8"))
try:
node = environment.parse(source)
tokens = list(environment.lex(environment.preprocess(source)))
except TemplateSyntaxError:
if not silent:
raise
# skip templates with syntax errors
return
finder = _CommentFinder(tokens, comment_tags)
for lineno, func, message in extract_from_ast(node, keywords):
yield lineno, func, message, finder.find_comments(lineno)
#: nicer import names
i18n = InternationalizationExtension
do = ExprStmtExtension
loopcontrols = LoopControlExtension
debug = DebugExtension
| bsd-3-clause | cfff852fd2594bd71d5c2eb730e7202b | 35.672875 | 88 | 0.592343 | 4.272616 | false | false | false | false |
datafolklabs/cement | cement/cli/main.py | 1 | 1347 |
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), 'contrib'))
from cement import App, CaughtSignal # noqa: E402
from .controllers.base import Base # noqa: E402
class CementApp(App):
class Meta:
label = 'cement'
controller = 'base'
template_module = 'cement.cli.templates'
template_handler = 'jinja2'
config_handler = 'yaml'
config_file_suffix = '.yml'
extensions = [
'generate',
'yaml',
'jinja2',
]
handlers = [
Base,
]
class CementTestApp(CementApp):
class Meta:
argv = []
config_files = []
exit_on_close = False
def main(argv=None):
with CementApp() as app:
try:
app.run()
except AssertionError as e: # pragma: nocover
print('AssertionError > %s' % e.args[0]) # pragma: nocover
app.exit_code = 1 # pragma: nocover
except CaughtSignal as e: # pragma: nocover
print('\n%s' % e) # pragma: nocover
app.exit_code = 0 # pragma: nocover
if __name__ == '__main__':
main() # pragma: nocover
| bsd-3-clause | 1efba3f1fa81cf153d92db2b7264835c | 25.94 | 73 | 0.47216 | 4.057229 | false | true | false | false |
mwaskom/seaborn | seaborn/cm.py | 1 | 66038 | from matplotlib import colors
from seaborn._compat import register_colormap
_rocket_lut = [
[ 0.01060815, 0.01808215, 0.10018654],
[ 0.01428972, 0.02048237, 0.10374486],
[ 0.01831941, 0.0229766 , 0.10738511],
[ 0.02275049, 0.02554464, 0.11108639],
[ 0.02759119, 0.02818316, 0.11483751],
[ 0.03285175, 0.03088792, 0.11863035],
[ 0.03853466, 0.03365771, 0.12245873],
[ 0.04447016, 0.03648425, 0.12631831],
[ 0.05032105, 0.03936808, 0.13020508],
[ 0.05611171, 0.04224835, 0.13411624],
[ 0.0618531 , 0.04504866, 0.13804929],
[ 0.06755457, 0.04778179, 0.14200206],
[ 0.0732236 , 0.05045047, 0.14597263],
[ 0.0788708 , 0.05305461, 0.14995981],
[ 0.08450105, 0.05559631, 0.15396203],
[ 0.09011319, 0.05808059, 0.15797687],
[ 0.09572396, 0.06050127, 0.16200507],
[ 0.10132312, 0.06286782, 0.16604287],
[ 0.10692823, 0.06517224, 0.17009175],
[ 0.1125315 , 0.06742194, 0.17414848],
[ 0.11813947, 0.06961499, 0.17821272],
[ 0.12375803, 0.07174938, 0.18228425],
[ 0.12938228, 0.07383015, 0.18636053],
[ 0.13501631, 0.07585609, 0.19044109],
[ 0.14066867, 0.0778224 , 0.19452676],
[ 0.14633406, 0.07973393, 0.1986151 ],
[ 0.15201338, 0.08159108, 0.20270523],
[ 0.15770877, 0.08339312, 0.20679668],
[ 0.16342174, 0.0851396 , 0.21088893],
[ 0.16915387, 0.08682996, 0.21498104],
[ 0.17489524, 0.08848235, 0.2190294 ],
[ 0.18065495, 0.09009031, 0.22303512],
[ 0.18643324, 0.09165431, 0.22699705],
[ 0.19223028, 0.09317479, 0.23091409],
[ 0.19804623, 0.09465217, 0.23478512],
[ 0.20388117, 0.09608689, 0.23860907],
[ 0.20973515, 0.09747934, 0.24238489],
[ 0.21560818, 0.09882993, 0.24611154],
[ 0.22150014, 0.10013944, 0.2497868 ],
[ 0.22741085, 0.10140876, 0.25340813],
[ 0.23334047, 0.10263737, 0.25697736],
[ 0.23928891, 0.10382562, 0.2604936 ],
[ 0.24525608, 0.10497384, 0.26395596],
[ 0.25124182, 0.10608236, 0.26736359],
[ 0.25724602, 0.10715148, 0.27071569],
[ 0.26326851, 0.1081815 , 0.27401148],
[ 0.26930915, 0.1091727 , 0.2772502 ],
[ 0.27536766, 0.11012568, 0.28043021],
[ 0.28144375, 0.11104133, 0.2835489 ],
[ 0.2875374 , 0.11191896, 0.28660853],
[ 0.29364846, 0.11275876, 0.2896085 ],
[ 0.29977678, 0.11356089, 0.29254823],
[ 0.30592213, 0.11432553, 0.29542718],
[ 0.31208435, 0.11505284, 0.29824485],
[ 0.31826327, 0.1157429 , 0.30100076],
[ 0.32445869, 0.11639585, 0.30369448],
[ 0.33067031, 0.11701189, 0.30632563],
[ 0.33689808, 0.11759095, 0.3088938 ],
[ 0.34314168, 0.11813362, 0.31139721],
[ 0.34940101, 0.11863987, 0.3138355 ],
[ 0.355676 , 0.11910909, 0.31620996],
[ 0.36196644, 0.1195413 , 0.31852037],
[ 0.36827206, 0.11993653, 0.32076656],
[ 0.37459292, 0.12029443, 0.32294825],
[ 0.38092887, 0.12061482, 0.32506528],
[ 0.38727975, 0.12089756, 0.3271175 ],
[ 0.39364518, 0.12114272, 0.32910494],
[ 0.40002537, 0.12134964, 0.33102734],
[ 0.40642019, 0.12151801, 0.33288464],
[ 0.41282936, 0.12164769, 0.33467689],
[ 0.41925278, 0.12173833, 0.33640407],
[ 0.42569057, 0.12178916, 0.33806605],
[ 0.43214263, 0.12179973, 0.33966284],
[ 0.43860848, 0.12177004, 0.34119475],
[ 0.44508855, 0.12169883, 0.34266151],
[ 0.45158266, 0.12158557, 0.34406324],
[ 0.45809049, 0.12142996, 0.34540024],
[ 0.46461238, 0.12123063, 0.34667231],
[ 0.47114798, 0.12098721, 0.34787978],
[ 0.47769736, 0.12069864, 0.34902273],
[ 0.48426077, 0.12036349, 0.35010104],
[ 0.49083761, 0.11998161, 0.35111537],
[ 0.49742847, 0.11955087, 0.35206533],
[ 0.50403286, 0.11907081, 0.35295152],
[ 0.51065109, 0.11853959, 0.35377385],
[ 0.51728314, 0.1179558 , 0.35453252],
[ 0.52392883, 0.11731817, 0.35522789],
[ 0.53058853, 0.11662445, 0.35585982],
[ 0.53726173, 0.11587369, 0.35642903],
[ 0.54394898, 0.11506307, 0.35693521],
[ 0.5506426 , 0.11420757, 0.35737863],
[ 0.55734473, 0.11330456, 0.35775059],
[ 0.56405586, 0.11235265, 0.35804813],
[ 0.57077365, 0.11135597, 0.35827146],
[ 0.5774991 , 0.11031233, 0.35841679],
[ 0.58422945, 0.10922707, 0.35848469],
[ 0.59096382, 0.10810205, 0.35847347],
[ 0.59770215, 0.10693774, 0.35838029],
[ 0.60444226, 0.10573912, 0.35820487],
[ 0.61118304, 0.10450943, 0.35794557],
[ 0.61792306, 0.10325288, 0.35760108],
[ 0.62466162, 0.10197244, 0.35716891],
[ 0.63139686, 0.10067417, 0.35664819],
[ 0.63812122, 0.09938212, 0.35603757],
[ 0.64483795, 0.0980891 , 0.35533555],
[ 0.65154562, 0.09680192, 0.35454107],
[ 0.65824241, 0.09552918, 0.3536529 ],
[ 0.66492652, 0.09428017, 0.3526697 ],
[ 0.67159578, 0.09306598, 0.35159077],
[ 0.67824099, 0.09192342, 0.3504148 ],
[ 0.684863 , 0.09085633, 0.34914061],
[ 0.69146268, 0.0898675 , 0.34776864],
[ 0.69803757, 0.08897226, 0.3462986 ],
[ 0.70457834, 0.0882129 , 0.34473046],
[ 0.71108138, 0.08761223, 0.3430635 ],
[ 0.7175507 , 0.08716212, 0.34129974],
[ 0.72398193, 0.08688725, 0.33943958],
[ 0.73035829, 0.0868623 , 0.33748452],
[ 0.73669146, 0.08704683, 0.33543669],
[ 0.74297501, 0.08747196, 0.33329799],
[ 0.74919318, 0.08820542, 0.33107204],
[ 0.75535825, 0.08919792, 0.32876184],
[ 0.76145589, 0.09050716, 0.32637117],
[ 0.76748424, 0.09213602, 0.32390525],
[ 0.77344838, 0.09405684, 0.32136808],
[ 0.77932641, 0.09634794, 0.31876642],
[ 0.78513609, 0.09892473, 0.31610488],
[ 0.79085854, 0.10184672, 0.313391 ],
[ 0.7965014 , 0.10506637, 0.31063031],
[ 0.80205987, 0.10858333, 0.30783 ],
[ 0.80752799, 0.11239964, 0.30499738],
[ 0.81291606, 0.11645784, 0.30213802],
[ 0.81820481, 0.12080606, 0.29926105],
[ 0.82341472, 0.12535343, 0.2963705 ],
[ 0.82852822, 0.13014118, 0.29347474],
[ 0.83355779, 0.13511035, 0.29057852],
[ 0.83850183, 0.14025098, 0.2876878 ],
[ 0.84335441, 0.14556683, 0.28480819],
[ 0.84813096, 0.15099892, 0.281943 ],
[ 0.85281737, 0.15657772, 0.27909826],
[ 0.85742602, 0.1622583 , 0.27627462],
[ 0.86196552, 0.16801239, 0.27346473],
[ 0.86641628, 0.17387796, 0.27070818],
[ 0.87079129, 0.17982114, 0.26797378],
[ 0.87507281, 0.18587368, 0.26529697],
[ 0.87925878, 0.19203259, 0.26268136],
[ 0.8833417 , 0.19830556, 0.26014181],
[ 0.88731387, 0.20469941, 0.25769539],
[ 0.89116859, 0.21121788, 0.2553592 ],
[ 0.89490337, 0.21785614, 0.25314362],
[ 0.8985026 , 0.22463251, 0.25108745],
[ 0.90197527, 0.23152063, 0.24918223],
[ 0.90530097, 0.23854541, 0.24748098],
[ 0.90848638, 0.24568473, 0.24598324],
[ 0.911533 , 0.25292623, 0.24470258],
[ 0.9144225 , 0.26028902, 0.24369359],
[ 0.91717106, 0.26773821, 0.24294137],
[ 0.91978131, 0.27526191, 0.24245973],
[ 0.92223947, 0.28287251, 0.24229568],
[ 0.92456587, 0.29053388, 0.24242622],
[ 0.92676657, 0.29823282, 0.24285536],
[ 0.92882964, 0.30598085, 0.24362274],
[ 0.93078135, 0.31373977, 0.24468803],
[ 0.93262051, 0.3215093 , 0.24606461],
[ 0.93435067, 0.32928362, 0.24775328],
[ 0.93599076, 0.33703942, 0.24972157],
[ 0.93752831, 0.34479177, 0.25199928],
[ 0.93899289, 0.35250734, 0.25452808],
[ 0.94036561, 0.36020899, 0.25734661],
[ 0.94167588, 0.36786594, 0.2603949 ],
[ 0.94291042, 0.37549479, 0.26369821],
[ 0.94408513, 0.3830811 , 0.26722004],
[ 0.94520419, 0.39062329, 0.27094924],
[ 0.94625977, 0.39813168, 0.27489742],
[ 0.94727016, 0.4055909 , 0.27902322],
[ 0.94823505, 0.41300424, 0.28332283],
[ 0.94914549, 0.42038251, 0.28780969],
[ 0.95001704, 0.42771398, 0.29244728],
[ 0.95085121, 0.43500005, 0.29722817],
[ 0.95165009, 0.44224144, 0.30214494],
[ 0.9524044 , 0.44944853, 0.3072105 ],
[ 0.95312556, 0.45661389, 0.31239776],
[ 0.95381595, 0.46373781, 0.31769923],
[ 0.95447591, 0.47082238, 0.32310953],
[ 0.95510255, 0.47787236, 0.32862553],
[ 0.95569679, 0.48489115, 0.33421404],
[ 0.95626788, 0.49187351, 0.33985601],
[ 0.95681685, 0.49882008, 0.34555431],
[ 0.9573439 , 0.50573243, 0.35130912],
[ 0.95784842, 0.51261283, 0.35711942],
[ 0.95833051, 0.51946267, 0.36298589],
[ 0.95879054, 0.52628305, 0.36890904],
[ 0.95922872, 0.53307513, 0.3748895 ],
[ 0.95964538, 0.53983991, 0.38092784],
[ 0.96004345, 0.54657593, 0.3870292 ],
[ 0.96042097, 0.55328624, 0.39319057],
[ 0.96077819, 0.55997184, 0.39941173],
[ 0.9611152 , 0.5666337 , 0.40569343],
[ 0.96143273, 0.57327231, 0.41203603],
[ 0.96173392, 0.57988594, 0.41844491],
[ 0.96201757, 0.58647675, 0.42491751],
[ 0.96228344, 0.59304598, 0.43145271],
[ 0.96253168, 0.5995944 , 0.43805131],
[ 0.96276513, 0.60612062, 0.44471698],
[ 0.96298491, 0.6126247 , 0.45145074],
[ 0.96318967, 0.61910879, 0.45824902],
[ 0.96337949, 0.6255736 , 0.46511271],
[ 0.96355923, 0.63201624, 0.47204746],
[ 0.96372785, 0.63843852, 0.47905028],
[ 0.96388426, 0.64484214, 0.4861196 ],
[ 0.96403203, 0.65122535, 0.4932578 ],
[ 0.96417332, 0.65758729, 0.50046894],
[ 0.9643063 , 0.66393045, 0.5077467 ],
[ 0.96443322, 0.67025402, 0.51509334],
[ 0.96455845, 0.67655564, 0.52251447],
[ 0.96467922, 0.68283846, 0.53000231],
[ 0.96479861, 0.68910113, 0.53756026],
[ 0.96492035, 0.69534192, 0.5451917 ],
[ 0.96504223, 0.7015636 , 0.5528892 ],
[ 0.96516917, 0.70776351, 0.5606593 ],
[ 0.96530224, 0.71394212, 0.56849894],
[ 0.96544032, 0.72010124, 0.57640375],
[ 0.96559206, 0.72623592, 0.58438387],
[ 0.96575293, 0.73235058, 0.59242739],
[ 0.96592829, 0.73844258, 0.60053991],
[ 0.96612013, 0.74451182, 0.60871954],
[ 0.96632832, 0.75055966, 0.61696136],
[ 0.96656022, 0.75658231, 0.62527295],
[ 0.96681185, 0.76258381, 0.63364277],
[ 0.96709183, 0.76855969, 0.64207921],
[ 0.96739773, 0.77451297, 0.65057302],
[ 0.96773482, 0.78044149, 0.65912731],
[ 0.96810471, 0.78634563, 0.66773889],
[ 0.96850919, 0.79222565, 0.6764046 ],
[ 0.96893132, 0.79809112, 0.68512266],
[ 0.96935926, 0.80395415, 0.69383201],
[ 0.9698028 , 0.80981139, 0.70252255],
[ 0.97025511, 0.81566605, 0.71120296],
[ 0.97071849, 0.82151775, 0.71987163],
[ 0.97120159, 0.82736371, 0.72851999],
[ 0.97169389, 0.83320847, 0.73716071],
[ 0.97220061, 0.83905052, 0.74578903],
[ 0.97272597, 0.84488881, 0.75440141],
[ 0.97327085, 0.85072354, 0.76299805],
[ 0.97383206, 0.85655639, 0.77158353],
[ 0.97441222, 0.86238689, 0.78015619],
[ 0.97501782, 0.86821321, 0.78871034],
[ 0.97564391, 0.87403763, 0.79725261],
[ 0.97628674, 0.87986189, 0.8057883 ],
[ 0.97696114, 0.88568129, 0.81430324],
[ 0.97765722, 0.89149971, 0.82280948],
[ 0.97837585, 0.89731727, 0.83130786],
[ 0.97912374, 0.90313207, 0.83979337],
[ 0.979891 , 0.90894778, 0.84827858],
[ 0.98067764, 0.91476465, 0.85676611],
[ 0.98137749, 0.92061729, 0.86536915]
]
_mako_lut = [
[ 0.04503935, 0.01482344, 0.02092227],
[ 0.04933018, 0.01709292, 0.02535719],
[ 0.05356262, 0.01950702, 0.03018802],
[ 0.05774337, 0.02205989, 0.03545515],
[ 0.06188095, 0.02474764, 0.04115287],
[ 0.06598247, 0.0275665 , 0.04691409],
[ 0.07005374, 0.03051278, 0.05264306],
[ 0.07409947, 0.03358324, 0.05834631],
[ 0.07812339, 0.03677446, 0.06403249],
[ 0.08212852, 0.0400833 , 0.06970862],
[ 0.08611731, 0.04339148, 0.07538208],
[ 0.09009161, 0.04664706, 0.08105568],
[ 0.09405308, 0.04985685, 0.08673591],
[ 0.09800301, 0.05302279, 0.09242646],
[ 0.10194255, 0.05614641, 0.09813162],
[ 0.10587261, 0.05922941, 0.103854 ],
[ 0.1097942 , 0.06227277, 0.10959847],
[ 0.11370826, 0.06527747, 0.11536893],
[ 0.11761516, 0.06824548, 0.12116393],
[ 0.12151575, 0.07117741, 0.12698763],
[ 0.12541095, 0.07407363, 0.1328442 ],
[ 0.12930083, 0.07693611, 0.13873064],
[ 0.13317849, 0.07976988, 0.14465095],
[ 0.13701138, 0.08259683, 0.15060265],
[ 0.14079223, 0.08542126, 0.15659379],
[ 0.14452486, 0.08824175, 0.16262484],
[ 0.14820351, 0.09106304, 0.16869476],
[ 0.15183185, 0.09388372, 0.17480366],
[ 0.15540398, 0.09670855, 0.18094993],
[ 0.15892417, 0.09953561, 0.18713384],
[ 0.16238588, 0.10236998, 0.19335329],
[ 0.16579435, 0.10520905, 0.19960847],
[ 0.16914226, 0.10805832, 0.20589698],
[ 0.17243586, 0.11091443, 0.21221911],
[ 0.17566717, 0.11378321, 0.21857219],
[ 0.17884322, 0.11666074, 0.2249565 ],
[ 0.18195582, 0.11955283, 0.23136943],
[ 0.18501213, 0.12245547, 0.23781116],
[ 0.18800459, 0.12537395, 0.24427914],
[ 0.19093944, 0.1283047 , 0.25077369],
[ 0.19381092, 0.13125179, 0.25729255],
[ 0.19662307, 0.13421303, 0.26383543],
[ 0.19937337, 0.13719028, 0.27040111],
[ 0.20206187, 0.14018372, 0.27698891],
[ 0.20469116, 0.14319196, 0.28359861],
[ 0.20725547, 0.14621882, 0.29022775],
[ 0.20976258, 0.14925954, 0.29687795],
[ 0.21220409, 0.15231929, 0.30354703],
[ 0.21458611, 0.15539445, 0.31023563],
[ 0.21690827, 0.15848519, 0.31694355],
[ 0.21916481, 0.16159489, 0.32366939],
[ 0.2213631 , 0.16471913, 0.33041431],
[ 0.22349947, 0.1678599 , 0.33717781],
[ 0.2255714 , 0.1710185 , 0.34395925],
[ 0.22758415, 0.17419169, 0.35075983],
[ 0.22953569, 0.17738041, 0.35757941],
[ 0.23142077, 0.18058733, 0.3644173 ],
[ 0.2332454 , 0.18380872, 0.37127514],
[ 0.2350092 , 0.18704459, 0.3781528 ],
[ 0.23670785, 0.190297 , 0.38504973],
[ 0.23834119, 0.19356547, 0.39196711],
[ 0.23991189, 0.19684817, 0.39890581],
[ 0.24141903, 0.20014508, 0.4058667 ],
[ 0.24286214, 0.20345642, 0.4128484 ],
[ 0.24423453, 0.20678459, 0.41985299],
[ 0.24554109, 0.21012669, 0.42688124],
[ 0.2467815 , 0.21348266, 0.43393244],
[ 0.24795393, 0.21685249, 0.4410088 ],
[ 0.24905614, 0.22023618, 0.448113 ],
[ 0.25007383, 0.22365053, 0.45519562],
[ 0.25098926, 0.22710664, 0.46223892],
[ 0.25179696, 0.23060342, 0.46925447],
[ 0.25249346, 0.23414353, 0.47623196],
[ 0.25307401, 0.23772973, 0.48316271],
[ 0.25353152, 0.24136961, 0.49001976],
[ 0.25386167, 0.24506548, 0.49679407],
[ 0.25406082, 0.2488164 , 0.50348932],
[ 0.25412435, 0.25262843, 0.51007843],
[ 0.25404842, 0.25650743, 0.51653282],
[ 0.25383134, 0.26044852, 0.52286845],
[ 0.2534705 , 0.26446165, 0.52903422],
[ 0.25296722, 0.2685428 , 0.53503572],
[ 0.2523226 , 0.27269346, 0.54085315],
[ 0.25153974, 0.27691629, 0.54645752],
[ 0.25062402, 0.28120467, 0.55185939],
[ 0.24958205, 0.28556371, 0.55701246],
[ 0.24842386, 0.28998148, 0.56194601],
[ 0.24715928, 0.29446327, 0.56660884],
[ 0.24580099, 0.29899398, 0.57104399],
[ 0.24436202, 0.30357852, 0.57519929],
[ 0.24285591, 0.30819938, 0.57913247],
[ 0.24129828, 0.31286235, 0.58278615],
[ 0.23970131, 0.3175495 , 0.5862272 ],
[ 0.23807973, 0.32226344, 0.58941872],
[ 0.23644557, 0.32699241, 0.59240198],
[ 0.2348113 , 0.33173196, 0.59518282],
[ 0.23318874, 0.33648036, 0.59775543],
[ 0.2315855 , 0.34122763, 0.60016456],
[ 0.23001121, 0.34597357, 0.60240251],
[ 0.2284748 , 0.35071512, 0.6044784 ],
[ 0.22698081, 0.35544612, 0.60642528],
[ 0.22553305, 0.36016515, 0.60825252],
[ 0.22413977, 0.36487341, 0.60994938],
[ 0.22280246, 0.36956728, 0.61154118],
[ 0.22152555, 0.37424409, 0.61304472],
[ 0.22030752, 0.37890437, 0.61446646],
[ 0.2191538 , 0.38354668, 0.61581561],
[ 0.21806257, 0.38817169, 0.61709794],
[ 0.21703799, 0.39277882, 0.61831922],
[ 0.21607792, 0.39736958, 0.61948028],
[ 0.21518463, 0.40194196, 0.62059763],
[ 0.21435467, 0.40649717, 0.62167507],
[ 0.21358663, 0.41103579, 0.62271724],
[ 0.21288172, 0.41555771, 0.62373011],
[ 0.21223835, 0.42006355, 0.62471794],
[ 0.21165312, 0.42455441, 0.62568371],
[ 0.21112526, 0.42903064, 0.6266318 ],
[ 0.21065161, 0.43349321, 0.62756504],
[ 0.21023306, 0.43794288, 0.62848279],
[ 0.20985996, 0.44238227, 0.62938329],
[ 0.20951045, 0.44680966, 0.63030696],
[ 0.20916709, 0.45122981, 0.63124483],
[ 0.20882976, 0.45564335, 0.63219599],
[ 0.20849798, 0.46005094, 0.63315928],
[ 0.20817199, 0.46445309, 0.63413391],
[ 0.20785149, 0.46885041, 0.63511876],
[ 0.20753716, 0.47324327, 0.63611321],
[ 0.20722876, 0.47763224, 0.63711608],
[ 0.20692679, 0.48201774, 0.63812656],
[ 0.20663156, 0.48640018, 0.63914367],
[ 0.20634336, 0.49078002, 0.64016638],
[ 0.20606303, 0.49515755, 0.6411939 ],
[ 0.20578999, 0.49953341, 0.64222457],
[ 0.20552612, 0.50390766, 0.64325811],
[ 0.20527189, 0.50828072, 0.64429331],
[ 0.20502868, 0.51265277, 0.64532947],
[ 0.20479718, 0.51702417, 0.64636539],
[ 0.20457804, 0.52139527, 0.64739979],
[ 0.20437304, 0.52576622, 0.64843198],
[ 0.20418396, 0.53013715, 0.64946117],
[ 0.20401238, 0.53450825, 0.65048638],
[ 0.20385896, 0.53887991, 0.65150606],
[ 0.20372653, 0.54325208, 0.65251978],
[ 0.20361709, 0.5476249 , 0.6535266 ],
[ 0.20353258, 0.55199854, 0.65452542],
[ 0.20347472, 0.55637318, 0.655515 ],
[ 0.20344718, 0.56074869, 0.65649508],
[ 0.20345161, 0.56512531, 0.65746419],
[ 0.20349089, 0.56950304, 0.65842151],
[ 0.20356842, 0.57388184, 0.65936642],
[ 0.20368663, 0.57826181, 0.66029768],
[ 0.20384884, 0.58264293, 0.6612145 ],
[ 0.20405904, 0.58702506, 0.66211645],
[ 0.20431921, 0.59140842, 0.66300179],
[ 0.20463464, 0.59579264, 0.66387079],
[ 0.20500731, 0.60017798, 0.66472159],
[ 0.20544449, 0.60456387, 0.66555409],
[ 0.20596097, 0.60894927, 0.66636568],
[ 0.20654832, 0.61333521, 0.66715744],
[ 0.20721003, 0.61772167, 0.66792838],
[ 0.20795035, 0.62210845, 0.66867802],
[ 0.20877302, 0.62649546, 0.66940555],
[ 0.20968223, 0.63088252, 0.6701105 ],
[ 0.21068163, 0.63526951, 0.67079211],
[ 0.21177544, 0.63965621, 0.67145005],
[ 0.21298582, 0.64404072, 0.67208182],
[ 0.21430361, 0.64842404, 0.67268861],
[ 0.21572716, 0.65280655, 0.67326978],
[ 0.21726052, 0.65718791, 0.6738255 ],
[ 0.21890636, 0.66156803, 0.67435491],
[ 0.220668 , 0.66594665, 0.67485792],
[ 0.22255447, 0.67032297, 0.67533374],
[ 0.22458372, 0.67469531, 0.67578061],
[ 0.22673713, 0.67906542, 0.67620044],
[ 0.22901625, 0.6834332 , 0.67659251],
[ 0.23142316, 0.68779836, 0.67695703],
[ 0.23395924, 0.69216072, 0.67729378],
[ 0.23663857, 0.69651881, 0.67760151],
[ 0.23946645, 0.70087194, 0.67788018],
[ 0.24242624, 0.70522162, 0.67813088],
[ 0.24549008, 0.70957083, 0.67835215],
[ 0.24863372, 0.71392166, 0.67854868],
[ 0.25187832, 0.71827158, 0.67872193],
[ 0.25524083, 0.72261873, 0.67887024],
[ 0.25870947, 0.72696469, 0.67898912],
[ 0.26229238, 0.73130855, 0.67907645],
[ 0.26604085, 0.73564353, 0.67914062],
[ 0.26993099, 0.73997282, 0.67917264],
[ 0.27397488, 0.74429484, 0.67917096],
[ 0.27822463, 0.74860229, 0.67914468],
[ 0.28264201, 0.75290034, 0.67907959],
[ 0.2873016 , 0.75717817, 0.67899164],
[ 0.29215894, 0.76144162, 0.67886578],
[ 0.29729823, 0.76567816, 0.67871894],
[ 0.30268199, 0.76989232, 0.67853896],
[ 0.30835665, 0.77407636, 0.67833512],
[ 0.31435139, 0.77822478, 0.67811118],
[ 0.3206671 , 0.78233575, 0.67786729],
[ 0.32733158, 0.78640315, 0.67761027],
[ 0.33437168, 0.79042043, 0.67734882],
[ 0.34182112, 0.79437948, 0.67709394],
[ 0.34968889, 0.79827511, 0.67685638],
[ 0.35799244, 0.80210037, 0.67664969],
[ 0.36675371, 0.80584651, 0.67649539],
[ 0.3759816 , 0.80950627, 0.67641393],
[ 0.38566792, 0.81307432, 0.67642947],
[ 0.39579804, 0.81654592, 0.67656899],
[ 0.40634556, 0.81991799, 0.67686215],
[ 0.41730243, 0.82318339, 0.67735255],
[ 0.4285828 , 0.82635051, 0.6780564 ],
[ 0.44012728, 0.82942353, 0.67900049],
[ 0.45189421, 0.83240398, 0.68021733],
[ 0.46378379, 0.83530763, 0.6817062 ],
[ 0.47573199, 0.83814472, 0.68347352],
[ 0.48769865, 0.84092197, 0.68552698],
[ 0.49962354, 0.84365379, 0.68783929],
[ 0.5114027 , 0.8463718 , 0.69029789],
[ 0.52301693, 0.84908401, 0.69288545],
[ 0.53447549, 0.85179048, 0.69561066],
[ 0.54578602, 0.8544913 , 0.69848331],
[ 0.55695565, 0.85718723, 0.70150427],
[ 0.56798832, 0.85987893, 0.70468261],
[ 0.57888639, 0.86256715, 0.70802931],
[ 0.5896541 , 0.8652532 , 0.71154204],
[ 0.60028928, 0.86793835, 0.71523675],
[ 0.61079441, 0.87062438, 0.71910895],
[ 0.62116633, 0.87331311, 0.72317003],
[ 0.63140509, 0.87600675, 0.72741689],
[ 0.64150735, 0.87870746, 0.73185717],
[ 0.65147219, 0.8814179 , 0.73648495],
[ 0.66129632, 0.8841403 , 0.74130658],
[ 0.67097934, 0.88687758, 0.74631123],
[ 0.68051833, 0.88963189, 0.75150483],
[ 0.68991419, 0.89240612, 0.75687187],
[ 0.69916533, 0.89520211, 0.76241714],
[ 0.70827373, 0.89802257, 0.76812286],
[ 0.71723995, 0.90086891, 0.77399039],
[ 0.72606665, 0.90374337, 0.7800041 ],
[ 0.73475675, 0.90664718, 0.78615802],
[ 0.74331358, 0.90958151, 0.79244474],
[ 0.75174143, 0.91254787, 0.79884925],
[ 0.76004473, 0.91554656, 0.80536823],
[ 0.76827704, 0.91856549, 0.81196513],
[ 0.77647029, 0.921603 , 0.81855729],
[ 0.78462009, 0.92466151, 0.82514119],
[ 0.79273542, 0.92773848, 0.83172131],
[ 0.8008109 , 0.93083672, 0.83829355],
[ 0.80885107, 0.93395528, 0.84485982],
[ 0.81685878, 0.9370938 , 0.85142101],
[ 0.82483206, 0.94025378, 0.8579751 ],
[ 0.83277661, 0.94343371, 0.86452477],
[ 0.84069127, 0.94663473, 0.87106853],
[ 0.84857662, 0.9498573 , 0.8776059 ],
[ 0.8564431 , 0.95309792, 0.88414253],
[ 0.86429066, 0.95635719, 0.89067759],
[ 0.87218969, 0.95960708, 0.89725384]
]
_vlag_lut = [
[ 0.13850039, 0.41331206, 0.74052025],
[ 0.15077609, 0.41762684, 0.73970427],
[ 0.16235219, 0.4219191 , 0.7389667 ],
[ 0.1733322 , 0.42619024, 0.73832537],
[ 0.18382538, 0.43044226, 0.73776764],
[ 0.19394034, 0.4346772 , 0.73725867],
[ 0.20367115, 0.43889576, 0.73685314],
[ 0.21313625, 0.44310003, 0.73648045],
[ 0.22231173, 0.44729079, 0.73619681],
[ 0.23125148, 0.45146945, 0.73597803],
[ 0.23998101, 0.45563715, 0.7358223 ],
[ 0.24853358, 0.45979489, 0.73571524],
[ 0.25691416, 0.4639437 , 0.73566943],
[ 0.26513894, 0.46808455, 0.73568319],
[ 0.27322194, 0.47221835, 0.73575497],
[ 0.28117543, 0.47634598, 0.73588332],
[ 0.28901021, 0.48046826, 0.73606686],
[ 0.2967358 , 0.48458597, 0.73630433],
[ 0.30436071, 0.48869986, 0.73659451],
[ 0.3118955 , 0.49281055, 0.73693255],
[ 0.31935389, 0.49691847, 0.73730851],
[ 0.32672701, 0.5010247 , 0.73774013],
[ 0.33402607, 0.50512971, 0.73821941],
[ 0.34125337, 0.50923419, 0.73874905],
[ 0.34840921, 0.51333892, 0.73933402],
[ 0.35551826, 0.51744353, 0.73994642],
[ 0.3625676 , 0.52154929, 0.74060763],
[ 0.36956356, 0.52565656, 0.74131327],
[ 0.37649902, 0.52976642, 0.74207698],
[ 0.38340273, 0.53387791, 0.74286286],
[ 0.39025859, 0.53799253, 0.7436962 ],
[ 0.39706821, 0.54211081, 0.744578 ],
[ 0.40384046, 0.54623277, 0.74549872],
[ 0.41058241, 0.55035849, 0.74645094],
[ 0.41728385, 0.55448919, 0.74745174],
[ 0.42395178, 0.55862494, 0.74849357],
[ 0.4305964 , 0.56276546, 0.74956387],
[ 0.4372044 , 0.56691228, 0.75068412],
[ 0.4437909 , 0.57106468, 0.75183427],
[ 0.45035117, 0.5752235 , 0.75302312],
[ 0.45687824, 0.57938983, 0.75426297],
[ 0.46339713, 0.58356191, 0.75551816],
[ 0.46988778, 0.58774195, 0.75682037],
[ 0.47635605, 0.59192986, 0.75816245],
[ 0.48281101, 0.5961252 , 0.75953212],
[ 0.4892374 , 0.60032986, 0.76095418],
[ 0.49566225, 0.60454154, 0.76238852],
[ 0.50206137, 0.60876307, 0.76387371],
[ 0.50845128, 0.61299312, 0.76538551],
[ 0.5148258 , 0.61723272, 0.76693475],
[ 0.52118385, 0.62148236, 0.76852436],
[ 0.52753571, 0.62574126, 0.77013939],
[ 0.53386831, 0.63001125, 0.77180152],
[ 0.54020159, 0.63429038, 0.7734803 ],
[ 0.54651272, 0.63858165, 0.77521306],
[ 0.55282975, 0.64288207, 0.77695608],
[ 0.55912585, 0.64719519, 0.77875327],
[ 0.56542599, 0.65151828, 0.78056551],
[ 0.57170924, 0.65585426, 0.78242747],
[ 0.57799572, 0.6602009 , 0.78430751],
[ 0.58426817, 0.66456073, 0.78623458],
[ 0.590544 , 0.66893178, 0.78818117],
[ 0.59680758, 0.67331643, 0.79017369],
[ 0.60307553, 0.67771273, 0.79218572],
[ 0.60934065, 0.68212194, 0.79422987],
[ 0.61559495, 0.68654548, 0.7963202 ],
[ 0.62185554, 0.69098125, 0.79842918],
[ 0.62810662, 0.69543176, 0.80058381],
[ 0.63436425, 0.69989499, 0.80275812],
[ 0.64061445, 0.70437326, 0.80497621],
[ 0.6468706 , 0.70886488, 0.80721641],
[ 0.65312213, 0.7133717 , 0.80949719],
[ 0.65937818, 0.71789261, 0.81180392],
[ 0.66563334, 0.72242871, 0.81414642],
[ 0.67189155, 0.72697967, 0.81651872],
[ 0.67815314, 0.73154569, 0.81892097],
[ 0.68441395, 0.73612771, 0.82136094],
[ 0.69068321, 0.74072452, 0.82382353],
[ 0.69694776, 0.7453385 , 0.82633199],
[ 0.70322431, 0.74996721, 0.8288583 ],
[ 0.70949595, 0.75461368, 0.83143221],
[ 0.7157774 , 0.75927574, 0.83402904],
[ 0.72206299, 0.76395461, 0.83665922],
[ 0.72835227, 0.76865061, 0.8393242 ],
[ 0.73465238, 0.7733628 , 0.84201224],
[ 0.74094862, 0.77809393, 0.84474951],
[ 0.74725683, 0.78284158, 0.84750915],
[ 0.75357103, 0.78760701, 0.85030217],
[ 0.75988961, 0.79239077, 0.85313207],
[ 0.76621987, 0.79719185, 0.85598668],
[ 0.77255045, 0.8020125 , 0.85888658],
[ 0.77889241, 0.80685102, 0.86181298],
[ 0.78524572, 0.81170768, 0.86476656],
[ 0.79159841, 0.81658489, 0.86776906],
[ 0.79796459, 0.82148036, 0.8707962 ],
[ 0.80434168, 0.82639479, 0.87385315],
[ 0.8107221 , 0.83132983, 0.87695392],
[ 0.81711301, 0.8362844 , 0.88008641],
[ 0.82351479, 0.84125863, 0.88325045],
[ 0.82992772, 0.84625263, 0.88644594],
[ 0.83634359, 0.85126806, 0.8896878 ],
[ 0.84277295, 0.85630293, 0.89295721],
[ 0.84921192, 0.86135782, 0.89626076],
[ 0.85566206, 0.866432 , 0.89959467],
[ 0.86211514, 0.87152627, 0.90297183],
[ 0.86857483, 0.87663856, 0.90638248],
[ 0.87504231, 0.88176648, 0.90981938],
[ 0.88151194, 0.88690782, 0.91328493],
[ 0.88797938, 0.89205857, 0.91677544],
[ 0.89443865, 0.89721298, 0.9202854 ],
[ 0.90088204, 0.90236294, 0.92380601],
[ 0.90729768, 0.90749778, 0.92732797],
[ 0.91367037, 0.91260329, 0.93083814],
[ 0.91998105, 0.91766106, 0.93431861],
[ 0.92620596, 0.92264789, 0.93774647],
[ 0.93231683, 0.9275351 , 0.94109192],
[ 0.93827772, 0.9322888 , 0.94432312],
[ 0.94404755, 0.93686925, 0.94740137],
[ 0.94958284, 0.94123072, 0.95027696],
[ 0.95482682, 0.9453245 , 0.95291103],
[ 0.9597248 , 0.94909728, 0.95525103],
[ 0.96422552, 0.95249273, 0.95723271],
[ 0.96826161, 0.95545812, 0.95882188],
[ 0.97178458, 0.95793984, 0.95995705],
[ 0.97474105, 0.95989142, 0.96059997],
[ 0.97708604, 0.96127366, 0.96071853],
[ 0.97877855, 0.96205832, 0.96030095],
[ 0.97978484, 0.96222949, 0.95935496],
[ 0.9805997 , 0.96155216, 0.95813083],
[ 0.98152619, 0.95993719, 0.95639322],
[ 0.9819726 , 0.95766608, 0.95399269],
[ 0.98191855, 0.9547873 , 0.95098107],
[ 0.98138514, 0.95134771, 0.94740644],
[ 0.98040845, 0.94739906, 0.94332125],
[ 0.97902107, 0.94300131, 0.93878672],
[ 0.97729348, 0.93820409, 0.93385135],
[ 0.9752533 , 0.933073 , 0.92858252],
[ 0.97297834, 0.92765261, 0.92302309],
[ 0.97049104, 0.92200317, 0.91723505],
[ 0.96784372, 0.91616744, 0.91126063],
[ 0.96507281, 0.91018664, 0.90514124],
[ 0.96222034, 0.90409203, 0.89890756],
[ 0.9593079 , 0.89791478, 0.89259122],
[ 0.95635626, 0.89167908, 0.88621654],
[ 0.95338303, 0.88540373, 0.87980238],
[ 0.95040174, 0.87910333, 0.87336339],
[ 0.94742246, 0.87278899, 0.86691076],
[ 0.94445249, 0.86646893, 0.86045277],
[ 0.94150476, 0.86014606, 0.85399191],
[ 0.93857394, 0.85382798, 0.84753642],
[ 0.93566206, 0.84751766, 0.84108935],
[ 0.93277194, 0.8412164 , 0.83465197],
[ 0.92990106, 0.83492672, 0.82822708],
[ 0.92704736, 0.82865028, 0.82181656],
[ 0.92422703, 0.82238092, 0.81541333],
[ 0.92142581, 0.81612448, 0.80902415],
[ 0.91864501, 0.80988032, 0.80264838],
[ 0.91587578, 0.80365187, 0.79629001],
[ 0.9131367 , 0.79743115, 0.78994 ],
[ 0.91041602, 0.79122265, 0.78360361],
[ 0.90771071, 0.78502727, 0.77728196],
[ 0.90501581, 0.77884674, 0.7709771 ],
[ 0.90235365, 0.77267117, 0.76467793],
[ 0.8997019 , 0.76650962, 0.75839484],
[ 0.89705346, 0.76036481, 0.752131 ],
[ 0.89444021, 0.75422253, 0.74587047],
[ 0.89183355, 0.74809474, 0.73962689],
[ 0.88923216, 0.74198168, 0.73340061],
[ 0.88665892, 0.73587283, 0.72717995],
[ 0.88408839, 0.72977904, 0.72097718],
[ 0.88153537, 0.72369332, 0.71478461],
[ 0.87899389, 0.7176179 , 0.70860487],
[ 0.87645157, 0.71155805, 0.7024439 ],
[ 0.8739399 , 0.70549893, 0.6962854 ],
[ 0.87142626, 0.6994551 , 0.69014561],
[ 0.8689268 , 0.69341868, 0.68401597],
[ 0.86643562, 0.687392 , 0.67789917],
[ 0.86394434, 0.68137863, 0.67179927],
[ 0.86147586, 0.67536728, 0.665704 ],
[ 0.85899928, 0.66937226, 0.6596292 ],
[ 0.85654668, 0.66337773, 0.6535577 ],
[ 0.85408818, 0.65739772, 0.64750494],
[ 0.85164413, 0.65142189, 0.64145983],
[ 0.84920091, 0.6454565 , 0.63542932],
[ 0.84676427, 0.63949827, 0.62941 ],
[ 0.84433231, 0.63354773, 0.62340261],
[ 0.84190106, 0.62760645, 0.61740899],
[ 0.83947935, 0.62166951, 0.61142404],
[ 0.8370538 , 0.61574332, 0.60545478],
[ 0.83463975, 0.60981951, 0.59949247],
[ 0.83221877, 0.60390724, 0.593547 ],
[ 0.82980985, 0.59799607, 0.58760751],
[ 0.82740268, 0.59209095, 0.58167944],
[ 0.82498638, 0.5861973 , 0.57576866],
[ 0.82258181, 0.5803034 , 0.56986307],
[ 0.82016611, 0.57442123, 0.56397539],
[ 0.81776305, 0.56853725, 0.55809173],
[ 0.81534551, 0.56266602, 0.55222741],
[ 0.81294293, 0.55679056, 0.5463651 ],
[ 0.81052113, 0.55092973, 0.54052443],
[ 0.80811509, 0.54506305, 0.53468464],
[ 0.80568952, 0.53921036, 0.52886622],
[ 0.80327506, 0.53335335, 0.52305077],
[ 0.80084727, 0.52750583, 0.51725256],
[ 0.79842217, 0.5216578 , 0.51146173],
[ 0.79599382, 0.51581223, 0.50568155],
[ 0.79355781, 0.50997127, 0.49991444],
[ 0.79112596, 0.50412707, 0.49415289],
[ 0.78867442, 0.49829386, 0.48841129],
[ 0.7862306 , 0.49245398, 0.48267247],
[ 0.7837687 , 0.48662309, 0.47695216],
[ 0.78130809, 0.4807883 , 0.47123805],
[ 0.77884467, 0.47495151, 0.46553236],
[ 0.77636283, 0.46912235, 0.45984473],
[ 0.77388383, 0.46328617, 0.45416141],
[ 0.77138912, 0.45745466, 0.44849398],
[ 0.76888874, 0.45162042, 0.44283573],
[ 0.76638802, 0.44577901, 0.43718292],
[ 0.76386116, 0.43994762, 0.43155211],
[ 0.76133542, 0.43410655, 0.42592523],
[ 0.75880631, 0.42825801, 0.42030488],
[ 0.75624913, 0.42241905, 0.41470727],
[ 0.7536919 , 0.41656866, 0.40911347],
[ 0.75112748, 0.41071104, 0.40352792],
[ 0.74854331, 0.40485474, 0.3979589 ],
[ 0.74594723, 0.39899309, 0.39240088],
[ 0.74334332, 0.39312199, 0.38685075],
[ 0.74073277, 0.38723941, 0.3813074 ],
[ 0.73809409, 0.38136133, 0.37578553],
[ 0.73544692, 0.37547129, 0.37027123],
[ 0.73278943, 0.36956954, 0.36476549],
[ 0.73011829, 0.36365761, 0.35927038],
[ 0.72743485, 0.35773314, 0.35378465],
[ 0.72472722, 0.35180504, 0.34831662],
[ 0.72200473, 0.34586421, 0.34285937],
[ 0.71927052, 0.33990649, 0.33741033],
[ 0.71652049, 0.33393396, 0.33197219],
[ 0.71375362, 0.32794602, 0.32654545],
[ 0.71096951, 0.32194148, 0.32113016],
[ 0.70816772, 0.31591904, 0.31572637],
[ 0.70534784, 0.30987734, 0.31033414],
[ 0.70250944, 0.30381489, 0.30495353],
[ 0.69965211, 0.2977301 , 0.2995846 ],
[ 0.6967754 , 0.29162126, 0.29422741],
[ 0.69388446, 0.28548074, 0.28887769],
[ 0.69097561, 0.2793096 , 0.28353795],
[ 0.68803513, 0.27311993, 0.27821876],
[ 0.6850794 , 0.26689144, 0.27290694],
[ 0.682108 , 0.26062114, 0.26760246],
[ 0.67911013, 0.2543177 , 0.26231367],
[ 0.67609393, 0.24796818, 0.25703372],
[ 0.67305921, 0.24156846, 0.25176238],
[ 0.67000176, 0.23511902, 0.24650278],
[ 0.66693423, 0.22859879, 0.24124404],
[ 0.6638441 , 0.22201742, 0.2359961 ],
[ 0.66080672, 0.21526712, 0.23069468]
]
_icefire_lut = [
[ 0.73936227, 0.90443867, 0.85757238],
[ 0.72888063, 0.89639109, 0.85488394],
[ 0.71834255, 0.88842162, 0.8521605 ],
[ 0.70773866, 0.88052939, 0.849422 ],
[ 0.69706215, 0.87271313, 0.84668315],
[ 0.68629021, 0.86497329, 0.84398721],
[ 0.67543654, 0.85730617, 0.84130969],
[ 0.66448539, 0.84971123, 0.83868005],
[ 0.65342679, 0.84218728, 0.83611512],
[ 0.64231804, 0.83471867, 0.83358584],
[ 0.63117745, 0.827294 , 0.83113431],
[ 0.62000484, 0.81991069, 0.82876741],
[ 0.60879435, 0.81256797, 0.82648905],
[ 0.59754118, 0.80526458, 0.82430414],
[ 0.58624247, 0.79799884, 0.82221573],
[ 0.57489525, 0.7907688 , 0.82022901],
[ 0.56349779, 0.78357215, 0.81834861],
[ 0.55204294, 0.77640827, 0.81657563],
[ 0.54052516, 0.76927562, 0.81491462],
[ 0.52894085, 0.76217215, 0.81336913],
[ 0.51728854, 0.75509528, 0.81194156],
[ 0.50555676, 0.74804469, 0.81063503],
[ 0.49373871, 0.7410187 , 0.80945242],
[ 0.48183174, 0.73401449, 0.80839675],
[ 0.46982587, 0.72703075, 0.80747097],
[ 0.45770893, 0.72006648, 0.80667756],
[ 0.44547249, 0.71311941, 0.80601991],
[ 0.43318643, 0.70617126, 0.80549278],
[ 0.42110294, 0.69916972, 0.80506683],
[ 0.40925101, 0.69211059, 0.80473246],
[ 0.3976693 , 0.68498786, 0.80448272],
[ 0.38632002, 0.67781125, 0.80431024],
[ 0.37523981, 0.67057537, 0.80420832],
[ 0.36442578, 0.66328229, 0.80417474],
[ 0.35385939, 0.65593699, 0.80420591],
[ 0.34358916, 0.64853177, 0.8043 ],
[ 0.33355526, 0.64107876, 0.80445484],
[ 0.32383062, 0.63356578, 0.80467091],
[ 0.31434372, 0.62600624, 0.8049475 ],
[ 0.30516161, 0.618389 , 0.80528692],
[ 0.29623491, 0.61072284, 0.80569021],
[ 0.28759072, 0.60300319, 0.80616055],
[ 0.27923924, 0.59522877, 0.80669803],
[ 0.27114651, 0.5874047 , 0.80730545],
[ 0.26337153, 0.57952055, 0.80799113],
[ 0.25588696, 0.57157984, 0.80875922],
[ 0.248686 , 0.56358255, 0.80961366],
[ 0.24180668, 0.55552289, 0.81055123],
[ 0.23526251, 0.54739477, 0.8115939 ],
[ 0.22921445, 0.53918506, 0.81267292],
[ 0.22397687, 0.53086094, 0.8137141 ],
[ 0.21977058, 0.52241482, 0.81457651],
[ 0.21658989, 0.51384321, 0.81528511],
[ 0.21452772, 0.50514155, 0.81577278],
[ 0.21372783, 0.49630865, 0.81589566],
[ 0.21409503, 0.48734861, 0.81566163],
[ 0.2157176 , 0.47827123, 0.81487615],
[ 0.21842857, 0.46909168, 0.81351614],
[ 0.22211705, 0.45983212, 0.81146983],
[ 0.22665681, 0.45052233, 0.80860217],
[ 0.23176013, 0.44119137, 0.80494325],
[ 0.23727775, 0.43187704, 0.80038017],
[ 0.24298285, 0.42261123, 0.79493267],
[ 0.24865068, 0.41341842, 0.78869164],
[ 0.25423116, 0.40433127, 0.78155831],
[ 0.25950239, 0.39535521, 0.77376848],
[ 0.2644736 , 0.38651212, 0.76524809],
[ 0.26901584, 0.37779582, 0.75621942],
[ 0.27318141, 0.36922056, 0.746605 ],
[ 0.27690355, 0.3607736 , 0.73659374],
[ 0.28023585, 0.35244234, 0.72622103],
[ 0.28306009, 0.34438449, 0.71500731],
[ 0.28535896, 0.33660243, 0.70303975],
[ 0.28708711, 0.32912157, 0.69034504],
[ 0.28816354, 0.32200604, 0.67684067],
[ 0.28862749, 0.31519824, 0.66278813],
[ 0.28847904, 0.30869064, 0.6482815 ],
[ 0.28770912, 0.30250126, 0.63331265],
[ 0.28640325, 0.29655509, 0.61811374],
[ 0.28458943, 0.29082155, 0.60280913],
[ 0.28233561, 0.28527482, 0.58742866],
[ 0.27967038, 0.2798938 , 0.57204225],
[ 0.27665361, 0.27465357, 0.55667809],
[ 0.27332564, 0.2695165 , 0.54145387],
[ 0.26973851, 0.26447054, 0.52634916],
[ 0.2659204 , 0.25949691, 0.511417 ],
[ 0.26190145, 0.25458123, 0.49668768],
[ 0.2577151 , 0.24971691, 0.48214874],
[ 0.25337618, 0.24490494, 0.46778758],
[ 0.24890842, 0.24013332, 0.45363816],
[ 0.24433654, 0.23539226, 0.4397245 ],
[ 0.23967922, 0.23067729, 0.4260591 ],
[ 0.23495608, 0.22598894, 0.41262952],
[ 0.23018113, 0.22132414, 0.39945577],
[ 0.22534609, 0.21670847, 0.38645794],
[ 0.22048761, 0.21211723, 0.37372555],
[ 0.2156198 , 0.20755389, 0.36125301],
[ 0.21074637, 0.20302717, 0.34903192],
[ 0.20586893, 0.19855368, 0.33701661],
[ 0.20101757, 0.19411573, 0.32529173],
[ 0.19619947, 0.18972425, 0.31383846],
[ 0.19140726, 0.18540157, 0.30260777],
[ 0.1866769 , 0.1811332 , 0.29166583],
[ 0.18201285, 0.17694992, 0.28088776],
[ 0.17745228, 0.17282141, 0.27044211],
[ 0.17300684, 0.16876921, 0.26024893],
[ 0.16868273, 0.16479861, 0.25034479],
[ 0.16448691, 0.16091728, 0.24075373],
[ 0.16043195, 0.15714351, 0.23141745],
[ 0.15652427, 0.15348248, 0.22238175],
[ 0.15277065, 0.14994111, 0.21368395],
[ 0.14918274, 0.14653431, 0.20529486],
[ 0.14577095, 0.14327403, 0.19720829],
[ 0.14254381, 0.14016944, 0.18944326],
[ 0.13951035, 0.13723063, 0.18201072],
[ 0.13667798, 0.13446606, 0.17493774],
[ 0.13405762, 0.13188822, 0.16820842],
[ 0.13165767, 0.12950667, 0.16183275],
[ 0.12948748, 0.12733187, 0.15580631],
[ 0.12755435, 0.1253723 , 0.15014098],
[ 0.12586516, 0.12363617, 0.1448459 ],
[ 0.12442647, 0.12213143, 0.13992571],
[ 0.12324241, 0.12086419, 0.13539995],
[ 0.12232067, 0.11984278, 0.13124644],
[ 0.12166209, 0.11907077, 0.12749671],
[ 0.12126982, 0.11855309, 0.12415079],
[ 0.12114244, 0.11829179, 0.1212385 ],
[ 0.12127766, 0.11828837, 0.11878534],
[ 0.12284806, 0.1179729 , 0.11772022],
[ 0.12619498, 0.11721796, 0.11770203],
[ 0.129968 , 0.11663788, 0.11792377],
[ 0.13410011, 0.11625146, 0.11839138],
[ 0.13855459, 0.11606618, 0.11910584],
[ 0.14333775, 0.11607038, 0.1200606 ],
[ 0.148417 , 0.11626929, 0.12125453],
[ 0.15377389, 0.11666192, 0.12268364],
[ 0.15941427, 0.11723486, 0.12433911],
[ 0.16533376, 0.11797856, 0.12621303],
[ 0.17152547, 0.11888403, 0.12829735],
[ 0.17797765, 0.11994436, 0.13058435],
[ 0.18468769, 0.12114722, 0.13306426],
[ 0.19165663, 0.12247737, 0.13572616],
[ 0.19884415, 0.12394381, 0.1385669 ],
[ 0.20627181, 0.12551883, 0.14157124],
[ 0.21394877, 0.12718055, 0.14472604],
[ 0.22184572, 0.12893119, 0.14802579],
[ 0.22994394, 0.13076731, 0.15146314],
[ 0.23823937, 0.13267611, 0.15502793],
[ 0.24676041, 0.13462172, 0.15870321],
[ 0.25546457, 0.13661751, 0.16248722],
[ 0.26433628, 0.13865956, 0.16637301],
[ 0.27341345, 0.14070412, 0.17034221],
[ 0.28264773, 0.14277192, 0.1743957 ],
[ 0.29202272, 0.14486161, 0.17852793],
[ 0.30159648, 0.14691224, 0.1827169 ],
[ 0.31129002, 0.14897583, 0.18695213],
[ 0.32111555, 0.15103351, 0.19119629],
[ 0.33107961, 0.1530674 , 0.19543758],
[ 0.34119892, 0.15504762, 0.1996803 ],
[ 0.35142388, 0.15701131, 0.20389086],
[ 0.36178937, 0.1589124 , 0.20807639],
[ 0.37229381, 0.16073993, 0.21223189],
[ 0.38288348, 0.16254006, 0.2163249 ],
[ 0.39359592, 0.16426336, 0.22036577],
[ 0.40444332, 0.16588767, 0.22434027],
[ 0.41537995, 0.16745325, 0.2282297 ],
[ 0.42640867, 0.16894939, 0.23202755],
[ 0.43754706, 0.17034847, 0.23572899],
[ 0.44878564, 0.1716535 , 0.23932344],
[ 0.4601126 , 0.17287365, 0.24278607],
[ 0.47151732, 0.17401641, 0.24610337],
[ 0.48300689, 0.17506676, 0.2492737 ],
[ 0.49458302, 0.17601892, 0.25227688],
[ 0.50623876, 0.17687777, 0.255096 ],
[ 0.5179623 , 0.17765528, 0.2577162 ],
[ 0.52975234, 0.17835232, 0.2601134 ],
[ 0.54159776, 0.17898292, 0.26226847],
[ 0.55348804, 0.17956232, 0.26416003],
[ 0.56541729, 0.18010175, 0.26575971],
[ 0.57736669, 0.180631 , 0.26704888],
[ 0.58932081, 0.18117827, 0.26800409],
[ 0.60127582, 0.18175888, 0.26858488],
[ 0.61319563, 0.1824336 , 0.2687872 ],
[ 0.62506376, 0.18324015, 0.26858301],
[ 0.63681202, 0.18430173, 0.26795276],
[ 0.64842603, 0.18565472, 0.26689463],
[ 0.65988195, 0.18734638, 0.26543435],
[ 0.67111966, 0.18948885, 0.26357955],
[ 0.68209194, 0.19216636, 0.26137175],
[ 0.69281185, 0.19535326, 0.25887063],
[ 0.70335022, 0.19891271, 0.25617971],
[ 0.71375229, 0.20276438, 0.25331365],
[ 0.72401436, 0.20691287, 0.25027366],
[ 0.73407638, 0.21145051, 0.24710661],
[ 0.74396983, 0.21631913, 0.24380715],
[ 0.75361506, 0.22163653, 0.24043996],
[ 0.7630579 , 0.22731637, 0.23700095],
[ 0.77222228, 0.23346231, 0.23356628],
[ 0.78115441, 0.23998404, 0.23013825],
[ 0.78979746, 0.24694858, 0.22678822],
[ 0.79819286, 0.25427223, 0.22352658],
[ 0.80630444, 0.26198807, 0.22040877],
[ 0.81417437, 0.27001406, 0.21744645],
[ 0.82177364, 0.27837336, 0.21468316],
[ 0.82915955, 0.28696963, 0.21210766],
[ 0.83628628, 0.2958499 , 0.20977813],
[ 0.84322168, 0.30491136, 0.20766435],
[ 0.84995458, 0.31415945, 0.2057863 ],
[ 0.85648867, 0.32358058, 0.20415327],
[ 0.86286243, 0.33312058, 0.20274969],
[ 0.86908321, 0.34276705, 0.20157271],
[ 0.87512876, 0.3525416 , 0.20064949],
[ 0.88100349, 0.36243385, 0.19999078],
[ 0.8866469 , 0.37249496, 0.1997976 ],
[ 0.89203964, 0.38273475, 0.20013431],
[ 0.89713496, 0.39318156, 0.20121514],
[ 0.90195099, 0.40380687, 0.20301555],
[ 0.90648379, 0.41460191, 0.20558847],
[ 0.9106967 , 0.42557857, 0.20918529],
[ 0.91463791, 0.43668557, 0.21367954],
[ 0.91830723, 0.44790913, 0.21916352],
[ 0.92171507, 0.45922856, 0.22568002],
[ 0.92491786, 0.4705936 , 0.23308207],
[ 0.92790792, 0.48200153, 0.24145932],
[ 0.93073701, 0.49341219, 0.25065486],
[ 0.93343918, 0.5048017 , 0.26056148],
[ 0.93602064, 0.51616486, 0.27118485],
[ 0.93850535, 0.52748892, 0.28242464],
[ 0.94092933, 0.53875462, 0.29416042],
[ 0.94330011, 0.5499628 , 0.30634189],
[ 0.94563159, 0.56110987, 0.31891624],
[ 0.94792955, 0.57219822, 0.33184256],
[ 0.95020929, 0.5832232 , 0.34508419],
[ 0.95247324, 0.59419035, 0.35859866],
[ 0.95471709, 0.60510869, 0.37236035],
[ 0.95698411, 0.61595766, 0.38629631],
[ 0.95923863, 0.62676473, 0.40043317],
[ 0.9615041 , 0.6375203 , 0.41474106],
[ 0.96371553, 0.64826619, 0.42928335],
[ 0.96591497, 0.65899621, 0.44380444],
[ 0.96809871, 0.66971662, 0.45830232],
[ 0.9702495 , 0.6804394 , 0.47280492],
[ 0.9723881 , 0.69115622, 0.48729272],
[ 0.97450723, 0.70187358, 0.50178034],
[ 0.9766108 , 0.712592 , 0.51626837],
[ 0.97871716, 0.72330511, 0.53074053],
[ 0.98082222, 0.73401769, 0.54520694],
[ 0.9829001 , 0.74474445, 0.5597019 ],
[ 0.98497466, 0.75547635, 0.57420239],
[ 0.98705581, 0.76621129, 0.58870185],
[ 0.98913325, 0.77695637, 0.60321626],
[ 0.99119918, 0.78771716, 0.61775821],
[ 0.9932672 , 0.79848979, 0.63231691],
[ 0.99535958, 0.80926704, 0.64687278],
[ 0.99740544, 0.82008078, 0.66150571],
[ 0.9992197 , 0.83100723, 0.6764127 ]
]
_flare_lut = [
[0.92907237, 0.68878959, 0.50411509],
[0.92891402, 0.68494686, 0.50173994],
[0.92864754, 0.68116207, 0.4993754],
[0.92836112, 0.67738527, 0.49701572],
[0.9280599, 0.67361354, 0.49466044],
[0.92775569, 0.66983999, 0.49230866],
[0.9274375, 0.66607098, 0.48996097],
[0.927111, 0.66230315, 0.48761688],
[0.92677996, 0.6585342, 0.485276],
[0.92644317, 0.65476476, 0.48293832],
[0.92609759, 0.65099658, 0.48060392],
[0.925747, 0.64722729, 0.47827244],
[0.92539502, 0.64345456, 0.47594352],
[0.92503106, 0.6396848, 0.47361782],
[0.92466877, 0.6359095, 0.47129427],
[0.92429828, 0.63213463, 0.46897349],
[0.92392172, 0.62835879, 0.46665526],
[0.92354597, 0.62457749, 0.46433898],
[0.9231622, 0.6207962, 0.46202524],
[0.92277222, 0.61701365, 0.45971384],
[0.92237978, 0.61322733, 0.45740444],
[0.92198615, 0.60943622, 0.45509686],
[0.92158735, 0.60564276, 0.45279137],
[0.92118373, 0.60184659, 0.45048789],
[0.92077582, 0.59804722, 0.44818634],
[0.92036413, 0.59424414, 0.44588663],
[0.91994924, 0.5904368, 0.44358868],
[0.91952943, 0.58662619, 0.4412926],
[0.91910675, 0.58281075, 0.43899817],
[0.91868096, 0.57899046, 0.4367054],
[0.91825103, 0.57516584, 0.43441436],
[0.91781857, 0.57133556, 0.43212486],
[0.9173814, 0.56750099, 0.4298371],
[0.91694139, 0.56366058, 0.42755089],
[0.91649756, 0.55981483, 0.42526631],
[0.91604942, 0.55596387, 0.42298339],
[0.9155979, 0.55210684, 0.42070204],
[0.9151409, 0.54824485, 0.4184247],
[0.91466138, 0.54438817, 0.41617858],
[0.91416896, 0.54052962, 0.41396347],
[0.91366559, 0.53666778, 0.41177769],
[0.91315173, 0.53280208, 0.40962196],
[0.91262605, 0.52893336, 0.40749715],
[0.91208866, 0.52506133, 0.40540404],
[0.91153952, 0.52118582, 0.40334346],
[0.91097732, 0.51730767, 0.4013163],
[0.910403, 0.51342591, 0.39932342],
[0.90981494, 0.50954168, 0.39736571],
[0.90921368, 0.5056543, 0.39544411],
[0.90859797, 0.50176463, 0.39355952],
[0.90796841, 0.49787195, 0.39171297],
[0.90732341, 0.4939774, 0.38990532],
[0.90666382, 0.49008006, 0.38813773],
[0.90598815, 0.486181, 0.38641107],
[0.90529624, 0.48228017, 0.38472641],
[0.90458808, 0.47837738, 0.38308489],
[0.90386248, 0.47447348, 0.38148746],
[0.90311921, 0.4705685, 0.37993524],
[0.90235809, 0.46666239, 0.37842943],
[0.90157824, 0.46275577, 0.37697105],
[0.90077904, 0.45884905, 0.37556121],
[0.89995995, 0.45494253, 0.37420106],
[0.89912041, 0.4510366, 0.37289175],
[0.8982602, 0.44713126, 0.37163458],
[0.89737819, 0.44322747, 0.37043052],
[0.89647387, 0.43932557, 0.36928078],
[0.89554477, 0.43542759, 0.36818855],
[0.89458871, 0.4315354, 0.36715654],
[0.89360794, 0.42764714, 0.36618273],
[0.89260152, 0.42376366, 0.36526813],
[0.8915687, 0.41988565, 0.36441384],
[0.89050882, 0.41601371, 0.36362102],
[0.8894159, 0.41215334, 0.36289639],
[0.888292, 0.40830288, 0.36223756],
[0.88713784, 0.40446193, 0.36164328],
[0.88595253, 0.40063149, 0.36111438],
[0.88473115, 0.39681635, 0.3606566],
[0.88347246, 0.39301805, 0.36027074],
[0.88217931, 0.38923439, 0.35995244],
[0.880851, 0.38546632, 0.35970244],
[0.87947728, 0.38172422, 0.35953127],
[0.87806542, 0.37800172, 0.35942941],
[0.87661509, 0.37429964, 0.35939659],
[0.87511668, 0.37062819, 0.35944178],
[0.87357554, 0.36698279, 0.35955811],
[0.87199254, 0.3633634, 0.35974223],
[0.87035691, 0.35978174, 0.36000516],
[0.86867647, 0.35623087, 0.36033559],
[0.86694949, 0.35271349, 0.36073358],
[0.86516775, 0.34923921, 0.36120624],
[0.86333996, 0.34580008, 0.36174113],
[0.86145909, 0.3424046, 0.36234402],
[0.85952586, 0.33905327, 0.36301129],
[0.85754536, 0.33574168, 0.36373567],
[0.855514, 0.33247568, 0.36451271],
[0.85344392, 0.32924217, 0.36533344],
[0.8513284, 0.32604977, 0.36620106],
[0.84916723, 0.32289973, 0.36711424],
[0.84696243, 0.31979068, 0.36806976],
[0.84470627, 0.31673295, 0.36907066],
[0.84240761, 0.31371695, 0.37010969],
[0.84005337, 0.31075974, 0.37119284],
[0.83765537, 0.30784814, 0.3723105],
[0.83520234, 0.30499724, 0.37346726],
[0.83270291, 0.30219766, 0.37465552],
[0.83014895, 0.29946081, 0.37587769],
[0.82754694, 0.29677989, 0.37712733],
[0.82489111, 0.29416352, 0.37840532],
[0.82218644, 0.29160665, 0.37970606],
[0.81942908, 0.28911553, 0.38102921],
[0.81662276, 0.28668665, 0.38236999],
[0.81376555, 0.28432371, 0.383727],
[0.81085964, 0.28202508, 0.38509649],
[0.8079055, 0.27979128, 0.38647583],
[0.80490309, 0.27762348, 0.3878626],
[0.80185613, 0.2755178, 0.38925253],
[0.79876118, 0.27347974, 0.39064559],
[0.79562644, 0.27149928, 0.39203532],
[0.79244362, 0.2695883, 0.39342447],
[0.78922456, 0.26773176, 0.3948046],
[0.78596161, 0.26594053, 0.39617873],
[0.7826624, 0.26420493, 0.39754146],
[0.77932717, 0.26252522, 0.39889102],
[0.77595363, 0.2609049, 0.4002279],
[0.77254999, 0.25933319, 0.40154704],
[0.76911107, 0.25781758, 0.40284959],
[0.76564158, 0.25635173, 0.40413341],
[0.76214598, 0.25492998, 0.40539471],
[0.75861834, 0.25356035, 0.40663694],
[0.75506533, 0.25223402, 0.40785559],
[0.75148963, 0.2509473, 0.40904966],
[0.74788835, 0.24970413, 0.41022028],
[0.74426345, 0.24850191, 0.41136599],
[0.74061927, 0.24733457, 0.41248516],
[0.73695678, 0.24620072, 0.41357737],
[0.73327278, 0.24510469, 0.41464364],
[0.72957096, 0.24404127, 0.4156828],
[0.72585394, 0.24300672, 0.41669383],
[0.7221226, 0.24199971, 0.41767651],
[0.71837612, 0.24102046, 0.41863486],
[0.71463236, 0.24004289, 0.41956983],
[0.7108932, 0.23906316, 0.42048681],
[0.70715842, 0.23808142, 0.42138647],
[0.70342811, 0.2370976, 0.42226844],
[0.69970218, 0.23611179, 0.42313282],
[0.69598055, 0.2351247, 0.42397678],
[0.69226314, 0.23413578, 0.42480327],
[0.68854988, 0.23314511, 0.42561234],
[0.68484064, 0.23215279, 0.42640419],
[0.68113541, 0.23115942, 0.42717615],
[0.67743412, 0.23016472, 0.42792989],
[0.67373662, 0.22916861, 0.42866642],
[0.67004287, 0.22817117, 0.42938576],
[0.66635279, 0.22717328, 0.43008427],
[0.66266621, 0.22617435, 0.43076552],
[0.65898313, 0.22517434, 0.43142956],
[0.65530349, 0.22417381, 0.43207427],
[0.65162696, 0.22317307, 0.4327001],
[0.64795375, 0.22217149, 0.43330852],
[0.64428351, 0.22116972, 0.43389854],
[0.64061624, 0.22016818, 0.43446845],
[0.63695183, 0.21916625, 0.43502123],
[0.63329016, 0.21816454, 0.43555493],
[0.62963102, 0.2171635, 0.43606881],
[0.62597451, 0.21616235, 0.43656529],
[0.62232019, 0.21516239, 0.43704153],
[0.61866821, 0.21416307, 0.43749868],
[0.61501835, 0.21316435, 0.43793808],
[0.61137029, 0.21216761, 0.4383556],
[0.60772426, 0.2111715, 0.43875552],
[0.60407977, 0.21017746, 0.43913439],
[0.60043678, 0.20918503, 0.43949412],
[0.59679524, 0.20819447, 0.43983393],
[0.59315487, 0.20720639, 0.44015254],
[0.58951566, 0.20622027, 0.44045213],
[0.58587715, 0.20523751, 0.44072926],
[0.5822395, 0.20425693, 0.44098758],
[0.57860222, 0.20328034, 0.44122241],
[0.57496549, 0.20230637, 0.44143805],
[0.57132875, 0.20133689, 0.4416298],
[0.56769215, 0.20037071, 0.44180142],
[0.5640552, 0.19940936, 0.44194923],
[0.56041794, 0.19845221, 0.44207535],
[0.55678004, 0.1975, 0.44217824],
[0.55314129, 0.19655316, 0.44225723],
[0.54950166, 0.19561118, 0.44231412],
[0.54585987, 0.19467771, 0.44234111],
[0.54221157, 0.19375869, 0.44233698],
[0.5385549, 0.19285696, 0.44229959],
[0.5348913, 0.19197036, 0.44222958],
[0.53122177, 0.1910974, 0.44212735],
[0.52754464, 0.19024042, 0.44199159],
[0.52386353, 0.18939409, 0.44182449],
[0.52017476, 0.18856368, 0.44162345],
[0.51648277, 0.18774266, 0.44139128],
[0.51278481, 0.18693492, 0.44112605],
[0.50908361, 0.18613639, 0.4408295],
[0.50537784, 0.18534893, 0.44050064],
[0.50166912, 0.18457008, 0.44014054],
[0.49795686, 0.18380056, 0.43974881],
[0.49424218, 0.18303865, 0.43932623],
[0.49052472, 0.18228477, 0.43887255],
[0.48680565, 0.1815371, 0.43838867],
[0.48308419, 0.18079663, 0.43787408],
[0.47936222, 0.18006056, 0.43733022],
[0.47563799, 0.17933127, 0.43675585],
[0.47191466, 0.17860416, 0.43615337],
[0.46818879, 0.17788392, 0.43552047],
[0.46446454, 0.17716458, 0.43486036],
[0.46073893, 0.17645017, 0.43417097],
[0.45701462, 0.17573691, 0.43345429],
[0.45329097, 0.17502549, 0.43271025],
[0.44956744, 0.17431649, 0.4319386],
[0.44584668, 0.17360625, 0.43114133],
[0.44212538, 0.17289906, 0.43031642],
[0.43840678, 0.17219041, 0.42946642],
[0.43469046, 0.17148074, 0.42859124],
[0.4309749, 0.17077192, 0.42769008],
[0.42726297, 0.17006003, 0.42676519],
[0.42355299, 0.16934709, 0.42581586],
[0.41984535, 0.16863258, 0.42484219],
[0.41614149, 0.16791429, 0.42384614],
[0.41244029, 0.16719372, 0.42282661],
[0.40874177, 0.16647061, 0.42178429],
[0.40504765, 0.16574261, 0.42072062],
[0.401357, 0.16501079, 0.41963528],
[0.397669, 0.16427607, 0.418528],
[0.39398585, 0.16353554, 0.41740053],
[0.39030735, 0.16278924, 0.41625344],
[0.3866314, 0.16203977, 0.41508517],
[0.38295904, 0.16128519, 0.41389849],
[0.37928736, 0.16052483, 0.41270599],
[0.37562649, 0.15974704, 0.41151182],
[0.37197803, 0.15895049, 0.41031532],
[0.36833779, 0.15813871, 0.40911916],
[0.36470944, 0.15730861, 0.40792149],
[0.36109117, 0.15646169, 0.40672362],
[0.35748213, 0.15559861, 0.40552633],
[0.353885, 0.15471714, 0.40432831],
[0.35029682, 0.15381967, 0.4031316],
[0.34671861, 0.1529053, 0.40193587],
[0.34315191, 0.15197275, 0.40074049],
[0.33959331, 0.15102466, 0.3995478],
[0.33604378, 0.15006017, 0.39835754],
[0.33250529, 0.14907766, 0.39716879],
[0.32897621, 0.14807831, 0.39598285],
[0.3254559, 0.14706248, 0.39480044],
[0.32194567, 0.14602909, 0.39362106],
[0.31844477, 0.14497857, 0.39244549],
[0.31494974, 0.14391333, 0.39127626],
[0.31146605, 0.14282918, 0.39011024],
[0.30798857, 0.1417297, 0.38895105],
[0.30451661, 0.14061515, 0.38779953],
[0.30105136, 0.13948445, 0.38665531],
[0.2975886, 0.1383403, 0.38552159],
[0.29408557, 0.13721193, 0.38442775]
]
_crest_lut = [
[0.6468274, 0.80289262, 0.56592265],
[0.64233318, 0.80081141, 0.56639461],
[0.63791969, 0.7987162, 0.56674976],
[0.6335316, 0.79661833, 0.56706128],
[0.62915226, 0.7945212, 0.56735066],
[0.62477862, 0.79242543, 0.56762143],
[0.62042003, 0.79032918, 0.56786129],
[0.61606327, 0.78823508, 0.56808666],
[0.61171322, 0.78614216, 0.56829092],
[0.60736933, 0.78405055, 0.56847436],
[0.60302658, 0.78196121, 0.56864272],
[0.59868708, 0.77987374, 0.56879289],
[0.59435366, 0.77778758, 0.56892099],
[0.59001953, 0.77570403, 0.56903477],
[0.58568753, 0.77362254, 0.56913028],
[0.58135593, 0.77154342, 0.56920908],
[0.57702623, 0.76946638, 0.56926895],
[0.57269165, 0.76739266, 0.5693172],
[0.56835934, 0.76532092, 0.56934507],
[0.56402533, 0.76325185, 0.56935664],
[0.55968429, 0.76118643, 0.56935732],
[0.55534159, 0.75912361, 0.56934052],
[0.55099572, 0.75706366, 0.56930743],
[0.54664626, 0.75500662, 0.56925799],
[0.54228969, 0.75295306, 0.56919546],
[0.53792417, 0.75090328, 0.56912118],
[0.53355172, 0.74885687, 0.5690324],
[0.52917169, 0.74681387, 0.56892926],
[0.52478243, 0.74477453, 0.56881287],
[0.52038338, 0.74273888, 0.56868323],
[0.5159739, 0.74070697, 0.56854039],
[0.51155269, 0.73867895, 0.56838507],
[0.50711872, 0.73665492, 0.56821764],
[0.50267118, 0.73463494, 0.56803826],
[0.49822926, 0.73261388, 0.56785146],
[0.49381422, 0.73058524, 0.56767484],
[0.48942421, 0.72854938, 0.56751036],
[0.48505993, 0.72650623, 0.56735752],
[0.48072207, 0.72445575, 0.56721583],
[0.4764113, 0.72239788, 0.56708475],
[0.47212827, 0.72033258, 0.56696376],
[0.46787361, 0.71825983, 0.56685231],
[0.46364792, 0.71617961, 0.56674986],
[0.45945271, 0.71409167, 0.56665625],
[0.45528878, 0.71199595, 0.56657103],
[0.45115557, 0.70989276, 0.5664931],
[0.44705356, 0.70778212, 0.56642189],
[0.44298321, 0.70566406, 0.56635683],
[0.43894492, 0.70353863, 0.56629734],
[0.43493911, 0.70140588, 0.56624286],
[0.43096612, 0.69926587, 0.5661928],
[0.42702625, 0.69711868, 0.56614659],
[0.42311977, 0.69496438, 0.56610368],
[0.41924689, 0.69280308, 0.56606355],
[0.41540778, 0.69063486, 0.56602564],
[0.41160259, 0.68845984, 0.56598944],
[0.40783143, 0.68627814, 0.56595436],
[0.40409434, 0.68408988, 0.56591994],
[0.40039134, 0.68189518, 0.56588564],
[0.39672238, 0.6796942, 0.56585103],
[0.39308781, 0.67748696, 0.56581581],
[0.38949137, 0.67527276, 0.56578084],
[0.38592889, 0.67305266, 0.56574422],
[0.38240013, 0.67082685, 0.56570561],
[0.37890483, 0.66859548, 0.56566462],
[0.37544276, 0.66635871, 0.56562081],
[0.37201365, 0.66411673, 0.56557372],
[0.36861709, 0.6618697, 0.5655231],
[0.36525264, 0.65961782, 0.56546873],
[0.36191986, 0.65736125, 0.56541032],
[0.35861935, 0.65509998, 0.56534768],
[0.35535621, 0.65283302, 0.56528211],
[0.35212361, 0.65056188, 0.56521171],
[0.34892097, 0.64828676, 0.56513633],
[0.34574785, 0.64600783, 0.56505539],
[0.34260357, 0.64372528, 0.5649689],
[0.33948744, 0.64143931, 0.56487679],
[0.33639887, 0.6391501, 0.56477869],
[0.33334501, 0.63685626, 0.56467661],
[0.33031952, 0.63455911, 0.564569],
[0.3273199, 0.63225924, 0.56445488],
[0.32434526, 0.62995682, 0.56433457],
[0.32139487, 0.62765201, 0.56420795],
[0.31846807, 0.62534504, 0.56407446],
[0.3155731, 0.62303426, 0.56393695],
[0.31270304, 0.62072111, 0.56379321],
[0.30985436, 0.61840624, 0.56364307],
[0.30702635, 0.61608984, 0.56348606],
[0.30421803, 0.61377205, 0.56332267],
[0.30143611, 0.61145167, 0.56315419],
[0.29867863, 0.60912907, 0.56298054],
[0.29593872, 0.60680554, 0.56280022],
[0.29321538, 0.60448121, 0.56261376],
[0.2905079, 0.60215628, 0.56242036],
[0.28782827, 0.5998285, 0.56222366],
[0.28516521, 0.59749996, 0.56202093],
[0.28251558, 0.59517119, 0.56181204],
[0.27987847, 0.59284232, 0.56159709],
[0.27726216, 0.59051189, 0.56137785],
[0.27466434, 0.58818027, 0.56115433],
[0.2720767, 0.58584893, 0.56092486],
[0.26949829, 0.58351797, 0.56068983],
[0.26693801, 0.58118582, 0.56045121],
[0.26439366, 0.57885288, 0.56020858],
[0.26185616, 0.57652063, 0.55996077],
[0.25932459, 0.57418919, 0.55970795],
[0.25681303, 0.57185614, 0.55945297],
[0.25431024, 0.56952337, 0.55919385],
[0.25180492, 0.56719255, 0.5589305],
[0.24929311, 0.56486397, 0.5586654],
[0.24678356, 0.56253666, 0.55839491],
[0.24426587, 0.56021153, 0.55812473],
[0.24174022, 0.55788852, 0.55785448],
[0.23921167, 0.55556705, 0.55758211],
[0.23668315, 0.55324675, 0.55730676],
[0.23414742, 0.55092825, 0.55703167],
[0.23160473, 0.54861143, 0.5567573],
[0.22905996, 0.54629572, 0.55648168],
[0.22651648, 0.54398082, 0.5562029],
[0.22396709, 0.54166721, 0.55592542],
[0.22141221, 0.53935481, 0.55564885],
[0.21885269, 0.53704347, 0.55537294],
[0.21629986, 0.53473208, 0.55509319],
[0.21374297, 0.53242154, 0.5548144],
[0.21118255, 0.53011166, 0.55453708],
[0.2086192, 0.52780237, 0.55426067],
[0.20605624, 0.52549322, 0.55398479],
[0.20350004, 0.5231837, 0.55370601],
[0.20094292, 0.52087429, 0.55342884],
[0.19838567, 0.51856489, 0.55315283],
[0.19582911, 0.51625531, 0.55287818],
[0.19327413, 0.51394542, 0.55260469],
[0.19072933, 0.51163448, 0.5523289],
[0.18819045, 0.50932268, 0.55205372],
[0.18565609, 0.50701014, 0.55177937],
[0.18312739, 0.50469666, 0.55150597],
[0.18060561, 0.50238204, 0.55123374],
[0.178092, 0.50006616, 0.55096224],
[0.17558808, 0.49774882, 0.55069118],
[0.17310341, 0.49542924, 0.5504176],
[0.17063111, 0.49310789, 0.55014445],
[0.1681728, 0.49078458, 0.54987159],
[0.1657302, 0.48845913, 0.54959882],
[0.16330517, 0.48613135, 0.54932605],
[0.16089963, 0.48380104, 0.54905306],
[0.15851561, 0.48146803, 0.54877953],
[0.15615526, 0.47913212, 0.54850526],
[0.15382083, 0.47679313, 0.54822991],
[0.15151471, 0.47445087, 0.54795318],
[0.14924112, 0.47210502, 0.54767411],
[0.1470032, 0.46975537, 0.54739226],
[0.14480101, 0.46740187, 0.54710832],
[0.14263736, 0.46504434, 0.54682188],
[0.14051521, 0.46268258, 0.54653253],
[0.13843761, 0.46031639, 0.54623985],
[0.13640774, 0.45794558, 0.5459434],
[0.13442887, 0.45556994, 0.54564272],
[0.1325044, 0.45318928, 0.54533736],
[0.13063777, 0.4508034, 0.54502674],
[0.12883252, 0.44841211, 0.5447104],
[0.12709242, 0.44601517, 0.54438795],
[0.1254209, 0.44361244, 0.54405855],
[0.12382162, 0.44120373, 0.54372156],
[0.12229818, 0.43878887, 0.54337634],
[0.12085453, 0.4363676, 0.54302253],
[0.11949938, 0.43393955, 0.54265715],
[0.11823166, 0.43150478, 0.54228104],
[0.11705496, 0.42906306, 0.54189388],
[0.115972, 0.42661431, 0.54149449],
[0.11498598, 0.42415835, 0.54108222],
[0.11409965, 0.42169502, 0.54065622],
[0.11331533, 0.41922424, 0.5402155],
[0.11263542, 0.41674582, 0.53975931],
[0.1120615, 0.4142597, 0.53928656],
[0.11159738, 0.41176567, 0.53879549],
[0.11125248, 0.40926325, 0.53828203],
[0.11101698, 0.40675289, 0.53774864],
[0.11089152, 0.40423445, 0.53719455],
[0.11085121, 0.4017095, 0.53662425],
[0.11087217, 0.39917938, 0.53604354],
[0.11095515, 0.39664394, 0.53545166],
[0.11110676, 0.39410282, 0.53484509],
[0.11131735, 0.39155635, 0.53422678],
[0.11158595, 0.38900446, 0.53359634],
[0.11191139, 0.38644711, 0.5329534],
[0.11229224, 0.38388426, 0.53229748],
[0.11273683, 0.38131546, 0.53162393],
[0.11323438, 0.37874109, 0.53093619],
[0.11378271, 0.37616112, 0.53023413],
[0.11437992, 0.37357557, 0.52951727],
[0.11502681, 0.37098429, 0.52878396],
[0.11572661, 0.36838709, 0.52803124],
[0.11646936, 0.36578429, 0.52726234],
[0.11725299, 0.3631759, 0.52647685],
[0.1180755, 0.36056193, 0.52567436],
[0.1189438, 0.35794203, 0.5248497],
[0.11984752, 0.35531657, 0.52400649],
[0.1207833, 0.35268564, 0.52314492],
[0.12174895, 0.35004927, 0.52226461],
[0.12274959, 0.34740723, 0.52136104],
[0.12377809, 0.34475975, 0.52043639],
[0.12482961, 0.34210702, 0.51949179],
[0.125902, 0.33944908, 0.51852688],
[0.12699998, 0.33678574, 0.51753708],
[0.12811691, 0.33411727, 0.51652464],
[0.12924811, 0.33144384, 0.51549084],
[0.13039157, 0.32876552, 0.51443538],
[0.13155228, 0.32608217, 0.51335321],
[0.13272282, 0.32339407, 0.51224759],
[0.13389954, 0.32070138, 0.51111946],
[0.13508064, 0.31800419, 0.50996862],
[0.13627149, 0.31530238, 0.50878942],
[0.13746376, 0.31259627, 0.50758645],
[0.13865499, 0.30988598, 0.50636017],
[0.13984364, 0.30717161, 0.50511042],
[0.14103515, 0.30445309, 0.50383119],
[0.14222093, 0.30173071, 0.50252813],
[0.14339946, 0.2990046, 0.50120127],
[0.14456941, 0.29627483, 0.49985054],
[0.14573579, 0.29354139, 0.49847009],
[0.14689091, 0.29080452, 0.49706566],
[0.1480336, 0.28806432, 0.49563732],
[0.1491628, 0.28532086, 0.49418508],
[0.15028228, 0.28257418, 0.49270402],
[0.15138673, 0.27982444, 0.49119848],
[0.15247457, 0.27707172, 0.48966925],
[0.15354487, 0.2743161, 0.48811641],
[0.15459955, 0.27155765, 0.4865371],
[0.15563716, 0.26879642, 0.4849321],
[0.1566572, 0.26603191, 0.48330429],
[0.15765823, 0.26326032, 0.48167456],
[0.15862147, 0.26048295, 0.48005785],
[0.15954301, 0.25770084, 0.47845341],
[0.16043267, 0.25491144, 0.4768626],
[0.16129262, 0.25211406, 0.4752857],
[0.1621119, 0.24931169, 0.47372076],
[0.16290577, 0.24649998, 0.47217025],
[0.16366819, 0.24368054, 0.47063302],
[0.1644021, 0.24085237, 0.46910949],
[0.16510882, 0.2380149, 0.46759982],
[0.16579015, 0.23516739, 0.46610429],
[0.1664433, 0.2323105, 0.46462219],
[0.16707586, 0.22944155, 0.46315508],
[0.16768475, 0.22656122, 0.46170223],
[0.16826815, 0.22366984, 0.46026308],
[0.16883174, 0.22076514, 0.45883891],
[0.16937589, 0.21784655, 0.45742976],
[0.16990129, 0.21491339, 0.45603578],
[0.1704074, 0.21196535, 0.45465677],
[0.17089473, 0.20900176, 0.4532928],
[0.17136819, 0.20602012, 0.45194524],
[0.17182683, 0.20302012, 0.45061386],
[0.17227059, 0.20000106, 0.44929865],
[0.17270583, 0.19695949, 0.44800165],
[0.17313804, 0.19389201, 0.44672488],
[0.17363177, 0.19076859, 0.44549087]
]
_lut_dict = dict(
rocket=_rocket_lut,
mako=_mako_lut,
icefire=_icefire_lut,
vlag=_vlag_lut,
flare=_flare_lut,
crest=_crest_lut,
)
for _name, _lut in _lut_dict.items():
_cmap = colors.ListedColormap(_lut, _name)
locals()[_name] = _cmap
_cmap_r = colors.ListedColormap(_lut[::-1], _name + "_r")
locals()[_name + "_r"] = _cmap_r
register_colormap(_name, _cmap)
register_colormap(_name + "_r", _cmap_r)
del colors, register_colormap
| bsd-3-clause | fc4b755607c549477223a1e1dea635e9 | 40.638083 | 61 | 0.626336 | 2.022294 | false | false | false | false |
mwaskom/seaborn | seaborn/miscplot.py | 2 | 1407 | import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
__all__ = ["palplot", "dogplot"]
def palplot(pal, size=1):
"""Plot the values in a color palette as a horizontal array.
Parameters
----------
pal : sequence of matplotlib colors
colors, i.e. as returned by seaborn.color_palette()
size :
scaling factor for size of plot
"""
n = len(pal)
f, ax = plt.subplots(1, 1, figsize=(n * size, size))
ax.imshow(np.arange(n).reshape(1, n),
cmap=mpl.colors.ListedColormap(list(pal)),
interpolation="nearest", aspect="auto")
ax.set_xticks(np.arange(n) - .5)
ax.set_yticks([-.5, .5])
# Ensure nice border between colors
ax.set_xticklabels(["" for _ in range(n)])
# The proper way to set no ticks
ax.yaxis.set_major_locator(ticker.NullLocator())
def dogplot(*_, **__):
"""Who's a good boy?"""
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
from io import BytesIO
url = "https://github.com/mwaskom/seaborn-data/raw/master/png/img{}.png"
pic = np.random.randint(2, 7)
data = BytesIO(urlopen(url.format(pic)).read())
img = plt.imread(data)
f, ax = plt.subplots(figsize=(5, 5), dpi=100)
f.subplots_adjust(0, 0, 1, 1)
ax.imshow(img)
ax.set_axis_off()
| bsd-3-clause | 383e82ec83d8a3ed13a7f49193e92962 | 28.3125 | 76 | 0.620469 | 3.366029 | false | false | false | false |
janelia-flyem/gala | gala/valprob.py | 2 | 8563 | from . import imio, option_manager, app_logger, session_manager
import libNeuroProofPriority as neuroproof
import os
import sys
import glob
import h5py
import numpy
import json
import traceback
def image_stack_verify(options_parser, options, master_logger):
if options.test_stack is not None:
if not os.path.exists(options.test_stack):
raise Exception("Image volume does not exist at " + options.test_stack)
def image_stack_verify2(options_parser, options, master_logger):
if options.gt_stack is not None:
if not os.path.exists(options.gt_stack):
raise Exception("Image volume does not exist at " + options.gt_stack)
def graph_file_verify(options_parser, options, master_logger):
if options.ragprob_file is not None:
if not os.path.exists(options.ragprob_file):
raise Exception("ragprob file not found " + options.ragprob_file + " not found")
def create_valprob_options(options_parser):
options_parser.create_option("test-stack", "initial segmentation (to any percentage)",
required=True, verify_fn=image_stack_verify,
shortcut='T', warning=True)
options_parser.create_option("gt-stack", "ground truth stack (~100 percent complete)",
default_val=None, required=True, dtype=str, verify_fn=image_stack_verify2, num_args=None,
shortcut='G', warning=False)
options_parser.create_option("ragprob-file", "RAG probability file",
default_val=None, required=True, dtype=str, verify_fn=graph_file_verify, num_args=None,
shortcut='R', warning=False)
options_parser.create_option("size-threshold", "Number of voxels used in threshold",
default_val=25000, required=False, dtype=int, verify_fn=None, num_args=None,
shortcut='ST', warning=False)
def load_graph_json(json_file):
json_file_handle = open(json_file)
json_data = json.load(json_file_handle)
pairprob_list = []
for edge in json_data["edge_list"]:
node1 = edge["node1"]
node2 = edge["node2"]
if node1 > node2:
node2, node1 = node1, node2
weight = edge["weight"]
pairprob_list.append((node1, node2, weight))
return pairprob_list
def find_gt_bodies(gt_stack, test_stack):
body2indices = {}
for index, value in numpy.ndenumerate(test_stack):
body2indices.setdefault(value, {})
value2 = gt_stack[index]
body2indices[value].setdefault(value2, 0)
(body2indices[value])[value2] += 1
body2gtbody = {}
for key, val in body2indices.items():
max_val = 0
max_id = 0
for key2, val2 in val.items():
if val2 > max_val:
max_val = val2
max_id = key2
body2gtbody[key] = max_id
return body2gtbody
def process_edge(body2gtbody, nomerge_hist, tot_hist, nomerge_hist2, tot_hist2, dirtybodies, bodyremap):
priority = neuroproof.get_next_edge()
(body1, body2) = priority.body_pair
weight = neuroproof.get_edge_val(priority)
if body1 not in dirtybodies and body2 not in dirtybodies:
tot_hist[int(weight*100)] += 1
tot_hist2[int(weight*100)] += 1
link = True
if body2gtbody[body1] != body2gtbody[body2]:
if body1 not in dirtybodies and body2 not in dirtybodies:
nomerge_hist[int(weight*100)] += 1
nomerge_hist2[int(weight*100)] += 1
link = False
else:
if body2 not in bodyremap:
bodyremap[body2] = [body2]
if body1 not in bodyremap:
bodyremap[body1] = [body1]
dirtybodies.add(body1)
bodyremap[body1].extend(bodyremap[body2])
del bodyremap[body2]
neuroproof.set_edge_result(priority.body_pair, link)
def auto_proofread(body2gtbody, rag_file, size_threshold, master_logger, test_stack, session_location):
nomerge_hist = []
tot_hist = []
nomerge_hist2 = []
tot_hist2 = []
dirtybodies = set()
for iter1 in range(0, 101):
nomerge_hist.append(0)
tot_hist.append(0)
nomerge_hist2.append(0)
tot_hist2.append(0)
neuroproof.initialize_priority_scheduler(rag_file, 0.1, 0.9, 0.1)
bodyremap = {}
num_body = 0
neuroproof.set_body_mode(size_threshold, 0)
while neuroproof.get_estimated_num_remaining_edges() > 0:
process_edge(body2gtbody, nomerge_hist, tot_hist, nomerge_hist2, tot_hist2, dirtybodies, bodyremap)
num_body += 1
num_synapse = 0
neuroproof.set_synapse_mode(0.1)
while neuroproof.get_estimated_num_remaining_edges() > 0:
process_edge(body2gtbody, nomerge_hist, tot_hist, nomerge_hist2, tot_hist2, dirtybodies, bodyremap)
num_synapse += 1
num_orphan = 0
neuroproof.set_orphan_mode(size_threshold, size_threshold, size_threshold)
while neuroproof.get_estimated_num_remaining_edges() > 0:
process_edge(body2gtbody, nomerge_hist, tot_hist, nomerge_hist2, tot_hist2, dirtybodies, bodyremap)
num_orphan += 1
master_logger.info("Probability Actual Agreement with Groundtruth Flat")
for iter1 in range(0, 101):
if tot_hist[iter1] == 0:
per = 0
else:
per = (float(nomerge_hist[iter1])/float(tot_hist[iter1]) * 100)
print(iter1, ", ", per , ", " , tot_hist[iter1])
master_logger.info("Probability Actual Agreement with Groundtruth Est")
for iter1 in range(0, 101):
if tot_hist2[iter1] == 0:
per = 0
else:
per = (float(nomerge_hist2[iter1])/float(tot_hist2[iter1]) * 100)
print(iter1, ", ", per , ", " , tot_hist2[iter1])
body2body = {}
for key, vallist in bodyremap.items():
for body in vallist:
body2body[body] = key
os.system("cp -R " + test_stack + "/superpixel_maps " + session_location + "/")
os.system("cp " + test_stack + "/superpixel_to_segment_map.txt " + session_location + "/")
mapping_file = open(test_stack + "/segment_to_body_map.txt")
outfile = open(session_location + "/segment_to_body_map.txt", 'w')
for line in mapping_file.readlines():
vals = line.split(' ')
seg = int(vals[0])
body = int(vals[1])
if body in body2body:
body = body2body[body]
outfile.write(str(seg) + " " + str(body) + "\n")
master_logger.info("Num body: " + str(num_body))
master_logger.info("Num synapse: " + str(num_synapse))
master_logger.info("Num orphan: " + str(num_orphan))
master_logger.info("Num total: " + str(num_body + num_synapse + num_orphan))
def valprob(session_location, options, master_logger):
master_logger.info("Reading gt_stack")
gt_stack = imio.read_image_stack(options.gt_stack)
master_logger.info("Reading test_stack")
test_stack = imio.read_image_stack(options.test_stack)
master_logger.info("Finished reading stacks")
master_logger.info("Loading graph json")
pairprob_list = load_graph_json(options.ragprob_file)
master_logger.info("Finished loading graph json")
master_logger.info("Matching bodies to GT")
body2gtbody = find_gt_bodies(gt_stack, test_stack)
master_logger.info("Finished matching bodies to GT")
nomerge_hist = []
tot_hist = []
for iter1 in range(0, 101):
nomerge_hist.append(0)
tot_hist.append(0)
for (node1, node2, prob) in pairprob_list:
tot_hist[int(prob*100)] += 1
if body2gtbody[node1] != body2gtbody[node2]:
nomerge_hist[int(prob*100)] += 1
master_logger.info("Probability Agreement with Groundtruth")
for iter1 in range(0, 101):
if tot_hist[iter1] == 0:
per = 0
else:
per = (float(nomerge_hist[iter1])/float(tot_hist[iter1]) * 100)
print(iter1, ", ", per , ", " , tot_hist[iter1])
auto_proofread(body2gtbody, options.ragprob_file, options.size_threshold, master_logger, options.test_stack, session_location)
def entrypoint(argv):
applogger = app_logger.AppLogger(False, 'valprob')
master_logger = applogger.get_logger()
try:
session = session_manager.Session("valprob", "Validate the predicted probabilities against 100% groundtruth",
master_logger, applogger, create_valprob_options)
valprob(session.session_location, session.options, master_logger)
except Exception as e:
master_logger.error(str(traceback.format_exc()))
except KeyboardInterrupt as err:
master_logger.error(str(traceback.format_exc()))
| bsd-3-clause | b99ddd9b01ac2053a4b003d4826016ff | 35.751073 | 130 | 0.639145 | 3.335801 | false | true | false | false |
mozilla/pontoon | pontoon/checks/migrations/0001_squashed_0004_auto_20200206_0932.py | 2 | 2984 | # Generated by Django 1.11.28 on 2020-03-08 19:52
from django.db import migrations, models
import django.db.migrations.operations.special
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
("base", "__first__"),
]
operations = [
migrations.CreateModel(
name="Error",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"library",
models.CharField(
choices=[
("p", "pontoon"),
("cl", "compare-locales"),
],
db_index=True,
max_length=20,
),
),
("message", models.TextField()),
(
"translation",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="errors",
to="base.Translation",
),
),
],
options={
"abstract": False,
},
),
migrations.CreateModel(
name="Warning",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"library",
models.CharField(
choices=[
("p", "pontoon"),
("cl", "compare-locales"),
],
db_index=True,
max_length=20,
),
),
("message", models.TextField()),
(
"translation",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="warnings",
to="base.Translation",
),
),
],
options={
"abstract": False,
},
),
migrations.AlterUniqueTogether(
name="warning",
unique_together={("translation", "library", "message")},
),
migrations.AlterUniqueTogether(
name="error",
unique_together={("translation", "library", "message")},
),
]
| bsd-3-clause | 95a0fd59dc75fe8df6e0409c8cbc1754 | 29.762887 | 68 | 0.342493 | 6.152577 | false | false | false | false |
mozilla/pontoon | pontoon/api/schema.py | 2 | 5726 | import graphene
from graphene_django import DjangoObjectType
from graphene_django.debug import DjangoDebug
from pontoon.api.util import get_fields
from pontoon.base.models import (
Locale as LocaleModel,
Project as ProjectModel,
ProjectLocale as ProjectLocaleModel,
)
from pontoon.tags.models import Tag as TagModel
class Stats:
missing_strings = graphene.Int()
complete = graphene.Boolean()
class Tag(DjangoObjectType):
class Meta:
convert_choices_to_enum = False
model = TagModel
fields = (
"slug",
"name",
"priority",
)
class ProjectLocale(DjangoObjectType, Stats):
class Meta:
model = ProjectLocaleModel
fields = (
"project",
"locale",
"total_strings",
"approved_strings",
"pretranslated_strings",
"strings_with_errors",
"strings_with_warnings",
"unreviewed_strings",
)
class Project(DjangoObjectType, Stats):
class Meta:
convert_choices_to_enum = False
model = ProjectModel
fields = (
"name",
"slug",
"disabled",
"sync_disabled",
"pretranslation_enabled",
"visibility",
"system_project",
"info",
"deadline",
"priority",
"contact",
"total_strings",
"approved_strings",
"pretranslated_strings",
"strings_with_errors",
"strings_with_warnings",
"unreviewed_strings",
)
localizations = graphene.List(ProjectLocale)
tags = graphene.List(Tag)
def resolve_localizations(obj, info):
return obj.project_locale.all()
def resolve_tags(obj, info):
return obj.tag_set.all()
class Locale(DjangoObjectType, Stats):
class Meta:
model = LocaleModel
fields = (
"name",
"code",
"direction",
"cldr_plurals",
"plural_rule",
"script",
"population",
"team_description",
"total_strings",
"approved_strings",
"pretranslated_strings",
"strings_with_errors",
"strings_with_warnings",
"unreviewed_strings",
"google_translate_code",
"ms_translator_code",
"systran_translate_code",
"ms_terminology_code",
)
localizations = graphene.List(
ProjectLocale,
include_disabled=graphene.Boolean(False),
include_system=graphene.Boolean(False),
)
def resolve_localizations(obj, info, include_disabled, include_system):
projects = obj.project_locale.visible_for(info.context.user)
records = projects.filter(
project__disabled=False, project__system_project=False
)
if include_disabled:
records |= projects.filter(project__disabled=True)
if include_system:
records |= projects.filter(project__system_project=True)
return records.distinct()
class Query(graphene.ObjectType):
debug = graphene.Field(DjangoDebug, name="__debug")
# include_disabled=True will return both active and disabled projects.
# include_system=True will return both system and non-system projects.
projects = graphene.List(
Project,
include_disabled=graphene.Boolean(False),
include_system=graphene.Boolean(False),
)
project = graphene.Field(Project, slug=graphene.String())
locales = graphene.List(Locale)
locale = graphene.Field(Locale, code=graphene.String())
def resolve_projects(obj, info, include_disabled, include_system):
fields = get_fields(info)
projects = ProjectModel.objects.visible_for(info.context.user)
records = projects.filter(disabled=False, system_project=False)
if include_disabled:
records |= projects.filter(disabled=True)
if include_system:
records |= projects.filter(system_project=True)
if "projects.localizations" in fields:
records = records.prefetch_related("project_locale__locale")
if "projects.localizations.locale.localizations" in fields:
raise Exception("Cyclic queries are forbidden")
return records.distinct()
def resolve_project(obj, info, slug):
qs = ProjectModel.objects.visible_for(info.context.user)
fields = get_fields(info)
if "project.localizations" in fields:
qs = qs.prefetch_related("project_locale__locale")
if "project.tags" in fields:
qs = qs.prefetch_related("tag_set")
if "project.localizations.locale.localizations" in fields:
raise Exception("Cyclic queries are forbidden")
return qs.get(slug=slug)
def resolve_locales(obj, info):
qs = LocaleModel.objects
fields = get_fields(info)
if "locales.localizations" in fields:
qs = qs.prefetch_related("project_locale__project")
if "locales.localizations.project.localizations" in fields:
raise Exception("Cyclic queries are forbidden")
return qs.all()
def resolve_locale(obj, info, code):
qs = LocaleModel.objects
fields = get_fields(info)
if "locale.localizations" in fields:
qs = qs.prefetch_related("project_locale__project")
if "locale.localizations.project.localizations" in fields:
raise Exception("Cyclic queries are forbidden")
return qs.get(code=code)
schema = graphene.Schema(query=Query)
| bsd-3-clause | b63c848e8a0be3fab71802655fd5b262 | 27.346535 | 75 | 0.601292 | 4.314996 | false | false | false | false |
mozilla/pontoon | pontoon/sync/migrations/0002_change_pontoon_sync_email.py | 2 | 1040 | # Generated by Django 1.11.28 on 2020-03-08 19:30
from django.db import migrations
SYNC_USER = {
"username": "pontoon-sync",
"email": "pontoon-sync@mozilla.com",
"first_name": "Sync",
}
def add_sync_user(apps, schema_editor):
User = apps.get_model("auth", "User")
UserProfile = apps.get_model("base", "UserProfile")
user = User(**SYNC_USER)
user.save()
profile = UserProfile(user=user)
profile.save()
def remove_sync_user(apps, schema_editor):
User = apps.get_model("auth", "User")
UserProfile = apps.get_model("base", "UserProfile")
user = User.objects.find(username=SYNC_USER["username"])
user_profile = UserProfile.objects.find(user=user)
user_profile.delete()
user.delete()
class Migration(migrations.Migration):
initial = True
dependencies = [
("sync", "0001_squashed_0004_add-sync-system-user"),
]
operations = [
migrations.RunPython(
code=add_sync_user,
reverse_code=remove_sync_user,
),
]
| bsd-3-clause | eb41f440ba81c892c6bfed830b0b3a0a | 21.12766 | 60 | 0.625962 | 3.45515 | false | false | false | false |
mozilla/pontoon | pontoon/translations/views.py | 2 | 15355 | from django.contrib.auth.decorators import login_required
from django.db import transaction
from django.http import JsonResponse
from django.shortcuts import get_object_or_404
from django.urls import reverse
from django.utils import timezone
from django.utils.datastructures import MultiValueDictKeyError
from django.views.decorators.http import require_POST
from notifications.signals import notify
from pontoon.actionlog.models import ActionLog
from pontoon.actionlog.utils import log_action
from pontoon.base import utils
from pontoon.base.models import (
TranslatedResource,
Translation,
)
from pontoon.checks.libraries import run_checks
from pontoon.checks.utils import are_blocking_checks
from pontoon.translations import forms
@require_POST
@utils.require_AJAX
@login_required(redirect_field_name="", login_url="/403")
@transaction.atomic
def create_translation(request):
"""
Create a new translation.
"""
form = forms.CreateTranslationForm(request.POST)
if not form.is_valid():
problems = []
for field, errors in form.errors.items():
problems.append(
'Error validating field `{}`: "{}"'.format(field, " ".join(errors))
)
return JsonResponse(
{"status": False, "message": "\n".join(problems)}, status=400
)
entity = form.cleaned_data["entity"]
string = form.cleaned_data["translation"]
locale = form.cleaned_data["locale"]
plural_form = form.cleaned_data["plural_form"]
original = form.cleaned_data["original"]
ignore_warnings = form.cleaned_data["ignore_warnings"]
approve = form.cleaned_data["approve"]
force_suggestions = form.cleaned_data["force_suggestions"]
paths = form.cleaned_data["paths"]
machinery_sources = form.cleaned_data["machinery_sources"]
project = entity.resource.project
# Read-only translations cannot saved
if utils.readonly_exists(project, locale):
return JsonResponse(
{
"status": False,
"message": "Forbidden: This string is in read-only mode.",
},
status=403,
)
translations = Translation.objects.filter(
entity=entity,
locale=locale,
plural_form=plural_form,
)
same_translations = translations.filter(string=string)
# If same translation exists in the DB, don't save it again.
if same_translations:
return JsonResponse({"status": False, "same": True})
# Look for failed checks.
# Checks are disabled for the tutorial.
use_checks = project.slug != "tutorial"
user = request.user
first_contribution = user.is_new_contributor(locale)
failed_checks = None
if use_checks:
failed_checks = run_checks(
entity,
locale.code,
original,
string,
user.profile.quality_checks,
)
if are_blocking_checks(failed_checks, ignore_warnings):
return JsonResponse({"status": False, "failedChecks": failed_checks})
now = timezone.now()
can_translate = user.can_translate(project=project, locale=locale) and (
not force_suggestions or approve
)
translation = Translation(
entity=entity,
locale=locale,
plural_form=plural_form,
string=string,
user=user,
date=now,
approved=can_translate,
machinery_sources=machinery_sources,
)
if can_translate:
translation.approved_user = user
translation.approved_date = now
translation.save(failed_checks=failed_checks)
log_action(ActionLog.ActionType.TRANSLATION_CREATED, user, translation=translation)
if translations:
translation = entity.reset_active_translation(
locale=locale,
plural_form=plural_form,
)
# When user makes their first contribution to the team, notify team managers
if first_contribution:
desc = """
<a href="{user_href}">{user}</a> has made their first contribution to
<a href="{locale_href}">{locale} ({locale_code})</a>.
Please welcome them to the team, and make sure to
<a href="{review_href}">review their suggestions</a>.
""".format(
user=user.name_or_email,
user_href=reverse(
"pontoon.contributors.contributor.username",
kwargs={
"username": user.username,
},
),
locale=locale.name,
locale_code=locale.code,
locale_href=reverse(
"pontoon.teams.team",
kwargs={
"locale": locale.code,
},
),
review_href=reverse(
"pontoon.translate",
kwargs={
"locale": locale.code,
"project": project.slug,
"resource": entity.resource.path,
},
)
+ f"?string={entity.pk}",
)
for manager in locale.managers_group.user_set.filter(
profile__new_contributor_notifications=True
):
notify.send(
sender=manager,
recipient=manager,
verb="has reviewed suggestions", # Triggers render of description only
description=desc,
)
return JsonResponse(
{
"status": True,
"translation": translation.serialize(),
"stats": TranslatedResource.objects.stats(project, paths, locale),
}
)
@utils.require_AJAX
@login_required(redirect_field_name="", login_url="/403")
@transaction.atomic
def delete_translation(request):
"""Delete given translation."""
try:
translation_id = request.POST["translation"]
except MultiValueDictKeyError as e:
return JsonResponse(
{"status": False, "message": f"Bad Request: {e}"},
status=400,
)
translation = get_object_or_404(Translation, pk=translation_id)
entity = translation.entity
project = entity.resource.project
locale = translation.locale
# Read-only translations cannot be deleted
if utils.readonly_exists(project, locale):
return JsonResponse(
{
"status": False,
"message": "Forbidden: This string is in read-only mode.",
},
status=403,
)
# Only privileged users or authors can delete translations
if not translation.rejected or not (
request.user.can_translate(locale, project)
or request.user == translation.user
or translation.approved
):
return JsonResponse(
{
"status": False,
"message": "Forbidden: You can't delete this translation.",
},
status=403,
)
translation.delete()
log_action(
ActionLog.ActionType.TRANSLATION_DELETED,
request.user,
entity=entity,
locale=locale,
)
return JsonResponse({"status": True})
@utils.require_AJAX
@login_required(redirect_field_name="", login_url="/403")
@transaction.atomic
def approve_translation(request):
"""Approve given translation."""
try:
t = request.POST["translation"]
ignore_warnings = request.POST.get("ignore_warnings", "false") == "true"
paths = request.POST.getlist("paths[]")
except MultiValueDictKeyError as e:
return JsonResponse(
{"status": False, "message": f"Bad Request: {e}"},
status=400,
)
translation = get_object_or_404(Translation, pk=t)
entity = translation.entity
project = entity.resource.project
locale = translation.locale
user = request.user
# Read-only translations cannot be approved
if utils.readonly_exists(project, locale):
return JsonResponse(
{
"status": False,
"message": "Forbidden: This string is in read-only mode.",
},
status=403,
)
if translation.approved:
return JsonResponse(
{
"status": False,
"message": "Forbidden: This translation is already approved.",
},
status=403,
)
# Only privileged users can approve translations
if not user.can_translate(locale, project):
return JsonResponse(
{
"status": False,
"message": "Forbidden: You don't have permission to approve this translation.",
},
status=403,
)
# Check for errors.
# Checks are disabled for the tutorial.
use_checks = project.slug != "tutorial"
if use_checks:
failed_checks = run_checks(
entity,
locale.code,
entity.string,
translation.string,
user.profile.quality_checks,
)
if are_blocking_checks(failed_checks, ignore_warnings):
return JsonResponse(
{"string": translation.string, "failedChecks": failed_checks}
)
translation.approve(user)
log_action(ActionLog.ActionType.TRANSLATION_APPROVED, user, translation=translation)
active_translation = translation.entity.reset_active_translation(
locale=locale,
plural_form=translation.plural_form,
)
return JsonResponse(
{
"translation": active_translation.serialize(),
"stats": TranslatedResource.objects.stats(project, paths, locale),
}
)
@utils.require_AJAX
@login_required(redirect_field_name="", login_url="/403")
@transaction.atomic
def unapprove_translation(request):
"""Unapprove given translation."""
try:
t = request.POST["translation"]
paths = request.POST.getlist("paths[]")
except MultiValueDictKeyError as e:
return JsonResponse(
{"status": False, "message": f"Bad Request: {e}"},
status=400,
)
translation = get_object_or_404(Translation, pk=t)
project = translation.entity.resource.project
locale = translation.locale
# Read-only translations cannot be un-approved
if utils.readonly_exists(project, locale):
return JsonResponse(
{
"status": False,
"message": "Forbidden: This string is in read-only mode.",
},
status=403,
)
# Only privileged users or authors can un-approve translations
if not (
request.user.can_translate(locale, project)
or request.user == translation.user
or translation.approved
):
return JsonResponse(
{
"status": False,
"message": "Forbidden: You can't unapprove this translation.",
},
status=403,
)
translation.unapprove(request.user)
log_action(
ActionLog.ActionType.TRANSLATION_UNAPPROVED,
request.user,
translation=translation,
)
active_translation = translation.entity.reset_active_translation(
locale=locale,
plural_form=translation.plural_form,
)
return JsonResponse(
{
"translation": active_translation.serialize(),
"stats": TranslatedResource.objects.stats(project, paths, locale),
}
)
@utils.require_AJAX
@login_required(redirect_field_name="", login_url="/403")
@transaction.atomic
def reject_translation(request):
"""Reject given translation."""
try:
t = request.POST["translation"]
paths = request.POST.getlist("paths[]")
except MultiValueDictKeyError as e:
return JsonResponse(
{"status": False, "message": f"Bad Request: {e}"},
status=400,
)
translation = get_object_or_404(Translation, pk=t)
project = translation.entity.resource.project
locale = translation.locale
# Read-only translations cannot be rejected
if utils.readonly_exists(project, locale):
return JsonResponse(
{
"status": False,
"message": "Forbidden: This string is in read-only mode.",
},
status=403,
)
# Non-privileged users can only reject own unapproved translations
if not request.user.can_translate(locale, project):
if translation.user == request.user:
if translation.approved is True:
return JsonResponse(
{
"status": False,
"message": "Forbidden: You can't reject approved translations.",
},
status=403,
)
else:
return JsonResponse(
{
"status": False,
"message": "Forbidden: You can't reject translations from other users.",
},
status=403,
)
translation.reject(request.user)
log_action(
ActionLog.ActionType.TRANSLATION_REJECTED, request.user, translation=translation
)
active_translation = translation.entity.reset_active_translation(
locale=locale,
plural_form=translation.plural_form,
)
return JsonResponse(
{
"translation": active_translation.serialize(),
"stats": TranslatedResource.objects.stats(project, paths, locale),
}
)
@utils.require_AJAX
@login_required(redirect_field_name="", login_url="/403")
@transaction.atomic
def unreject_translation(request):
"""Unreject given translation."""
try:
t = request.POST["translation"]
paths = request.POST.getlist("paths[]")
except MultiValueDictKeyError as e:
return JsonResponse(
{"status": False, "message": f"Bad Request: {e}"},
status=400,
)
translation = get_object_or_404(Translation, pk=t)
project = translation.entity.resource.project
locale = translation.locale
# Read-only translations cannot be un-rejected
if utils.readonly_exists(project, locale):
return JsonResponse(
{
"status": False,
"message": "Forbidden: This string is in read-only mode.",
},
status=403,
)
# Only privileged users or authors can un-reject translations
if not (
request.user.can_translate(locale, project)
or request.user == translation.user
or translation.approved
):
return JsonResponse(
{
"status": False,
"message": "Forbidden: You can't unreject this translation.",
},
status=403,
)
translation.unreject(request.user)
log_action(
ActionLog.ActionType.TRANSLATION_UNREJECTED,
request.user,
translation=translation,
)
active_translation = translation.entity.reset_active_translation(
locale=locale,
plural_form=translation.plural_form,
)
return JsonResponse(
{
"translation": active_translation.serialize(),
"stats": TranslatedResource.objects.stats(project, paths, locale),
}
)
| bsd-3-clause | 922f31f08182cf4a2c8708f6ec9df82d | 29.107843 | 95 | 0.590882 | 4.557732 | false | false | false | false |
mozilla/pontoon | pontoon/batch/forms.py | 2 | 1064 | import urllib.parse
from django import forms
from pontoon.base import utils
from pontoon.batch.actions import ACTIONS_FN_MAP
class BatchActionsForm(forms.Form):
"""Handles the arguments passed to the batch actions view."""
locale = forms.CharField()
action = forms.ChoiceField(choices=[(x, x) for x in ACTIONS_FN_MAP.keys()])
entities = forms.CharField(required=False)
find = forms.CharField(required=False)
replace = forms.CharField(required=False)
def clean_entities(self):
return utils.split_ints(self.cleaned_data["entities"])
def decode_field(self, param_name):
"""
The translate frontend sends quoted form fields to avoid issues with e.g. non breakable spaces.
Related bug: https://bugzilla.mozilla.org/show_bug.cgi?id=1438575
"""
field_val = self.cleaned_data.get(param_name, "")
return urllib.parse.unquote(field_val)
def clean_find(self):
return self.decode_field("find")
def clean_replace(self):
return self.decode_field("replace")
| bsd-3-clause | 84653502738d7208efc2f0516094ce16 | 30.294118 | 103 | 0.68515 | 3.869091 | false | false | false | false |
mozilla/pontoon | pontoon/base/tests/forms/test_permission_log.py | 2 | 3207 | import pytest
from pontoon.base.forms import LocalePermsForm, ProjectLocalePermsForm
from pontoon.base.models import PermissionChangelog
@pytest.mark.django_db
def test_locale_perms_form_log_no_changes(user_a, locale_a):
form = LocalePermsForm(
{"translators": [], "managers": []}, instance=locale_a, user=user_a
)
assert form.is_valid()
form.save()
assert not PermissionChangelog.objects.exists()
@pytest.mark.django_db
def test_project_locale_perms_form_log_no_changes(user_a, locale_a):
form = ProjectLocalePermsForm(
{"translators": []},
instance=locale_a,
user=user_a,
)
assert form.is_valid()
form.save()
assert not PermissionChangelog.objects.exists()
@pytest.mark.django_db
def test_locale_perms_form_log(
locale_a, user_a, user_b, user_c, assert_permissionchangelog
):
# Add new users to groups
form = LocalePermsForm(
{"translators": [user_c.pk], "managers": [user_b.pk]},
instance=locale_a,
user=user_a,
)
assert form.is_valid()
form.save()
changelog_entry0, changelog_entry1 = PermissionChangelog.objects.all()
assert_permissionchangelog(
changelog_entry0,
PermissionChangelog.ActionType.ADDED,
user_a,
user_c,
locale_a.translators_group,
)
assert_permissionchangelog(
changelog_entry1,
PermissionChangelog.ActionType.ADDED,
user_a,
user_b,
locale_a.managers_group,
)
# Remove items from groups
form = LocalePermsForm(
{"translators": [], "managers": []},
instance=locale_a,
user=user_a,
)
assert form.is_valid()
form.save()
changelog_entry3, changelog_entry2 = PermissionChangelog.objects.order_by("-pk")[:2]
assert_permissionchangelog(
changelog_entry2,
PermissionChangelog.ActionType.REMOVED,
user_a,
user_c,
locale_a.translators_group,
)
assert_permissionchangelog(
changelog_entry3,
PermissionChangelog.ActionType.REMOVED,
user_a,
user_b,
locale_a.managers_group,
)
@pytest.mark.django_db
def test_project_locale_perms_form_log(
locale_a, user_a, user_b, user_c, assert_permissionchangelog
):
# Add new users to groups
form = ProjectLocalePermsForm(
{"translators": [user_c.pk]},
instance=locale_a,
user=user_a,
)
assert form.is_valid()
form.save()
(changelog_entry0,) = PermissionChangelog.objects.all()
assert_permissionchangelog(
changelog_entry0,
PermissionChangelog.ActionType.ADDED,
user_a,
user_c,
locale_a.translators_group,
)
# Remove items from groups
form = ProjectLocalePermsForm(
{"translators": [], "managers": []},
instance=locale_a,
user=user_a,
)
assert form.is_valid()
form.save()
(changelog_entry1,) = PermissionChangelog.objects.order_by("-pk")[:1]
assert_permissionchangelog(
changelog_entry1,
PermissionChangelog.ActionType.REMOVED,
user_a,
user_c,
locale_a.translators_group,
)
| bsd-3-clause | eb84842b8626177c2d2c8eb732aa80fb | 22.580882 | 88 | 0.628313 | 3.547566 | false | true | false | false |
mozilla/pontoon | pontoon/sync/migrations/0001_squashed_0004_add-sync-system-user.py | 2 | 3336 | # Generated by Django 1.11.28 on 2020-03-08 19:30
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
("base", "0002_auto_20200322_1821"),
]
operations = [
migrations.CreateModel(
name="ProjectSyncLog",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("start_time", models.DateTimeField(default=django.utils.timezone.now)),
(
"project",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="base.Project"
),
),
],
),
migrations.CreateModel(
name="RepositorySyncLog",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("start_time", models.DateTimeField(default=django.utils.timezone.now)),
("end_time", models.DateTimeField(blank=True, default=None, null=True)),
(
"project_sync_log",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="repository_sync_logs",
to="sync.ProjectSyncLog",
),
),
(
"repository",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="base.Repository",
),
),
],
),
migrations.CreateModel(
name="SyncLog",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("start_time", models.DateTimeField(default=django.utils.timezone.now)),
],
),
migrations.AddField(
model_name="projectsynclog",
name="sync_log",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="project_sync_logs",
to="sync.SyncLog",
),
),
migrations.AddField(
model_name="projectsynclog",
name="skipped",
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name="projectsynclog",
name="skipped_end_time",
field=models.DateTimeField(blank=True, default=None, null=True),
),
]
| bsd-3-clause | 8803da64a725bfb3dd006b66bbaad37b | 31.705882 | 88 | 0.420564 | 5.42439 | false | false | false | false |
mozilla/pontoon | pontoon/sync/tests/test_tasks.py | 2 | 15685 | from unittest.mock import ANY, patch, PropertyMock
import pytest
from pontoon.base.models import ChangedEntityLocale, Locale, Project, Repository
from pontoon.base.tests import (
ChangedEntityLocaleFactory,
CONTAINS,
ProjectFactory,
RepositoryFactory,
TestCase,
TranslationFactory,
)
from pontoon.base.utils import aware_datetime
from pontoon.sync.core import serial_task
from pontoon.sync.models import ProjectSyncLog, RepositorySyncLog, SyncLog
from pontoon.sync.tasks import sync_project, sync_translations
from pontoon.sync.tests import (
FAKE_CHECKOUT_PATH,
FakeCheckoutTestCase,
ProjectSyncLogFactory,
SyncLogFactory,
)
class SyncProjectTests(TestCase):
def setUp(self):
super().setUp()
self.db_project = ProjectFactory.create()
self.repository = self.db_project.repositories.all()[0]
self.sync_log = SyncLogFactory.create()
self.mock_pull_source_repo_changes = self.patch(
"pontoon.sync.tasks.pull_source_repo_changes", return_value=True
)
self.mock_project_needs_sync = self.patch_object(
Project, "needs_sync", new_callable=PropertyMock, return_value=True
)
self.mock_sync_translations = self.patch("pontoon.sync.tasks.sync_translations")
self.mock_update_originals = self.patch(
"pontoon.sync.tasks.update_originals", return_value=[[], [], [], []]
)
self.mock_source_directory_path = self.patch(
"pontoon.sync.vcs.models.VCSProject.source_directory_path",
return_value=self.repository.checkout_path,
)
def test_missing_project(self):
"""
If a project with the given PK doesn't exist, log it and exit.
"""
with patch("pontoon.sync.tasks.log") as mock_log:
with pytest.raises(Project.DoesNotExist):
sync_project(99999, self.sync_log.pk)
mock_log.error.assert_called_with(CONTAINS("99999"))
assert not self.mock_update_originals.called
def test_missing_log(self):
"""
If a log with the given PK doesn't exist, log it and exit.
"""
with patch("pontoon.sync.tasks.log") as mock_log:
with pytest.raises(SyncLog.DoesNotExist):
sync_project(self.db_project.pk, 99999)
mock_log.error.assert_called_with(CONTAINS("99999"))
assert not self.mock_update_originals.called
def test_db_changed_no_repo_changed(self):
"""
If the database has changes and VCS doesn't, skip syncing
resources, but sync translations.
"""
self.mock_pull_source_repo_changes.return_value = False
self.mock_project_needs_sync.return_value = True
with patch("pontoon.sync.tasks.log") as mock_log:
sync_project(self.db_project.pk, self.sync_log.pk)
sync_project(self.db_project.pk, self.sync_log.pk)
assert not self.mock_update_originals.called
mock_log.info.assert_called_with(
CONTAINS("Skipping syncing sources", self.db_project.slug)
)
def test_no_changes_skip(self):
"""
If the database and the source repository both have no
changes, and project has a single repository, skip sync.
"""
self.mock_pull_source_repo_changes.return_value = False
self.mock_project_needs_sync.return_value = False
with patch("pontoon.sync.tasks.log") as mock_log:
sync_project(self.db_project.pk, self.sync_log.pk)
assert not self.mock_update_originals.called
mock_log.info.assert_called_with(
CONTAINS("Skipping project", self.db_project.slug)
)
# When skipping, mark the project log properly.
assert ProjectSyncLog.objects.get(project=self.db_project).skipped
def test_no_changes_force(self):
"""
If the database and VCS both have no changes, but force is true,
do not skip syncing resources.
"""
self.mock_pull_source_repo_changes.return_value = False
self.mock_project_needs_sync.return_value = False
sync_project(self.db_project.pk, self.sync_log.pk, force=True)
assert self.mock_update_originals.called
def test_no_pull(self):
"""
Don't call repo.pull if command.no_pull is True.
"""
sync_project(self.db_project.pk, self.sync_log.pk, no_pull=True)
assert not self.mock_pull_source_repo_changes.called
def test_create_project_log(self):
assert not ProjectSyncLog.objects.exists()
sync_project(self.db_project.pk, self.sync_log.pk)
log = ProjectSyncLog.objects.get(project=self.db_project)
assert self.mock_sync_translations.call_args[0][1].pk == log.pk
class SyncTranslationsTests(FakeCheckoutTestCase):
def setUp(self):
super().setUp()
self.project_sync_log = ProjectSyncLogFactory.create()
self.mock_pull_locale_repo_changes = self.patch(
"pontoon.sync.tasks.pull_locale_repo_changes", return_value=[True, {}]
)
self.mock_commit_changes = self.patch("pontoon.sync.tasks.commit_changes")
self.mock_pretranslate = self.patch("pontoon.sync.tasks.pretranslate")
self.mock_repo_checkout_path = self.patch_object(
Repository,
"checkout_path",
new_callable=PropertyMock,
return_value=FAKE_CHECKOUT_PATH,
)
def test_clear_changed_entities(self):
"""
Delete all ChangedEntityLocale objects for the project created
before the sync started after handling it.
"""
self.now = aware_datetime(1970, 1, 2)
self.mock_pull_locale_repo_changes.return_value = [
True,
{self.repository.pk: Locale.objects.filter(pk=self.translated_locale.pk)},
]
changed1, changed2, changed_after = ChangedEntityLocaleFactory.create_batch(
3,
locale=self.translated_locale,
entity__resource=self.main_db_resource,
when=aware_datetime(1970, 1, 1),
)
changed_after.when = aware_datetime(1970, 1, 3)
changed_after.save()
sync_translations(self.db_project, self.project_sync_log, self.now, True)
with pytest.raises(ChangedEntityLocale.DoesNotExist):
changed1.refresh_from_db()
with pytest.raises(ChangedEntityLocale.DoesNotExist):
changed2.refresh_from_db()
changed_after.refresh_from_db() # Should not raise
def test_no_commit(self):
"""Don't call commit_changes if command.no_commit is True."""
self.mock_pull_locale_repo_changes.return_value = [
True,
{self.repository.pk: Locale.objects.filter(pk=self.translated_locale.pk)},
]
sync_translations(
self.db_project, self.project_sync_log, self.now, True, no_commit=True
)
assert not self.mock_commit_changes.called
def test_readonly_locales(self):
"""Don't call commit_changes for locales in read-only mode."""
project_locale = self.translated_locale.project_locale.get(
project=self.db_project,
)
project_locale.readonly = True
project_locale.save()
self.mock_pull_locale_repo_changes.return_value = [
True,
{
self.repository.pk: Locale.objects.filter(
pk=self.translated_locale.pk,
)
},
]
sync_translations(
self.db_project,
self.project_sync_log,
self.now,
True,
no_commit=False,
)
assert not self.mock_commit_changes.called
def test_remove_duplicate_approvals(self):
"""
Ensure that duplicate approvals are removed.
"""
# Trigger creation of new approved translation.
self.main_vcs_translation.strings[None] = "New Translated String"
self.main_vcs_translation.fuzzy = False
self.mock_pull_locale_repo_changes.return_value = [
True,
{self.repository.pk: Locale.objects.filter(pk=self.translated_locale.pk)},
]
# Translation approved after the sync started simulates the race
# where duplicate translations occur.
duplicate_translation = TranslationFactory.create(
entity=self.main_db_entity,
locale=self.translated_locale,
string="Other New Translated String",
approved=True,
approved_date=aware_datetime(1970, 1, 3),
)
ChangedEntityLocale.objects.filter(entity=self.main_db_entity).delete()
with patch("pontoon.sync.tasks.VCSProject", return_value=self.vcs_project):
sync_translations(self.db_project, self.project_sync_log, self.now, True)
# Only one translation should be approved: the duplicate_translation.
assert self.main_db_entity.translation_set.filter(approved=True).count() == 1
new_translation = self.main_db_entity.translation_set.get(
string="New Translated String"
)
assert not new_translation.approved
assert new_translation.approved_date is None
duplicate_translation.refresh_from_db()
assert duplicate_translation.approved
assert duplicate_translation.approved_date == aware_datetime(1970, 1, 3)
def test_create_repository_log(self):
assert not RepositorySyncLog.objects.exists()
repo = RepositoryFactory.create()
self.db_project.repositories.set([repo])
self.db_project.save()
self.mock_pull_locale_repo_changes.return_value = [
True,
{repo.pk: Locale.objects.filter(pk=self.translated_locale.pk)},
]
sync_translations(self.db_project, self.project_sync_log, self.now, True)
log = RepositorySyncLog.objects.get(repository=repo.pk)
assert log.repository == repo
def test_no_pretranslation(self):
"""
Ensure that pretranslation isn't called if pretranslation not enabled
or no new Entity, Locale or TranslatedResource is created.
"""
self.mock_pull_locale_repo_changes.return_value = [
True,
{self.repository.pk: Locale.objects.filter(pk=self.translated_locale.pk)},
]
sync_translations(
self.db_project,
self.project_sync_log,
self.now,
True,
[],
[],
[],
["new_entity"],
)
# Pretranslation is not enabled
assert not self.mock_pretranslate.called
self.db_project.pretranslation_enabled = True
self.db_project.save()
with self.patch(
"pontoon.sync.tasks.update_translated_resources", return_value=False
):
sync_translations(self.db_project, self.project_sync_log, self.now, True)
# No new Entity, Locale or TranslatedResource
assert not self.mock_pretranslate.called
def test_new_entities_pretranslation(self):
"""
Test if pretranslation is called for newly added entities.
"""
self.db_project.pretranslation_enabled = True
self.db_project.save()
self.mock_pull_locale_repo_changes.return_value = [
True,
{self.repository.pk: Locale.objects.filter(pk=self.translated_locale.pk)},
]
all_locales = list(self.db_project.locales.values_list("pk", flat=True))
with self.patch(
"pontoon.sync.tasks.update_translated_resources", return_value=False
):
sync_translations(
self.db_project,
self.project_sync_log,
self.now,
True,
[],
[],
[],
["new_entity"],
)
assert self.mock_pretranslate.called
assert self.mock_pretranslate.call_args[1]["entities"] == ["new_entity"]
assert list(self.mock_pretranslate.call_args[1]["locales"]) == all_locales
def test_new_translated_resource_pretranslation(self):
"""
Test if pretranslation is called for locales with newly added TranslatedResource.
"""
self.db_project.pretranslation_enabled = True
self.db_project.save()
self.mock_pull_locale_repo_changes.return_value = [
True,
{self.repository.pk: Locale.objects.filter(pk=self.translated_locale.pk)},
]
sync_translations(
self.db_project,
self.project_sync_log,
self.now,
True,
[],
[],
[],
["new_entity"],
)
assert self.mock_pretranslate.called
assert self.mock_pretranslate.call_args[1]["locales"] == [
self.translated_locale.pk
]
# Ensure that pretranslate is called only once for the locale.
assert self.mock_pretranslate.call_args[1].get("entities") is None
class UserError(Exception):
pass
class SyncExecutionTests(TestCase):
def test_serial_task(self):
"""
Test if sync will create lock in cache and release this after task is done.
"""
@serial_task(100)
def test_task(self, call_subtask):
if call_subtask:
return subtask()
def subtask():
return test_task.delay()
first_call = test_task.delay(call_subtask=True)
second_call = first_call.get()
assert first_call.successful()
assert second_call.failed()
with pytest.raises(RuntimeError):
second_call.get()
def test_release_lock_after_timeout(self):
"""
Tests if lock is released after specified timeout.
"""
with patch("pontoon.sync.core.cache") as mock_cache:
@serial_task(3)
def timeout_task(self):
return 42
first_call = timeout_task.delay()
assert first_call.successful()
assert first_call.get(), 42
mock_cache.add.assert_called_with(ANY, ANY, timeout=3)
def test_parametrized_serial_task(self):
"""
Serial task should be able to work simultaneously for different parameters.
"""
with patch("pontoon.sync.core.cache") as mock_cache:
@serial_task(3, lock_key="param={0}")
def task_lock_key(self, param):
return param
first_call = task_lock_key.delay(42)
second_call = task_lock_key.delay(24)
assert first_call.successful()
assert second_call.successful()
assert first_call.get() == 42
assert second_call.get() == 24
mock_cache.add.assert_any_call(
CONTAINS("task_lock_key[param=42]"), ANY, timeout=3
)
mock_cache.add.assert_any_call(
CONTAINS("task_lock_key[param=24]"), ANY, timeout=3
)
def test_exception_during_sync(self):
"""
Any error during performing synchronization should release the lock.
"""
@serial_task(100)
def exception_task(self):
raise UserError
first_call = exception_task.delay()
second_call = exception_task.delay()
assert first_call.failed()
assert second_call.failed()
with pytest.raises(UserError):
first_call.get()
with pytest.raises(UserError):
second_call.get()
| bsd-3-clause | 9de6f85f1e54da6b0829eaa92468fd89 | 34.089485 | 89 | 0.607906 | 4.042526 | false | true | false | false |
mozilla/pontoon | pontoon/sync/tests/test_models.py | 2 | 6197 | from pontoon.base.tests import ProjectFactory, RepositoryFactory, TestCase
from pontoon.base.utils import aware_datetime
from pontoon.sync.models import ProjectSyncLog
from pontoon.sync.tests import (
ProjectSyncLogFactory,
RepositorySyncLogFactory,
SyncLogFactory,
)
class SyncLogTests(TestCase):
def test_end_time_unfinished(self):
"""If a job is unfinished, it's end_time is None."""
sync_log = SyncLogFactory.create()
# Create repo without existing log so sync is unfinished.
repo = RepositoryFactory.create()
ProjectSyncLogFactory.create(sync_log=sync_log, project__repositories=[repo])
assert sync_log.end_time is None
def test_end_time(self):
"""
Return the latest end time among repo sync logs for this log.
"""
sync_log = SyncLogFactory.create()
RepositorySyncLogFactory.create(
project_sync_log__sync_log=sync_log, end_time=aware_datetime(2015, 1, 1)
)
RepositorySyncLogFactory.create(
project_sync_log__sync_log=sync_log, end_time=aware_datetime(2015, 1, 2)
)
assert sync_log.end_time == aware_datetime(2015, 1, 2)
def test_end_time_skipped(self):
"""Include skipped repos in finding the latest end time."""
sync_log = SyncLogFactory.create()
RepositorySyncLogFactory.create(
project_sync_log__sync_log=sync_log, end_time=aware_datetime(2015, 1, 1)
)
ProjectSyncLogFactory.create(
sync_log=sync_log, skipped=True, skipped_end_time=aware_datetime(2015, 1, 2)
)
ProjectSyncLogFactory.create(
sync_log=sync_log, skipped=True, skipped_end_time=aware_datetime(2015, 1, 4)
)
assert sync_log.end_time == aware_datetime(2015, 1, 4)
def test_finished(self):
sync_log = SyncLogFactory.create()
# Create repo without existing log so sync is unfinished.
repo = RepositoryFactory.create()
project_sync_log = ProjectSyncLogFactory.create(
sync_log=sync_log, project__repositories=[repo]
)
# Sync isn't finished until all repos are finished.
assert not sync_log.finished
repo_log = RepositorySyncLogFactory.create(
repository=repo,
project_sync_log=project_sync_log,
start_time=aware_datetime(2015, 1, 1),
end_time=None,
)
del sync_log.finished
assert not sync_log.finished
repo_log.end_time = aware_datetime(2015, 1, 2)
repo_log.save()
del sync_log.finished
assert sync_log.finished
class ProjectSyncLogTests(TestCase):
def test_end_time_unfinished(self):
"""If a sync is unfinished, it's end_time is None."""
repo = RepositoryFactory.create()
project_sync_log = ProjectSyncLogFactory.create(project__repositories=[repo])
assert project_sync_log.end_time is None
def test_end_time(self):
"""
Return the latest end time among repo sync logs for this log.
"""
project = ProjectFactory.create(repositories=[])
source_repo, repo1, repo2 = RepositoryFactory.create_batch(3, project=project)
project_sync_log = ProjectSyncLogFactory.create(project=project)
RepositorySyncLogFactory.create(
project_sync_log=project_sync_log,
repository=repo1,
end_time=aware_datetime(2015, 1, 1),
)
assert project_sync_log.end_time == aware_datetime(2015, 1, 1)
def test_end_time_skipped(self):
"""
If a sync is skipped, it's end_time is self.skipped_end_time.
"""
repo = RepositoryFactory.create()
project_sync_log = ProjectSyncLogFactory.create(
project__repositories=[repo],
skipped=True,
skipped_end_time=aware_datetime(2015, 1, 1),
)
assert project_sync_log.end_time == aware_datetime(2015, 1, 1)
def test_status(self):
repo = RepositoryFactory.create()
project_sync_log = ProjectSyncLogFactory.create(
project__repositories=[repo], skipped=False
)
# Repos aren't finished, status should be in-progress.
assert project_sync_log.status == ProjectSyncLog.IN_PROGRESS
# Once repo is finished, status should be synced.
RepositorySyncLogFactory.create(
repository=repo,
project_sync_log=project_sync_log,
start_time=aware_datetime(2015, 1, 1),
end_time=aware_datetime(2015, 1, 1, 1),
)
del project_sync_log.finished
del project_sync_log.status
assert project_sync_log.status == ProjectSyncLog.SYNCED
# Skipped projects are just "skipped".
skipped_log = ProjectSyncLogFactory.create(
project__repositories=[repo],
skipped=True,
)
assert skipped_log.status == ProjectSyncLog.SKIPPED
def test_finished(self):
repo = RepositoryFactory.create()
project_sync_log = ProjectSyncLogFactory.create(project__repositories=[repo])
# Sync isn't finished until all repos are finished.
assert not project_sync_log.finished
repo_log = RepositorySyncLogFactory.create(
repository=repo,
project_sync_log=project_sync_log,
start_time=aware_datetime(2015, 1, 1),
end_time=None,
)
del project_sync_log.finished
assert not project_sync_log.finished
repo_log.end_time = aware_datetime(2015, 1, 2)
repo_log.save()
del project_sync_log.finished
assert project_sync_log.finished
def test_finished_skipped(self):
"""A skipped log is considered finished."""
skipped_log = ProjectSyncLogFactory.create(skipped=True)
assert skipped_log.finished
class RepositorySyncLogTests(TestCase):
def test_finished(self):
log = RepositorySyncLogFactory.create(end_time=None)
assert not log.finished
log.end_time = aware_datetime(2015, 1, 1)
log.save()
del log.finished
assert log.finished
| bsd-3-clause | d311577f45f247a0dd1d4d47d006033e | 33.237569 | 88 | 0.632726 | 4.016202 | false | true | false | false |
mozilla/pontoon | pontoon/base/migrations/0013_transvision_remove.py | 2 | 1201 | # Generated by Django 3.1.3 on 2021-02-03 14:15
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("base", "0012_auto_20201020_1830"),
]
operations = [
migrations.RemoveField(
model_name="locale",
name="transvision",
),
migrations.AlterField(
model_name="translation",
name="machinery_sources",
field=django.contrib.postgres.fields.ArrayField(
base_field=models.CharField(
choices=[
("translation-memory", "Translation Memory"),
("google-translate", "Google Translate"),
("microsoft-translator", "Microsoft Translator"),
("systran-translate", "Systran Translate"),
("microsoft-terminology", "Microsoft"),
("caighdean", "Caighdean"),
],
max_length=30,
),
blank=True,
default=list,
size=None,
),
),
]
| bsd-3-clause | 2bf41ee295df6ecf4ee70bd19ba87237 | 30.605263 | 73 | 0.486261 | 4.983402 | false | false | false | false |
mozilla/pontoon | pontoon/db/migrations.py | 2 | 2050 | """
Django 1.10.5 doesn't support migrations/updates for fulltext fields, django-pg-fts isn't
actively maintained and current codebase is broken between various versions of Django.
Because of that I decided to implement our migrations with intent to drop it when django develops
its own solution.
"""
import copy
from django.db.migrations.operations.base import Operation
class BaseSQL(Operation):
"""
Allows to create parameterized sql migrations.
"""
forward_sql = None
backward_sql = None
sql_opts = {}
@property
def sql(self):
return self.forward_sql.format(**self.sql_opts)
@property
def reverse_sql(self):
return self.backward_sql.format(**self.sql_opts)
def __init__(self, **kwargs):
sql_opts = copy.copy(self.sql_opts)
sql_opts.update(kwargs)
self.sql_opts = sql_opts
def database_forwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute(self.sql)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute(self.reverse_sql)
def state_forwards(self, app_label, state):
pass
class GINIndex(BaseSQL):
"""
RunIndex operations share some parts like e.g. drop of an index.
"""
forward_sql = """
CREATE INDEX {table}_{field}_{index_suffix} ON \"{table}\"
USING GIN({expression} {index_opts})
"""
backward_sql = """
DROP INDEX {table}_{field}_{index_suffix}
"""
sql_opts = {
"index_opts": "",
}
class MultiFieldTRGMIndex(GINIndex):
"""
Create a gin-based trigram index on a set of fields.
"""
sql_opts = {"index_opts": "", "index_suffix": "trigram_index"}
@property
def sql(self):
def index_field(field_name):
return f"UPPER({field_name}) gin_trgm_ops"
self.sql_opts["expression"] = ",".join(
map(index_field, self.sql_opts["from_fields"])
)
return self.forward_sql.format(**self.sql_opts)
| bsd-3-clause | bce3fd70c1248a9412ca069b7127c5f0 | 24.949367 | 97 | 0.631707 | 3.673835 | false | false | false | false |
mozilla/pontoon | pontoon/insights/migrations/0008_time_to_review_suggestions_data.py | 2 | 2852 | # Generated by Django 3.2.4 on 2021-11-30 19:56
from datetime import datetime, timedelta
from django.db import migrations
from django.db.models import F
def populate_time_to_review_suggestions(apps, schema_editor):
ActionLog = apps.get_model("actionlog", "ActionLog")
actions = (
ActionLog.objects.filter(
translation__entity__resource__project__system_project=False,
translation__entity__resource__project__visibility="public",
action_type__in=["translation:approved", "translation:rejected"],
)
.exclude(performed_by=F("translation__user"))
.exclude(performed_by__email="pontoon-sync@example.com")
.values(
"created_at",
"action_type",
"translation__locale",
date=F("translation__date"),
approved_date=F("translation__approved_date"),
rejected_date=F("translation__rejected_date"),
)
)
action_data = dict()
# Store action data in a dict for faster matching with snapshots
for action in actions:
key = (action["translation__locale"], action["created_at"].date())
data = action_data.setdefault(key, list())
if action["action_type"] == "translation:approved" and action["approved_date"]:
data.append(action["approved_date"] - action["date"])
elif (
action["action_type"] == "translation:rejected" and action["rejected_date"]
):
data.append(action["rejected_date"] - action["date"])
LocaleInsightsSnapshot = apps.get_model("insights", "LocaleInsightsSnapshot")
snapshots = LocaleInsightsSnapshot.objects.all()
snapshot_data = dict()
# Store snapshots in a map with the same key format we use for action data
for snapshot in snapshots:
key = (snapshot.locale_id, snapshot.created_at)
snapshot_data[key] = snapshot
# Update snapshots
for key, times_to_review in action_data.items():
if key in snapshot_data and len(times_to_review) > 0:
snapshot_data[key].time_to_review_suggestions = sum(
times_to_review, timedelta()
) / len(times_to_review)
LocaleInsightsSnapshot.objects.bulk_update(
snapshots, ["time_to_review_suggestions"], batch_size=1000
)
def reset_time_to_review_suggestions(apps, schema_editor):
LocaleInsightsSnapshot = apps.get_model("insights", "LocaleInsightsSnapshot")
LocaleInsightsSnapshot.objects.update(time_to_review_suggestions=timedelta())
class Migration(migrations.Migration):
dependencies = [
("insights", "0007_time_to_review_suggestions"),
]
operations = [
migrations.RunPython(
code=populate_time_to_review_suggestions,
reverse_code=reset_time_to_review_suggestions,
),
]
| bsd-3-clause | fa0e78b1572775f6abc474c616666102 | 34.209877 | 87 | 0.640954 | 3.994398 | false | false | false | false |
mozilla/pontoon | pontoon/allauth_urls.py | 2 | 1663 | """
Pontoon requires a very specific subset of functionality implemented in django allauth.
Because of concerns related to the security concerns it's a better to keep only selected
views and don't allow user to tamper with the state of an account.
"""
import importlib
from django.urls import path
from django.conf import settings
from django.contrib.auth import views
from allauth.account import views as account_views
from allauth.socialaccount import views as socialaccount_views, providers
if settings.AUTHENTICATION_METHOD == "django":
urlpatterns = [
path("standalone-login/", views.LoginView.as_view(), name="standalone_login"),
path(
"standalone-logout/",
views.LogoutView.as_view(),
name="standalone_logout",
),
]
else:
urlpatterns = [
path("login/", account_views.login, name="account_login"),
path("logout/", account_views.logout, name="account_logout"),
path("inactive/", account_views.account_inactive, name="account_inactive"),
path(
"social/login/cancelled/",
socialaccount_views.login_cancelled,
name="socialaccount_login_cancelled",
),
path(
"social/login/error/",
socialaccount_views.login_error,
name="socialaccount_login_error",
),
]
for provider in providers.registry.get_list():
try:
prov_mod = importlib.import_module(provider.get_package() + ".urls")
except ImportError:
continue
prov_urlpatterns = getattr(prov_mod, "urlpatterns", None)
if prov_urlpatterns:
urlpatterns += prov_urlpatterns
| bsd-3-clause | 0e421db930678ecb3f7ea280291ff98a | 33.645833 | 88 | 0.65905 | 4.319481 | false | false | false | false |
mozilla/pontoon | pontoon/checks/libraries/pontoon_db.py | 3 | 2637 | import html
import re
import bleach
from collections import defaultdict
from fluent.syntax import FluentParser, ast
from pontoon.sync.formats.ftl import localizable_entries
MAX_LENGTH_RE = re.compile(r"MAX_LENGTH:( *)(\d+)", re.MULTILINE)
parser = FluentParser()
def get_max_length(comment):
"""
Return max length value for an entity with MAX_LENTH.
"""
max_length = re.findall(MAX_LENGTH_RE, comment or "")
if max_length:
return int(max_length[0][1])
return None
def run_checks(entity, original, string):
"""
Group all checks related to the base UI that get stored in the DB
:arg pontoon.base.models.Entity entity: Source entity
:arg basestring original: an original string
:arg basestring string: a translation
"""
checks = defaultdict(list)
resource_ext = entity.resource.format
if resource_ext == "lang":
# Newlines are not allowed in .lang files (bug 1190754)
if "\n" in string:
checks["pErrors"].append("Newline characters are not allowed")
# Prevent translations exceeding the given length limit
max_length = get_max_length(entity.comment)
if max_length:
string_length = len(
html.unescape(bleach.clean(string, strip=True, tags=()))
)
if string_length > max_length:
checks["pErrors"].append("Translation too long")
# Bug 1599056: Original and translation must either both end in a newline,
# or none of them should.
if resource_ext == "po":
if original.endswith("\n") != string.endswith("\n"):
checks["pErrors"].append("Ending newline mismatch")
# Prevent empty translation submissions if not supported
if string == "" and not entity.resource.allows_empty_translations:
checks["pErrors"].append("Empty translations are not allowed")
# FTL checks
if resource_ext == "ftl" and string != "":
translation_ast = parser.parse_entry(string)
entity_ast = parser.parse_entry(entity.string)
# Parse error
if isinstance(translation_ast, ast.Junk):
checks["pErrors"].append(translation_ast.annotations[0].message)
# Not a localizable entry
elif not isinstance(translation_ast, localizable_entries):
checks["pErrors"].append(
"Translation needs to be a valid localizable entry"
)
# Message ID mismatch
elif entity_ast.id.name != translation_ast.id.name:
checks["pErrors"].append("Translation key needs to match source string key")
return checks
| bsd-3-clause | 2a775d343268f3a710518c2569749642 | 30.771084 | 88 | 0.64543 | 4.146226 | false | false | false | false |
mozilla/pontoon | pontoon/insights/migrations/0002_project_projectlocale.py | 2 | 3004 | # Generated by Django 3.1.3 on 2020-12-23 15:07
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
("base", "0012_auto_20201020_1830"),
("insights", "0001_initial"),
]
operations = [
migrations.CreateModel(
name="ProjectLocaleInsightsSnapshot",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("total_strings", models.PositiveIntegerField(default=0)),
("approved_strings", models.PositiveIntegerField(default=0)),
("fuzzy_strings", models.PositiveIntegerField(default=0)),
("strings_with_errors", models.PositiveIntegerField(default=0)),
("strings_with_warnings", models.PositiveIntegerField(default=0)),
("unreviewed_strings", models.PositiveIntegerField(default=0)),
("created_at", models.DateField(default=django.utils.timezone.now)),
("completion", models.FloatField()),
(
"project_locale",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="base.projectlocale",
),
),
],
options={
"abstract": False,
},
),
migrations.CreateModel(
name="ProjectInsightsSnapshot",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("total_strings", models.PositiveIntegerField(default=0)),
("approved_strings", models.PositiveIntegerField(default=0)),
("fuzzy_strings", models.PositiveIntegerField(default=0)),
("strings_with_errors", models.PositiveIntegerField(default=0)),
("strings_with_warnings", models.PositiveIntegerField(default=0)),
("unreviewed_strings", models.PositiveIntegerField(default=0)),
("created_at", models.DateField(default=django.utils.timezone.now)),
("completion", models.FloatField()),
(
"project",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="base.project"
),
),
],
options={
"abstract": False,
},
),
]
| bsd-3-clause | 1a06d3f2570bc1eea5016502ffd6a1f4 | 37.025316 | 86 | 0.475033 | 5.442029 | false | false | false | false |
mozilla/pontoon | pontoon/sync/vcs/models.py | 2 | 32832 | """
Models for working with remote translation data stored in a VCS.
"""
import logging
import os
import shutil
import requests
from datetime import datetime
from itertools import chain
from pathlib import Path
from urllib.parse import urljoin, urlparse
from compare_locales.paths import (
ProjectFiles,
TOMLParser,
)
from django.utils import timezone
from django.utils.functional import cached_property
from pontoon.base import MOZILLA_REPOS
from pontoon.sync.exceptions import ParseError
from pontoon.sync.utils import (
is_hidden,
is_resource,
is_asymmetric_resource,
get_parent_directory,
uses_undercore_as_separator,
directory_contains_resources,
locale_directory_path,
locale_to_source_path,
source_to_locale_path,
)
from pontoon.sync.vcs.repositories import get_changed_files
log = logging.getLogger(__name__)
class DownloadTOMLParser(TOMLParser):
"""
This wrapper is a workaround for the lack of the shared and persistent filesystem
on Heroku workers.
Related: https://bugzilla.mozilla.org/show_bug.cgi?id=1530988
"""
def __init__(self, checkout_path, permalink_prefix, configuration_file):
self.checkout_path = os.path.join(checkout_path, "")
self.permalink_prefix = permalink_prefix
self.config_path = urlparse(permalink_prefix).path
self.config_file = configuration_file
def get_local_path(self, path):
"""Return the directory in which the config file should be stored."""
local_path = path.replace(self.config_path, "")
return os.path.join(self.checkout_path, local_path)
def get_remote_path(self, path):
"""Construct the link to the remote resource based on the local path."""
remote_config_path = path.replace(self.checkout_path, "")
return urljoin(self.permalink_prefix, remote_config_path)
def get_project_config(self, path):
"""Download the project config file and return its local path."""
local_path = Path(self.get_local_path(path))
local_path.parent.mkdir(parents=True, exist_ok=True)
with local_path.open("wb") as f:
remote_path = self.get_remote_path(path)
config_file = requests.get(remote_path)
config_file.raise_for_status()
f.write(config_file.content)
return str(local_path)
def parse(self, path=None, env=None, ignore_missing_includes=True):
"""Download the config file before it gets parsed."""
return super().parse(
self.get_project_config(path or self.config_file),
env,
ignore_missing_includes,
)
class MissingRepositoryPermalink(Exception):
"""
Raised when a project uses project config files and
its source repository doesn't have the permalink.
"""
class MissingSourceRepository(Exception):
"""
Raised when project can't find the repository
which contains source files.
"""
class MissingSourceDirectoryError(Exception):
"""Raised when sync can't find the source directory for the locales."""
class MissingLocaleDirectoryError(IOError):
"""Raised when sync can't find the locale directory."""
class VCSProject:
"""
Container for project data that is stored on the filesystem and
pulled from a remote VCS.
"""
SOURCE_DIR_SCORES = {
"templates": 3,
"en-US": 2,
"en-us": 2,
"en_US": 2,
"en_us": 2,
"en": 1,
}
SOURCE_DIR_NAMES = SOURCE_DIR_SCORES.keys()
def __init__(
self,
db_project,
now=None,
locales=None,
repo_locales=None,
added_paths=None,
changed_paths=None,
force=False,
):
"""
Load resource paths from the given db_project and parse them
for translation data.
:param Project db_project:
Project model instance for the project we're going to be
reading files for.
:param datetime.datetime now:
Sync start time.
:param list locales:
List of Locale model instances for the locales that we want
to parse. Defaults to parsing resources for all enabled
locales on the project.
:param dict repo_locales:
A dict of repository PKs and their currently checked out locales
(not neccessarily matching the ones stored in the DB).
:param list added_paths:
List of added source file paths
:param list changed_paths:
List of changed source file paths
:param bool force:
Scans all resources in repository
:param VCSConfiguration configuration:
Project configuration, provided by the optional configuration file.
"""
self.db_project = db_project
self.now = now
self.locales = locales if locales is not None else db_project.locales.all()
self.repo_locales = repo_locales
self.added_paths = added_paths or []
self.changed_paths = changed_paths or []
self.force = force
self.synced_locales = set()
self.configuration = None
if db_project.configuration_file:
# Permalink is required to download project config files.
if not db_project.source_repository.permalink_prefix:
raise MissingRepositoryPermalink()
self.configuration = VCSConfiguration(self)
@cached_property
def changed_files(self):
if self.force or (
self.db_project.configuration_file and self.changed_config_files
):
# All files are marked as changed
return None
if self.locales:
return self.changed_locales_files
else:
return self.changed_source_files[0]
@cached_property
def changed_source_files(self):
"""
Returns a tuple of changed and removed source files in the project:
(changed_files, removed_files)
"""
source_resources_repo = self.db_project.source_repository
if not source_resources_repo:
raise MissingSourceRepository(self.db_project)
source_directory = self.source_directory_path
last_revision = source_resources_repo.get_last_synced_revisions()
modified_files, removed_files = get_changed_files(
source_resources_repo.type, source_directory, last_revision
)
# Unify filesystem and data model file extensions
if not self.configuration:
modified_files = map(source_to_locale_path, modified_files)
removed_files = map(source_to_locale_path, removed_files)
if source_resources_repo.source_repo or not last_revision:
def get_path(path):
return (path, [])
else:
relative_source_path = source_directory[
len(source_resources_repo.checkout_path) :
].lstrip(os.sep)
def get_path(path):
return (path[len(relative_source_path) :].lstrip(os.sep), [])
return dict(map(get_path, modified_files)), dict(map(get_path, removed_files))
@cached_property
def changed_locales_files(self):
"""
Map of changed files and locales they were changed for.
"""
files = {}
# VCS changes
repos = self.db_project.translation_repositories()
if self.repo_locales:
repos = repos.filter(pk__in=self.repo_locales.keys())
for repo in repos:
if repo.multi_locale:
locales = (
self.repo_locales[repo.pk]
if self.repo_locales
else self.db_project.locales.all()
)
for locale in locales:
changed_files = get_changed_files(
repo.type,
repo.locale_checkout_path(locale),
repo.get_last_synced_revisions(locale.code),
)[0]
for path in changed_files:
files.setdefault(path, []).append(locale)
else:
changed_files = get_changed_files(
repo.type, repo.checkout_path, repo.get_last_synced_revisions()
)[0]
log.info(
"Changed files in {} repository, all: {}".format(
self.db_project, changed_files
)
)
# Include only relevant (localizable) files
if self.configuration:
files = self.get_relevant_files_with_config(changed_files)
else:
files = self.get_relevant_files_without_config(
changed_files, self.locale_path_locales(repo.checkout_path)
)
log.info(
"Changed files in {} repository, relevant for enabled locales: {}".format(
self.db_project, files
)
)
# DB changes
vcs = files
db = self.db_project.changed_resources(self.now)
for path in set(list(vcs.keys()) + list(db.keys())):
if path in vcs and path in db:
vcs[path] = set(list(vcs[path]) + list(db[path]))
else:
vcs[path] = vcs[path] if path in vcs else db[path]
return files
@cached_property
def changed_config_files(self):
"""
A set of the changed project config files.
"""
config_files = {
pc.path.replace(os.path.join(self.source_directory_path, ""), "")
for pc in self.configuration.parsed_configuration.configs
}
changed_files = set(self.changed_source_files[0])
return changed_files.intersection(config_files)
def get_relevant_files_with_config(self, paths):
"""
Check if given paths represent localizable files using project configuration.
Return a dict of relative reference paths of such paths and corresponding Locale
objects.
"""
files = {}
for locale in self.db_project.locales.all():
for path in paths:
absolute_path = os.path.join(self.source_directory_path, path)
reference_path = self.configuration.reference_path(
locale, absolute_path
)
if reference_path:
relative_reference_path = reference_path[
len(self.source_directory_path) :
].lstrip(os.sep)
files.setdefault(relative_reference_path, []).append(locale)
return files
def get_relevant_files_without_config(self, paths, locale_path_locales):
"""
Check if given paths represent localizable files by matching them against locale
repository paths. Return a dict of relative reference paths of such paths and
corresponding Locale objects.
"""
files = {}
locale_paths = locale_path_locales.keys()
for path in paths:
if is_hidden(path):
continue
for locale_path in locale_paths:
if path.startswith(locale_path):
locale = locale_path_locales[locale_path]
path = path[len(locale_path) :].lstrip(os.sep)
files.setdefault(path, []).append(locale)
break
return files
def locale_path_locales(self, repo_checkout_path):
"""
A map of relative locale directory paths and their respective locales.
"""
locale_path_locales = {}
for locale in self.db_project.locales.all():
locale_directory = self.locale_directory_paths[locale.code]
path = locale_directory[len(repo_checkout_path) :].lstrip(os.sep)
path = os.path.join(path, "") # Ensure the path ends with os.sep
locale_path_locales[path] = locale
return locale_path_locales
@cached_property
def locale_directory_paths(self):
"""
A map of locale codes and their absolute directory paths.
Create locale directory, if not in repository yet.
"""
locale_directory_paths = {}
parent_directories = set()
for locale in self.locales:
try:
if self.configuration:
locale_directory_paths[locale.code] = self.configuration.l10n_base
else:
locale_directory_paths[locale.code] = locale_directory_path(
self.checkout_path,
locale.code,
parent_directories,
)
parent_directory = get_parent_directory(
locale_directory_paths[locale.code]
)
except OSError:
if not self.db_project.has_multi_locale_repositories:
source_directory = self.source_directory_path
parent_directory = get_parent_directory(source_directory)
locale_code = locale.code
if uses_undercore_as_separator(parent_directory):
locale_code = locale_code.replace("-", "_")
locale_directory = os.path.join(parent_directory, locale_code)
# For asymmetric formats, create empty folder
if is_asymmetric_resource(next(self.relative_resource_paths())):
os.makedirs(locale_directory)
# For other formats, copy resources from source directory
else:
shutil.copytree(source_directory, locale_directory)
for root, dirnames, filenames in os.walk(locale_directory):
for filename in filenames:
path = os.path.join(root, filename)
if is_resource(filename):
os.rename(path, source_to_locale_path(path))
else:
os.remove(path)
locale_directory_paths[locale.code] = locale_directory
else:
raise MissingLocaleDirectoryError(
f"Directory for locale `{locale.code}` not found"
)
parent_directories.add(parent_directory)
return locale_directory_paths
@cached_property
def resources(self):
"""
Lazy-loaded mapping of relative paths -> VCSResources that need to be synced:
* changed in repository
* changed in Pontoon DB
* corresponding source file added
* corresponding source file changed
* all paths relevant for newly enabled (unsynced) locales
Waiting until first access both avoids unnecessary file reads
and allows tests that don't need to touch the resources to run
with less mocking.
"""
resources = {}
log.info(
"Changed files in {} repository and Pontoon, relevant for enabled locales: {}".format(
self.db_project, self.changed_files
)
)
for path in self.relative_resource_paths():
# Syncing translations
if self.locales:
# Copy list instead of cloning
locales = list(self.db_project.unsynced_locales)
if self.changed_files is not None and (
(not self.changed_files or path not in self.changed_files)
and path not in self.added_paths
and path not in self.changed_paths
):
if not locales:
log.debug(f"Skipping unchanged file: {path}")
continue
else:
if (
self.changed_files is None
or path in self.added_paths
or path in self.changed_paths
):
locales += self.locales
else:
locales += self.changed_files[path]
# Syncing resources
else:
if self.changed_files is not None and path not in self.changed_files:
log.debug(f"Skipping unchanged resource file: {path}")
continue
locales = []
locales = {l for l in locales if l in self.locales}
self.synced_locales.update(locales)
log.debug(
"Detected resource file {} for {}".format(
path, ",".join([l.code for l in locales]) or "source"
)
)
try:
resources[path] = VCSResource(self, path, locales=locales)
except ParseError as err:
log.error(
"Skipping resource {path} due to ParseError: {err}".format(
path=path, err=err
)
)
log.info(
"Relative paths in {} that need to be synced: {}".format(
self.db_project, resources.keys()
)
)
return resources
@property
def entities(self):
return chain.from_iterable(
resource.entities.values() for resource in self.resources.values()
)
@property
def checkout_path(self):
return self.db_project.checkout_path
@cached_property
def source_directory_path(self):
"""
Path to the directory where source strings are stored.
Paths are identified using a scoring system; more likely
directory names get higher scores, as do directories with
formats that only used for source strings.
"""
source_repository = self.db_project.source_repository
# If project configuration provided, files could be stored in multiple
# directories, so we just use the source repository checkout path
if self.configuration:
return source_repository.checkout_path
# If source repository explicitly marked
if source_repository.source_repo:
return source_repository.checkout_path
possible_sources = []
for root, dirnames, filenames in os.walk(self.checkout_path):
for dirname in dirnames:
if dirname in self.SOURCE_DIR_NAMES:
score = self.SOURCE_DIR_SCORES[dirname]
# Ensure the matched directory contains resources.
directory_path = os.path.join(root, dirname)
if directory_contains_resources(directory_path):
# Extra points for source resources!
if directory_contains_resources(
directory_path, source_only=True
):
score += 3
possible_sources.append((directory_path, score))
if possible_sources:
return max(possible_sources, key=lambda s: s[1])[0]
else:
raise MissingSourceDirectoryError(
f"No source directory found for project {self.db_project.slug}"
)
def relative_resource_paths(self):
"""
List of all source resource paths, relative to source_directory_path.
"""
if self.configuration:
paths = self.resource_paths_with_config()
else:
paths = self.resource_paths_without_config()
for path in paths:
if not self.configuration:
path = source_to_locale_path(path)
yield os.path.relpath(path, self.source_directory_path)
def resource_paths_with_config(self):
"""
List of absolute paths for all supported source resources
as specified through project configuration.
"""
path = self.source_directory_path
project_files = self.configuration.get_or_set_project_files(None)
for root, dirnames, filenames in os.walk(path):
if is_hidden(root):
continue
for filename in filenames:
absolute_path = os.path.join(root, filename)
if project_files.match(absolute_path):
yield absolute_path
def resource_paths_without_config(self):
"""
List of absolute paths for all supported source resources
found within the given path.
"""
path = self.source_directory_path
for root, dirnames, filenames in os.walk(path):
if is_hidden(root):
continue
# Ignore certain files in Mozilla repositories.
if self.db_project.repository_url in MOZILLA_REPOS:
filenames = [
f for f in filenames if not f.endswith("region.properties")
]
for filename in filenames:
if is_resource(filename):
yield os.path.join(root, filename)
class VCSConfiguration:
"""
Container for the project configuration, provided by the optional
configuration file.
For more information, see:
https://moz-l10n-config.readthedocs.io/en/latest/fileformat.html.
"""
def __init__(self, vcs_project):
self.vcs_project = vcs_project
self.configuration_file = vcs_project.db_project.configuration_file
self.project_files = {}
@cached_property
def l10n_base(self):
"""
If project configuration provided, files could be stored in multiple
directories, so we just use the translation repository checkout path
"""
return self.vcs_project.db_project.translation_repositories()[0].checkout_path
@cached_property
def parsed_configuration(self):
"""Return parsed project configuration file."""
return DownloadTOMLParser(
self.vcs_project.db_project.source_repository.checkout_path,
self.vcs_project.db_project.source_repository.permalink_prefix,
self.configuration_file,
).parse(env={"l10n_base": self.l10n_base})
def add_locale(self, locale_code):
"""
Add new locale to project configuration.
"""
locales = self.parsed_configuration.locales or []
locales.append(locale_code)
self.parsed_configuration.set_locales(locales)
"""
TODO: For now we don't make changes to the configuration file to
avoid committing it to the VCS. The pytoml serializer messes with the
file layout (indents and newlines) pretty badly. We should fix the
serializer and replace the content of this method with the following
code:
# Update configuration file
with open(self.configuration_path, 'r+b') as f:
data = pytoml.load(f)
data['locales'].append(locale_code)
f.seek(0)
f.write(pytoml.dumps(data, sort_keys=True))
f.truncate()
# Invalidate cached parsed configuration
del self.__dict__['parsed_configuration']
# Commit configuration file to VCS
commit_message = 'Update configuration file'
commit_author = User(
first_name=settings.VCS_SYNC_NAME,
email=settings.VCS_SYNC_EMAIL,
)
repo = self.vcs_project.db_project.source_repository
repo.commit(commit_message, commit_author, repo.checkout_path)
"""
def get_or_set_project_files(self, locale_code):
"""
Get or set project files for the given locale code. This approach
allows us to cache the files for later use.
Also, make sure that the requested locale_code is available in the
configuration file.
"""
if (
locale_code is not None
and locale_code not in self.parsed_configuration.all_locales
):
self.add_locale(locale_code)
return self.project_files.setdefault(
locale_code,
ProjectFiles(locale_code, [self.parsed_configuration]),
)
def l10n_path(self, locale, reference_path):
"""
Return l10n path for the given locale and reference path.
"""
project_files = self.get_or_set_project_files(locale.code)
m = project_files.match(reference_path)
return m[0] if m is not None else None
def reference_path(self, locale, l10n_path):
"""
Return reference path for the given locale and l10n path.
"""
project_files = self.get_or_set_project_files(locale.code)
m = project_files.match(l10n_path)
return m[1] if m is not None else None
def locale_resources(self, locale):
"""
Return a list of Resource instances, which need to be enabled for the
given locale.
"""
resources = []
project_files = self.get_or_set_project_files(locale.code)
for resource in self.vcs_project.db_project.resources.all():
absolute_resource_path = os.path.join(
self.vcs_project.source_directory_path,
resource.path,
)
if project_files.match(absolute_resource_path):
resources.append(resource)
return resources
class VCSResource:
"""Represents a single resource across multiple locales."""
def __init__(self, vcs_project, path, locales=None):
"""
Load the resource file for each enabled locale and store its
translations in VCSEntity instances.
"""
from pontoon.base.models import Locale
from pontoon.sync import formats # Avoid circular import.
self.vcs_project = vcs_project
self.path = path
self.locales = locales or []
self.files = {}
self.entities = {}
# Create entities using resources from the source directory,
source_resource_path = os.path.join(
vcs_project.source_directory_path, self.path
)
source_resource_path = locale_to_source_path(source_resource_path)
source_resource_file = formats.parse(
source_resource_path, locale=Locale.objects.get(code="en-US")
)
for index, translation in enumerate(source_resource_file.translations):
vcs_entity = VCSEntity(
resource=self,
key=translation.key,
context=translation.context,
string=translation.source_string,
string_plural=translation.source_string_plural,
comments=translation.comments,
group_comments=(
translation.group_comments
if hasattr(translation, "group_comments")
else None
),
resource_comments=(
translation.resource_comments
if hasattr(translation, "resource_comments")
else None
),
source=translation.source,
order=translation.order or index,
)
self.entities[vcs_entity.key] = vcs_entity
# Fill in translations from the locale resources.
for locale in locales:
locale_directory = self.vcs_project.locale_directory_paths[locale.code]
if self.vcs_project.configuration:
# Some resources might not be available for this locale
resource_path = self.vcs_project.configuration.l10n_path(
locale,
source_resource_path,
)
if resource_path is None:
continue
else:
resource_path = os.path.join(locale_directory, self.path)
log.debug("Parsing resource file: %s", resource_path)
try:
resource_file = formats.parse(
resource_path, source_resource_path, locale
)
# File doesn't exist or is invalid: log it and move on
except (OSError, ParseError) as err:
log.error(
"Skipping resource {path} due to {type}: {err}".format(
path=path, type=type(err).__name__, err=err
)
)
continue
self.files[locale] = resource_file
log.debug("Discovered %s translations.", len(resource_file.translations))
for translation in resource_file.translations:
try:
self.entities[translation.key].translations[
locale.code
] = translation
except KeyError:
# If the source is missing an entity, we consider it
# deleted and don't add it.
pass
def save(self, locale=None):
"""
Save changes made to any of the translations in this resource
back to the filesystem for all locales.
"""
if locale:
self.files[locale].save(locale)
else:
for locale, resource_file in self.files.items():
resource_file.save(locale)
class VCSEntity:
"""
An Entity is a single string to be translated, and a VCSEntity
stores the translations for an entity from several locales.
"""
def __init__(
self,
resource,
key,
string,
source,
comments,
group_comments=None,
resource_comments=None,
context="",
string_plural="",
order=0,
):
self.resource = resource
self.key = key
self.string = string
self.string_plural = string_plural
self.source = source
self.comments = comments
self.group_comments = group_comments or []
self.resource_comments = resource_comments or []
self.context = context
self.order = order
self.translations = {}
def has_translation_for(self, locale_code):
"""Return True if a translation exists for the given locale."""
return locale_code in self.translations
class VCSTranslation:
"""
A single translation of a source string into another language.
Since a string can have different translations based on plural
forms, all of the different forms are stored under self.strings, a
dict where the keys equal possible values for
pontoon.base.models.Translation.plural_form and the values equal the
translation for that plural form.
"""
def __init__(
self,
key,
strings,
comments,
fuzzy,
context="",
source_string="",
source_string_plural="",
group_comments=None,
resource_comments=None,
order=0,
source=None,
last_translator=None,
last_updated=None,
):
self.key = key
self.context = context
self.source_string = source_string
self.source_string_plural = source_string_plural
self.strings = strings
self.comments = comments
self.group_comments = group_comments
self.resource_comments = resource_comments
self.fuzzy = fuzzy
self.order = order
self.source = source or []
self.last_translator = last_translator
self.last_updated = last_updated
@property
def extra(self):
"""
Return a dict of custom properties to store in the database.
Useful for subclasses from specific formats that have extra data
that needs to be preserved.
"""
return {}
def update_from_db(self, db_translations):
"""
Update translation with current DB state.
"""
# If no DB translations are fuzzy, set fuzzy to False.
# Otherwise, it's true.
self.fuzzy = any(t for t in db_translations if t.fuzzy)
if len(db_translations) > 0:
last_translation = max(
db_translations,
key=lambda t: t.date or timezone.make_aware(datetime.min),
)
self.last_updated = last_translation.date
self.last_translator = last_translation.user
# Replace existing translations with ones from the database.
self.strings = {db.plural_form: db.string for db in db_translations}
| bsd-3-clause | edfe8efa217ddde458acbc5bcecf9a51 | 33.706131 | 98 | 0.573221 | 4.695652 | false | true | false | false |
mozilla/pontoon | pontoon/sync/formats/po.py | 2 | 3807 | """
Parser for to pofile translation format.
"""
from datetime import datetime
from django.utils import timezone
import polib
from pontoon.sync import KEY_SEPARATOR
from pontoon.sync.exceptions import ParseError
from pontoon.sync.formats.base import ParsedResource
from pontoon.sync.vcs.models import VCSTranslation
class POEntity(VCSTranslation):
def __init__(self, po_entry, order):
self.po_entry = po_entry
if po_entry.msgstr_plural:
strings = po_entry.msgstr_plural
else:
strings = {None: po_entry.msgstr}
# Remove empty strings from the string dict.
strings = {key: value for key, value in strings.items() if value}
# Pofiles use the source as the key prepended with context if available.
key = po_entry.msgid
context = po_entry.msgctxt or ""
if context:
key = context + KEY_SEPARATOR + key
super().__init__(
key=key,
context=context,
source_string=po_entry.msgid,
source_string_plural=po_entry.msgid_plural,
strings=strings,
comments=po_entry.comment.split("\n") if po_entry.comment else [],
fuzzy="fuzzy" in po_entry.flags,
order=order,
source=po_entry.occurrences,
)
def update_entry(self, locale):
"""Update the POEntry associated with this translation."""
if self.po_entry.msgstr_plural:
self.po_entry.msgstr_plural = {
plural_form: self.strings.get(plural_form, "")
for plural_form in range(locale.nplurals or 1)
}
else:
self.po_entry.msgstr = self.strings.get(None, "")
if self.fuzzy and "fuzzy" not in self.po_entry.flags:
self.po_entry.flags.append("fuzzy")
elif not self.fuzzy and "fuzzy" in self.po_entry.flags:
self.po_entry.flags.remove("fuzzy")
def __repr__(self):
return "<POEntity {key}>".format(key=self.key.encode("utf-8"))
class POResource(ParsedResource):
def __init__(self, pofile):
self.pofile = pofile
self.entities = [
POEntity(entry, k)
for k, entry in enumerate(self.pofile)
if not entry.obsolete
]
@property
def translations(self):
return self.entities
def save(self, locale):
for entity in self.translations:
entity.update_entry(locale)
metadata = self.pofile.metadata
if len(self.translations) > 0:
latest_translation = max(
self.translations,
key=lambda t: t.last_updated or timezone.make_aware(datetime.min),
)
if latest_translation.last_updated:
metadata["PO-Revision-Date"] = latest_translation.last_updated.strftime(
"%Y-%m-%d %H:%M%z"
)
if latest_translation.last_translator:
metadata[
"Last-Translator"
] = latest_translation.last_translator.display_name_and_email
metadata.update(
{
"Language": locale.code.replace("-", "_"),
"X-Generator": "Pontoon",
"Plural-Forms": (
"nplurals={locale.nplurals}; plural={locale.plural_rule};".format(
locale=locale
)
),
}
)
self.pofile.save()
def __repr__(self):
return f"<POResource {self.pofile.fpath}>"
def parse(path, source_path=None, locale=None):
try:
pofile = polib.pofile(path, wrapwidth=200)
except OSError as err:
raise ParseError(f"Failed to parse {path}: {err}")
return POResource(pofile)
| bsd-3-clause | df2e4c290aeab8be1bf13c31dd70f2bd | 30.46281 | 88 | 0.567901 | 4.041401 | false | false | false | false |
mozilla/pontoon | pontoon/tags/tests/utils/test_tags.py | 3 | 6925 | from unittest.mock import MagicMock, patch, PropertyMock
import pytest
from pontoon.tags.models import Tag
from pontoon.tags.utils import (
TagsLatestTranslationsTool,
TagsResourcesTool,
TagsStatsTool,
TagsTool,
TagTool,
)
from pontoon.tags.utils.base import Clonable
def test_util_tags_tool():
# test tags tool instantiation
tags_tool = TagsTool()
assert tags_tool.tag_class is TagTool
assert tags_tool.resources_class is TagsResourcesTool
assert tags_tool.translations_class is TagsLatestTranslationsTool
assert tags_tool.stats_class is TagsStatsTool
assert tags_tool.locales is None
assert tags_tool.projects is None
assert tags_tool.priority is None
assert tags_tool.slug is None
assert tags_tool.path is None
assert tags_tool.tag_manager == Tag.objects
@patch("pontoon.tags.utils.TagsTool.stats_class")
def test_util_tags_tool_stats(stats_mock, tag_init_kwargs):
# tests instantiation of tag.stats_tool with different args
tags_tool = TagsTool(**tag_init_kwargs)
stats_mock.return_value = 23
assert tags_tool.stat_tool == 23
assert stats_mock.call_args[1] == tag_init_kwargs
@pytest.mark.parametrize(
"kwargs",
[
dict(slug=None, locales=None, projects=None, path=None),
dict(slug=1, locales=2, projects=3, path=4),
],
)
@patch("pontoon.tags.utils.TagsTool.resources_class")
def test_util_tags_tool_resources(resources_mock, kwargs):
# tests instantiation of tag.resources_tool with different args
tags_tool = TagsTool(**kwargs)
resources_mock.return_value = 23
assert tags_tool.resource_tool == 23
assert resources_mock.call_args[1] == kwargs
@pytest.mark.parametrize(
"kwargs",
[dict(slug=None, locales=None, projects=None), dict(slug=1, locales=2, projects=3)],
)
@patch("pontoon.tags.utils.TagsTool.translations_class")
def test_util_tags_tool_translations(trans_mock, kwargs):
# tests instantiation of tag.translations_tool with different args
tags_tool = TagsTool(**kwargs)
trans_mock.return_value = 23
assert tags_tool.translation_tool == 23
assert trans_mock.call_args[1] == kwargs
@patch("pontoon.tags.utils.TagsTool.tag_class")
@patch("pontoon.tags.utils.TagsTool.get_tags")
@patch("pontoon.tags.utils.TagsTool.__len__")
@patch("pontoon.tags.utils.TagsTool.__iter__")
def test_util_tags_tool_get(iter_mock, len_mock, tags_mock, class_mock):
# tests getting a TagTool from TagsTool
tags_tool = TagsTool()
class_mock.return_value = 23
len_mock.return_value = 7
iter_mock.return_value = iter([3, 17, 73])
# with no slug returns first result from iter(self)
assert tags_tool.get() == 3
assert not class_mock.called
assert not tags_mock.called
assert len_mock.called
assert iter_mock.called
len_mock.reset_mock()
iter_mock.reset_mock()
# calling with slug creates a TagTool instance
# and doesnt call iter(self) at all
assert tags_tool.get(113) == 23
assert not len_mock.called
assert not iter_mock.called
assert list(class_mock.call_args) == [(tags_tool,), {}]
assert list(tags_mock.call_args) == [(), {"slug": 113}]
def test_util_tags_tool_call_and_clone():
# tests cloning a TagsTool
tags_tool = TagsTool()
cloned = tags_tool()
assert cloned is not tags_tool
assert isinstance(tags_tool, Clonable)
assert isinstance(cloned, Clonable)
@patch("pontoon.tags.utils.TagsTool.__call__")
def test_util_tags_tool_getitem(call_mock):
# test that calling __getitem__ calls __call__ with slug
tags_tool = TagsTool()
slugs = ["foo", "bar"]
for slug in slugs:
tags_tool[slug]
assert call_mock.call_args_list[0][1] == dict(slug=slugs[0])
assert call_mock.call_args_list[1][1] == dict(slug=slugs[1])
@patch("pontoon.tags.utils.TagsTool.iter_tags")
@patch("pontoon.tags.utils.TagsTool.stat_tool", new_callable=PropertyMock)
def test_util_tags_tool_iter(stats_mock, iter_mock):
# tests that when you iter it calls iter_tags with
# stats data
tags_tool = TagsTool()
stats_mock.configure_mock(**{"return_value.data": [7, 23]})
iter_mock.return_value = iter([])
assert list(tags_tool) == []
assert stats_mock.called
assert list(iter_mock.call_args) == [([7, 23],), {}]
@patch("pontoon.tags.utils.TagsTool.stat_tool", new_callable=PropertyMock)
def test_util_tags_tool_len(stats_mock):
# tests that when you len() you get the len
# of the stats data
m_len = MagicMock()
m_len.__len__.return_value = 23
stats_mock.configure_mock(**{"return_value.data": m_len})
tags_tool = TagsTool()
assert len(tags_tool) == 23
assert m_len.__len__.called
@patch("pontoon.tags.utils.TagsTool.translation_tool", new_callable=PropertyMock)
@patch("pontoon.tags.utils.TagsTool.tag_class")
def test_util_tags_tool_iter_tags(tag_mock, trans_mock):
# tests that iter_tags calls instantiates a TagTool with
# stat data and latest_translation data
trans_mock.configure_mock(**{"return_value.data.get.return_value": 23})
tags_tool = TagsTool()
list(
tags_tool.iter_tags(
[
dict(resource__tag=1, foo="bar"),
dict(resource__tag=2, foo="bar"),
dict(resource__tag=3, foo="bar"),
]
)
)
# translation_tool.data.get() was called 3 times with tag pks
assert [x[0][0] for x in trans_mock.return_value.data.get.call_args_list] == [
1,
2,
3,
]
# TagTool was called 3 times with the tags tool as arg
assert [x[0][0] for x in tag_mock.call_args_list] == [tags_tool] * 3
# and stat + translation data as kwargs
assert [x[1] for x in tag_mock.call_args_list] == [
{"resource__tag": 1, "latest_translation": 23, "foo": "bar"},
{"resource__tag": 2, "latest_translation": 23, "foo": "bar"},
{"resource__tag": 3, "latest_translation": 23, "foo": "bar"},
]
@patch("pontoon.tags.utils.TagsTool.tag_manager", new_callable=PropertyMock)
def test_util_tags_tool_get_tags(tag_mock):
filter_mock = MagicMock(**{"filter.return_value": 23})
tag_mock.configure_mock(
**{"return_value.filter.return_value.values.return_value": filter_mock}
)
tags_tool = TagsTool()
# no slug provided, returns `values`
assert tags_tool.get_tags() is filter_mock
assert not filter_mock.called
assert list(tag_mock.return_value.filter.return_value.values.call_args) == [
("pk", "name", "slug", "priority", "project"),
{},
]
tag_mock.reset_mock()
# slug provided, `values` is filtered
assert tags_tool.get_tags("FOO") == 23
assert list(filter_mock.filter.call_args) == [(), {"slug": "FOO"}]
assert list(tag_mock.return_value.filter.return_value.values.call_args) == [
("pk", "name", "slug", "priority", "project"),
{},
]
| bsd-3-clause | 6b29e0dfbf287e53f229fdc0b624f032 | 33.1133 | 88 | 0.66296 | 3.303912 | false | true | false | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.