code
stringlengths 1
199k
|
|---|
import jtutil
from jtelem import jtelem
class new(jtelem):
def __init__(self):
jtelem.__init__(self)
def classname(self):
return "jtvglue"
def xmlout(self):
return "<vglue/>"
|
from time import sleep
from base import WebTest, USER, PASS
from creds import RECIP
from runner import test_runner
class SearchTest(WebTest):
def __init__(self):
WebTest.__init__(self)
self.login(USER, PASS)
self.wait_with_folder_list()
def load_search_page(self):
list_item = self.by_class('menu_search')
link = list_item.find_element_by_tag_name('a').click()
self.wait_with_folder_list()
assert self.by_class('content_title').text.startswith('Search')
def keyword_search(self):
terms = self.by_id('search_terms')
terms.send_keys('test')
self.by_class('search_update').click();
self.wait_with_folder_list()
sleep(1)
table = self.by_class('message_table_body')
assert len(table.find_elements_by_tag_name('tr')) > 0
def reset_search(self):
self.by_class('search_reset').click()
self.wait_with_folder_list()
sleep(1)
assert self.by_id('search_terms').get_attribute('value') == ''
table = self.by_class('message_table_body')
assert len(table.find_elements_by_tag_name('tr')) == 0
if __name__ == '__main__':
print("SEARCH TEST")
test_runner(SearchTest, [
'load_search_page',
'keyword_search',
'reset_search'
])
|
'''Helper for plotting reblocking plots.'''
import matplotlib.pyplot as plt
def plot_reblocking(block_info, plotfile=None, plotshow=True):
'''Plot the reblocking data.
Parameters
----------
block_info : :class:`pandas.DataFrame`
Reblocking data (i.e. the first item of the tuple returned by ``reblock``).
plotfile : string
If not null, save the plot to the given filename. If '-', then show the
plot interactively. See also ``plotshow``.
plotshow : bool
If ``plotfile`` is not given or is '-', then show the plot interactively.
Returns
-------
fig : :class:`matplotlib.figure.Figure`
plot of the reblocking data.
'''
fig = plt.figure()
data_sets = block_info.columns.get_level_values(0).unique()
for (i, col) in enumerate(data_sets):
ax = fig.add_subplot(len(data_sets), 1, i+1)
# There should only be (at most) one non-null value for optimal block.
opt = block_info[block_info[(col,'optimal block')] != ''].index.values
if opt:
opt = opt[0]
std_err = block_info[(col, 'standard error')]
if 'standard error error' in block_info[col]:
std_err_err = block_info[(col, 'standard error error')]
else:
std_err_err = 0*std_err
line = ax.errorbar(block_info.index, std_err, std_err_err, marker='o',
label=col)
if opt:
ax.annotate('', (opt, std_err[opt]-std_err_err[opt]),
xytext=(0, -20), textcoords='offset points',
arrowprops=dict(
arrowstyle="->", #color=line[0].get_color()
linewidth=1.2*line[0].get_linewidth(),
),)
ax.legend(loc=2)
ax.set_ylabel('standard error')
ax.set_xlabel('Reblock iteration')
size = fig.get_size_inches()
fig.set_size_inches(size[0], size[1]*len(data_sets))
fig.tight_layout()
if plotfile == '-' or (not plotfile and plotshow):
plt.show()
elif plotfile:
fig.savefig(plotfile)
return fig
|
from ajenti.com import Interface
class IRequestDispatcher(Interface):
def match(self, uri):
pass
def process(self, req, start_response):
pass
class IContentProvider(Interface):
path = ''
module = ''
js_files = []
css_files = []
widget_files = []
def content_path(self):
pass
def widget_path(self):
pass
def template_path(self):
pass
class ICategoryProvider(Interface):
""" ICategoryProvider should contain:
'get_ui()' method to retrieve main panel
"""
def get_ui():
pass
class IEventDispatcher(Interface):
def match_event(self, event):
pass
def event(self, event, *params, **kwparams):
pass
|
"""
This file implements a Rectangle class for geometric operations.
"""
import copy
from dragonfly.windows.point import Point
class Rectangle(Point):
#-----------------------------------------------------------------------
# Methods for initialization, copying, and introspection.
def __init__(self, x1=None, y1=None, dx=None, dy=None):
Point.__init__(self, x1, y1)
self._size = Point(dx, dy)
def copy(self):
return copy.deepcopy(self)
def __copy__(self):
return copy.deepcopy(self)
def __repr__(self):
return "%s(%.1f, %.1f, %.1f, %.1f)" \
% (self.__class__.__name__, self.x, self.y, self.dx, self.dy)
def __eq__(self, other):
return (super(Rectangle, self).__eq__(other) and
self.size == other.size)
def __ne__(self, other):
return not self.__eq__(other)
#-----------------------------------------------------------------------
# Methods that control attribute access.
p1 = property(fget=lambda self: Point(self._x, self._y),
doc="Protected access to p1 attribute.")
p2 = property(fget=lambda self: self.p1 + self._size,
doc="Protected access to p2 attribute.")
size = property(fget=lambda self: self._size.copy() ,
doc="Protected access to size attribute.")
x1 = Point.x
y1 = Point.y
def _set_x2(self, x):
self._size.x = x # Use type checking of Point class.
self._size._x -= self._x
x2 = property(fget=lambda self: self._x + self._size._x,
fset=_set_x2,
doc="Protected access to x2 attribute.")
def _set_y2(self, y):
self._size.y = y # Use type checking of Point class.
self._size._y -= self._y
y2 = property(fget=lambda self: self._y + self._size._y,
fset=_set_y2,
doc="Protected access to y2 attribute.")
def _get_center(self):
return self.p1 + Point(self._size.x / 2, self._size.y / 2)
center = property(fget=_get_center,
doc="Dynamic access to center attribute.")
dx = property(fget=lambda self: self._size.x,
doc="Protected access to dx attribute.")
dy = property(fget=lambda self: self._size.y,
doc="Protected access to dy attribute.")
x_center = property(fget=lambda self: self.x + self._size.x / 2,
doc="Protected access to x_center attribute.")
y_center = property(fget=lambda self: self.y + self._size.y / 2,
doc="Protected access to y_center attribute.")
ltwh = property(fget=lambda self: (int(self._x), int(self._y),
int(self._size._x),
int(self._size._y)),
doc="Shortcut to left-top-with-height tuple.")
#-----------------------------------------------------------------------
# Methods for manipulating rectangle objects.
def translate(self, dx, dy):
other = Point(dx, dy)
self += other
def renormalize(self, src, dst):
# pylint: disable=W0212
# Suppress warnings about protected member access.
p1 = self.p1.renormalize(src, dst)
p2 = self.p2.renormalize(src, dst)
self._x = p1.x
self._y = p1.y
self._size._x = p2.x - p1.x
self._size._y = p2.y - p1.y
#-----------------------------------------------------------------------
# Methods for various rectangle related operations.
def contains(self, p):
"""Test whether this rectangle instance contains a point."""
assert isinstance(p, Point)
return self.x1 <= p.x < self.x2 and self.y1 <= p.y < self.y2
unit = Rectangle(0.0, 0.0, 1.0, 1.0)
|
def itemNames():
return ['droid_memory_module']
def itemChances():
return [100]
|
"""
The I{sudsobject} module provides a collection of suds objects
that are primarily used for the highly dynamic interactions with
wsdl/xsd defined types.
"""
from logging import getLogger
from . import tostr
from .compat import basestring
from .utils import is_builtin
log = getLogger(__name__)
def items(sobject):
"""
Extract the I{items} from a suds object much like the
items() method works on I{dict}.
@param sobject: A suds object
@type sobject: L{Object}
@return: A list of items contained in I{sobject}.
@rtype: [(key, value),...]
"""
for item in sobject:
yield item
def asdict(sobject):
"""
Convert a sudsobject into a dictionary.
@param sobject: A suds object
@type sobject: L{Object}
@return: A python dictionary containing the
items contained in I{sobject}.
@rtype: dict
"""
return dict(items(sobject))
def merge(a, b):
"""
Merge all attributes and metadata from I{a} to I{b}.
@param a: A I{source} object
@type a: L{Object}
@param b: A I{destination} object
@type b: L{Object}
"""
for item in a:
setattr(b, item[0], item[1])
b.__metadata__ = b.__metadata__
return b
def footprint(sobject):
"""
Get the I{virtual footprint} of the object.
This is really a count of the attributes in the branch with a significant
value.
@param sobject: A suds object.
@type sobject: L{Object}
@return: The branch footprint.
@rtype: int
"""
n = 0
for a in sobject.__keylist__:
v = getattr(sobject, a)
if v is None:
continue
if isinstance(v, Object):
n += footprint(v)
continue
if hasattr(v, '__len__'):
if len(v):
n += 1
continue
n += 1
return n
class Factory:
cache = {}
@classmethod
def subclass(cls, name, bases, dict={}):
if not isinstance(bases, tuple):
bases = (bases,)
key = '.'.join((name, str(bases)))
subclass = cls.cache.get(key)
if subclass is None:
subclass = type(name, bases, dict)
cls.cache[key] = subclass
return subclass
@classmethod
def object(cls, classname=None, dict={}):
if classname is not None:
subclass = cls.subclass(classname, Object)
inst = subclass()
else:
inst = Object()
for a in dict.items():
setattr(inst, a[0], a[1])
return inst
@classmethod
def metadata(cls):
return Metadata()
@classmethod
def property(cls, name, value=None):
subclass = cls.subclass(name, Property)
return subclass(value)
class Object:
def __init__(self):
self.__keylist__ = []
self.__printer__ = Printer()
self.__metadata__ = Metadata()
def __setattr__(self, name, value):
if not is_builtin(name) and name not in self.__keylist__:
self.__keylist__.append(name)
self.__dict__[name] = value
def __delattr__(self, name):
try:
del self.__dict__[name]
if not is_builtin(name):
self.__keylist__.remove(name)
except:
cls = self.__class__.__name__
raise AttributeError("%s has no attribute '%s'" % (cls, name))
def __getitem__(self, name):
if isinstance(name, int):
name = self.__keylist__[int(name)]
return getattr(self, name)
def __setitem__(self, name, value):
setattr(self, name, value)
def __iter__(self):
return Iter(self)
def __len__(self):
return len(self.__keylist__)
def __contains__(self, name):
return name in self.__keylist__
def __str__(self):
return self.__printer__.tostr(self)
def __unicode__(self):
return self.__printer__.tostr(self)
class Iter:
def __init__(self, sobject):
self.sobject = sobject
self.keylist = self.__keylist(sobject)
self.index = 0
def __next__(self):
return self.next()
def next(self):
keylist = self.keylist
nkeys = len(self.keylist)
while self.index < nkeys:
k = keylist[self.index]
self.index += 1
if hasattr(self.sobject, k):
v = getattr(self.sobject, k)
return (k, v)
raise StopIteration()
def __keylist(self, sobject):
keylist = sobject.__keylist__
try:
keyset = set(keylist)
ordering = sobject.__metadata__.ordering
ordered = set(ordering)
if not ordered.issuperset(keyset):
log.debug(
'%s must be superset of %s, ordering ignored',
keylist,
ordering)
raise KeyError()
return ordering
except:
return keylist
def __iter__(self):
return self
class Metadata(Object):
def __init__(self):
self.__keylist__ = []
self.__printer__ = Printer()
class Facade(Object):
def __init__(self, name):
Object.__init__(self)
md = self.__metadata__
md.facade = name
class Property(Object):
def __init__(self, value):
Object.__init__(self)
self.value = value
def items(self):
for item in self:
if item[0] != 'value':
yield item
def get(self):
return self.value
def set(self, value):
self.value = value
return self
class Printer:
"""
Pretty printing of a Object object.
"""
@classmethod
def indent(cls, n):
return '%*s' % (n * 3, ' ')
def tostr(self, object, indent=-2):
""" get s string representation of object """
history = []
return self.process(object, history, indent)
def process(self, object, h, n=0, nl=False):
""" print object using the specified indent (n) and newline (nl). """
if object is None:
return 'None'
if isinstance(object, Object):
if len(object) == 0:
return '<empty>'
else:
return self.print_object(object, h, n+2, nl)
if isinstance(object, dict):
if len(object) == 0:
return '<empty>'
else:
return self.print_dictionary(object, h, n+2, nl)
if isinstance(object, (list, tuple)):
if len(object) == 0:
return '<empty>'
else:
return self.print_collection(object, h, n+2)
if isinstance(object, basestring):
return '"%s"' % tostr(object)
return '%s' % tostr(object)
def print_object(self, d, h, n, nl=False):
""" print complex using the specified indent (n) and newline (nl). """
s = []
cls = d.__class__
md = d.__metadata__
if d in h:
s.append('(')
s.append(cls.__name__)
s.append(')')
s.append('...')
return ''.join(s)
h.append(d)
if nl:
s.append('\n')
s.append(self.indent(n))
if cls != Object:
s.append('(')
if isinstance(d, Facade):
s.append(md.facade)
else:
s.append(cls.__name__)
s.append(')')
s.append('{')
for item in d:
if self.exclude(d, item):
continue
item = self.unwrap(d, item)
s.append('\n')
s.append(self.indent(n+1))
if isinstance(item[1], (list, tuple)):
s.append(item[0])
s.append('[]')
else:
s.append(item[0])
s.append(' = ')
s.append(self.process(item[1], h, n, True))
s.append('\n')
s.append(self.indent(n))
s.append('}')
h.pop()
return ''.join(s)
def print_dictionary(self, d, h, n, nl=False):
""" print complex using the specified indent (n) and newline (nl). """
if d in h:
return '{}...'
h.append(d)
s = []
if nl:
s.append('\n')
s.append(self.indent(n))
s.append('{')
for item in d.items():
s.append('\n')
s.append(self.indent(n+1))
if isinstance(item[1], (list, tuple)):
s.append(tostr(item[0]))
s.append('[]')
else:
s.append(tostr(item[0]))
s.append(' = ')
s.append(self.process(item[1], h, n, True))
s.append('\n')
s.append(self.indent(n))
s.append('}')
h.pop()
return ''.join(s)
def print_collection(self, c, h, n):
"""print collection using the specified indent (n) and newline (nl)."""
if c in h:
return '[]...'
h.append(c)
s = []
for item in c:
s.append('\n')
s.append(self.indent(n))
s.append(self.process(item, h, n-2))
s.append(',')
h.pop()
return ''.join(s)
def unwrap(self, d, item):
""" translate (unwrap) using an optional wrapper function """
nopt = lambda x: x
try:
md = d.__metadata__
pmd = getattr(md, '__print__', None)
if pmd is None:
return item
wrappers = getattr(pmd, 'wrappers', {})
fn = wrappers.get(item[0], nopt)
return (item[0], fn(item[1]))
except:
pass
return item
def exclude(self, d, item):
""" check metadata for excluded items """
try:
md = d.__metadata__
pmd = getattr(md, '__print__', None)
if pmd is None:
return False
excludes = getattr(pmd, 'excludes', [])
return item[0] in excludes
except:
pass
return False
|
import unittest
from tenacity import RetryError, retry, stop_after_attempt
from tenacity import tornadoweb
from tornado import gen
from tornado import testing
from .test_tenacity import NoIOErrorAfterCount
@retry
@gen.coroutine
def _retryable_coroutine(thing):
yield gen.sleep(0.00001)
thing.go()
@retry(stop=stop_after_attempt(2))
@gen.coroutine
def _retryable_coroutine_with_2_attempts(thing):
yield gen.sleep(0.00001)
thing.go()
class TestTornado(testing.AsyncTestCase):
@testing.gen_test
def test_retry(self):
assert gen.is_coroutine_function(_retryable_coroutine)
thing = NoIOErrorAfterCount(5)
yield _retryable_coroutine(thing)
assert thing.counter == thing.count
@testing.gen_test
def test_stop_after_attempt(self):
assert gen.is_coroutine_function(_retryable_coroutine)
thing = NoIOErrorAfterCount(2)
try:
yield _retryable_coroutine_with_2_attempts(thing)
except RetryError:
assert thing.counter == 2
def test_repr(self):
repr(tornadoweb.TornadoRetrying())
def test_old_tornado(self):
old_attr = gen.is_coroutine_function
try:
del gen.is_coroutine_function
# is_coroutine_function was introduced in tornado 4.5;
# verify that we don't *completely* fall over on old versions
@retry
def retryable(thing):
pass
finally:
gen.is_coroutine_function = old_attr
if __name__ == "__main__":
unittest.main()
|
from urllib2 import Request, urlopen
import base64
from tweepy import oauth
from tweepy.error import TweepError
from tweepy.api import API
class AuthHandler(object):
def apply_auth(self, url, method, headers, parameters):
"""Apply authentication headers to request"""
raise NotImplementedError
def get_username(self):
"""Return the username of the authenticated user"""
raise NotImplementedError
class BasicAuthHandler(AuthHandler):
def __init__(self, username, password):
self.username = username
self._b64up = base64.b64encode('%s:%s' % (username, password))
def apply_auth(self, url, method, headers, parameters):
headers['Authorization'] = 'Basic %s' % self._b64up
def get_username(self):
return self.username
class OAuthHandler(AuthHandler):
"""OAuth authentication handler"""
REQUEST_TOKEN_URL = 'http://api.twitter.com/oauth/request_token'
AUTHORIZATION_URL = 'http://api.twitter.com/oauth/authorize'
AUTHENTICATE_URL = 'http://api.twitter.com/oauth/authenticate'
ACCESS_TOKEN_URL = 'http://api.twitter.com/oauth/access_token'
def __init__(self, consumer_key, consumer_secret, callback=None):
self._consumer = oauth.OAuthConsumer(consumer_key, consumer_secret)
self._sigmethod = oauth.OAuthSignatureMethod_HMAC_SHA1()
self.request_token = None
self.access_token = None
self.callback = callback
self.username = None
def apply_auth(self, url, method, headers, parameters):
request = oauth.OAuthRequest.from_consumer_and_token(
self._consumer, http_url=url, http_method=method,
token=self.access_token, parameters=parameters
)
request.sign_request(self._sigmethod, self._consumer, self.access_token)
headers.update(request.to_header())
def _get_request_token(self):
try:
request = oauth.OAuthRequest.from_consumer_and_token(
self._consumer, http_url=self.REQUEST_TOKEN_URL, callback=self.callback
)
request.sign_request(self._sigmethod, self._consumer, None)
resp = urlopen(Request(self.REQUEST_TOKEN_URL, headers=request.to_header()))
return oauth.OAuthToken.from_string(resp.read())
except Exception, e:
raise TweepError(e)
def set_request_token(self, key, secret):
self.request_token = oauth.OAuthToken(key, secret)
def set_access_token(self, key, secret):
self.access_token = oauth.OAuthToken(key, secret)
def get_authorization_url(self, signin_with_twitter=False):
"""Get the authorization URL to redirect the user"""
try:
# get the request token
self.request_token = self._get_request_token()
# build auth request and return as url
if signin_with_twitter:
auth_url = self.AUTHENTICATE_URL
else:
auth_url = self.AUTHORIZATION_URL
request = oauth.OAuthRequest.from_token_and_callback(
token=self.request_token, http_url=auth_url
)
return request.to_url()
except Exception, e:
raise TweepError(e)
def get_access_token(self, verifier=None):
"""
After user has authorized the request token, get access token
with user supplied verifier.
"""
try:
# build request
request = oauth.OAuthRequest.from_consumer_and_token(
self._consumer,
token=self.request_token, http_url=self.ACCESS_TOKEN_URL,
verifier=str(verifier)
)
request.sign_request(self._sigmethod, self._consumer, self.request_token)
# send request
resp = urlopen(Request(self.ACCESS_TOKEN_URL, headers=request.to_header()))
self.access_token = oauth.OAuthToken.from_string(resp.read())
return self.access_token
except Exception, e:
raise TweepError(e)
def get_username(self):
if self.username is None:
api = API(self)
user = api.verify_credentials()
if user:
self.username = user.screen_name
else:
raise TweepError("Unable to get username, invalid oauth token!")
return self.username
|
import zstackwoodpecker.test_state as ts_header
TestAction = ts_header.TestAction
def path():
return dict(initial_formation="template1", \
path_list=[[TestAction.create_volume_snapshot, "vm1-root", "snapshot1"], \
[TestAction.stop_vm, "vm1"], \
[TestAction.start_vm, "vm1"], \
[TestAction.create_volume_snapshot, "vm1-root", "snapshot2"], \
[TestAction.stop_vm, "vm1"], \
[TestAction.reinit_vm, "vm1"], \
[TestAction.start_vm, "vm1"], \
[TestAction.batch_delete_volume_snapshot, ["snapshot2"]]
])
|
from tempest.api.compute import base
from tempest.common.utils import data_utils
from tempest.common import waiters
from tempest import config
from tempest import test
CONF = config.CONF
class VolumesSnapshotsTestJSON(base.BaseV2ComputeTest):
@classmethod
def skip_checks(cls):
super(VolumesSnapshotsTestJSON, cls).skip_checks()
if not CONF.service_available.cinder:
skip_msg = ("%s skipped as Cinder is not available" % cls.__name__)
raise cls.skipException(skip_msg)
@classmethod
def setup_clients(cls):
super(VolumesSnapshotsTestJSON, cls).setup_clients()
cls.volumes_client = cls.volumes_extensions_client
cls.snapshots_client = cls.snapshots_extensions_client
@test.idempotent_id('cd4ec87d-7825-450d-8040-6e2068f2da8f')
def test_volume_snapshot_create_get_list_delete(self):
volume = self.create_volume()
self.addCleanup(self.delete_volume, volume['id'])
s_name = data_utils.rand_name(self.__class__.__name__ + '-Snapshot')
# Create snapshot
snapshot = self.snapshots_client.create_snapshot(
volume_id=volume['id'],
display_name=s_name)['snapshot']
def delete_snapshot(snapshot_id):
waiters.wait_for_snapshot_status(self.snapshots_client,
snapshot_id,
'available')
# Delete snapshot
self.snapshots_client.delete_snapshot(snapshot_id)
self.snapshots_client.wait_for_resource_deletion(snapshot_id)
self.addCleanup(delete_snapshot, snapshot['id'])
self.assertEqual(volume['id'], snapshot['volumeId'])
# Get snapshot
fetched_snapshot = self.snapshots_client.show_snapshot(
snapshot['id'])['snapshot']
self.assertEqual(s_name, fetched_snapshot['displayName'])
self.assertEqual(volume['id'], fetched_snapshot['volumeId'])
# Fetch all snapshots
snapshots = self.snapshots_client.list_snapshots()['snapshots']
self.assertIn(snapshot['id'], map(lambda x: x['id'], snapshots))
|
"""
Example Session Command
Make sure you can authenticate before running this command. This command
is currently hard coded to use the Identity service.
For example:
python -m examples.session /tenants
"""
import sys
from examples import common
from examples import connection
from openstack.identity import identity_service
def make_session(opts):
return connection.make_connection(opts).session
def run_session(opts):
argument = opts.argument
if argument is None:
raise Exception("A path argument must be specified")
sess = make_session(opts)
filtration = identity_service.IdentityService()
print("Session: %s" % sess)
print(sess.get(argument, service=filtration).text)
return
if __name__ == "__main__":
opts = common.setup()
sys.exit(common.main(opts, run_session))
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('domain', '0020_meta'),
]
operations = [
migrations.AddField(
model_name='attribute',
name='optionsets',
field=models.ManyToManyField(blank=True, to='options.OptionSet'),
),
]
|
import mock
from ironic.common import driver_factory
from ironic.common import exception
from ironic.common import states
from ironic.common import utils as cmn_utils
from ironic.conductor import task_manager
from ironic.conductor import utils as conductor_utils
from ironic import objects
from ironic.tests import base as tests_base
from ironic.tests.conductor import utils as mgr_utils
from ironic.tests.db import base
from ironic.tests.db import utils
from ironic.tests.objects import utils as obj_utils
class NodeSetBootDeviceTestCase(base.DbTestCase):
def test_node_set_boot_device_non_existent_device(self):
mgr_utils.mock_the_extension_manager(driver="fake_ipmitool")
self.driver = driver_factory.get_driver("fake_ipmitool")
ipmi_info = utils.get_test_ipmi_info()
node = obj_utils.create_test_node(self.context,
uuid=cmn_utils.generate_uuid(),
driver='fake_ipmitool',
driver_info=ipmi_info)
task = task_manager.TaskManager(self.context, node.uuid)
self.assertRaises(exception.InvalidParameterValue,
conductor_utils.node_set_boot_device,
task,
device='fake')
def test_node_set_boot_device_valid(self):
mgr_utils.mock_the_extension_manager(driver="fake_ipmitool")
self.driver = driver_factory.get_driver("fake_ipmitool")
ipmi_info = utils.get_test_ipmi_info()
node = obj_utils.create_test_node(self.context,
uuid=cmn_utils.generate_uuid(),
driver='fake_ipmitool',
driver_info=ipmi_info)
task = task_manager.TaskManager(self.context, node.uuid)
with mock.patch.object(self.driver.management,
'set_boot_device') as mock_sbd:
conductor_utils.node_set_boot_device(task,
device='pxe')
mock_sbd.assert_called_once_with(task,
device='pxe',
persistent=False)
class NodePowerActionTestCase(base.DbTestCase):
def setUp(self):
super(NodePowerActionTestCase, self).setUp()
mgr_utils.mock_the_extension_manager()
self.driver = driver_factory.get_driver("fake")
def test_node_power_action_power_on(self):
"""Test node_power_action to turn node power on."""
node = obj_utils.create_test_node(self.context,
uuid=cmn_utils.generate_uuid(),
driver='fake',
power_state=states.POWER_OFF)
task = task_manager.TaskManager(self.context, node.uuid)
with mock.patch.object(self.driver.power,
'get_power_state') as get_power_mock:
get_power_mock.return_value = states.POWER_OFF
conductor_utils.node_power_action(task, states.POWER_ON)
node.refresh()
get_power_mock.assert_called_once_with(mock.ANY)
self.assertEqual(states.POWER_ON, node['power_state'])
self.assertIsNone(node['target_power_state'])
self.assertIsNone(node['last_error'])
def test_node_power_action_power_off(self):
"""Test node_power_action to turn node power off."""
node = obj_utils.create_test_node(self.context,
uuid=cmn_utils.generate_uuid(),
driver='fake',
power_state=states.POWER_ON)
task = task_manager.TaskManager(self.context, node.uuid)
with mock.patch.object(self.driver.power,
'get_power_state') as get_power_mock:
get_power_mock.return_value = states.POWER_ON
conductor_utils.node_power_action(task, states.POWER_OFF)
node.refresh()
get_power_mock.assert_called_once_with(mock.ANY)
self.assertEqual(states.POWER_OFF, node['power_state'])
self.assertIsNone(node['target_power_state'])
self.assertIsNone(node['last_error'])
def test_node_power_action_power_reboot(self):
"""Test for reboot a node."""
node = obj_utils.create_test_node(self.context,
uuid=cmn_utils.generate_uuid(),
driver='fake',
power_state=states.POWER_ON)
task = task_manager.TaskManager(self.context, node.uuid)
with mock.patch.object(self.driver.power, 'reboot') as reboot_mock:
conductor_utils.node_power_action(task, states.REBOOT)
node.refresh()
reboot_mock.assert_called_once_with(mock.ANY)
self.assertEqual(states.POWER_ON, node['power_state'])
self.assertIsNone(node['target_power_state'])
self.assertIsNone(node['last_error'])
def test_node_power_action_invalid_state(self):
"""Test for exception when changing to an invalid power state."""
node = obj_utils.create_test_node(self.context,
uuid=cmn_utils.generate_uuid(),
driver='fake',
power_state=states.POWER_ON)
task = task_manager.TaskManager(self.context, node.uuid)
with mock.patch.object(self.driver.power,
'get_power_state') as get_power_mock:
get_power_mock.return_value = states.POWER_ON
self.assertRaises(exception.InvalidParameterValue,
conductor_utils.node_power_action,
task,
"INVALID_POWER_STATE")
node.refresh()
get_power_mock.assert_called_once_with(mock.ANY)
self.assertEqual(states.POWER_ON, node['power_state'])
self.assertIsNone(node['target_power_state'])
self.assertIsNotNone(node['last_error'])
# last_error is cleared when a new transaction happens
conductor_utils.node_power_action(task, states.POWER_OFF)
node.refresh()
self.assertEqual(states.POWER_OFF, node['power_state'])
self.assertIsNone(node['target_power_state'])
self.assertIsNone(node['last_error'])
def test_node_power_action_already_being_processed(self):
"""Test node power action after aborted power action.
The target_power_state is expected to be None so it isn't
checked in the code. This is what happens if it is not None.
(Eg, if a conductor had died during a previous power-off
attempt and left the target_power_state set to states.POWER_OFF,
and the user is attempting to power-off again.)
"""
node = obj_utils.create_test_node(self.context,
uuid=cmn_utils.generate_uuid(),
driver='fake',
power_state=states.POWER_ON,
target_power_state=states.POWER_OFF)
task = task_manager.TaskManager(self.context, node.uuid)
conductor_utils.node_power_action(task, states.POWER_OFF)
node.refresh()
self.assertEqual(states.POWER_OFF, node['power_state'])
self.assertEqual(states.NOSTATE, node['target_power_state'])
self.assertIsNone(node['last_error'])
def test_node_power_action_in_same_state(self):
"""Test setting node state to its present state.
Test that we don't try to set the power state if the requested
state is the same as the current state.
"""
node = obj_utils.create_test_node(self.context,
uuid=cmn_utils.generate_uuid(),
driver='fake',
last_error='anything but None',
power_state=states.POWER_ON)
task = task_manager.TaskManager(self.context, node.uuid)
with mock.patch.object(self.driver.power,
'get_power_state') as get_power_mock:
get_power_mock.return_value = states.POWER_ON
with mock.patch.object(self.driver.power,
'set_power_state') as set_power_mock:
conductor_utils.node_power_action(task, states.POWER_ON)
node.refresh()
get_power_mock.assert_called_once_with(mock.ANY)
self.assertFalse(set_power_mock.called,
"set_power_state unexpectedly called")
self.assertEqual(states.POWER_ON, node['power_state'])
self.assertIsNone(node['target_power_state'])
self.assertIsNone(node['last_error'])
def test_node_power_action_failed_getting_state(self):
"""Test for exception when we can't get the current power state."""
node = obj_utils.create_test_node(self.context,
uuid=cmn_utils.generate_uuid(),
driver='fake',
power_state=states.POWER_ON)
task = task_manager.TaskManager(self.context, node.uuid)
with mock.patch.object(self.driver.power,
'get_power_state') as get_power_state_mock:
get_power_state_mock.side_effect = (
exception.InvalidParameterValue('failed getting power state'))
self.assertRaises(exception.InvalidParameterValue,
conductor_utils.node_power_action,
task,
states.POWER_ON)
node.refresh()
get_power_state_mock.assert_called_once_with(mock.ANY)
self.assertEqual(states.POWER_ON, node['power_state'])
self.assertIsNone(node['target_power_state'])
self.assertIsNotNone(node['last_error'])
def test_node_power_action_set_power_failure(self):
"""Test if an exception is thrown when the set_power call fails."""
node = obj_utils.create_test_node(self.context,
uuid=cmn_utils.generate_uuid(),
driver='fake',
power_state=states.POWER_OFF)
task = task_manager.TaskManager(self.context, node.uuid)
with mock.patch.object(self.driver.power,
'get_power_state') as get_power_mock:
with mock.patch.object(self.driver.power,
'set_power_state') as set_power_mock:
get_power_mock.return_value = states.POWER_OFF
set_power_mock.side_effect = exception.IronicException()
self.assertRaises(
exception.IronicException,
conductor_utils.node_power_action,
task,
states.POWER_ON)
node.refresh()
get_power_mock.assert_called_once_with(mock.ANY)
set_power_mock.assert_called_once_with(mock.ANY,
states.POWER_ON)
self.assertEqual(states.POWER_OFF, node['power_state'])
self.assertIsNone(node['target_power_state'])
self.assertIsNotNone(node['last_error'])
class CleanupAfterTimeoutTestCase(tests_base.TestCase):
def setUp(self):
super(CleanupAfterTimeoutTestCase, self).setUp()
self.task = mock.Mock(spec=task_manager.TaskManager)
self.task.context = mock.sentinel.context
self.task.driver = mock.Mock(spec_set=['deploy'])
self.task.shared = False
self.task.node = mock.Mock(spec_set=objects.Node)
self.node = self.task.node
def test_cleanup_after_timeout(self):
conductor_utils.cleanup_after_timeout(self.task)
self.node.save.assert_called_once_with()
self.task.driver.deploy.clean_up.assert_called_once_with(self.task)
self.assertEqual(states.DEPLOYFAIL, self.node.provision_state)
self.assertEqual(states.NOSTATE, self.node.target_provision_state)
self.assertIn('Timeout reached', self.node.last_error)
def test_cleanup_after_timeout_shared_lock(self):
self.task.shared = True
self.assertRaises(exception.ExclusiveLockRequired,
conductor_utils.cleanup_after_timeout,
self.task)
def test_cleanup_after_timeout_cleanup_ironic_exception(self):
clean_up_mock = self.task.driver.deploy.clean_up
clean_up_mock.side_effect = exception.IronicException('moocow')
conductor_utils.cleanup_after_timeout(self.task)
self.task.driver.deploy.clean_up.assert_called_once_with(self.task)
self.assertEqual([mock.call()] * 2, self.node.save.call_args_list)
self.assertEqual(states.DEPLOYFAIL, self.node.provision_state)
self.assertEqual(states.NOSTATE, self.node.target_provision_state)
self.assertIn('moocow', self.node.last_error)
def test_cleanup_after_timeout_cleanup_random_exception(self):
clean_up_mock = self.task.driver.deploy.clean_up
clean_up_mock.side_effect = Exception('moocow')
conductor_utils.cleanup_after_timeout(self.task)
self.task.driver.deploy.clean_up.assert_called_once_with(self.task)
self.assertEqual([mock.call()] * 2, self.node.save.call_args_list)
self.assertEqual(states.DEPLOYFAIL, self.node.provision_state)
self.assertEqual(states.NOSTATE, self.node.target_provision_state)
self.assertIn('Deploy timed out', self.node.last_error)
|
"""Helper utility to save parameter dicts."""
import tvm
import tvm._ffi
_save_param_dict = tvm._ffi.get_global_func("tvm.relay._save_param_dict")
_load_param_dict = tvm._ffi.get_global_func("tvm.relay._load_param_dict")
def save_param_dict(params):
"""Save parameter dictionary to binary bytes.
The result binary bytes can be loaded by the
GraphModule with API "load_params".
Parameters
----------
params : dict of str to NDArray
The parameter dictionary.
Returns
-------
param_bytes: bytearray
Serialized parameters.
Examples
--------
.. code-block:: python
# set up the parameter dict
params = {"param0": arr0, "param1": arr1}
# save the parameters as byte array
param_bytes = tvm.relay.save_param_dict(params)
# We can serialize the param_bytes and load it back later.
# Pass in byte array to module to directly set parameters
graph_runtime_mod.load_params(param_bytes)
"""
args = []
for k, v in params.items():
args.append(k)
args.append(tvm.nd.array(v))
return _save_param_dict(*args)
def load_param_dict(param_bytes):
"""Load parameter dictionary to binary bytes.
Parameters
----------
param_bytes: bytearray
Serialized parameters.
Returns
-------
params : dict of str to NDArray
The parameter dictionary.
"""
if isinstance(param_bytes, (bytes, str)):
param_bytes = bytearray(param_bytes)
load_arr = _load_param_dict(param_bytes)
return {v.name: v.array for v in load_arr}
|
from typing import Any
from django.core.management.base import BaseCommand
from django.db.utils import IntegrityError
from django.conf import settings
from zproject.backends import ZulipLDAPUserPopulator
from zerver.models import UserProfile
from zerver.lib.logging_util import create_logger
logger = create_logger(__name__, settings.LDAP_SYNC_LOG_PATH, 'INFO')
def sync_ldap_user_data() -> None:
logger.info("Starting update.")
backend = ZulipLDAPUserPopulator()
for u in UserProfile.objects.select_related().filter(is_active=True, is_bot=False).all():
# This will save the user if relevant, and will do nothing if the user
# does not exist.
try:
if backend.populate_user(backend.django_to_ldap_username(u.email)) is not None:
logger.info("Updated %s." % (u.email,))
else:
logger.warning("Did not find %s in LDAP." % (u.email,))
except IntegrityError:
logger.warning("User populated did not match an existing user.")
logger.info("Finished update.")
class Command(BaseCommand):
def handle(self, *args: Any, **options: Any) -> None:
sync_ldap_user_data()
|
input = """
{d}.
a | c :- d.
a | c :- b.
a :- b.
b :- a.
"""
output = """
{}
{c, d}
{a, b, d}
"""
|
from traits.api import Float, Property, Bool, Button, String, Enum
from pychron.core.ui.thread import Thread
from pychron.globals import globalv
from pychron.lasers.laser_managers.base_lase_manager import BaseLaserManager
class RemoteLaserManager(BaseLaserManager):
position = String(enter_set=True, auto_set=False)
x = Property(depends_on="_x")
y = Property(depends_on="_y")
z = Property(depends_on="_z")
_x = Float
_y = Float
_z = Float
connected = Bool
test_connection_button = Button("Test Connection")
snapshot_button = Button("Test Snapshot")
use_autocenter = Bool(False)
output_power = Float(enter_set=True, auto_set=False)
fire_laser_button = Button
fire_label = Property(depends_on="_firing")
units = Enum("watts", "percent")
_patterning = False
_firing = Bool(False)
_is_moving = Bool(False)
stage_stop_button = Button("Stage Stop")
move_enabled_button = Button("Enable Move")
move_enabled_label = Property(depends_on="_move_enabled")
_move_enabled = Bool(False)
update_position_button = Button
def open(self, *args, **kw):
raise NotImplementedError
def opened(self):
self.debug("opened")
if self.update_position():
self._opened_hook()
return True
def update_position(self):
pos = super(RemoteLaserManager, self).update_position()
if pos:
self.trait_set(**dict(zip(("_x", "_y", "_z"), pos)))
return True
# private
def _update_position_button_fired(self):
if not self.simulation:
self.update_position()
def _test_connection_button_fired(self):
self.test_connection()
if self.connected:
self.opened()
def _test_connection_hook(self):
pass
def _test_connection(self):
if self.simulation:
return globalv.communication_simulation, None
else:
self.connected = False
if self.setup_communicator():
self._test_connection_hook()
self.debug("test connection. connected= {}".format(self.connected))
return self.connected, None
def _position_changed(self):
if self.position is not None:
t = Thread(
target=self._move_to_position, args=(self.position, self.use_autocenter)
)
t.start()
self._position_thread = t
def _enable_fired(self):
if self.enabled:
self.disable_laser()
self.enabled = False
else:
if self.enable_laser():
self.enabled = True
def _get_move_enabled_label(self):
return "Enable Axis Moves" if not self._move_enabled else "Disable Axis Moves"
def _get_fire_label(self):
return "Fire" if not self._firing else "Stop"
def _move_enabled_button_fired(self):
self._move_enabled = not self._move_enabled
def _opened_hook(self):
pass
def _get_x(self):
return self._x
def _get_y(self):
return self._y
def _get_z(self):
return self._z
|
from uw_sws.section_status import get_section_status_by_label
|
"""Support for Tuya select."""
from __future__ import annotations
from typing import cast
from tuya_iot import TuyaDevice, TuyaDeviceManager
from tuya_iot.device import TuyaDeviceStatusRange
from homeassistant.components.select import SelectEntity, SelectEntityDescription
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import EntityCategory
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import HomeAssistantTuyaData
from .base import EnumTypeData, TuyaEntity
from .const import (
DEVICE_CLASS_TUYA_BASIC_ANTI_FLICKR,
DEVICE_CLASS_TUYA_BASIC_NIGHTVISION,
DEVICE_CLASS_TUYA_DECIBEL_SENSITIVITY,
DEVICE_CLASS_TUYA_FINGERBOT_MODE,
DEVICE_CLASS_TUYA_IPC_WORK_MODE,
DEVICE_CLASS_TUYA_LED_TYPE,
DEVICE_CLASS_TUYA_LIGHT_MODE,
DEVICE_CLASS_TUYA_MOTION_SENSITIVITY,
DEVICE_CLASS_TUYA_RECORD_MODE,
DEVICE_CLASS_TUYA_RELAY_STATUS,
DEVICE_CLASS_TUYA_VACUUM_CISTERN,
DEVICE_CLASS_TUYA_VACUUM_COLLECTION,
DEVICE_CLASS_TUYA_VACUUM_MODE,
DOMAIN,
TUYA_DISCOVERY_NEW,
DPCode,
)
SELECTS: dict[str, tuple[SelectEntityDescription, ...]] = {
# Coffee maker
# https://developer.tuya.com/en/docs/iot/categorykfj?id=Kaiuz2p12pc7f
"kfj": (
SelectEntityDescription(
key=DPCode.CUP_NUMBER,
name="Cups",
icon="mdi:numeric",
),
SelectEntityDescription(
key=DPCode.CONCENTRATION_SET,
name="Concentration",
icon="mdi:altimeter",
entity_category=EntityCategory.CONFIG,
),
SelectEntityDescription(
key=DPCode.MATERIAL,
name="Material",
entity_category=EntityCategory.CONFIG,
),
SelectEntityDescription(
key=DPCode.MODE,
name="Mode",
icon="mdi:coffee",
),
),
# Switch
# https://developer.tuya.com/en/docs/iot/s?id=K9gf7o5prgf7s
"kg": (
SelectEntityDescription(
key=DPCode.RELAY_STATUS,
name="Power on Behavior",
device_class=DEVICE_CLASS_TUYA_RELAY_STATUS,
entity_category=EntityCategory.CONFIG,
),
SelectEntityDescription(
key=DPCode.LIGHT_MODE,
name="Indicator Light Mode",
device_class=DEVICE_CLASS_TUYA_LIGHT_MODE,
entity_category=EntityCategory.CONFIG,
),
),
# Heater
# https://developer.tuya.com/en/docs/iot/categoryqn?id=Kaiuz18kih0sm
"qn": (
SelectEntityDescription(
key=DPCode.LEVEL,
name="Temperature Level",
icon="mdi:thermometer-lines",
),
),
# Siren Alarm
# https://developer.tuya.com/en/docs/iot/categorysgbj?id=Kaiuz37tlpbnu
"sgbj": (
SelectEntityDescription(
key=DPCode.ALARM_VOLUME,
name="Volume",
entity_category=EntityCategory.CONFIG,
),
SelectEntityDescription(
key=DPCode.BRIGHT_STATE,
name="Brightness",
entity_category=EntityCategory.CONFIG,
),
),
# Smart Camera
# https://developer.tuya.com/en/docs/iot/categorysp?id=Kaiuz35leyo12
"sp": (
SelectEntityDescription(
key=DPCode.IPC_WORK_MODE,
name="IPC Mode",
device_class=DEVICE_CLASS_TUYA_IPC_WORK_MODE,
entity_category=EntityCategory.CONFIG,
),
SelectEntityDescription(
key=DPCode.DECIBEL_SENSITIVITY,
name="Sound Detection Sensitivity",
icon="mdi:volume-vibrate",
device_class=DEVICE_CLASS_TUYA_DECIBEL_SENSITIVITY,
entity_category=EntityCategory.CONFIG,
),
SelectEntityDescription(
key=DPCode.RECORD_MODE,
name="Record Mode",
icon="mdi:record-rec",
device_class=DEVICE_CLASS_TUYA_RECORD_MODE,
entity_category=EntityCategory.CONFIG,
),
SelectEntityDescription(
key=DPCode.BASIC_NIGHTVISION,
name="Night Vision",
icon="mdi:theme-light-dark",
device_class=DEVICE_CLASS_TUYA_BASIC_NIGHTVISION,
entity_category=EntityCategory.CONFIG,
),
SelectEntityDescription(
key=DPCode.BASIC_ANTI_FLICKER,
name="Anti-flicker",
icon="mdi:image-outline",
device_class=DEVICE_CLASS_TUYA_BASIC_ANTI_FLICKR,
entity_category=EntityCategory.CONFIG,
),
SelectEntityDescription(
key=DPCode.MOTION_SENSITIVITY,
name="Motion Detection Sensitivity",
icon="mdi:motion-sensor",
device_class=DEVICE_CLASS_TUYA_MOTION_SENSITIVITY,
entity_category=EntityCategory.CONFIG,
),
),
# IoT Switch?
# Note: Undocumented
"tdq": (
SelectEntityDescription(
key=DPCode.RELAY_STATUS,
name="Power on Behavior",
device_class=DEVICE_CLASS_TUYA_RELAY_STATUS,
entity_category=EntityCategory.CONFIG,
),
SelectEntityDescription(
key=DPCode.LIGHT_MODE,
name="Indicator Light Mode",
device_class=DEVICE_CLASS_TUYA_LIGHT_MODE,
entity_category=EntityCategory.CONFIG,
),
),
# Dimmer Switch
# https://developer.tuya.com/en/docs/iot/categorytgkg?id=Kaiuz0ktx7m0o
"tgkg": (
SelectEntityDescription(
key=DPCode.RELAY_STATUS,
name="Power on Behavior",
device_class=DEVICE_CLASS_TUYA_RELAY_STATUS,
entity_category=EntityCategory.CONFIG,
),
SelectEntityDescription(
key=DPCode.LIGHT_MODE,
name="Indicator Light Mode",
device_class=DEVICE_CLASS_TUYA_LIGHT_MODE,
entity_category=EntityCategory.CONFIG,
),
SelectEntityDescription(
key=DPCode.LED_TYPE_1,
name="Light Source Type",
device_class=DEVICE_CLASS_TUYA_LED_TYPE,
entity_category=EntityCategory.CONFIG,
),
SelectEntityDescription(
key=DPCode.LED_TYPE_2,
name="Light 2 Source Type",
device_class=DEVICE_CLASS_TUYA_LED_TYPE,
entity_category=EntityCategory.CONFIG,
),
SelectEntityDescription(
key=DPCode.LED_TYPE_3,
name="Light 3 Source Type",
device_class=DEVICE_CLASS_TUYA_LED_TYPE,
entity_category=EntityCategory.CONFIG,
),
),
# Dimmer
# https://developer.tuya.com/en/docs/iot/tgq?id=Kaof8ke9il4k4
"tgq": (
SelectEntityDescription(
key=DPCode.LED_TYPE_1,
name="Light Source Type",
device_class=DEVICE_CLASS_TUYA_LED_TYPE,
entity_category=EntityCategory.CONFIG,
),
SelectEntityDescription(
key=DPCode.LED_TYPE_2,
name="Light 2 Source Type",
device_class=DEVICE_CLASS_TUYA_LED_TYPE,
entity_category=EntityCategory.CONFIG,
),
),
# Fingerbot
"szjqr": (
SelectEntityDescription(
key=DPCode.MODE,
name="Mode",
device_class=DEVICE_CLASS_TUYA_FINGERBOT_MODE,
entity_category=EntityCategory.CONFIG,
),
),
# Robot Vacuum
# https://developer.tuya.com/en/docs/iot/fsd?id=K9gf487ck1tlo
"sd": (
SelectEntityDescription(
key=DPCode.CISTERN,
name="Water Tank Adjustment",
entity_category=EntityCategory.CONFIG,
device_class=DEVICE_CLASS_TUYA_VACUUM_CISTERN,
icon="mdi:water-opacity",
),
SelectEntityDescription(
key=DPCode.COLLECTION_MODE,
name="Dust Collection Mode",
entity_category=EntityCategory.CONFIG,
device_class=DEVICE_CLASS_TUYA_VACUUM_COLLECTION,
icon="mdi:air-filter",
),
SelectEntityDescription(
key=DPCode.MODE,
name="Mode",
entity_category=EntityCategory.CONFIG,
device_class=DEVICE_CLASS_TUYA_VACUUM_MODE,
icon="mdi:layers-outline",
),
),
}
SELECTS["cz"] = SELECTS["kg"]
SELECTS["pc"] = SELECTS["kg"]
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Set up Tuya select dynamically through Tuya discovery."""
hass_data: HomeAssistantTuyaData = hass.data[DOMAIN][entry.entry_id]
@callback
def async_discover_device(device_ids: list[str]) -> None:
"""Discover and add a discovered Tuya select."""
entities: list[TuyaSelectEntity] = []
for device_id in device_ids:
device = hass_data.device_manager.device_map[device_id]
if descriptions := SELECTS.get(device.category):
for description in descriptions:
if (
description.key in device.function
or description.key in device.status
):
entities.append(
TuyaSelectEntity(
device, hass_data.device_manager, description
)
)
async_add_entities(entities)
async_discover_device([*hass_data.device_manager.device_map])
entry.async_on_unload(
async_dispatcher_connect(hass, TUYA_DISCOVERY_NEW, async_discover_device)
)
class TuyaSelectEntity(TuyaEntity, SelectEntity):
"""Tuya Select Entity."""
def __init__(
self,
device: TuyaDevice,
device_manager: TuyaDeviceManager,
description: SelectEntityDescription,
) -> None:
"""Init Tuya sensor."""
super().__init__(device, device_manager)
self.entity_description = description
self._attr_unique_id = f"{super().unique_id}{description.key}"
self._attr_opions: list[str] = []
if status_range := device.status_range.get(description.key):
self._status_range = cast(TuyaDeviceStatusRange, status_range)
# Extract type data from enum status range,
if self._status_range.type == "Enum":
type_data = EnumTypeData.from_json(self._status_range.values)
self._attr_options = type_data.range
@property
def current_option(self) -> str | None:
"""Return the selected entity option to represent the entity state."""
# Raw value
value = self.device.status.get(self.entity_description.key)
if value is None or value not in self._attr_options:
return None
return value
def select_option(self, option: str) -> None:
"""Change the selected option."""
self._send_command(
[
{
"code": self.entity_description.key,
"value": option,
}
]
)
|
import json
from textwrap import dedent
from typing import Optional
import pytest
from pants.backend.python.target_types import PexExecutionMode
from pants.testutil.pants_integration_test import PantsResult, run_pants, setup_tmpdir
@pytest.mark.parametrize(
("entry_point", "execution_mode", "include_tools"),
[
("app.py", PexExecutionMode.UNZIP, True),
("app.py", PexExecutionMode.VENV, True),
("app.py:main", PexExecutionMode.ZIPAPP, False),
("app.py:main", None, False),
],
)
def test_run_sample_script(
entry_point: str, execution_mode: Optional[PexExecutionMode], include_tools: bool
) -> None:
"""Test that we properly run a `pex_binary` target.
This checks a few things:
- We can handle source roots.
- We properly load third party requirements.
- We propagate the error code.
"""
sources = {
"src_root1/project/app.py": dedent(
"""\
import sys
from utils.strutil import upper_case
def main():
print(upper_case("Hello world."))
print("Hola, mundo.", file=sys.stderr)
sys.exit(23)
if __name__ == "__main__":
main()
"""
),
"src_root1/project/BUILD": dedent(
f"""\
python_library(name='lib')
pex_binary(
entry_point={entry_point!r},
execution_mode={execution_mode.value if execution_mode is not None else None!r},
include_tools={include_tools!r},
)
"""
),
"src_root2/utils/strutil.py": dedent(
"""\
def upper_case(s):
return s.upper()
"""
),
"src_root2/utils/BUILD": "python_library()",
}
def run(*extra_args: str, **extra_env: str) -> PantsResult:
with setup_tmpdir(sources) as tmpdir:
args = [
"--backend-packages=pants.backend.python",
f"--source-root-patterns=['/{tmpdir}/src_root1', '/{tmpdir}/src_root2']",
"--pants-ignore=__pycache__",
"--pants-ignore=/src/python",
"run",
f"{tmpdir}/src_root1/project/app.py",
*extra_args,
]
return run_pants(args, extra_env=extra_env)
result = run()
assert "Hola, mundo.\n" in result.stderr
assert result.stdout == "HELLO WORLD.\n"
assert result.exit_code == 23
if include_tools:
result = run("--", "info", PEX_TOOLS="1")
assert result.exit_code == 0
pex_info = json.loads(result.stdout)
assert (execution_mode is PexExecutionMode.UNZIP) == pex_info["unzip"]
assert (execution_mode is PexExecutionMode.VENV) == pex_info["venv"]
|
import tempfile
import io
import sys
import subprocess
MAIN_SCRIPT_URL = "https://raw.githubusercontent.com/platformio/platformio-core-installer/master/get-platformio.py"
def download_with_requests(url, dst):
import requests
resp = requests.get(url, stream=True)
itercontent = resp.iter_content(chunk_size=io.DEFAULT_BUFFER_SIZE)
with open(dst, "wb") as fp:
for chunk in itercontent:
fp.write(chunk)
return dst
def download_with_urllib3(url, dst):
import urllib3
http = urllib3.PoolManager()
r = http.request("GET", url, preload_content=False)
with open(dst, "wb") as out:
while True:
data = r.read(io.DEFAULT_BUFFER_SIZE)
if not data:
break
out.write(data)
r.release_conn()
return dst
def download_with_urllib(url, dst):
if sys.version_info[0] == 3:
from urllib.request import urlopen
else:
from urllib import urlopen
response = urlopen(url)
CHUNK = 16 * 1024
with open(dst, "wb") as f:
while True:
chunk = response.read(CHUNK)
if not chunk:
break
f.write(chunk)
return dst
def download_with_curl(url, dst):
subprocess.check_output(["curl", "-o", dst, url])
return dst
def download_with_wget(url, dst):
subprocess.check_output(["wget", "-O", dst, url])
return dst
def download_file(url, dst):
methods = [
download_with_requests,
download_with_urllib3,
download_with_urllib,
download_with_curl,
download_with_wget,
]
for method in methods:
try:
method(url, dst)
return dst
except:
pass
raise Exception("Could not download file '%s' to '%s' " % (url, dst))
def main():
with tempfile.NamedTemporaryFile() as tmp_file:
dst = download_file(MAIN_SCRIPT_URL, str(tmp_file.name))
command = [sys.executable, dst]
command.extend(sys.argv[1:])
subprocess.check_call(command)
if __name__ == "__main__":
sys.exit(main())
|
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.7.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1ServicePort(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, name=None, node_port=None, port=None, protocol=None, target_port=None):
"""
V1ServicePort - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'name': 'str',
'node_port': 'int',
'port': 'int',
'protocol': 'str',
'target_port': 'str'
}
self.attribute_map = {
'name': 'name',
'node_port': 'nodePort',
'port': 'port',
'protocol': 'protocol',
'target_port': 'targetPort'
}
self._name = name
self._node_port = node_port
self._port = port
self._protocol = protocol
self._target_port = target_port
@property
def name(self):
"""
Gets the name of this V1ServicePort.
The name of this port within the service. This must be a DNS_LABEL. All ports within a ServiceSpec must have unique names. This maps to the 'Name' field in EndpointPort objects. Optional if only one ServicePort is defined on this service.
:return: The name of this V1ServicePort.
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""
Sets the name of this V1ServicePort.
The name of this port within the service. This must be a DNS_LABEL. All ports within a ServiceSpec must have unique names. This maps to the 'Name' field in EndpointPort objects. Optional if only one ServicePort is defined on this service.
:param name: The name of this V1ServicePort.
:type: str
"""
self._name = name
@property
def node_port(self):
"""
Gets the node_port of this V1ServicePort.
The port on each node on which this service is exposed when type=NodePort or LoadBalancer. Usually assigned by the system. If specified, it will be allocated to the service if unused or else creation of the service will fail. Default is to auto-allocate a port if the ServiceType of this Service requires one. More info: https://kubernetes.io/docs/concepts/services-networking/service/#type-nodeport
:return: The node_port of this V1ServicePort.
:rtype: int
"""
return self._node_port
@node_port.setter
def node_port(self, node_port):
"""
Sets the node_port of this V1ServicePort.
The port on each node on which this service is exposed when type=NodePort or LoadBalancer. Usually assigned by the system. If specified, it will be allocated to the service if unused or else creation of the service will fail. Default is to auto-allocate a port if the ServiceType of this Service requires one. More info: https://kubernetes.io/docs/concepts/services-networking/service/#type-nodeport
:param node_port: The node_port of this V1ServicePort.
:type: int
"""
self._node_port = node_port
@property
def port(self):
"""
Gets the port of this V1ServicePort.
The port that will be exposed by this service.
:return: The port of this V1ServicePort.
:rtype: int
"""
return self._port
@port.setter
def port(self, port):
"""
Sets the port of this V1ServicePort.
The port that will be exposed by this service.
:param port: The port of this V1ServicePort.
:type: int
"""
if port is None:
raise ValueError("Invalid value for `port`, must not be `None`")
self._port = port
@property
def protocol(self):
"""
Gets the protocol of this V1ServicePort.
The IP protocol for this port. Supports \"TCP\" and \"UDP\". Default is TCP.
:return: The protocol of this V1ServicePort.
:rtype: str
"""
return self._protocol
@protocol.setter
def protocol(self, protocol):
"""
Sets the protocol of this V1ServicePort.
The IP protocol for this port. Supports \"TCP\" and \"UDP\". Default is TCP.
:param protocol: The protocol of this V1ServicePort.
:type: str
"""
self._protocol = protocol
@property
def target_port(self):
"""
Gets the target_port of this V1ServicePort.
Number or name of the port to access on the pods targeted by the service. Number must be in the range 1 to 65535. Name must be an IANA_SVC_NAME. If this is a string, it will be looked up as a named port in the target Pod's container ports. If this is not specified, the value of the 'port' field is used (an identity map). This field is ignored for services with clusterIP=None, and should be omitted or set equal to the 'port' field. More info: https://kubernetes.io/docs/concepts/services-networking/service/#defining-a-service
:return: The target_port of this V1ServicePort.
:rtype: str
"""
return self._target_port
@target_port.setter
def target_port(self, target_port):
"""
Sets the target_port of this V1ServicePort.
Number or name of the port to access on the pods targeted by the service. Number must be in the range 1 to 65535. Name must be an IANA_SVC_NAME. If this is a string, it will be looked up as a named port in the target Pod's container ports. If this is not specified, the value of the 'port' field is used (an identity map). This field is ignored for services with clusterIP=None, and should be omitted or set equal to the 'port' field. More info: https://kubernetes.io/docs/concepts/services-networking/service/#defining-a-service
:param target_port: The target_port of this V1ServicePort.
:type: str
"""
self._target_port = target_port
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1ServicePort):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
'''
'''
import hashlib
import hmac
Test.Summary = '''
Test url_sig plugin
'''
Test.ContinueOnFail = True
Test.SkipUnless(Condition.PluginExists('url_sig.so'))
url_sig_log_id = Test.Disk.File("url_sig_short.log")
url_sig_log_id.Content = "url_sig.gold"
server = Test.MakeOriginServer("server")
request_header = {
"headers": "GET /foo/abcde/qrstuvwxyz HTTP/1.1\r\nHost: just.any.thing\r\n\r\n", "timestamp": "1469733493.993", "body": ""
}
response_header = {"headers": "HTTP/1.1 200 OK\r\nConnection: close\r\n\r\n", "timestamp": "1469733493.993", "body": ""}
server.addResponse("sessionfile.log", request_header, response_header)
ts = Test.MakeATSProcess("ts", select_ports=True, enable_tls=True, enable_cache=False)
ts.addDefaultSSLFiles()
ts.Disk.records_config.update({
# 'proxy.config.diags.debug.enabled': 1,
# 'proxy.config.diags.debug.tags': 'http|url_sig',
'proxy.config.proxy_name': 'Poxy_Proxy', # This will be the server name.
'proxy.config.ssl.server.cert.path': '{0}'.format(ts.Variables.SSLDir),
'proxy.config.ssl.server.private_key.path': '{0}'.format(ts.Variables.SSLDir),
})
ts.Disk.ssl_multicert_config.AddLine(
'dest_ip=* ssl_cert_name=server.pem ssl_key_name=server.key'
)
ts.Setup.Copy("url_sig.config", ts.Variables.CONFIGDIR)
ts.Disk.remap_config.AddLine(
f'map http://one.two.three/ http://127.0.0.1:{server.Variables.Port}/' +
' @plugin=url_sig.so @pparam=url_sig.config'
)
ts.Disk.remap_config.AddLine(
f'map https://one.two.three/ http://127.0.0.1:{server.Variables.Port}/' +
' @plugin=url_sig.so @pparam=url_sig.config'
)
ts.Disk.remap_config.AddLine(
f'map http://four.five.six/ http://127.0.0.1:{server.Variables.Port}/' +
' @plugin=url_sig.so @pparam=url_sig.config @pparam=pristineurl'
)
ts.Disk.remap_config.AddLine(
f'map http://seven.eight.nine/ http://127.0.0.1:{server.Variables.Port}' +
' @plugin=url_sig.so @pparam=url_sig.config @pparam=PristineUrl'
)
LogTee = f" 2>&1 | grep '^<' | tee -a {Test.RunDirectory}/url_sig_long.log"
tr = Test.AddTestRun()
tr.Processes.Default.StartBefore(ts)
tr.Processes.Default.StartBefore(server, ready=When.PortOpen(server.Variables.Port))
tr.Processes.Default.ReturnCode = 0
tr.Processes.Default.Command = (
f"curl --verbose --proxy http://127.0.0.1:{ts.Variables.port} 'http://seven.eight.nine/" +
"foo/abcde/qrstuvwxyz?C=127.0.0.2&E=33046620008&A=2&K=13&P=101&S=d1f352d4f1d931ad2f441013402d93f8'" +
LogTee
)
tr = Test.AddTestRun()
tr.Processes.Default.ReturnCode = 0
tr.Processes.Default.Command = (
f"curl --verbose --proxy http://127.0.0.1:{ts.Variables.port} 'http://seven.eight.nine/" +
"foo/abcde/qrstuvwxyz?C=127.0.0.1&E=1&A=2&K=13&P=010&S=f237aad1fa010234d7bf8108a0e36387'" +
LogTee
)
tr = Test.AddTestRun()
tr.Processes.Default.ReturnCode = 0
tr.Processes.Default.Command = (
f"curl --verbose --proxy http://127.0.0.1:{ts.Variables.port} 'http://seven.eight.nine/" +
"foo/abcde/qrstuvwxyz?C=127.0.0.1&E=33046620008&K=13&P=101&S=d1f352d4f1d931ad2f441013402d93f8'" +
LogTee
)
tr = Test.AddTestRun()
tr.Processes.Default.ReturnCode = 0
tr.Processes.Default.Command = (
f"curl --verbose --proxy http://127.0.0.1:{ts.Variables.port} 'http://seven.eight.nine/" +
"foo/abcde/qrstuvwxyz?C=127.0.0.1&E=33046620008&A=3&K=13&P=101&S=d1f352d4f1d931ad2f441013402d93f8'" +
LogTee
)
tr = Test.AddTestRun()
tr.Processes.Default.ReturnCode = 0
tr.Processes.Default.Command = (
f"curl --verbose --proxy http://127.0.0.1:{ts.Variables.port} 'http://seven.eight.nine/" +
"foo/abcde/qrstuvwxyz?C=127.0.0.1&E=33046620008&A=2&K=13&S=d1f352d4f1d931ad2f441013402d93f8'" +
LogTee
)
tr = Test.AddTestRun()
tr.Processes.Default.ReturnCode = 0
tr.Processes.Default.Command = (
f"curl --verbose --proxy http://127.0.0.1:{ts.Variables.port} 'http://seven.eight.nine/" +
"foo/abcde/qrstuvwxyz?C=127.0.0.1&E=33046620008&A=2&K=13&P=10&S=d1f352d4f1d931ad2f441013402d93f8'" +
LogTee
)
tr = Test.AddTestRun()
tr.Processes.Default.ReturnCode = 0
tr.Processes.Default.Command = (
f"curl --verbose --proxy http://127.0.0.1:{ts.Variables.port} 'http://seven.eight.nine/" +
"foo/abcde/qrstuvwxyz?C=127.0.0.1&E=33046620008&A=2&K=13&P=101'" +
LogTee
)
tr = Test.AddTestRun()
tr.Processes.Default.ReturnCode = 0
tr.Processes.Default.Command = (
f"curl --verbose --proxy http://127.0.0.1:{ts.Variables.port} 'http://seven.eight.nine/" +
"foo/abcde/qrstuvwxyz?C=127.0.0.1&E=33046620008&A=2&K=13&P=101&S=d1f452d4f1d931ad2f441013402d93f8'" +
LogTee
)
tr = Test.AddTestRun()
tr.Processes.Default.ReturnCode = 0
tr.Processes.Default.Command = (
f"curl --verbose --proxy http://127.0.0.1:{ts.Variables.port} 'http://seven.eight.nine/" +
"foo/abcde/qrstuvwxyz?C=127.0.0.1&E=33046620008&A=2&&K=13&P=101&S=d1f352d4f1d931ad2f441013402d93f8#'" +
LogTee
)
tr = Test.AddTestRun()
tr.Processes.Default.ReturnCode = 0
tr.Processes.Default.Command = (
f"curl --verbose --proxy http://127.0.0.1:{ts.Variables.port} 'http://four.five.six/" +
"foo/abcde/qrstuvwxyz?C=127.0.0.1&E=33046618556&A=1&K=15&P=1&S=f4103561a23adab7723a89b9831d77e0afb61d92'" +
LogTee
)
tr = Test.AddTestRun()
tr.Processes.Default.ReturnCode = 0
tr.Processes.Default.Command = (
f"curl --verbose --proxy http://127.0.0.1:{ts.Variables.port} 'http://seven.eight.nine/" +
"foo/abcde/qrstuvwxyz?E=33046618586&A=2&K=0&P=1&S=0364efa28afe345544596705b92d20ac'" +
LogTee
)
tr = Test.AddTestRun()
tr.Processes.Default.ReturnCode = 0
tr.Processes.Default.Command = (
f"curl --verbose --proxy http://127.0.0.1:{ts.Variables.port} 'http://seven.eight.nine/" +
"foo/abcde/qrstuvwxyz?C=127.0.0.1&E=33046619717&A=2&K=13&P=010&S=f237aad1fa010234d7bf8108a0e36387'" +
LogTee
)
tr = Test.AddTestRun()
tr.Processes.Default.ReturnCode = 0
tr.Processes.Default.Command = (
f"curl --verbose --proxy http://127.0.0.1:{ts.Variables.port} 'http://seven.eight.nine/" +
"foo/abcde/qrstuvwxyz?C=127.0.0.1&E=33046620008&A=2&K=13&P=101&S=d1f352d4f1d931ad2f441013402d93f8'" +
LogTee
)
def sign(payload, key):
secret = bytes(key, 'utf-8')
data = bytes(payload, 'utf-8')
md = bytes(hmac.new(secret, data, digestmod=hashlib.sha1).digest().hex(), 'utf-8')
return md.decode("utf-8")
path = "foo/abcde/qrstuvwxyz?E=33046618506&A=1&K=7&P=1&S="
to_sign = f"127.0.0.1:{server.Variables.Port}/{path}"
url = "http://one.two.three/" + path + sign(to_sign, "dqsgopTSM_doT6iAysasQVUKaPykyb6e")
tr = Test.AddTestRun()
tr.Processes.Default.ReturnCode = 0
tr.Processes.Default.Command = (
f"curl --verbose --proxy http://127.0.0.1:{ts.Variables.port} '{url}'" + LogTee
)
path = "foo/abcde/qrstuvwxyz?E=33046618506&A=1&K=7&P=1&S="
to_sign = f"127.0.0.1:{server.Variables.Port}/{path}"
url = f"https://127.0.0.1:{ts.Variables.ssl_port}/{path}" + sign(to_sign, "dqsgopTSM_doT6iAysasQVUKaPykyb6e")
tr = Test.AddTestRun()
tr.Processes.Default.ReturnCode = 0
tr.Processes.Default.Command = (
f"curl --verbose --http1.1 --insecure --header 'Host: one.two.three' '{url}'" +
LogTee + " ; grep -F -e '< HTTP' -e Authorization {0}/url_sig_long.log > {0}/url_sig_short.log ".format(ts.RunDirectory)
)
ts.Disk.diags_log.Content = Testers.ContainsExpression("ERROR", "Some tests are failure tests")
ts.Disk.diags_log.Content += Testers.ExcludesExpression("Error parsing", "Verify that we can accept long comment lines")
|
from functools import wraps
from allura import model as M
from ming.orm.ormsession import ThreadLocalORMSession
from pylons import c
def with_user_project(username):
def _with_user_project(func):
@wraps(func)
def wrapped(*args, **kw):
user = M.User.by_username(username)
c.user = user
n = M.Neighborhood.query.get(name='Users')
shortname = 'u/' + username
p = M.Project.query.get(shortname=shortname, neighborhood_id=n._id)
if not p:
n.register_project(shortname, user=user, user_project=True)
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
return func(*args, **kw)
return wrapped
return _with_user_project
def with_tool(project_shortname, ep_name, mount_point=None, mount_label=None,
ordinal=None, post_install_hook=None, username='test-admin',
**override_options):
def _with_tool(func):
@wraps(func)
def wrapped(*args, **kw):
c.user = M.User.by_username(username)
p = M.Project.query.get(shortname=project_shortname)
c.project = p
if mount_point and not p.app_instance(mount_point):
c.app = p.install_app(ep_name, mount_point, mount_label, ordinal, **override_options)
if post_install_hook:
post_install_hook(c.app)
while M.MonQTask.run_ready('setup'):
pass
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
elif mount_point:
c.app = p.app_instance(mount_point)
return func(*args, **kw)
return wrapped
return _with_tool
with_discussion = with_tool('test', 'Discussion', 'discussion')
with_link = with_tool('test', 'Link', 'link')
with_tracker = with_tool('test', 'Tickets', 'bugs')
with_wiki = with_tool('test', 'Wiki', 'wiki')
with_git = with_tool('test', 'Git', 'src-git', 'Git', type='git')
with_hg = with_tool('test', 'Hg', 'src-hg', 'Mercurial', type='hg')
with_svn = with_tool('test', 'SVN', 'src', 'SVN')
with_url = with_tool('test', 'ShortUrl', 'url')
def with_repos(func):
@wraps(func)
@with_git
@with_hg
@with_svn
def wrapped(*args, **kw):
return func(*args, **kw)
return wrapped
class raises(object):
'test helper in the form of a context manager, to assert that something raises an exception'
def __init__(self, ExcType):
self.ExcType = ExcType
def __enter__(self):
pass
def __exit__(self, exc_type, exc_val, exc_t):
if exc_type:
if issubclass(exc_type, self.ExcType):
# ok
return True
else:
# root exception will be raised, untouched
return False
else:
raise AssertionError('Did not raise %s' % self.ExcType)
|
import datetime
from nova import exception
from nova import objects
from nova.objects import fields
from nova import test
class TestImageMeta(test.NoDBTestCase):
def test_basic_attrs(self):
image = {'status': 'active',
'container_format': 'bare',
'min_ram': 0,
'updated_at': '2014-12-12T11:16:36.000000',
# Testing string -> int conversion
'min_disk': '0',
'owner': '2d8b9502858c406ebee60f0849486222',
# Testing string -> bool conversion
'protected': 'yes',
'properties': {
'os_type': 'Linux',
'hw_video_model': 'vga',
'hw_video_ram': '512',
'hw_qemu_guest_agent': 'yes',
'hw_scsi_model': 'virtio-scsi',
},
'size': 213581824,
'name': 'f16-x86_64-openstack-sda',
'checksum': '755122332caeb9f661d5c978adb8b45f',
'created_at': '2014-12-10T16:23:14.000000',
'disk_format': 'qcow2',
'id': 'c8b1790e-a07d-4971-b137-44f2432936cd'
}
image_meta = objects.ImageMeta.from_dict(image)
self.assertEqual('active', image_meta.status)
self.assertEqual('bare', image_meta.container_format)
self.assertEqual(0, image_meta.min_ram)
self.assertIsInstance(image_meta.updated_at, datetime.datetime)
self.assertEqual(0, image_meta.min_disk)
self.assertEqual('2d8b9502858c406ebee60f0849486222', image_meta.owner)
self.assertTrue(image_meta.protected)
self.assertEqual(213581824, image_meta.size)
self.assertEqual('f16-x86_64-openstack-sda', image_meta.name)
self.assertEqual('755122332caeb9f661d5c978adb8b45f',
image_meta.checksum)
self.assertIsInstance(image_meta.created_at, datetime.datetime)
self.assertEqual('qcow2', image_meta.disk_format)
self.assertEqual('c8b1790e-a07d-4971-b137-44f2432936cd', image_meta.id)
self.assertIsInstance(image_meta.properties, objects.ImageMetaProps)
def test_no_props(self):
image_meta = objects.ImageMeta.from_dict({})
self.assertIsInstance(image_meta.properties, objects.ImageMetaProps)
def test_volume_backed_image(self):
image = {'container_format': None,
'size': 0,
'checksum': None,
'disk_format': None,
}
image_meta = objects.ImageMeta.from_dict(image)
self.assertEqual('', image_meta.container_format)
self.assertEqual(0, image_meta.size)
self.assertEqual('', image_meta.checksum)
self.assertEqual('', image_meta.disk_format)
def test_null_substitution(self):
image = {'name': None,
'checksum': None,
'owner': None,
'size': None,
'virtual_size': None,
'container_format': None,
'disk_format': None,
}
image_meta = objects.ImageMeta.from_dict(image)
self.assertEqual('', image_meta.name)
self.assertEqual('', image_meta.checksum)
self.assertEqual('', image_meta.owner)
self.assertEqual(0, image_meta.size)
self.assertEqual(0, image_meta.virtual_size)
self.assertEqual('', image_meta.container_format)
self.assertEqual('', image_meta.disk_format)
class TestImageMetaProps(test.NoDBTestCase):
def test_normal_props(self):
props = {'os_type': 'windows',
'hw_video_model': 'vga',
'hw_video_ram': '512',
'hw_qemu_guest_agent': 'yes',
'trait:CUSTOM_TRUSTED': 'required',
# Fill sane values for the rest here
}
virtprops = objects.ImageMetaProps.from_dict(props)
self.assertEqual('windows', virtprops.os_type)
self.assertEqual('vga', virtprops.hw_video_model)
self.assertEqual(512, virtprops.hw_video_ram)
self.assertTrue(virtprops.hw_qemu_guest_agent)
self.assertIsNotNone(virtprops.traits_required)
self.assertIn('CUSTOM_TRUSTED', virtprops.traits_required)
def test_default_props(self):
props = {}
virtprops = objects.ImageMetaProps.from_dict(props)
for prop in virtprops.fields:
self.assertIsNone(virtprops.get(prop))
def test_default_prop_value(self):
props = {}
virtprops = objects.ImageMetaProps.from_dict(props)
self.assertEqual("hvm", virtprops.get("hw_vm_mode", "hvm"))
self.assertIsNone(virtprops.get("traits_required"))
def test_non_existent_prop(self):
props = {}
virtprops = objects.ImageMetaProps.from_dict(props)
self.assertRaises(AttributeError,
virtprops.get,
"doesnotexist")
def test_legacy_compat(self):
legacy_props = {
'architecture': 'x86_64',
'owner_id': '123',
'vmware_adaptertype': 'lsiLogic',
'vmware_disktype': 'preallocated',
'vmware_image_version': '2',
'vmware_ostype': 'rhel3_64Guest',
'auto_disk_config': 'yes',
'ipxe_boot': 'yes',
'xenapi_device_id': '3',
'xenapi_image_compression_level': '2',
'vmware_linked_clone': 'false',
'xenapi_use_agent': 'yes',
'xenapi_skip_agent_inject_ssh': 'no',
'xenapi_skip_agent_inject_files_at_boot': 'no',
'cache_in_nova': 'yes',
'vm_mode': 'hvm',
'bittorrent': 'yes',
'mappings': [],
'block_device_mapping': [],
'bdm_v2': 'yes',
'root_device_name': '/dev/vda',
'hypervisor_version_requires': '>=1.5.3',
'hypervisor_type': 'qemu',
}
image_meta = objects.ImageMetaProps.from_dict(legacy_props)
self.assertEqual('x86_64', image_meta.hw_architecture)
self.assertEqual('123', image_meta.img_owner_id)
self.assertEqual('lsilogic', image_meta.hw_scsi_model)
self.assertEqual('preallocated', image_meta.hw_disk_type)
self.assertEqual(2, image_meta.img_version)
self.assertEqual('rhel3_64Guest', image_meta.os_distro)
self.assertTrue(image_meta.hw_auto_disk_config)
self.assertTrue(image_meta.hw_ipxe_boot)
self.assertEqual(3, image_meta.hw_device_id)
self.assertEqual(2, image_meta.img_compression_level)
self.assertFalse(image_meta.img_linked_clone)
self.assertTrue(image_meta.img_use_agent)
self.assertFalse(image_meta.os_skip_agent_inject_ssh)
self.assertFalse(image_meta.os_skip_agent_inject_files_at_boot)
self.assertTrue(image_meta.img_cache_in_nova)
self.assertTrue(image_meta.img_bittorrent)
self.assertEqual([], image_meta.img_mappings)
self.assertEqual([], image_meta.img_block_device_mapping)
self.assertTrue(image_meta.img_bdm_v2)
self.assertEqual("/dev/vda", image_meta.img_root_device_name)
self.assertEqual('>=1.5.3', image_meta.img_hv_requested_version)
self.assertEqual('qemu', image_meta.img_hv_type)
def test_legacy_compat_vmware_adapter_types(self):
legacy_types = ['lsiLogic', 'busLogic', 'ide', 'lsiLogicsas',
'paraVirtual', None, '']
for legacy_type in legacy_types:
legacy_props = {
'vmware_adaptertype': legacy_type,
}
image_meta = objects.ImageMetaProps.from_dict(legacy_props)
if legacy_type == 'ide':
self.assertEqual('ide', image_meta.hw_disk_bus)
elif not legacy_type:
self.assertFalse(image_meta.obj_attr_is_set('hw_disk_bus'))
self.assertFalse(image_meta.obj_attr_is_set('hw_scsi_model'))
else:
self.assertEqual('scsi', image_meta.hw_disk_bus)
if legacy_type == 'lsiLogicsas':
expected = 'lsisas1068'
elif legacy_type == 'paraVirtual':
expected = 'vmpvscsi'
else:
expected = legacy_type.lower()
self.assertEqual(expected, image_meta.hw_scsi_model)
def test_duplicate_legacy_and_normal_props(self):
# Both keys are referring to the same object field
props = {'hw_scsi_model': 'virtio-scsi',
'vmware_adaptertype': 'lsiLogic',
}
virtprops = objects.ImageMetaProps.from_dict(props)
# The normal property always wins vs. the legacy field since
# _set_attr_from_current_names is called finally
self.assertEqual('virtio-scsi', virtprops.hw_scsi_model)
def test_get(self):
props = objects.ImageMetaProps(os_distro='linux')
self.assertEqual('linux', props.get('os_distro'))
self.assertIsNone(props.get('img_version'))
self.assertEqual(1, props.get('img_version', 1))
def test_set_numa_mem(self):
props = {'hw_numa_nodes': 2,
'hw_numa_mem.0': "2048",
'hw_numa_mem.1': "4096"}
virtprops = objects.ImageMetaProps.from_dict(props)
self.assertEqual(2, virtprops.hw_numa_nodes)
self.assertEqual([2048, 4096], virtprops.hw_numa_mem)
def test_set_numa_mem_sparse(self):
props = {'hw_numa_nodes': 2,
'hw_numa_mem.0': "2048",
'hw_numa_mem.1': "1024",
'hw_numa_mem.3': "4096"}
virtprops = objects.ImageMetaProps.from_dict(props)
self.assertEqual(2, virtprops.hw_numa_nodes)
self.assertEqual([2048, 1024], virtprops.hw_numa_mem)
def test_set_numa_mem_no_count(self):
props = {'hw_numa_mem.0': "2048",
'hw_numa_mem.3': "4096"}
virtprops = objects.ImageMetaProps.from_dict(props)
self.assertIsNone(virtprops.get("hw_numa_nodes"))
self.assertEqual([2048], virtprops.hw_numa_mem)
def test_set_numa_cpus(self):
props = {'hw_numa_nodes': 2,
'hw_numa_cpus.0': "0-3",
'hw_numa_cpus.1': "4-7"}
virtprops = objects.ImageMetaProps.from_dict(props)
self.assertEqual(2, virtprops.hw_numa_nodes)
self.assertEqual([set([0, 1, 2, 3]), set([4, 5, 6, 7])],
virtprops.hw_numa_cpus)
def test_set_numa_cpus_sparse(self):
props = {'hw_numa_nodes': 4,
'hw_numa_cpus.0': "0-3",
'hw_numa_cpus.1': "4,5",
'hw_numa_cpus.3': "6-7"}
virtprops = objects.ImageMetaProps.from_dict(props)
self.assertEqual(4, virtprops.hw_numa_nodes)
self.assertEqual([set([0, 1, 2, 3]), set([4, 5])],
virtprops.hw_numa_cpus)
def test_set_numa_cpus_no_count(self):
props = {'hw_numa_cpus.0': "0-3",
'hw_numa_cpus.3': "4-7"}
virtprops = objects.ImageMetaProps.from_dict(props)
self.assertIsNone(virtprops.get("hw_numa_nodes"))
self.assertEqual([set([0, 1, 2, 3])],
virtprops.hw_numa_cpus)
def test_get_unnumbered_trait_fields(self):
"""Tests that only valid un-numbered required traits are parsed from
the properties.
"""
props = {'trait:HW_CPU_X86_AVX2': 'required',
'trait:CUSTOM_TRUSTED': 'required',
'trait1:CUSTOM_FPGA': 'required',
'trai:CUSTOM_FOO': 'required',
'trait:CUSTOM_XYZ': 'xyz'}
virtprops = objects.ImageMetaProps.from_dict(props)
self.assertIn('CUSTOM_TRUSTED', virtprops.traits_required)
self.assertIn('HW_CPU_X86_AVX2', virtprops.traits_required)
# numbered traits are ignored
self.assertNotIn('CUSTOM_FPGA', virtprops.traits_required)
# property key does not start with `trait:` exactly
self.assertNotIn('CUSTOM_FOO', virtprops.traits_required)
# property value is not required
self.assertNotIn('CUSTOM_XYZ', virtprops.traits_required)
def test_traits_required_initialized_as_list(self):
"""Tests that traits_required field is set as a list even if the same
property is set on the image metadata.
"""
props = {'trait:HW_CPU_X86_AVX2': 'required',
'trait:CUSTOM_TRUSTED': 'required',
'traits_required': 'foo'}
virtprops = objects.ImageMetaProps.from_dict(props)
self.assertIsInstance(virtprops.traits_required, list)
self.assertIn('CUSTOM_TRUSTED', virtprops.traits_required)
self.assertIn('HW_CPU_X86_AVX2', virtprops.traits_required)
self.assertEqual(2, len(virtprops.traits_required))
def test_obj_make_compatible(self):
props = {
'hw_firmware_type': 'uefi',
'hw_cpu_realtime_mask': '^0-1',
'hw_cpu_thread_policy': 'prefer',
'img_config_drive': 'mandatory',
'os_admin_user': 'root',
'hw_vif_multiqueue_enabled': True,
'img_hv_type': 'kvm',
'img_hv_requested_version': '>= 1.0',
'os_require_quiesce': True,
'os_secure_boot': 'required',
'hw_rescue_bus': 'ide',
'hw_rescue_device': 'disk',
'hw_watchdog_action': fields.WatchdogAction.DISABLED,
}
obj = objects.ImageMetaProps(**props)
primitive = obj.obj_to_primitive('1.0')
self.assertFalse(any([x in primitive['nova_object.data']
for x in props]))
for bus in ('lxc', 'uml'):
obj.hw_disk_bus = bus
self.assertRaises(exception.ObjectActionError,
obj.obj_to_primitive, '1.0')
def test_obj_make_compatible_hw_emulation(self):
"""Check 'hw_emulation_architecture' compatibility."""
# assert that 'hw_emulation_architecture' is supported
# on a suitably new version
obj = objects.ImageMetaProps(
hw_emulation_architecture=objects.fields.Architecture.AARCH64,
)
primitive = obj.obj_to_primitive('1.31')
self.assertIn('hw_emulation_architecture',
primitive['nova_object.data'])
self.assertEqual(
objects.fields.Architecture.AARCH64,
primitive['nova_object.data']['hw_emulation_architecture'])
# and is absent on older versions
primitive = obj.obj_to_primitive('1.29')
self.assertNotIn('hw_emulation_architecture',
primitive['nova_object.data'])
def test_obj_make_compatible_input_bus(self):
"""Check 'hw_input_bus' compatibility."""
# assert that 'hw_input_bus' is supported on a suitably new version
obj = objects.ImageMetaProps(
hw_input_bus=objects.fields.InputBus.VIRTIO,
)
primitive = obj.obj_to_primitive('1.29')
self.assertIn('hw_input_bus', primitive['nova_object.data'])
self.assertEqual(
objects.fields.InputBus.VIRTIO,
primitive['nova_object.data']['hw_input_bus'])
# and is absent on older versions
primitive = obj.obj_to_primitive('1.28')
self.assertNotIn('hw_input_bus', primitive['nova_object.data'])
def test_obj_make_compatible_video_model(self):
# assert that older video models are preserved.
obj = objects.ImageMetaProps(
hw_video_model=objects.fields.VideoModel.QXL,
hw_disk_bus=objects.fields.DiskBus.VIRTIO
)
primitive = obj.obj_to_primitive('1.21')
self.assertIn("hw_video_model", primitive['nova_object.data'])
self.assertEqual(objects.fields.VideoModel.QXL,
primitive['nova_object.data']['hw_video_model'])
self.assertIn("hw_disk_bus", primitive['nova_object.data'])
self.assertEqual(objects.fields.DiskBus.VIRTIO,
primitive['nova_object.data']['hw_disk_bus'])
# Virtio, GOP and None were added in 1.22 and should raise an
# exception when backleveling.
models = [objects.fields.VideoModel.VIRTIO,
objects.fields.VideoModel.GOP,
objects.fields.VideoModel.NONE]
for model in models:
obj = objects.ImageMetaProps(hw_video_model=model)
ex = self.assertRaises(exception.ObjectActionError,
obj.obj_to_primitive, '1.21')
self.assertIn('hw_video_model', str(ex))
def test_obj_bochs_model_positive(self):
"""Test "bochs" support from Nova object version 1.30 onwards
"""
obj = objects.ImageMetaProps(
hw_video_model=objects.fields.VideoModel.BOCHS,
)
primitive = obj.obj_to_primitive('1.30')
self.assertEqual(
objects.fields.VideoModel.BOCHS,
primitive['nova_object.data']['hw_video_model'])
def test_obj_bochs_model_negative(self):
"""Make sure an exception is raised for Nova object version <
1.30
"""
obj = objects.ImageMetaProps(
hw_video_model=objects.fields.VideoModel.BOCHS,
)
ex = self.assertRaises(exception.ObjectActionError,
obj.obj_to_primitive, '1.29')
self.assertIn('hw_video_model=bochs not supported', str(ex))
def test_obj_make_compatible_watchdog_action_not_disabled(self):
"""Tests that we don't pop the hw_watchdog_action if the value is not
'disabled'.
"""
obj = objects.ImageMetaProps(
hw_watchdog_action=fields.WatchdogAction.PAUSE)
primitive = obj.obj_to_primitive('1.0')
self.assertIn('hw_watchdog_action', primitive['nova_object.data'])
self.assertEqual(fields.WatchdogAction.PAUSE,
primitive['nova_object.data']['hw_watchdog_action'])
def test_set_os_secure_boot(self):
props = {'os_secure_boot': "required"}
secure_props = objects.ImageMetaProps.from_dict(props)
self.assertEqual("required", secure_props.os_secure_boot)
def test_obj_make_compatible_img_hide_hypervisor_id(self):
"""Tests that checks if we pop img_hide_hypervisor_id."""
obj = objects.ImageMetaProps(img_hide_hypervisor_id=True)
primitive = obj.obj_to_primitive('1.0')
self.assertNotIn('img_hide_hypervisor_id',
primitive['nova_object.data'])
def test_obj_make_compatible_trait_fields(self):
"""Tests that checks if we pop traits_required."""
obj = objects.ImageMetaProps(traits_required=['CUSTOM_TRUSTED'])
primitive = obj.obj_to_primitive('1.19')
self.assertNotIn('traits_required', primitive['nova_object.data'])
def test_obj_make_compatible_pmu(self):
"""Tests that checks if we pop hw_pmu."""
obj = objects.ImageMetaProps(hw_pmu=True)
primitive = obj.obj_to_primitive()
old_primitive = obj.obj_to_primitive('1.22')
self.assertIn('hw_pmu', primitive['nova_object.data'])
self.assertNotIn('hw_pmu', old_primitive['nova_object.data'])
def test_obj_make_compatible_vtpm(self):
"""Test that checks if we pop hw_tpm_model and hw_tpm_version."""
obj = objects.ImageMetaProps(
hw_tpm_model='tpm-tis', hw_tpm_version='1.2',
)
primitive = obj.obj_to_primitive()
self.assertIn('hw_tpm_model', primitive['nova_object.data'])
self.assertIn('hw_tpm_version', primitive['nova_object.data'])
primitive = obj.obj_to_primitive('1.26')
self.assertNotIn('hw_tpm_model', primitive['nova_object.data'])
self.assertNotIn('hw_tpm_version', primitive['nova_object.data'])
def test_obj_make_compatible_socket_policy(self):
obj = objects.ImageMetaProps(
hw_pci_numa_affinity_policy=fields.PCINUMAAffinityPolicy.SOCKET)
self.assertRaises(exception.ObjectActionError,
obj.obj_to_primitive, '1.27')
|
import logging
from collections import OrderedDict
from typing import Dict, Iterable, List, Optional, Tuple
from pip._vendor.packaging.utils import canonicalize_name
from pip._internal.exceptions import InstallationError
from pip._internal.models.wheel import Wheel
from pip._internal.req.req_install import InstallRequirement
from pip._internal.utils import compatibility_tags
logger = logging.getLogger(__name__)
class RequirementSet:
def __init__(self, check_supported_wheels=True):
# type: (bool) -> None
"""Create a RequirementSet.
"""
self.requirements = OrderedDict() # type: Dict[str, InstallRequirement]
self.check_supported_wheels = check_supported_wheels
self.unnamed_requirements = [] # type: List[InstallRequirement]
def __str__(self):
# type: () -> str
requirements = sorted(
(req for req in self.requirements.values() if not req.comes_from),
key=lambda req: canonicalize_name(req.name or ""),
)
return ' '.join(str(req.req) for req in requirements)
def __repr__(self):
# type: () -> str
requirements = sorted(
self.requirements.values(),
key=lambda req: canonicalize_name(req.name or ""),
)
format_string = '<{classname} object; {count} requirement(s): {reqs}>'
return format_string.format(
classname=self.__class__.__name__,
count=len(requirements),
reqs=', '.join(str(req.req) for req in requirements),
)
def add_unnamed_requirement(self, install_req):
# type: (InstallRequirement) -> None
assert not install_req.name
self.unnamed_requirements.append(install_req)
def add_named_requirement(self, install_req):
# type: (InstallRequirement) -> None
assert install_req.name
project_name = canonicalize_name(install_req.name)
self.requirements[project_name] = install_req
def add_requirement(
self,
install_req, # type: InstallRequirement
parent_req_name=None, # type: Optional[str]
extras_requested=None # type: Optional[Iterable[str]]
):
# type: (...) -> Tuple[List[InstallRequirement], Optional[InstallRequirement]]
"""Add install_req as a requirement to install.
:param parent_req_name: The name of the requirement that needed this
added. The name is used because when multiple unnamed requirements
resolve to the same name, we could otherwise end up with dependency
links that point outside the Requirements set. parent_req must
already be added. Note that None implies that this is a user
supplied requirement, vs an inferred one.
:param extras_requested: an iterable of extras used to evaluate the
environment markers.
:return: Additional requirements to scan. That is either [] if
the requirement is not applicable, or [install_req] if the
requirement is applicable and has just been added.
"""
# If the markers do not match, ignore this requirement.
if not install_req.match_markers(extras_requested):
logger.info(
"Ignoring %s: markers '%s' don't match your environment",
install_req.name, install_req.markers,
)
return [], None
# If the wheel is not supported, raise an error.
# Should check this after filtering out based on environment markers to
# allow specifying different wheels based on the environment/OS, in a
# single requirements file.
if install_req.link and install_req.link.is_wheel:
wheel = Wheel(install_req.link.filename)
tags = compatibility_tags.get_supported()
if (self.check_supported_wheels and not wheel.supported(tags)):
raise InstallationError(
"{} is not a supported wheel on this platform.".format(
wheel.filename)
)
# This next bit is really a sanity check.
assert not install_req.user_supplied or parent_req_name is None, (
"a user supplied req shouldn't have a parent"
)
# Unnamed requirements are scanned again and the requirement won't be
# added as a dependency until after scanning.
if not install_req.name:
self.add_unnamed_requirement(install_req)
return [install_req], None
try:
existing_req = self.get_requirement(
install_req.name) # type: Optional[InstallRequirement]
except KeyError:
existing_req = None
has_conflicting_requirement = (
parent_req_name is None and
existing_req and
not existing_req.constraint and
existing_req.extras == install_req.extras and
existing_req.req and
install_req.req and
existing_req.req.specifier != install_req.req.specifier
)
if has_conflicting_requirement:
raise InstallationError(
"Double requirement given: {} (already in {}, name={!r})"
.format(install_req, existing_req, install_req.name)
)
# When no existing requirement exists, add the requirement as a
# dependency and it will be scanned again after.
if not existing_req:
self.add_named_requirement(install_req)
# We'd want to rescan this requirement later
return [install_req], install_req
# Assume there's no need to scan, and that we've already
# encountered this for scanning.
if install_req.constraint or not existing_req.constraint:
return [], existing_req
does_not_satisfy_constraint = (
install_req.link and
not (
existing_req.link and
install_req.link.path == existing_req.link.path
)
)
if does_not_satisfy_constraint:
raise InstallationError(
"Could not satisfy constraints for '{}': "
"installation from path or url cannot be "
"constrained to a version".format(install_req.name)
)
# If we're now installing a constraint, mark the existing
# object for real installation.
existing_req.constraint = False
# If we're now installing a user supplied requirement,
# mark the existing object as such.
if install_req.user_supplied:
existing_req.user_supplied = True
existing_req.extras = tuple(sorted(
set(existing_req.extras) | set(install_req.extras)
))
logger.debug(
"Setting %s extras to: %s",
existing_req, existing_req.extras,
)
# Return the existing requirement for addition to the parent and
# scanning again.
return [existing_req], existing_req
def has_requirement(self, name):
# type: (str) -> bool
project_name = canonicalize_name(name)
return (
project_name in self.requirements and
not self.requirements[project_name].constraint
)
def get_requirement(self, name):
# type: (str) -> InstallRequirement
project_name = canonicalize_name(name)
if project_name in self.requirements:
return self.requirements[project_name]
raise KeyError(f"No project with the name {name!r}")
@property
def all_requirements(self):
# type: () -> List[InstallRequirement]
return self.unnamed_requirements + list(self.requirements.values())
|
"""Escaping functions for compiled soy templates.
This module contains the public functions and classes to sanitize content for
different contexts.
The bulk of the logic resides in generated_sanitize.py which is generated by
GeneratePySanitizeEscapingDirectiveCode.java to match other implementations.
Please keep as much escaping and filtering logic/regex in there as possible.
"""
from __future__ import unicode_literals
__author__ = 'dcphillips@google.com (David Phillips)'
import functools
import re
from . import generated_sanitize
try:
str = unicode
except NameError:
pass
_AMBIGUOUS_ATTR_END_RE = re.compile(r'([^"\'\s])$')
_HTML5_VOID_ELEMENTS_RE = re.compile(
'^<(?:area|base|br|col|command|embed|hr|img|input'
'|keygen|link|meta|param|source|track|wbr)\\b')
_INNOCUOUS_OUTPUT = 'zSoyz'
_NEWLINE_RE = re.compile('(\r\n|\r|\n)')
_REPLACEMENT_TAG_RE = re.compile(r'\[(\d+)\]')
_HTML_RAW_CONTENT_HAZARD_RE = re.compile(r'<\/|\]\]>')
_HTML_RAW_CONTENT_HAZARD_REPLACEMENTS = {
'</': r'<\/',
']]>': r']]\>'
}
def change_newline_to_br(value):
result = _NEWLINE_RE.sub('<br>', str(value))
if is_content_kind(value, CONTENT_KIND.HTML):
approval = IActuallyUnderstandSoyTypeSafetyAndHaveSecurityApproval(
'Persisting existing sanitization.')
return SanitizedHtml(result, get_content_dir(value), approval=approval)
return result
def clean_html(value, safe_tags=None):
if not safe_tags:
safe_tags = generated_sanitize._SAFE_TAG_WHITELIST
if is_content_kind(value, CONTENT_KIND.HTML):
return value
approval = IActuallyUnderstandSoyTypeSafetyAndHaveSecurityApproval(
'Escaped html is by nature sanitized.')
return SanitizedHtml(_strip_html_tags(value, safe_tags),
get_content_dir(value), approval=approval)
def escape_css_string(value):
return generated_sanitize.escape_css_string_helper(value)
def escape_html(value):
if is_content_kind(value, CONTENT_KIND.HTML):
return value
approval = IActuallyUnderstandSoyTypeSafetyAndHaveSecurityApproval(
'Escaped html is by nature sanitized.')
return SanitizedHtml(generated_sanitize.escape_html_helper(value),
get_content_dir(value), approval=approval)
def escape_html_attribute(value):
if is_content_kind(value, CONTENT_KIND.HTML):
return generated_sanitize.normalize_html_helper(
_strip_html_tags(value.content))
return generated_sanitize.escape_html_helper(value)
def escape_html_attribute_nospace(value):
if is_content_kind(value, CONTENT_KIND.HTML):
return generated_sanitize.normalize_html_nospace_helper(
_strip_html_tags(value.content))
return generated_sanitize.escape_html_nospace_helper(value)
def escape_html_rcdata(value):
if is_content_kind(value, CONTENT_KIND.HTML):
return generated_sanitize.normalize_html_helper(value.content)
return generated_sanitize.escape_html_helper(value)
def escape_js_regex(value):
return generated_sanitize.escape_js_regex_helper(value)
def escape_js_string(value):
if is_content_kind(value, CONTENT_KIND.JS_STR_CHARS):
return value.content
return generated_sanitize.escape_js_string_helper(value)
def escape_js_value(value):
if value is None:
# We output null for compatibility with Java, as it returns null from maps
# where there is no corresponding key.
return ' null '
if is_content_kind(value, CONTENT_KIND.JS):
return value.content
# We surround values with spaces so that they can't be interpolated into
# identifiers by accident.
# We could use parentheses but those might be interpreted as a function call.
# This matches the JS implementation in javascript/template/soy/soyutils.js.
if isinstance(value, (int, long, float, complex)):
return ' ' + str(value) + ' '
return "'" + generated_sanitize.escape_js_string_helper(value) + "'"
def escape_uri(value):
return generated_sanitize.escape_uri_helper(value)
def filter_css_value(value):
if is_content_kind(value, CONTENT_KIND.CSS):
return _embed_css_into_html(value.content)
if value is None:
return ''
return generated_sanitize.filter_css_value_helper(value)
def filter_html_attributes(value):
# NOTE: Explicitly no support for SanitizedContentKind.HTML, since that is
# meaningless in this context, which is generally *between* html attributes.
if is_content_kind(value, CONTENT_KIND.ATTRIBUTES):
# Add a space at the end to ensure this won't get merged into following
# attributes, unless the interpretation is unambiguous (ending with quotes
# or a space).
return _AMBIGUOUS_ATTR_END_RE.sub(r'\1 ', value.content)
# TODO(gboyer): Replace this with a runtime exception along with other
return generated_sanitize.filter_html_attributes_helper(value)
def filter_html_element_name(value):
# NOTE: We don't accept any SanitizedContent here. HTML indicates valid
# PCDATA, not tag names. A sloppy developer shouldn't be able to cause an
# exploit:
# ... {let userInput}script src=http://evil.com/evil.js{/let} ...
# ... {param tagName kind="html"}{$userInput}{/param} ...
# ... <{$tagName}>Hello World</{$tagName}>
return generated_sanitize.filter_html_element_name_helper(value)
def filter_image_data_uri(value):
approval = IActuallyUnderstandSoyTypeSafetyAndHaveSecurityApproval(
'Filtered URIs are by nature sanitized.')
return SanitizedUri(
generated_sanitize.filter_image_data_uri_helper(value), approval=approval)
def filter_no_auto_escape(value):
if is_content_kind(value, CONTENT_KIND.TEXT):
return _INNOCUOUS_OUTPUT
return value
def filter_normalize_uri(value):
if is_content_kind(value, CONTENT_KIND.URI):
return normalize_uri(value)
return generated_sanitize.filter_normalize_uri_helper(value)
def filter_normalize_media_uri(value):
if is_content_kind(value, CONTENT_KIND.URI):
return normalize_uri(value)
return generated_sanitize.filter_normalize_media_uri_helper(value)
def normalize_html(value):
return generated_sanitize.normalize_html_helper(value)
def normalize_uri(value):
return generated_sanitize.normalize_uri_helper(value)
def get_content_dir(value):
if isinstance(value, SanitizedContent):
return value.content_dir
return None
def is_content_kind(value, content_kind):
return (isinstance(value, SanitizedContent) and
value.content_kind == content_kind)
def _get_content_kind(value):
"""Get human-readable name for the kind of value.
Args:
value: A input string.
Returns:
A string name represented the type of value.
"""
if isinstance(value, SanitizedContent):
return CONTENT_KIND.decodeKind(value.content_kind)
else:
return type(value)
def _strip_html_tags(value, tag_whitelist=None):
"""Strip any html tags not present on the whitelist.
If there's a whitelist present, the handler will use a marker for whitelisted
tags, strips all others, and then reinserts the originals.
Args:
value: The input string.
tag_whitelist: A list of safe tag names.
Returns:
A string with non-whitelisted tags stripped.
"""
if not tag_whitelist:
# The second level (replacing '<' with '<') ensures that non-tag uses of
# '<' do not recombine into tags as in
# '<<foo>script>alert(1337)</<foo>script>'
return generated_sanitize._LT_REGEX.sub(
'<', generated_sanitize._HTML_TAG_REGEX.sub('', value))
# Escapes '[' so that we can use [123] below to mark places where tags
# have been removed.
html = str(value).replace('[', '[')
# Consider all uses of '<' and replace whitelisted tags with markers like
# [1] which are indices into a list of approved tag names.
# Replace all other uses of < and > with entities.
tags = []
tag_handler = functools.partial(_tag_sub_handler, tag_whitelist, tags)
html = generated_sanitize._HTML_TAG_REGEX.sub(tag_handler, html)
# Escape HTML special characters. Now there are no '<' in html that could
# start a tag.
html = generated_sanitize.normalize_html_helper(html)
# Discard any dead close tags and close any hanging open tags before
# reinserting white listed tags.
final_close_tags = _balance_tags(tags)
# Now html contains no tags or less-than characters that could become
# part of a tag via a replacement operation and tags only contains
# approved tags.
# Reinsert the white-listed tags.
html = _REPLACEMENT_TAG_RE.sub(lambda match: tags[int(match.group(1))], html)
# Close any still open tags.
# This prevents unclosed formatting elements like <ol> and <table> from
# breaking the layout of containing HTML.
return html + final_close_tags
def _embed_css_into_html(css):
"""
Make sure that tag boundaries are not broken by Safe CSS when embedded in an
HTML <style> element.
Args:
css: Safe CSS content
Returns:
Embeddable safe CSS content
"""
return _HTML_RAW_CONTENT_HAZARD_RE.sub(_defang_raw_content_hazard, css)
def _defang_raw_content_hazard(match):
"""Maps _HTML_RAW_CONTENT_HAZARD_RE matches to safe alternatives"""
return _HTML_RAW_CONTENT_HAZARD_REPLACEMENTS[match.group(0)]
def _tag_sub_handler(tag_whitelist, tags, match):
"""Replace whitelisted tags with markers and update the tag list.
Args:
tag_whitelist: A list containing all whitelisted html tags.
tags: The list of all whitelisted tags found in the text.
match: The current match element with a subgroup containing the tag name.
Returns:
The replacement content, a index marker for whitelisted tags, or an empty
string.
"""
tag = match.group(0)
name = match.group(1)
name = name.lower()
if name in tag_whitelist:
start = '</' if tag[1] == '/' else '<'
index = len(tags)
tags.append(start + name + '>')
return '[%d]' % index
return ''
def _balance_tags(tags):
"""Throw out any close tags without an open tag.
If {@code <table>} is used for formatting, embedded HTML shouldn't be able
to use a mismatched {@code </table>} to break page layout.
Args:
tags: The list of all tags in this text.
Returns:
A string containing zero or more closed tags that close all elements that
are opened in tags but not closed.
"""
open_tags = []
for i, tag in enumerate(tags):
if tag[1] == '/':
index = len(open_tags) - 1
while index >= 0 and open_tags[index] != tag:
index -= 1
if index < 0:
tags[i] = '' # Drop close tag.
else:
tags[i] = ''.join(reversed(open_tags[index:]))
del open_tags[index:]
elif not _HTML5_VOID_ELEMENTS_RE.match(tag):
open_tags.append('</' + tag[1:])
return ''.join(reversed(open_tags))
class IActuallyUnderstandSoyTypeSafetyAndHaveSecurityApproval:
justification = None
def __init__(self, justification=None):
if justification:
self.justification = justification
class CONTENT_KIND:
HTML, JS, JS_STR_CHARS, URI, ATTRIBUTES, CSS, TEXT = range(1, 8)
@staticmethod
def decodeKind(i):
i = i - 1;
return ['HTML', 'JS', 'JS_STR_CHARS', 'URI', 'ATTRIBUTES', 'CSS', 'TEXT'][i]
class DIR:
LTR, NEUTRAL, RTL = (1, 0, -1)
class SanitizedContent(object):
content_kind = None
def __new__(cls, *args, **kwargs):
if cls is SanitizedContent or not cls.content_kind:
raise TypeError('SanitizedContent cannot be instantiated directly. '
'Instantiate a child class with a valid content_kind.')
return object.__new__(cls, *args, **kwargs)
def __init__(self, content=None, content_dir=None, approval=None):
if not isinstance(approval,
IActuallyUnderstandSoyTypeSafetyAndHaveSecurityApproval):
raise TypeError('Caller does not have sanitization approval.')
elif not approval.justification or len(approval.justification) < 20:
raise TypeError('A justification of at least 20 characters must be'
'provided with the approval.')
self.content = content
self.content_dir = content_dir
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not self.__eq__(other)
def __len__(self):
return len(self.content)
def __nonzero__(self):
return bool(self.content)
def __str__(self):
return str(self.content)
def __unicode__(self):
return str(self.content)
class SanitizedCss(SanitizedContent):
content_kind = CONTENT_KIND.CSS
def __init__(self, content=None, approval=None):
super(SanitizedCss, self).__init__(content, DIR.LTR, approval)
class SanitizedHtml(SanitizedContent):
content_kind = CONTENT_KIND.HTML
class SanitizedHtmlAttribute(SanitizedContent):
content_kind = CONTENT_KIND.ATTRIBUTES
def __init__(self, content=None, approval=None):
super(SanitizedHtmlAttribute, self).__init__(
content, DIR.LTR, approval)
class SanitizedJs(SanitizedContent):
content_kind = CONTENT_KIND.JS
def __init__(self, content=None, approval=None):
super(SanitizedJs, self).__init__(content, DIR.LTR, approval)
class SanitizedJsStrChars(SanitizedContent):
content_kind = CONTENT_KIND.JS_STR_CHARS
class SanitizedUri(SanitizedContent):
content_kind = CONTENT_KIND.URI
def __init__(self, content=None, approval=None):
super(SanitizedUri, self).__init__(content, DIR.LTR, approval)
class UnsanitizedText(SanitizedContent):
content_kind = CONTENT_KIND.TEXT
def __init__(self, content=None, content_dir=None, approval=None):
# approval is still in the api for consistency, but unsanitized text is
# always approved.
approval = IActuallyUnderstandSoyTypeSafetyAndHaveSecurityApproval(
'Unsanitized Text does not require approval.')
super(UnsanitizedText, self).__init__(str(content), content_dir,
approval=approval)
|
import unittest
from bhdashboard.tests.widgets import timeline
def suite():
suite = unittest.TestSuite()
suite.addTest(timeline.suite())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'ImageItemURLQueue'
db.create_table('for_sale_imageitemurlqueue', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('item', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['for_sale.Item'])),
('url', self.gf('django.db.models.fields.URLField')(max_length=200)),
('timestamp', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
))
db.send_create_signal('for_sale', ['ImageItemURLQueue'])
def backwards(self, orm):
# Deleting model 'ImageItemURLQueue'
db.delete_table('for_sale_imageitemurlqueue')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'for_sale.imageitem': {
'Meta': {'object_name': 'ImageItem'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('core.thumbs.ImageWithThumbsField', [], {'max_length': '100'}),
'item': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['for_sale.Item']"}),
'primary_picture': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'})
},
'for_sale.imageitemurlqueue': {
'Meta': {'object_name': 'ImageItemURLQueue'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'item': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['for_sale.Item']"}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'for_sale.item': {
'Meta': {'object_name': 'Item', '_ormbases': ['inventory.Product']},
'price': ('django.db.models.fields.DecimalField', [], {'max_digits': '11', 'decimal_places': '2'}),
'product_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['inventory.Product']", 'unique': 'True', 'primary_key': 'True'}),
'qty': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'inventory.product': {
'Meta': {'object_name': 'Product'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketCategory']"}),
'date_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shops.Shop']"}),
'subcategory': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketSubCategory']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['inventory.ProductType']", 'null': 'True', 'blank': 'True'}),
'weight': ('django.db.models.fields.DecimalField', [], {'default': "'0'", 'max_digits': '11', 'decimal_places': '2'})
},
'inventory.producttype': {
'Meta': {'object_name': 'ProductType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'market.marketcategory': {
'Meta': {'object_name': 'MarketCategory'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'marketplace': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketPlace']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '60', 'db_index': 'True'})
},
'market.marketplace': {
'Meta': {'object_name': 'MarketPlace'},
'base_domain': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '92'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '92', 'db_index': 'True'}),
'template_prefix': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '92', 'db_index': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '92'})
},
'market.marketsubcategory': {
'Meta': {'unique_together': "(('parent', 'slug'),)", 'object_name': 'MarketSubCategory'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'marketplace': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketPlace']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'subcategories'", 'null': 'True', 'to': "orm['market.MarketCategory']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '60', 'db_index': 'True'})
},
'shops.shop': {
'Meta': {'object_name': 'Shop'},
'admin': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'bids': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'date_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'default': "'39.29038,-76.61219'", 'max_length': '255'}),
'marketplace': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketPlace']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'views': ('django.db.models.fields.IntegerField', [], {'default': '0'})
}
}
complete_apps = ['for_sale']
|
"""Test the mib_etherlike module."""
import unittest
from mock import Mock
import os
import sys
TEST_DIRECTORY = os.path.dirname(os.path.realpath(__file__))
SWITCHMAP_DIRECTORY = os.path.abspath(os.path.join(TEST_DIRECTORY, os.pardir))
ROOT_DIRECTORY = os.path.abspath(os.path.join(SWITCHMAP_DIRECTORY, os.pardir))
if TEST_DIRECTORY.endswith('/switchmap-ng/switchmap/test') is True:
sys.path.append(ROOT_DIRECTORY)
else:
print(
'This script is not installed in the "switchmap-ng/bin" directory. '
'Please fix.')
sys.exit(2)
from switchmap.snmp import mib_etherlike as testimport
class Query(object):
"""Class for snmp_manager.Query mock.
A detailed tutorial about Python mocks can be found here:
http://www.drdobbs.com/testing/using-mocks-in-python/240168251
"""
def query(self):
"""Do an SNMP query."""
pass
def oid_exists(self):
"""Determine existence of OID on device."""
pass
def swalk(self):
"""Do a failsafe SNMPwalk."""
pass
def walk(self):
"""Do a SNMPwalk."""
pass
class KnownValues(unittest.TestCase):
"""Checks all functions and methods."""
#########################################################################
# General object setup
#########################################################################
# SNMPwalk results used by Mocks.
# Normalized walk returning integers
nwalk_results_integer = {
100: 1234,
200: 5678
}
def test_supported(self):
"""Testing method / function supported."""
# Set the stage for oid_exists returning True
snmpobj = Mock(spec=Query)
mock_spec = {'oid_exists.return_value': True}
snmpobj.configure_mock(**mock_spec)
# Test supported
testobj = testimport.init_query(snmpobj)
self.assertEqual(testobj.supported(), True)
# Set the stage for oid_exists returning False
mock_spec = {'oid_exists.return_value': False}
snmpobj.configure_mock(**mock_spec)
# Test unsupported
testobj = testimport.init_query(snmpobj)
self.assertEqual(testobj.supported(), False)
def test_layer1(self):
"""Testing method / function layer1."""
# Initializing key variables
expected_dict = {
100: {'dot3StatsDuplexStatus': 1234},
200: {'dot3StatsDuplexStatus': 5678}
}
# Set the stage for SNMPwalk
snmpobj = Mock(spec=Query)
mock_spec = {'swalk.return_value': self.nwalk_results_integer}
snmpobj.configure_mock(**mock_spec)
# Get results
testobj = testimport.init_query(snmpobj)
results = testobj.layer1()
# Basic testing of results
for primary in results.keys():
for secondary in results[primary].keys():
self.assertEqual(
results[primary][secondary],
expected_dict[primary][secondary])
def test_dot3statsduplexstatus(self):
"""Testing method / function dot3statsduplexstatus."""
# Set the stage for SNMPwalk
snmpobj = Mock(spec=Query)
mock_spec = {'swalk.return_value': self.nwalk_results_integer}
snmpobj.configure_mock(**mock_spec)
# Get results
testobj = testimport.init_query(snmpobj)
results = testobj.dot3statsduplexstatus()
# Basic testing of results
for key in results.keys():
self.assertEqual(isinstance(key, int), True)
# Test that we are getting the correct OID
results = testobj.dot3statsduplexstatus(oidonly=True)
self.assertEqual(results, '.1.3.6.1.2.1.10.7.2.1.19')
if __name__ == '__main__':
# Do the unit test
unittest.main()
|
from setuptools import setup
setup(
name = 'not_zipsafe_egg',
packages = ['lib'],
zip_safe = False
)
|
import importlib
import json
import os
from typing import Any, Dict
from django.utils.translation import ugettext as _
from zerver.lib.actions import (
internal_send_huddle_message,
internal_send_private_message,
internal_send_stream_message_by_name,
)
from zerver.lib.bot_config import ConfigError, get_bot_config
from zerver.lib.bot_storage import (
get_bot_storage,
is_key_in_bot_storage,
remove_bot_storage,
set_bot_storage,
)
from zerver.lib.integrations import EMBEDDED_BOTS
from zerver.lib.topic import get_topic_from_message_info
from zerver.models import UserProfile, get_active_user
our_dir = os.path.dirname(os.path.abspath(__file__))
from zulip_bots.lib import RateLimit
def get_bot_handler(service_name: str) -> Any:
# Check that this service is present in EMBEDDED_BOTS, add exception handling.
configured_service = ""
for embedded_bot_service in EMBEDDED_BOTS:
if service_name == embedded_bot_service.name:
configured_service = embedded_bot_service.name
if not configured_service:
return None
bot_module_name = f'zulip_bots.bots.{configured_service}.{configured_service}'
bot_module: Any = importlib.import_module(bot_module_name)
return bot_module.handler_class()
class StateHandler:
storage_size_limit: int = 10000000 # TODO: Store this in the server configuration model.
def __init__(self, user_profile: UserProfile) -> None:
self.user_profile = user_profile
self.marshal = lambda obj: json.dumps(obj)
self.demarshal = lambda obj: json.loads(obj)
def get(self, key: str) -> str:
return self.demarshal(get_bot_storage(self.user_profile, key))
def put(self, key: str, value: str) -> None:
set_bot_storage(self.user_profile, [(key, self.marshal(value))])
def remove(self, key: str) -> None:
remove_bot_storage(self.user_profile, [key])
def contains(self, key: str) -> bool:
return is_key_in_bot_storage(self.user_profile, key)
class EmbeddedBotQuitException(Exception):
pass
class EmbeddedBotEmptyRecipientsList(Exception):
pass
class EmbeddedBotHandler:
def __init__(self, user_profile: UserProfile) -> None:
# Only expose a subset of our UserProfile's functionality
self.user_profile = user_profile
self._rate_limit = RateLimit(20, 5)
self.full_name = user_profile.full_name
self.email = user_profile.email
self.storage = StateHandler(user_profile)
self.user_id = user_profile.id
def send_message(self, message: Dict[str, Any]) -> None:
if not self._rate_limit.is_legal():
self._rate_limit.show_error_and_exit()
if message['type'] == 'stream':
internal_send_stream_message_by_name(
self.user_profile.realm, self.user_profile,
message['to'], message['topic'], message['content'],
)
return
assert message['type'] == 'private'
# Ensure that it's a comma-separated list, even though the
# usual 'to' field could be either a List[str] or a str.
recipients = ','.join(message['to']).split(',')
if len(message['to']) == 0:
raise EmbeddedBotEmptyRecipientsList(_('Message must have recipients!'))
elif len(message['to']) == 1:
recipient_user = get_active_user(recipients[0], self.user_profile.realm)
internal_send_private_message(self.user_profile.realm, self.user_profile,
recipient_user, message['content'])
else:
internal_send_huddle_message(self.user_profile.realm, self.user_profile,
recipients, message['content'])
def send_reply(self, message: Dict[str, Any], response: str) -> None:
if message['type'] == 'private':
self.send_message(dict(
type='private',
to=[x['email'] for x in message['display_recipient']],
content=response,
sender_email=message['sender_email'],
))
else:
self.send_message(dict(
type='stream',
to=message['display_recipient'],
topic=get_topic_from_message_info(message),
content=response,
sender_email=message['sender_email'],
))
# The bot_name argument exists only to comply with ExternalBotHandler.get_config_info().
def get_config_info(self, bot_name: str, optional: bool=False) -> Dict[str, str]:
try:
return get_bot_config(self.user_profile)
except ConfigError:
if optional:
return dict()
raise
def quit(self, message: str= "") -> None:
raise EmbeddedBotQuitException(message)
|
from nailgun.orchestrator import plugins_serializers
from nailgun.orchestrator.priority_serializers import PriorityStrategy
def stage_serialize(serializer, graph_tasks, cluster, nodes):
"""Serialize tasks for given stage
:param serializer: plugins_serializers.BasePluginDeploymentHooksSerializer
:param graph_tasks: list of tasks
:param cluster: cluster db object
:param nodes: list of node db objects
"""
priority = PriorityStrategy()
tasks = []
tasks.extend(graph_tasks)
plugins = serializer(cluster, nodes)
tasks.extend(plugins.serialize())
priority.one_by_one(tasks)
return tasks
def pre_deployment_serialize(orchestrator_graph, cluster, nodes):
graph_tasks = orchestrator_graph.pre_tasks_serialize(nodes)
return stage_serialize(
plugins_serializers.PluginsPreDeploymentHooksSerializer,
graph_tasks, cluster, nodes)
def post_deployment_serialize(orchestrator_graph, cluster, nodes):
graph_tasks = orchestrator_graph.post_tasks_serialize(nodes)
return stage_serialize(
plugins_serializers.PluginsPostDeploymentHooksSerializer,
graph_tasks, cluster, nodes)
|
"""Support for Sensors using public Netatmo data."""
from datetime import timedelta
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_NAME, CONF_MODE, CONF_MONITORED_CONDITIONS, TEMP_CELSIUS,
DEVICE_CLASS_TEMPERATURE, DEVICE_CLASS_HUMIDITY)
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
CONF_AREAS = 'areas'
CONF_LAT_NE = 'lat_ne'
CONF_LON_NE = 'lon_ne'
CONF_LAT_SW = 'lat_sw'
CONF_LON_SW = 'lon_sw'
DEFAULT_NAME = 'Netatmo Public Data'
DEFAULT_MODE = 'avg'
MODE_TYPES = {'max', 'avg'}
SENSOR_TYPES = {
'temperature': ['Temperature', TEMP_CELSIUS, 'mdi:thermometer',
DEVICE_CLASS_TEMPERATURE],
'pressure': ['Pressure', 'mbar', 'mdi:gauge', None],
'humidity': ['Humidity', '%', 'mdi:water-percent', DEVICE_CLASS_HUMIDITY],
'rain': ['Rain', 'mm', 'mdi:weather-rainy', None],
'windstrength': ['Wind Strength', 'km/h', 'mdi:weather-windy', None],
'guststrength': ['Gust Strength', 'km/h', 'mdi:weather-windy', None],
}
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=600)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_AREAS): vol.All(cv.ensure_list, [
{
vol.Required(CONF_LAT_NE): cv.latitude,
vol.Required(CONF_LAT_SW): cv.latitude,
vol.Required(CONF_LON_NE): cv.longitude,
vol.Required(CONF_LON_SW): cv.longitude,
vol.Required(CONF_MONITORED_CONDITIONS): [vol.In(SENSOR_TYPES)],
vol.Optional(CONF_MODE, default=DEFAULT_MODE): vol.In(MODE_TYPES),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string
}
]),
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the access to Netatmo binary sensor."""
netatmo = hass.components.netatmo
sensors = []
areas = config.get(CONF_AREAS)
for area_conf in areas:
data = NetatmoPublicData(netatmo.NETATMO_AUTH,
lat_ne=area_conf.get(CONF_LAT_NE),
lon_ne=area_conf.get(CONF_LON_NE),
lat_sw=area_conf.get(CONF_LAT_SW),
lon_sw=area_conf.get(CONF_LON_SW))
for sensor_type in area_conf.get(CONF_MONITORED_CONDITIONS):
sensors.append(NetatmoPublicSensor(area_conf.get(CONF_NAME),
data, sensor_type,
area_conf.get(CONF_MODE)))
add_entities(sensors, True)
class NetatmoPublicSensor(Entity):
"""Represent a single sensor in a Netatmo."""
def __init__(self, area_name, data, sensor_type, mode):
"""Initialize the sensor."""
self.netatmo_data = data
self.type = sensor_type
self._mode = mode
self._name = '{} {}'.format(area_name,
SENSOR_TYPES[self.type][0])
self._area_name = area_name
self._state = None
self._device_class = SENSOR_TYPES[self.type][3]
self._icon = SENSOR_TYPES[self.type][2]
self._unit_of_measurement = SENSOR_TYPES[self.type][1]
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def icon(self):
"""Icon to use in the frontend."""
return self._icon
@property
def device_class(self):
"""Return the device class of the sensor."""
return self._device_class
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity."""
return self._unit_of_measurement
def update(self):
"""Get the latest data from NetAtmo API and updates the states."""
self.netatmo_data.update()
if self.netatmo_data.data is None:
_LOGGER.warning("No data found for %s", self._name)
self._state = None
return
data = None
if self.type == 'temperature':
data = self.netatmo_data.data.getLatestTemperatures()
elif self.type == 'pressure':
data = self.netatmo_data.data.getLatestPressures()
elif self.type == 'humidity':
data = self.netatmo_data.data.getLatestHumidities()
elif self.type == 'rain':
data = self.netatmo_data.data.getLatestRain()
elif self.type == 'windstrength':
data = self.netatmo_data.data.getLatestWindStrengths()
elif self.type == 'guststrength':
data = self.netatmo_data.data.getLatestGustStrengths()
if not data:
_LOGGER.warning("No station provides %s data in the area %s",
self.type, self._area_name)
self._state = None
return
if self._mode == 'avg':
self._state = round(sum(data.values()) / len(data), 1)
elif self._mode == 'max':
self._state = max(data.values())
class NetatmoPublicData:
"""Get the latest data from NetAtmo."""
def __init__(self, auth, lat_ne, lon_ne, lat_sw, lon_sw):
"""Initialize the data object."""
self.auth = auth
self.data = None
self.lat_ne = lat_ne
self.lon_ne = lon_ne
self.lat_sw = lat_sw
self.lon_sw = lon_sw
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Request an update from the Netatmo API."""
import pyatmo
data = pyatmo.PublicData(self.auth,
LAT_NE=self.lat_ne,
LON_NE=self.lon_ne,
LAT_SW=self.lat_sw,
LON_SW=self.lon_sw,
filtering=True)
if data.CountStationInArea() == 0:
_LOGGER.warning('No Stations available in this area.')
return
self.data = data
|
import sys, os
import time
extensions = ['sphinx.ext.todo']
templates_path = [u'_templates']
source_suffix = '.rst'
source_encoding = u'utf-8-sig'
master_doc = u'index'
project = u'OpenSplice GPB Tutorial'
this_year = time.strftime( '%Y' )
copyright = u'{y}, ADLINK Technology Limited'.format( y = this_year )
print 'Copyright string is:', copyright
version = u'6.x'
release = version
print 'Short version string is:', version
print 'Full version string is:', release
language = u'en'
today = ' '
exclude_patterns = []
pygments_style = 'sphinx'
html_theme = u'vortextheme'
html_theme_path = ['../../.']
html_title = 'OpenSplice GPB Tutorial'
html_logo = './images/Vortex_logo_2014.png'
html_static_path = []
html_show_sphinx = False
htmlhelp_basename = 'OpenSpliceGPBTutorial'
latex_paper_size = u'a4'
latex_font_size = u'10pt'
latex_documents = [('index', 'OpenSplice_GPBTutorial.tex', u'OpenSplice GPB Tutorial', u'', 'manual', True)]
latex_elements = { 'babel': '\\usepackage[english]{babel}' }
latex_logo = 'images/Vortex-OpenSplice-Cover.png'
latex_use_parts = False
latex_elements = {
'classoptions': ',openany, oneside',
'babel': '\\usepackage[english]{babel}'
}
man_pages = [('index', 'OpenSpliceGPBTutorial', u'OpenSplice GPB Tutorial', [u'ADLINK Technology Limited'], 1)]
todo_include_todos = True
|
"""Database models."""
from __future__ import absolute_import, unicode_literals
from datetime import timedelta
import timezone_field
from celery import schedules
from celery.five import python_2_unicode_compatible
from django.conf import settings
from django.core.exceptions import MultipleObjectsReturned, ValidationError
from django.core.validators import MaxValueValidator
from django.db import models
from django.db.models import signals
from django.utils.translation import ugettext_lazy as _
from . import managers, validators
from .tzcrontab import TzAwareCrontab
from .utils import make_aware, now
DAYS = 'days'
HOURS = 'hours'
MINUTES = 'minutes'
SECONDS = 'seconds'
MICROSECONDS = 'microseconds'
PERIOD_CHOICES = (
(DAYS, _('Days')),
(HOURS, _('Hours')),
(MINUTES, _('Minutes')),
(SECONDS, _('Seconds')),
(MICROSECONDS, _('Microseconds')),
)
SOLAR_SCHEDULES = [(x, _(x)) for x in sorted(schedules.solar._all_events)]
def cronexp(field):
"""Representation of cron expression."""
return field and str(field).replace(' ', '') or '*'
@python_2_unicode_compatible
class SolarSchedule(models.Model):
"""Schedule following astronomical patterns."""
event = models.CharField(
_('event'), max_length=24, choices=SOLAR_SCHEDULES
)
latitude = models.DecimalField(
_('latitude'), max_digits=9, decimal_places=6
)
longitude = models.DecimalField(
_('longitude'), max_digits=9, decimal_places=6
)
class Meta:
"""Table information."""
verbose_name = _('solar event')
verbose_name_plural = _('solar events')
ordering = ('event', 'latitude', 'longitude')
unique_together = ('event', 'latitude', 'longitude')
@property
def schedule(self):
return schedules.solar(self.event,
self.latitude,
self.longitude,
nowfun=lambda: make_aware(now()))
@classmethod
def from_schedule(cls, schedule):
spec = {'event': schedule.event,
'latitude': schedule.lat,
'longitude': schedule.lon}
try:
return cls.objects.get(**spec)
except cls.DoesNotExist:
return cls(**spec)
except MultipleObjectsReturned:
cls.objects.filter(**spec).delete()
return cls(**spec)
def __str__(self):
return '{0} ({1}, {2})'.format(
self.get_event_display(),
self.latitude,
self.longitude
)
@python_2_unicode_compatible
class IntervalSchedule(models.Model):
"""Schedule executing every n seconds."""
DAYS = DAYS
HOURS = HOURS
MINUTES = MINUTES
SECONDS = SECONDS
MICROSECONDS = MICROSECONDS
PERIOD_CHOICES = PERIOD_CHOICES
every = models.IntegerField(_('every'), null=False)
period = models.CharField(
_('period'), max_length=24, choices=PERIOD_CHOICES,
)
class Meta:
"""Table information."""
verbose_name = _('interval')
verbose_name_plural = _('intervals')
ordering = ['period', 'every']
@property
def schedule(self):
return schedules.schedule(
timedelta(**{self.period: self.every}),
nowfun=lambda: make_aware(now())
)
@classmethod
def from_schedule(cls, schedule, period=SECONDS):
every = max(schedule.run_every.total_seconds(), 0)
try:
return cls.objects.get(every=every, period=period)
except cls.DoesNotExist:
return cls(every=every, period=period)
except MultipleObjectsReturned:
cls.objects.filter(every=every, period=period).delete()
return cls(every=every, period=period)
def __str__(self):
if self.every == 1:
return _('every {0.period_singular}').format(self)
return _('every {0.every} {0.period}').format(self)
@property
def period_singular(self):
return self.period[:-1]
@python_2_unicode_compatible
class CrontabSchedule(models.Model):
"""Timezone Aware Crontab-like schedule."""
#
# The worst case scenario for day of month is a list of all 31 day numbers
# '[1, 2, ..., 31]' which has a length of 115. Likewise, minute can be
# 0..59 and hour can be 0..23. Ensure we can accomodate these by allowing
# 4 chars for each value (what we save on 0-9 accomodates the []).
# We leave the other fields at their historical length.
#
minute = models.CharField(
_('minute'), max_length=60 * 4, default='*',
validators=[validators.minute_validator],
)
hour = models.CharField(
_('hour'), max_length=24 * 4, default='*',
validators=[validators.hour_validator],
)
day_of_week = models.CharField(
_('day of week'), max_length=64, default='*',
validators=[validators.day_of_week_validator],
)
day_of_month = models.CharField(
_('day of month'), max_length=31 * 4, default='*',
validators=[validators.day_of_month_validator],
)
month_of_year = models.CharField(
_('month of year'), max_length=64, default='*',
validators=[validators.month_of_year_validator],
)
timezone = timezone_field.TimeZoneField(default='UTC')
class Meta:
"""Table information."""
verbose_name = _('crontab')
verbose_name_plural = _('crontabs')
ordering = ['month_of_year', 'day_of_month',
'day_of_week', 'hour', 'minute', 'timezone']
def __str__(self):
return '{0} {1} {2} {3} {4} (m/h/d/dM/MY) {5}'.format(
cronexp(self.minute), cronexp(self.hour),
cronexp(self.day_of_week), cronexp(self.day_of_month),
cronexp(self.month_of_year), str(self.timezone)
)
@property
def schedule(self):
crontab = schedules.crontab(
minute=self.minute,
hour=self.hour,
day_of_week=self.day_of_week,
day_of_month=self.day_of_month,
month_of_year=self.month_of_year,
)
if getattr(settings, 'DJANGO_CELERY_BEAT_TZ_AWARE', True):
crontab = TzAwareCrontab(
minute=self.minute,
hour=self.hour,
day_of_week=self.day_of_week,
day_of_month=self.day_of_month,
month_of_year=self.month_of_year,
tz=self.timezone
)
return crontab
@classmethod
def from_schedule(cls, schedule):
spec = {'minute': schedule._orig_minute,
'hour': schedule._orig_hour,
'day_of_week': schedule._orig_day_of_week,
'day_of_month': schedule._orig_day_of_month,
'month_of_year': schedule._orig_month_of_year,
'timezone': schedule.tz
}
try:
return cls.objects.get(**spec)
except cls.DoesNotExist:
return cls(**spec)
except MultipleObjectsReturned:
cls.objects.filter(**spec).delete()
return cls(**spec)
class PeriodicTasks(models.Model):
"""Helper table for tracking updates to periodic tasks."""
ident = models.SmallIntegerField(default=1, primary_key=True, unique=True)
last_update = models.DateTimeField(null=False)
objects = managers.ExtendedManager()
@classmethod
def changed(cls, instance, **kwargs):
if not instance.no_changes:
cls.update_changed()
@classmethod
def update_changed(cls, **kwargs):
cls.objects.update_or_create(ident=1, defaults={'last_update': now()})
@classmethod
def last_change(cls):
try:
return cls.objects.get(ident=1).last_update
except cls.DoesNotExist:
pass
@python_2_unicode_compatible
class PeriodicTask(models.Model):
"""Model representing a periodic task."""
name = models.CharField(
_('name'), max_length=200, unique=True,
help_text=_('Useful description'),
)
task = models.CharField(_('task name'), max_length=200)
interval = models.ForeignKey(
IntervalSchedule, on_delete=models.CASCADE,
null=True, blank=True, verbose_name=_('interval'),
)
crontab = models.ForeignKey(
CrontabSchedule, on_delete=models.CASCADE, null=True, blank=True,
verbose_name=_('crontab'), help_text=_('Use one of interval/crontab'),
)
solar = models.ForeignKey(
SolarSchedule, on_delete=models.CASCADE, null=True, blank=True,
verbose_name=_('solar'), help_text=_('Use a solar schedule')
)
args = models.TextField(
_('Arguments'), blank=True, default='[]',
help_text=_('JSON encoded positional arguments'),
)
kwargs = models.TextField(
_('Keyword arguments'), blank=True, default='{}',
help_text=_('JSON encoded keyword arguments'),
)
queue = models.CharField(
_('queue'), max_length=200, blank=True, null=True, default=None,
help_text=_('Queue defined in CELERY_TASK_QUEUES'),
)
exchange = models.CharField(
_('exchange'), max_length=200, blank=True, null=True, default=None,
)
routing_key = models.CharField(
_('routing key'), max_length=200, blank=True, null=True, default=None,
)
priority = models.PositiveIntegerField(
_('priority'), default=None, validators=[MaxValueValidator(255)],
blank=True, null=True
)
expires = models.DateTimeField(
_('expires'), blank=True, null=True,
)
one_off = models.BooleanField(
_('one-off task'), default=False,
)
start_time = models.DateTimeField(
_('start_time'), blank=True, null=True,
)
enabled = models.BooleanField(
_('enabled'), default=True,
)
last_run_at = models.DateTimeField(
auto_now=False, auto_now_add=False,
editable=False, blank=True, null=True,
)
total_run_count = models.PositiveIntegerField(
default=0, editable=False,
)
date_changed = models.DateTimeField(auto_now=True)
description = models.TextField(_('description'), blank=True)
objects = managers.PeriodicTaskManager()
no_changes = False
class Meta:
"""Table information."""
verbose_name = _('periodic task')
verbose_name_plural = _('periodic tasks')
def validate_unique(self, *args, **kwargs):
super(PeriodicTask, self).validate_unique(*args, **kwargs)
schedule_types = ['interval', 'crontab', 'solar']
selected_schedule_types = [s for s in schedule_types
if getattr(self, s)]
if len(selected_schedule_types) == 0:
raise ValidationError({
'interval': [
'One of interval, crontab, or solar must be set.'
]
})
err_msg = 'Only one of interval, crontab, or solar must be set'
if len(selected_schedule_types) > 1:
error_info = {}
for selected_schedule_type in selected_schedule_types:
error_info[selected_schedule_type] = [err_msg]
raise ValidationError(error_info)
def save(self, *args, **kwargs):
self.exchange = self.exchange or None
self.routing_key = self.routing_key or None
self.queue = self.queue or None
if not self.enabled:
self.last_run_at = None
super(PeriodicTask, self).save(*args, **kwargs)
def __str__(self):
fmt = '{0.name}: {{no schedule}}'
if self.interval:
fmt = '{0.name}: {0.interval}'
if self.crontab:
fmt = '{0.name}: {0.crontab}'
if self.solar:
fmt = '{0.name}: {0.solar}'
return fmt.format(self)
@property
def schedule(self):
if self.interval:
return self.interval.schedule
if self.crontab:
return self.crontab.schedule
if self.solar:
return self.solar.schedule
signals.pre_delete.connect(PeriodicTasks.changed, sender=PeriodicTask)
signals.pre_save.connect(PeriodicTasks.changed, sender=PeriodicTask)
signals.pre_delete.connect(
PeriodicTasks.update_changed, sender=IntervalSchedule)
signals.post_save.connect(
PeriodicTasks.update_changed, sender=IntervalSchedule)
signals.post_delete.connect(
PeriodicTasks.update_changed, sender=CrontabSchedule)
signals.post_save.connect(
PeriodicTasks.update_changed, sender=CrontabSchedule)
signals.post_delete.connect(
PeriodicTasks.update_changed, sender=SolarSchedule)
signals.post_save.connect(
PeriodicTasks.update_changed, sender=SolarSchedule)
|
"""Tests for api.py."""
__author__ = 'aiuto@google.com (Tony Aiuto)'
import json
import os
import gflags as flags
from google.apputils import basetest
from googleapis.codegen import data_types
from googleapis.codegen import language_model
from googleapis.codegen.api import Api
from googleapis.codegen.api import AuthScope
from googleapis.codegen.api import Method
from googleapis.codegen.api import Resource
from googleapis.codegen.api import Schema
from googleapis.codegen.api_exception import ApiException
FLAGS = flags.FLAGS
class FakeLanguageModel(language_model.LanguageModel):
def GetCodeTypeFromDictionary(self, def_dict):
return def_dict.get('type')
def ArrayOf(self, unused_var, s):
return 'Array[%s]' % s
class ApiTest(basetest.TestCase):
# The base discovery doc for most tests.
_TEST_DISCOVERY_DOC = 'sample_discovery.json'
_TEST_DISCOVERY_RPC_DOC = 'sample_discovery.rpc.json'
_TEST_SHARED_TYPES_DOC = 'sample_shared.json'
def ApiFromDiscoveryDoc(self, path):
"""Load a discovery doc from a file and creates a library Api.
Args:
path: (str) The path to the document.
Returns:
An Api for that document.
"""
f = open(os.path.join(os.path.dirname(__file__), 'testdata', path))
discovery_doc = json.loads(f.read())
f.close()
return Api(discovery_doc)
def testLazySchemaForCreation(self):
"""Check loading schemas which are known to have a forward reference.
In the test data, "Activity" refers to "Commment", and the nature
(sorted) of the loading code causes "Activity" to be processed
before "Commment". We want to make sure that SchemaFor does the right
thing with the lazy creation of activity.
"""
api = self.ApiFromDiscoveryDoc(self._TEST_DISCOVERY_DOC)
for schema in ['Activity', 'Comment', 'Activity.object']:
self.assertTrue(isinstance(api._schemas[schema], Schema))
def SchemaRefInProperties(self):
"""Make sure that an object ref works in a schema properties list."""
api = self.ApiFromDiscoveryDoc(self._TEST_DISCOVERY_DOC)
activity_schema = api._schemas['Activity']
for prop in activity_schema.values['properties']:
if prop.values['wireName'] == 'object':
self.assertEquals('ActivityObject',
prop.object_type.values['className'])
def testMakeDefaultSchemaNameFromTheDictTag(self):
"""Use the outer tag as id for schemas which have no id in their dict."""
discovery_doc = json.loads(
"""
{
"name": "fake",
"version": "v1",
"schemas": {
"should_use_id": {
"id": "named",
"type": "object",
"properties": { "dummy": { "type": "string" } }
},
"unnamed": {
"type": "object",
"properties": { "dummy": { "type": "string" } }
}
},
"resources": {}
}
""")
gen = Api(discovery_doc)
self.assertTrue('named' in gen._schemas)
self.assertTrue('unnamed' in gen._schemas)
def testUnknownHttpMethod(self):
"""Make sure we get an exception on unknown HTTP types."""
api = Api({'name': 'dummy', 'version': 'v1', 'resources': {}})
unused_resource = Resource(api, 'temp', {'methods': {}})
self.assertRaises(ApiException,
Method, api, 'bad', {
'rpcMethod': 'rpc',
'httpMethod': 'Not GET/POST/PUT/DELETE',
'parameters': {}
})
def testRequiredParameterList(self):
"""Make sure we are computing required parameters correctly."""
api = self.ApiFromDiscoveryDoc(self._TEST_DISCOVERY_DOC)
tests_executed = 0
for resource in api.values['resources']:
if resource.values['wireName'] == 'activities':
for method in resource.values['methods']:
if method.required_parameters:
required_names = [p.values['wireName']
for p in method.required_parameters]
self.assertEquals(method.values['parameterOrder'], required_names)
tests_executed += 1
method = api.MethodByName('chili.activities.get')
optional_names = set(p.values['wireName']
for p in method.optional_parameters)
self.assertEquals(set(['truncateAtom', 'max-comments', 'hl', 'max-liked']),
optional_names)
tests_executed += 1
self.assertEquals(7, tests_executed)
def testSchemaLoadingAsString(self):
"""Test for the "schema as strings" representation."""
api = self.ApiFromDiscoveryDoc('foo.v1.json')
self.assertEquals(4, len(api._schemas))
def testSubResources(self):
"""Test for the APIs with subresources."""
def CountResourceTree(resource):
ret = 0
for r in resource._resources:
ret += 1 + CountResourceTree(r)
return ret
api = self.ApiFromDiscoveryDoc('moderator.v1.json')
top_level_resources = 0
total_resources = 0
non_method_resources = 0
have_sub_resources = 0
have_sub_resources_and_methods = 0
for r in api._resources:
top_level_resources += 1
total_resources += 1 + CountResourceTree(r)
if not r._methods:
non_method_resources += 1
if r._resources:
have_sub_resources += 1
if r._resources and r._methods:
have_sub_resources_and_methods += 1
# Hand counted 18 resources in the file.
self.assertEquals(18, total_resources)
self.assertEquals(11, top_level_resources)
# 4 of them have no methods, only sub resources
self.assertEquals(4, non_method_resources)
# 6 of them have sub resources.
self.assertEquals(6, have_sub_resources)
# And, of course, 2 should have both sub resources and methods
self.assertEquals(2, have_sub_resources_and_methods)
def testParameters(self):
api = self.ApiFromDiscoveryDoc(self._TEST_DISCOVERY_DOC)
delete = api.MethodByName('chili.activities.delete')
self.assertEquals(1, len(delete.query_parameters))
self.assertEquals(3, len(delete.path_parameters))
required_p = FindByWireName(delete.values['parameters'],
'required_parameter')
self.assertEquals('query', required_p.location)
post_id = FindByWireName(delete.values['parameters'], 'postId')
self.assertEquals('path', post_id.location)
def testEnums(self):
gen = self.ApiFromDiscoveryDoc('enums.json')
# Find the method with the enums
m1 = gen.MethodByName('language.translations.list')
language = FindByWireName(m1.values['parameters'], 'language')
e = language.values['enumType']
self.assertEquals(m1, e.parent)
for name, value, desc in e.values['pairs']:
self.assertTrue(name in ['ENGLISH', 'ITALIAN', 'LANG_ZH_CN',
'LANG_ZH_TW'])
self.assertTrue(value in ['english', 'italian', 'lang_zh-CN',
'lang_zh-TW'])
self.assertTrue(desc in ['English (US)', 'Italian',
'Chinese (Simplified)', 'Chinese (Traditional)'])
accuracy = FindByWireName(m1.values['parameters'], 'accuracy')
e = accuracy.values['enumType']
self.assertEquals(m1, e.parent)
for name, value, desc in e.values['pairs']:
self.assertTrue(name in ['VALUE_1', 'VALUE_2', 'VALUE_3'])
self.assertTrue(value in ['1', '2', '3'])
def testArrayParameter(self):
api = self.ApiFromDiscoveryDoc(self._TEST_DISCOVERY_DOC)
search = api.MethodByName('chili.people.search')
filter_param = FindByWireName(search.values['parameters'], 'filters')
self.assertTrue(isinstance(filter_param.data_type,
data_types.ArrayDataType))
self.assertTrue(isinstance(filter_param.data_type._base_type,
data_types.PrimitiveDataType))
self.assertEquals('string',
filter_param.data_type._base_type.values['type'])
def testRepeatedEnum(self):
api = self.ApiFromDiscoveryDoc(self._TEST_DISCOVERY_DOC)
activities = FindByWireName(api.values['resources'], 'activities')
list_method = FindByWireName(activities.values['methods'], 'list')
options = [p for p in list_method.values['parameters']
if p.values['wireName'] == 'options'][0]
# Should be an array of enums of type string
self.assertTrue(isinstance(options.data_type, data_types.ArrayDataType))
self.assertTrue(isinstance(options.data_type._base_type, data_types.Enum))
self.assertEquals('string', options.data_type._base_type.values['type'])
def testScopes(self):
gen = self.ApiFromDiscoveryDoc(self._TEST_DISCOVERY_DOC)
scopes = gen.GetTemplateValue('authscopes')
self.assertEquals(2, len(scopes))
self.assertEquals('https://www.googleapis.com/auth/buzz',
scopes[0].GetTemplateValue('value'))
self.assertEquals('BUZZ',
scopes[0].GetTemplateValue('name'))
self.assertEquals('https://www.googleapis.com/auth/buzz.read-only',
scopes[1].GetTemplateValue('value'))
self.assertEquals('BUZZ_READ_ONLY',
scopes[1].GetTemplateValue('name'))
def testAuthScope(self):
api = self.ApiFromDiscoveryDoc(self._TEST_DISCOVERY_DOC)
scope = AuthScope(api,
'https://www.googleapis.com/auth/userinfo.email',
{'description': 'A typical scope'})
self.assertEquals('USERINFO_EMAIL', scope.GetTemplateValue('name'))
self.assertEquals('userinfo.email', scope.GetTemplateValue('lastPart'))
self.assertEquals('A typical scope', scope.GetTemplateValue('description'))
scope = AuthScope(api,
'https://www.googleapis.com/auth/no.description', {})
self.assertEquals('NO_DESCRIPTION', scope.GetTemplateValue('name'))
self.assertEquals('https://www.googleapis.com/auth/no.description',
scope.GetTemplateValue('description'))
scope = AuthScope(api, 'https://www.googleapis.com/auth/trim.slashes//', {})
self.assertEquals('TRIM_SLASHES', scope.GetTemplateValue('name'))
self.assertEquals('https://www.googleapis.com/auth/trim.slashes//',
scope.GetTemplateValue('value'))
scope = AuthScope(api,
'https://www.googleapis.com/auth/product',
{'description': 'A product level scope'})
self.assertEquals('PRODUCT', scope.GetTemplateValue('name'))
scope = AuthScope(api,
'https://mail.google.com/',
{'description': 'A non-googleapis.com scope'})
self.assertEquals('MAIL_GOOGLE_COM', scope.GetTemplateValue('name'))
self.assertEquals('mail.google.com', scope.GetTemplateValue('lastPart'))
self.assertEquals('https://mail.google.com/',
scope.GetTemplateValue('value'))
scope = AuthScope(api,
'https://mail.google.com/abc',
{'description': 'A non-googleapis.com scope'})
self.assertEquals('MAIL_GOOGLE_COM_ABC', scope.GetTemplateValue('name'))
scope = AuthScope(api,
'http://mail.google.com/',
{'description': 'A non-https scope'})
self.assertEquals('HTTP___MAIL_GOOGLE_COM', scope.GetTemplateValue('name'))
scope = AuthScope(api, 'tag:google.com,2010:auth/groups2#email', {})
self.assertEquals('TAG_GOOGLE_COM_2010_AUTH_GROUPS2_EMAIL',
scope.GetTemplateValue('name'))
scope = AuthScope(api, 'email', {})
self.assertEquals('EMAIL', scope.GetTemplateValue('name'))
def testPostVariations(self):
gen = self.ApiFromDiscoveryDoc('post_variations.json')
# Check a normal GET method to make sure it has no request and does have
# a response
r1 = FindByWireName(gen.values['resources'], 'r1')
methods = r1.values['methods']
m = FindByWireName(methods, 'get')
self.assertIsNone(m.values['requestType'])
self.assertEquals('Task', m.values['responseType'].class_name)
# A normal POST with both a request and response
m = FindByWireName(methods, 'insert')
self.assertEquals('Task', m.values['requestType'].class_name)
self.assertEquals('Task', m.values['responseType'].class_name)
# A POST with neither request nor response
m = FindByWireName(methods, 'no_request_no_response')
self.assertIsNone(m.values.get('requestType'))
self.assertTrue(isinstance(m.values.get('responseType'), data_types.Void))
# A POST with no request
m = FindByWireName(methods, 'no_request')
self.assertIsNone(m.values.get('requestType'))
self.assertEquals('Task', m.values['responseType'].class_name)
# A PUT with no response
m = FindByWireName(methods, 'no_response')
self.assertEquals('TaskList', m.values['requestType'].class_name)
self.assertTrue(isinstance(m.values.get('responseType'), data_types.Void))
def testSchemaParenting(self):
api = self.ApiFromDiscoveryDoc(self._TEST_DISCOVERY_DOC)
# Check that top level schemas have no parent
for schema in ['Activity', 'Comment']:
self.assertIsNone(api._schemas[schema].parent)
for schema in ['Person.urls', 'Activity.object',
'Activity.object.attachments']:
self.assertTrue(api._schemas[schema].parent)
# verify the values in the name to schema map
for name, schema in api._schemas.items():
if schema.parent and schema.parent != api:
wire_name = schema.values['wireName']
parent_wire_name = schema.parent.values['wireName']
# Our entry key should never match the wirename of our parent
self.assertNotEquals(name, parent_wire_name)
# our key must look like 'p1.p2....parent.me'. We verify that we at
# least end with 'parent.me'
self.assertTrue(name.endswith('.'.join([parent_wire_name, wire_name])))
def testReadingRpcDiscovery(self):
gen = self.ApiFromDiscoveryDoc(self._TEST_DISCOVERY_RPC_DOC)
# no resources in RPC
self.assertEquals(0, len(gen.values['resources']))
# but we do expect a few methods
self.assertLess(5, len(gen.values['methods']))
self.assertGreater(100, len(gen.values['methods']))
# RPC methods all have an id, httpMethod should be POST and have no path
for method in gen.values['methods']:
self.assertIsNotNone(method.values['id'])
self.assertEquals('POST', method.values['httpMethod'])
self.assertIsNone(method.values['restPath'])
def testNormalizeUrlComponents(self):
googleapis_base = 'https://www.googleapis.com/'
def LoadApi(discovery_dict):
d = {'name': 'fake', 'version': 'v1'}
d.update(discovery_dict)
api = Api(d)
return api
api = LoadApi({})
self.assertEquals(googleapis_base, api.values['rootUrl'])
self.assertEquals('fake/v1/', api.values['servicePath'])
custom_path = '/testing/fake/v1/'
api = LoadApi({'basePath': custom_path})
self.assertEquals(googleapis_base, api.values['rootUrl'])
self.assertEquals('testing/fake/v1/', api.values['servicePath'])
custom_url = 'https://foo.com/bar/baz/'
api = LoadApi({'basePath': custom_url})
self.assertEquals('https://foo.com/', api.values['rootUrl'])
self.assertEquals('bar/baz/', api.values['servicePath'])
# Make sure baseUrl wins over basePath
api = LoadApi({
'basePath': '/will/not/be/used/',
'baseUrl': custom_url
})
self.assertEquals('https://foo.com/', api.values['rootUrl'])
self.assertEquals('bar/baz/', api.values['servicePath'])
# Make sure rootUrl wins over all
api = LoadApi({
'basePath': '/will/not/be/used/',
'baseUrl': 'https://bar.com/not/used/',
'rootUrl': 'https://foo.com/',
'servicePath': 'bar/baz/',
})
self.assertEquals('https://foo.com/', api.values['rootUrl'])
self.assertEquals('bar/baz/', api.values['servicePath'])
# Test Swarm APIs
api = LoadApi({
'baseUrl': 'https://localhost.appspot.com/_ah/api/fake/v1/',
'basePath': '/_ah/api/fake/v1/',
'rootUrl': 'https://localhost.appspot.com/_ah/api/',
'servicePath': 'fake/v1/',
})
self.assertEquals('https://localhost.appspot.com/_ah/api/',
api.values['rootUrl'])
self.assertEquals('fake/v1/', api.values['servicePath'])
# .. in path
self.assertRaises(ValueError, LoadApi, {'basePath': '/do/not/../go/up'})
# no servicePath
self.assertRaises(ValueError, LoadApi, {'rootUrl': 'https://foo.com/'})
# batchPath
api = LoadApi({})
self.assertEquals(None, api.values['batchPath'])
api = LoadApi({
'batchPath': 'batch'
})
self.assertEquals("batch", api.values['batchPath'])
api = LoadApi({
'batchPath': '/batch'
})
self.assertEquals("batch", api.values['batchPath'])
api = LoadApi({
'batchPath': '//batch'
})
self.assertEquals("batch", api.values['batchPath'])
def testCanonicalName(self):
d = {'name': 'fake', 'version': 'v1', 'canonicalName': 'My API'}
api = Api(d)
self.assertEquals('fake', api.values['name'])
self.assertEquals('MyAPI', api._class_name)
def testNormalizeOwnerInformation(self):
def LoadApi(**kwargs):
d = {'name': 'fake', 'version': 'v1'}
d.update(kwargs)
return Api(d)
api = LoadApi()
self.assertEquals('Google', api.values['ownerName'])
self.assertEquals('google', api.values['owner'])
self.assertEquals('google.com', api.values['ownerDomain'])
api = LoadApi(ownerName='Google', ownerDomain='youtube.com')
self.assertEquals('Google', api.values['ownerName'])
self.assertEquals('google', api.values['owner'])
self.assertEquals('youtube.com', api.values['ownerDomain'])
api = LoadApi(ownerDomain='youtube.com')
self.assertEquals('youtube_com', api.values['owner'])
self.assertEquals('youtube.com', api.values['ownerDomain'])
# owner is explicitly declared
api = LoadApi(owner='You Tube', ownerDomain='youtube.com')
self.assertEquals('You Tube', api.values['owner'])
self.assertEquals('youtube.com', api.values['ownerDomain'])
api = LoadApi(servicePath='/fake',
rootUrl='https://www.foobar.co.uk:8080/root')
self.assertEquals('www.foobar.co.uk', api['ownerDomain'])
self.assertEquals('www_foobar_co_uk', api['owner'])
api = LoadApi(servicePath='/fake',
rootUrl='https://whathaveyou.googleplex.com')
self.assertEquals('google.com', api['ownerDomain'])
self.assertEquals('Google', api['ownerName'])
self.assertEquals('google', api['owner'])
api = LoadApi(servicePath='/fake',
rootUrl='https://whathaveyou.googleapis.com')
self.assertEquals('google.com', api['ownerDomain'])
self.assertEquals('Google', api['ownerName'])
self.assertEquals('google', api['owner'])
api = LoadApi(servicePath='/fake',
rootUrl='https://whathaveyou.google.com')
self.assertEquals('google.com', api['ownerDomain'])
self.assertEquals('Google', api['ownerName'])
self.assertEquals('google', api['owner'])
def testSharedTypes(self):
api = self.ApiFromDiscoveryDoc(self._TEST_SHARED_TYPES_DOC)
api.VisitAll(lambda o: o.SetLanguageModel(language_model.LanguageModel()))
# class defined by the API
photos_feed_schema = api._schemas['PhotosFeed']
# type defined from a shared type repo
photo_schema = api._schemas[
'http://www.googleapis.com/types/v1/com.google/plus/v2/photo']
self.assertEquals('PhotosFeed', photos_feed_schema.values['wireName'])
self.assertEquals('com.google.myservice', photos_feed_schema.module.name)
self.assertEquals('Photo', photo_schema.values['wireName'])
self.assertEquals('com.google.plus.pictures', photo_schema.module.name)
self.assertEquals('com/google/plus/pictures', photo_schema.module.path)
def testMethods(self):
api = self.ApiFromDiscoveryDoc(self._TEST_DISCOVERY_DOC)
self.assertEquals(api, api.top_level_methods[0].parent)
self.assertLess(25, len(api.all_methods))
self.assertLess(0, len(api.top_level_methods))
def testApiHasTitle(self):
api_def = {'name': 'fake',
'version': 'v1',
'schemas': {},
'resources': {}}
api = Api(api_def)
self.assertEquals('fake', api['title'])
def testExponentialBackoffDefault(self):
# Make sure exponentialBackoffDefault defaults to False.
discovery_doc = json.loads(
"""
{
"name": "fake",
"version": "v1",
"schemas": {},
"resources": {}
}
""")
api = Api(discovery_doc)
# Make sure exponentialBackoffDefault gets set to True.
self.assertFalse(api.values['exponentialBackoffDefault'])
discovery_doc2 = json.loads(
"""
{
"name": "fake",
"version": "v1",
"schemas": {},
"resources": {},
"exponentialBackoffDefault": true
}
""")
api2 = Api(discovery_doc2)
self.assertTrue(api2.values['exponentialBackoffDefault'])
class ApiModulesTest(basetest.TestCase):
def setUp(self):
self.discovery_doc = json.loads(
"""
{
"name": "fake",
"version": "v1",
"schemas": {},
"resources": {}
}
""")
self.language_model = FakeLanguageModel()
def testModuleOwnerDomain(self):
self.discovery_doc['ownerDomain'] = 'foo.bar'
api = Api(self.discovery_doc)
api.VisitAll(lambda o: o.SetLanguageModel(self.language_model))
self.assertEquals('bar/foo/fake', api.values['module'].path)
def testModulePackagePath(self):
self.discovery_doc['packagePath'] = 'foo/BAR'
api = Api(self.discovery_doc)
api.VisitAll(lambda o: o.SetLanguageModel(self.language_model))
self.assertEquals('com/google/foo/BAR/fake', api.values['module'].path)
def testModuleOwnerDomainAndPackagePath(self):
self.discovery_doc['ownerDomain'] = 'toasty.com'
self.discovery_doc['packagePath'] = 'foo/BAR'
api = Api(self.discovery_doc)
api.VisitAll(lambda o: o.SetLanguageModel(self.language_model))
self.assertEquals('com/toasty/foo/BAR/fake', api.values['module'].path)
def FindByWireName(list_of_resource_or_method, wire_name):
"""Find an element in a list by its "wireName".
The "wireName" is the name of the method "on the wire", which is the raw name
as it appears in the JSON.
Args:
list_of_resource_or_method: A list of resource or methods as annotated by
the Api.
wire_name: (str): the name to fine.
Returns:
dict or None
"""
for x in list_of_resource_or_method:
if x.values['wireName'] == wire_name:
return x
return None
if __name__ == '__main__':
basetest.main()
|
"""Module for testing the add/del/show hub command."""
import unittest
if __name__ == "__main__":
import utils
utils.import_depends()
from brokertest import TestBrokerCommand
class TestHub(TestBrokerCommand):
def test_100_add_hub1_default_org(self):
command = ["add", "hub", "--hub", "hub1", "--fullname",
"hub1 example", "--comments", "Some hub comments"]
self.noouttest(command)
def test_110_add_hubtest_org(self):
command = ["add", "organization", "--organization", "hubtest",
"--fullname", "Hub Test, Inc"]
self.noouttest(command)
def test_115_add_hub2(self):
command = ["add", "hub", "--hub", "hub2", "--fullname", "hub2 example",
"--organization", "hubtest", "--comments", "Some other hub comments"]
self.noouttest(command)
def test_120_add_hk(self):
self.noouttest(["add_hub", "--hub", "hk", "--organization", "ms",
"--fullname", "Non-Japan-Asia"])
def test_120_add_ln(self):
self.noouttest(["add_hub", "--hub", "ln", "--organization", "ms",
"--fullname", "Europa"])
def test_120_add_ny(self):
self.noouttest(["add_hub", "--hub", "ny", "--organization", "ms",
"--fullname", "Americas"])
def test_130_verify_hub1(self):
command = "show hub --hub hub1"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "Hub: hub1", command)
self.matchoutput(out, " Fullname: hub1 example", command)
self.matchoutput(out, " Comments: Some hub comments", command)
self.matchoutput(out, " Location Parents: [Organization ms]", command)
def test_130_verify_hub2(self):
command = "show hub --hub hub2"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "Hub: hub2", command)
self.matchoutput(out, " Fullname: hub2 example", command)
self.matchoutput(out, " Comments: Some other hub comments", command)
self.matchoutput(out, " Location Parents: [Organization hubtest]",
command)
def test_130_show_all(self):
command = "show hub --all"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "Hub: hub1", command)
self.matchoutput(out, "Hub: hub2", command)
def test_200_add_hub1_net(self):
self.net.allocate_network(self, "hub1_net", 24, "unknown", "hub", "hub1",
comments="Made-up network")
def test_201_del_hub1_fail(self):
command = "del hub --hub hub1"
err = self.badrequesttest(command.split(" "))
self.matchoutput(err,
"Bad Request: Could not delete hub hub1, networks "
"were found using this location.",
command)
def test_202_cleanup_hub1_net(self):
self.net.dispose_network(self, "hub1_net")
def test_210_del_hub1(self):
command = "del hub --hub hub1"
self.noouttest(command.split(" "))
def test_220_del_hub1_again(self):
command = "del hub --hub hub1"
out = self.notfoundtest(command.split(" "))
self.matchoutput(out, "Hub hub1 not found.", command)
def test_230_del_hub2(self):
command = "del hub --hub hub2"
self.noouttest(command.split(" "))
def test_240_del_hubtest_org(self):
command = "del organization --organization hubtest"
self.noouttest(command.split(" "))
def test_250_add_hub_badname(self):
command = ["add_hub", "--hub", "foo bar"]
out = self.badrequesttest(command)
self.matchoutput(out, "'foo bar' is not a valid value for Hub", command)
def test_300_verify_hub1(self):
command = "show hub --hub hub1"
out = self.notfoundtest(command.split(" "))
self.matchoutput(out, "Hub hub1 not found.", command)
def test_300_verify_hub2(self):
command = "show hub --hub hub2"
out = self.notfoundtest(command.split(" "))
self.matchoutput(out, "Hub hub2 not found.", command)
def test_300_verify_all(self):
command = "show hub --all"
out = self.commandtest(command.split(" "))
self.matchclean(out, "Hub: hub1", command)
self.matchclean(out, "Hub: hub2", command)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestHub)
unittest.TextTestRunner(verbosity=2).run(suite)
|
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
def get_property_value(dictionary, property_name, default_value=None, trim_string=False,
empty_value=""):
"""
Get a property value from a dictionary, applying applying rules as necessary.
If dictionary does not contain a value for property_name or the value for property_name is None,
null_value is used as the value to return. Then, if trim_string is True and the value is None
or the value is an empty string, empty_value will be return else the (current) value is returned.
Note: the property value will most likely be a string or a unicode string, however in the event
it is not (for example a number), this method will behave properly and return the value as is.
:param dictionary: a dictionary of values
:param property_name: the name of a dictionary item to retrieve
:param default_value: the value to use if the item is not in the dictionary or the value of the item is None
:param trim_string: a Boolean value indicating whether to strip whitespace from the value (True) or not (False)
:param empty_value: the value to use if the (current) value is None or an empty string, if trim_string is True
:return: the requested property value with rules applied
"""
# If property_name is not in the dictionary, set value to null_value
if property_name in dictionary:
value = dictionary[property_name]
if value is None:
value = default_value
else:
value = default_value
if trim_string:
# If the value is none, consider it empty...
if value is None:
value = empty_value
elif (type(value) == str) or (type(value) == unicode):
value = value.strip()
if len(value) == 0:
value = empty_value
return value
def get_unstructured_data(dictionary, property_name):
prefix = property_name + '/'
prefix_len = len(prefix)
return dict((k[prefix_len:], v) for k, v in dictionary.iteritems() if k.startswith(prefix))
def split_host_and_port(host):
"""
Splits a string into its host and port components
:param host: a string matching the following pattern: <host name | ip address>[:port]
:return: a Dictionary containing 'host' and 'port' entries for the input value
"""
if host is None:
host_and_port = None
else:
host_and_port = {}
parts = host.split(":")
if parts is not None:
length = len(parts)
if length > 0:
host_and_port['host'] = parts[0]
if length > 1:
host_and_port['port'] = int(parts[1])
return host_and_port
def set_port(host, port):
"""
Sets the port for a host specification, potentially replacing an existing port declaration
:param host: a string matching the following pattern: <host name | ip address>[:port]
:param port: a string or integer declaring the (new) port
:return: a string declaring the new host/port specification
"""
if port is None:
return host
else:
host_and_port = split_host_and_port(host)
if (host_and_port is not None) and ('host' in host_and_port):
return "%s:%s" % (host_and_port['host'], port)
else:
return host
|
class MapQuery(object):
"""Query to retrieve complete ways and relations in an area."""
_QUERY_TEMPLATE = "(node({south},{west},{north},{east});<;);"
def __init__(self, south, west, north, east):
"""
Initialize query with given bounding box.
:param bbox Bounding box with limit values in format west, south,
east, north.
"""
self.west = west
self.south = south
self.east = east
self.north = north
def __str__(self):
return self._QUERY_TEMPLATE.format(
west=self.west,
south=self.south,
east=self.east,
north=self.north
)
class WayQuery(object):
"""Query to retrieve a set of ways and their dependent nodes satisfying
the input parameters"""
_QUERY_TEMPLATE = "(way{query_parameters});(._;>;);"
def __init__(self, query_parameters):
"""Initialize a query for a set of ways satisfying the given parameters.
:param query_parameters Overpass QL query parameters"""
self.query_parameters = query_parameters
def __str__(self):
return self._QUERY_TEMPLATE.format(
query_parameters=self.query_parameters
)
|
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import functools
import logging
import os
import shutil
import sys
from collections import defaultdict
from pex.fetcher import Fetcher
from pex.pex import PEX
from pex.platforms import Platform
from pex.resolver import resolve
from twitter.common.collections import OrderedSet
from pants.backend.codegen.antlr.python.python_antlr_library import PythonAntlrLibrary
from pants.backend.codegen.thrift.python.python_thrift_library import PythonThriftLibrary
from pants.backend.python.antlr_builder import PythonAntlrBuilder
from pants.backend.python.python_requirement import PythonRequirement
from pants.backend.python.targets.python_binary import PythonBinary
from pants.backend.python.targets.python_library import PythonLibrary
from pants.backend.python.targets.python_requirement_library import PythonRequirementLibrary
from pants.backend.python.targets.python_tests import PythonTests
from pants.backend.python.thrift_builder import PythonThriftBuilder
from pants.base.build_environment import get_buildroot
from pants.build_graph.files import Files
from pants.build_graph.prep_command import PrepCommand
from pants.build_graph.resources import Resources
from pants.build_graph.target import Target
from pants.invalidation.build_invalidator import BuildInvalidator, CacheKeyGenerator
from pants.util.dirutil import safe_mkdir, safe_mkdtemp, safe_rmtree
logger = logging.getLogger(__name__)
class PythonChroot(object):
_VALID_DEPENDENCIES = {
Files: 'files',
PrepCommand: 'prep',
PythonLibrary: 'libraries',
PythonRequirementLibrary: 'reqs',
PythonBinary: 'binaries',
PythonThriftLibrary: 'thrifts',
PythonAntlrLibrary: 'antlrs',
PythonTests: 'tests',
Resources: 'resources'
}
class InvalidDependencyException(Exception):
def __init__(self, target):
super(PythonChroot.InvalidDependencyException, self).__init__(
'Not a valid Python dependency! Found: {}'.format(target))
@staticmethod
def get_platforms(platform_list):
return tuple({Platform.current() if p == 'current' else p for p in platform_list})
def __init__(self,
python_setup,
python_repos,
ivy_bootstrapper,
thrift_binary_factory,
interpreter,
builder,
targets,
platforms,
extra_requirements=None,
log=None):
self._python_setup = python_setup
self._python_repos = python_repos
self._ivy_bootstrapper = ivy_bootstrapper
self._thrift_binary_factory = thrift_binary_factory
self._interpreter = interpreter
self._builder = builder
self._targets = targets
self._platforms = platforms
self._extra_requirements = list(extra_requirements) if extra_requirements else []
self._logger = log or logger
# Note: unrelated to the general pants artifact cache.
self._artifact_cache_root = os.path.join(
self._python_setup.artifact_cache_dir, str(self._interpreter.identity))
self._key_generator = CacheKeyGenerator()
self._build_invalidator = BuildInvalidator(self._artifact_cache_root)
def delete(self):
"""Deletes this chroot from disk if it has been dumped."""
safe_rmtree(self.path())
def debug(self, msg):
self._logger.debug(msg)
def path(self):
return os.path.realpath(self._builder.path())
def pex(self):
return PEX(self.path(), interpreter=self._interpreter)
def package_pex(self, filename):
"""Package into a PEX zipfile.
:param filename: The filename where the PEX should be stored.
"""
self._builder.build(filename)
def _dump_library(self, library):
def copy_to_chroot(base, path, add_function):
src = os.path.join(get_buildroot(), base, path)
add_function(src, path)
self.debug(' Dumping library: {}'.format(library))
for relpath in library.sources_relative_to_source_root():
try:
copy_to_chroot(library.target_base, relpath, self._builder.add_source)
except OSError:
logger.error("Failed to copy {path} for library {library}"
.format(path=os.path.join(library.target_base, relpath),
library=library))
raise
for resources_tgt in library.resources:
for resource_file_from_source_root in resources_tgt.sources_relative_to_source_root():
try:
copy_to_chroot(resources_tgt.target_base, resource_file_from_source_root,
self._builder.add_resource)
except OSError:
logger.error("Failed to copy {path} for resource {resource}"
.format(path=os.path.join(resources_tgt.target_base,
resource_file_from_source_root),
resource=resources_tgt.address.spec))
raise
def _dump_requirement(self, req):
self.debug(' Dumping requirement: {}'.format(req))
self._builder.add_requirement(req)
def _dump_distribution(self, dist):
self.debug(' Dumping distribution: .../{}'.format(os.path.basename(dist.location)))
self._builder.add_distribution(dist)
def _generate_requirement(self, library, builder_cls):
library_key = self._key_generator.key_for_target(library)
builder = builder_cls(target=library,
root_dir=get_buildroot(),
target_suffix='-' + library_key.hash[:8])
cache_dir = os.path.join(self._artifact_cache_root, library_key.id)
if self._build_invalidator.needs_update(library_key):
sdist = builder.build(interpreter=self._interpreter)
safe_mkdir(cache_dir)
shutil.copy(sdist, os.path.join(cache_dir, os.path.basename(sdist)))
self._build_invalidator.update(library_key)
return PythonRequirement(builder.requirement_string(), repository=cache_dir, use_2to3=True)
def _generate_thrift_requirement(self, library):
thrift_builder = functools.partial(PythonThriftBuilder,
thrift_binary_factory=self._thrift_binary_factory,
workdir=safe_mkdtemp(dir=self.path(), prefix='thrift.'))
return self._generate_requirement(library, thrift_builder)
def _generate_antlr_requirement(self, library):
antlr_builder = functools.partial(PythonAntlrBuilder,
ivy_bootstrapper=self._ivy_bootstrapper,
workdir=safe_mkdtemp(dir=self.path(), prefix='antlr.'))
return self._generate_requirement(library, antlr_builder)
def resolve(self, targets):
children = defaultdict(OrderedSet)
def add_dep(trg):
for target_type, target_key in self._VALID_DEPENDENCIES.items():
if isinstance(trg, target_type):
children[target_key].add(trg)
return
elif type(trg) == Target:
return
raise self.InvalidDependencyException(trg)
for target in targets:
target.walk(add_dep)
return children
def dump(self):
self.debug('Building chroot for {}:'.format(self._targets))
targets = self.resolve(self._targets)
for lib in targets['libraries'] | targets['binaries']:
self._dump_library(lib)
generated_reqs = OrderedSet()
if targets['thrifts']:
for thr in targets['thrifts']:
generated_reqs.add(self._generate_thrift_requirement(thr))
generated_reqs.add(PythonRequirement('thrift', use_2to3=True))
for antlr in targets['antlrs']:
generated_reqs.add(self._generate_antlr_requirement(antlr))
reqs_from_libraries = OrderedSet()
for req_lib in targets['reqs']:
for req in req_lib.payload.requirements:
reqs_from_libraries.add(req)
reqs_to_build = OrderedSet()
find_links = OrderedSet()
for req in reqs_from_libraries | generated_reqs | self._extra_requirements:
if not req.should_build(self._interpreter.python, Platform.current()):
self.debug('Skipping {} based upon version filter'.format(req))
continue
reqs_to_build.add(req)
self._dump_requirement(req.requirement)
if req.repository:
find_links.add(req.repository)
distributions = self._resolve_multi(reqs_to_build, find_links)
locations = set()
for platform, dist_set in distributions.items():
for dist in dist_set:
if dist.location not in locations:
self._dump_distribution(dist)
locations.add(dist.location)
if len(targets['binaries']) > 1:
print('WARNING: Target has multiple python_binary targets!', file=sys.stderr)
return self._builder
def _resolve_multi(self, requirements, find_links):
"""Multi-platform dependency resolution for PEX files.
Given a pants configuration and a set of requirements, return a map of platform name -> list
of :class:`pkg_resources.Distribution` instances needed to satisfy them on that platform.
That may involve distributions for multiple platforms.
:param requirements: A list of :class:`PythonRequirement` objects to resolve.
:param find_links: Additional paths to search for source packages during resolution.
"""
distributions = dict()
platforms = self.get_platforms(self._platforms or self._python_setup.platforms)
fetchers = self._python_repos.get_fetchers()
fetchers.extend(Fetcher([path]) for path in find_links)
context = self._python_repos.get_network_context()
for platform in platforms:
requirements_cache_dir = os.path.join(self._python_setup.resolver_cache_dir,
str(self._interpreter.identity))
distributions[platform] = resolve(
requirements=[req.requirement for req in requirements],
interpreter=self._interpreter,
fetchers=fetchers,
platform=platform,
context=context,
cache=requirements_cache_dir,
cache_ttl=self._python_setup.resolver_cache_ttl,
allow_prereleases=self._python_setup.resolver_allow_prereleases)
return distributions
|
from __future__ import division
import datetime
from django.conf import settings
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import messages
from openstack_dashboard import api
class BaseUsage(object):
show_deleted = False
def __init__(self, request, project_id=None):
self.project_id = project_id or request.user.tenant_id
self.request = request
self.summary = {}
self.usage_list = []
self.limits = {}
self.quotas = {}
@property
def today(self):
return timezone.now()
@property
def first_day(self):
days_range = getattr(settings, 'OVERVIEW_DAYS_RANGE', 1)
if days_range:
return self.today.date() - datetime.timedelta(days=days_range)
else:
return datetime.date(self.today.year, self.today.month, 1)
@staticmethod
def get_start(year, month, day):
start = datetime.datetime(year, month, day, 0, 0, 0)
return timezone.make_aware(start, timezone.utc)
@staticmethod
def get_end(year, month, day):
end = datetime.datetime(year, month, day, 23, 59, 59)
return timezone.make_aware(end, timezone.utc)
def get_instances(self):
instance_list = []
[instance_list.extend(u.server_usages) for u in self.usage_list]
return instance_list
def get_date_range(self):
if not hasattr(self, "start") or not hasattr(self, "end"):
args_start = (self.first_day.year, self.first_day.month,
self.first_day.day)
args_end = (self.today.year, self.today.month, self.today.day)
form = self.get_form()
if form.is_valid():
start = form.cleaned_data['start']
end = form.cleaned_data['end']
args_start = (start.year,
start.month,
start.day)
args_end = (end.year,
end.month,
end.day)
elif form.is_bound:
messages.error(self.request,
_("Invalid date format: "
"Using today as default."))
self.start = self.get_start(*args_start)
self.end = self.get_end(*args_end)
return self.start, self.end
def init_form(self):
self.start = self.first_day
self.end = self.today.date()
return self.start, self.end
def get_form(self):
if not hasattr(self, 'form'):
req = self.request
start = req.GET.get('start', req.session.get('usage_start'))
end = req.GET.get('end', req.session.get('usage_end'))
if start and end:
# bound form
self.form = forms.DateForm({'start': start, 'end': end})
else:
# non-bound form
init = self.init_form()
start = init[0].isoformat()
end = init[1].isoformat()
self.form = forms.DateForm(initial={'start': start,
'end': end})
req.session['usage_start'] = start
req.session['usage_end'] = end
return self.form
def _get_neutron_usage(self, limits, resource_name):
resource_map = {
'floatingip': {
'api': api.neutron.tenant_floating_ip_list,
'limit_name': 'totalFloatingIpsUsed',
'message': _('Unable to retrieve floating IP addresses.')
},
'security_group': {
'api': api.neutron.security_group_list,
'limit_name': 'totalSecurityGroupsUsed',
'message': _('Unable to retrieve security groups.')
}
}
resource = resource_map[resource_name]
try:
method = resource['api']
current_used = len(method(self.request))
except Exception:
current_used = 0
msg = resource['message']
exceptions.handle(self.request, msg)
limits[resource['limit_name']] = current_used
def _set_neutron_limit(self, limits, neutron_quotas, resource_name):
limit_name_map = {
'floatingip': 'maxTotalFloatingIps',
'security_group': 'maxSecurityGroups',
}
if neutron_quotas is None:
resource_max = float("inf")
else:
resource_max = getattr(neutron_quotas.get(resource_name),
'limit', float("inf"))
if resource_max == -1:
resource_max = float("inf")
limits[limit_name_map[resource_name]] = resource_max
def get_neutron_limits(self):
if not api.base.is_service_enabled(self.request, 'network'):
return
try:
neutron_quotas_supported = (
api.neutron.is_quotas_extension_supported(self.request))
neutron_sg_used = (
api.neutron.is_extension_supported(self.request,
'security-group'))
if api.neutron.floating_ip_supported(self.request):
self._get_neutron_usage(self.limits, 'floatingip')
if neutron_sg_used:
self._get_neutron_usage(self.limits, 'security_group')
# Quotas are an optional extension in Neutron. If it isn't
# enabled, assume the floating IP limit is infinite.
if neutron_quotas_supported:
neutron_quotas = api.neutron.tenant_quota_get(self.request,
self.project_id)
else:
neutron_quotas = None
except Exception:
# Assume neutron security group and quotas are enabled
# because they are enabled in most Neutron plugins.
neutron_sg_used = True
neutron_quotas = None
msg = _('Unable to retrieve network quota information.')
exceptions.handle(self.request, msg)
self._set_neutron_limit(self.limits, neutron_quotas, 'floatingip')
if neutron_sg_used:
self._set_neutron_limit(self.limits, neutron_quotas,
'security_group')
def get_cinder_limits(self):
"""Get volume limits if cinder is enabled."""
if not api.cinder.is_volume_service_enabled(self.request):
return
try:
self.limits.update(api.cinder.tenant_absolute_limits(self.request))
except Exception:
msg = _("Unable to retrieve volume limit information.")
exceptions.handle(self.request, msg)
return
def get_limits(self):
try:
self.limits = api.nova.tenant_absolute_limits(self.request,
reserved=True)
except Exception:
exceptions.handle(self.request,
_("Unable to retrieve limit information."))
self.get_neutron_limits()
self.get_cinder_limits()
def get_usage_list(self, start, end):
return []
def summarize(self, start, end):
if not api.nova.extension_supported('SimpleTenantUsage', self.request):
return
if start <= end and start <= self.today:
# The API can't handle timezone aware datetime, so convert back
# to naive UTC just for this last step.
start = timezone.make_naive(start, timezone.utc)
end = timezone.make_naive(end, timezone.utc)
try:
self.usage_list = self.get_usage_list(start, end)
except Exception:
exceptions.handle(self.request,
_('Unable to retrieve usage information.'))
elif end < start:
messages.error(self.request,
_("Invalid time period. The end date should be "
"more recent than the start date."))
elif start > self.today:
messages.error(self.request,
_("Invalid time period. You are requesting "
"data from the future which may not exist."))
for project_usage in self.usage_list:
project_summary = project_usage.get_summary()
for key, value in project_summary.items():
self.summary.setdefault(key, 0)
self.summary[key] += value
def csv_link(self):
form = self.get_form()
data = {}
if hasattr(form, "cleaned_data"):
data = form.cleaned_data
if not ('start' in data and 'end' in data):
data = {"start": self.today.date(), "end": self.today.date()}
return "?start=%s&end=%s&format=csv" % (data['start'],
data['end'])
class GlobalUsage(BaseUsage):
show_deleted = True
def get_usage_list(self, start, end):
return api.nova.usage_list(self.request, start, end)
class ProjectUsage(BaseUsage):
attrs = ('memory_mb', 'vcpus', 'uptime',
'hours', 'local_gb')
def get_usage_list(self, start, end):
show_deleted = self.request.GET.get('show_deleted',
self.show_deleted)
instances = []
deleted_instances = []
usage = api.nova.usage_get(self.request, self.project_id, start, end)
# Attribute may not exist if there are no instances
if hasattr(usage, 'server_usages'):
now = self.today
for server_usage in usage.server_usages:
# This is a way to phrase uptime in a way that is compatible
# with the 'timesince' filter. (Use of local time intentional.)
server_uptime = server_usage['uptime']
total_uptime = now - datetime.timedelta(seconds=server_uptime)
server_usage['uptime_at'] = total_uptime
if server_usage['ended_at'] and not show_deleted:
deleted_instances.append(server_usage)
else:
instances.append(server_usage)
usage.server_usages = instances
return (usage,)
|
import os
import stat
import subprocess
from keystone.common import logging
from keystone import config
LOG = logging.getLogger(__name__)
CONF = config.CONF
DIR_PERMS = (stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR |
stat.S_IRGRP | stat.S_IXGRP |
stat.S_IROTH | stat.S_IXOTH)
CERT_PERMS = stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH
PRIV_PERMS = stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR
DEFAULT_SUBJECT = '/C=US/ST=Unset/L=Unset/O=Unset/CN=www.example.com'
def file_exists(file_path):
return os.path.exists(file_path)
class ConfigurePKI(object):
"""Generate files for PKI signing using OpenSSL.
Signed tokens require a private key and signing certificate which itself
must be signed by a CA. This class generates them with workable defaults
if each of the files are not present
"""
def __init__(self, keystone_user, keystone_group, **kw):
self.conf_dir = os.path.dirname(CONF.signing.ca_certs)
self.use_keystone_user = keystone_user
self.use_keystone_group = keystone_group
self.ssl_config_file_name = os.path.join(self.conf_dir, "openssl.conf")
self.ca_key_file = os.path.join(self.conf_dir, "cakey.pem")
self.request_file_name = os.path.join(self.conf_dir, "req.pem")
self.ssl_dictionary = {'conf_dir': self.conf_dir,
'ca_cert': CONF.signing.ca_certs,
'ssl_config': self.ssl_config_file_name,
'ca_private_key': self.ca_key_file,
'ca_cert_cn': 'hostname',
'request_file': self.request_file_name,
'signing_key': CONF.signing.keyfile,
'signing_cert': CONF.signing.certfile,
'default_subject': DEFAULT_SUBJECT,
'key_size': int(CONF.signing.key_size),
'valid_days': int(CONF.signing.valid_days),
'ca_password': CONF.signing.ca_password}
def _make_dirs(self, file_name):
dir = os.path.dirname(file_name)
if not file_exists(dir):
os.makedirs(dir, DIR_PERMS)
if os.geteuid() == 0 and self.use_keystone_group:
os.chown(dir, -1, self.use_keystone_group)
def _set_permissions(self, file_name, perms):
os.chmod(file_name, perms)
if os.geteuid() == 0:
os.chown(file_name, self.use_keystone_user or -1,
self.use_keystone_group or -1)
def exec_command(self, command):
to_exec = command % self.ssl_dictionary
LOG.info(to_exec)
subprocess.check_call(to_exec.rsplit(' '))
def build_ssl_config_file(self):
if not file_exists(self.ssl_config_file_name):
self._make_dirs(self.ssl_config_file_name)
ssl_config_file = open(self.ssl_config_file_name, 'w')
ssl_config_file.write(self.sslconfig % self.ssl_dictionary)
ssl_config_file.close()
self._set_permissions(self.ssl_config_file_name, CERT_PERMS)
index_file_name = os.path.join(self.conf_dir, 'index.txt')
if not file_exists(index_file_name):
index_file = open(index_file_name, 'w')
index_file.write('')
index_file.close()
self._set_permissions(self.ssl_config_file_name, PRIV_PERMS)
serial_file_name = os.path.join(self.conf_dir, 'serial')
if not file_exists(serial_file_name):
index_file = open(serial_file_name, 'w')
index_file.write('01')
index_file.close()
self._set_permissions(self.ssl_config_file_name, PRIV_PERMS)
def build_ca_cert(self):
if not file_exists(CONF.signing.ca_certs):
if not os.path.exists(self.ca_key_file):
self._make_dirs(self.ca_key_file)
self.exec_command('openssl genrsa -out %(ca_private_key)s '
'%(key_size)d -config %(ssl_config)s')
self._set_permissions(self.ssl_dictionary['ca_private_key'],
stat.S_IRUSR)
self.exec_command('openssl req -new -x509 -extensions v3_ca '
'-passin pass:%(ca_password)s '
'-key %(ca_private_key)s -out %(ca_cert)s '
'-days %(valid_days)d '
'-config %(ssl_config)s '
'-subj %(default_subject)s')
self._set_permissions(self.ssl_dictionary['ca_cert'], CERT_PERMS)
def build_private_key(self):
signing_keyfile = self.ssl_dictionary['signing_key']
if not file_exists(signing_keyfile):
self._make_dirs(signing_keyfile)
self.exec_command('openssl genrsa -out %(signing_key)s '
'%(key_size)d '
'-config %(ssl_config)s')
self._set_permissions(os.path.dirname(signing_keyfile), PRIV_PERMS)
self._set_permissions(signing_keyfile, stat.S_IRUSR)
def build_signing_cert(self):
if not file_exists(CONF.signing.certfile):
self._make_dirs(CONF.signing.certfile)
self.exec_command('openssl req -key %(signing_key)s -new -nodes '
'-out %(request_file)s -config %(ssl_config)s '
'-subj %(default_subject)s')
self.exec_command('openssl ca -batch -out %(signing_cert)s '
'-config %(ssl_config)s '
'-infiles %(request_file)s')
def run(self):
self.build_ssl_config_file()
self.build_ca_cert()
self.build_private_key()
self.build_signing_cert()
sslconfig = """
dir = %(conf_dir)s
[ ca ]
default_ca = CA_default
[ CA_default ]
new_certs_dir = $dir
serial = $dir/serial
database = $dir/index.txt
certificate = %(ca_cert)s
private_key = %(ca_private_key)s
default_days = 365
default_md = md5
preserve = no
email_in_dn = no
nameopt = default_ca
certopt = default_ca
policy = policy_match
[ policy_match ]
countryName = match
stateOrProvinceName = match
organizationName = match
organizationalUnitName = optional
commonName = supplied
emailAddress = optional
[ req ]
default_bits = 1024 # Size of keys
default_keyfile = key.pem # name of generated keys
default_md = md5 # message digest algorithm
string_mask = nombstr # permitted characters
distinguished_name = req_distinguished_name
req_extensions = v3_req
[ req_distinguished_name ]
0.organizationName = Organization Name (company)
organizationalUnitName = Organizational Unit Name (department, division)
emailAddress = Email Address
emailAddress_max = 40
localityName = Locality Name (city, district)
stateOrProvinceName = State or Province Name (full name)
countryName = Country Name (2 letter code)
countryName_min = 2
countryName_max = 2
commonName = Common Name (hostname, IP, or your name)
commonName_max = 64
0.organizationName_default = Openstack, Inc
localityName_default = Undefined
stateOrProvinceName_default = Undefined
countryName_default = US
commonName_default = %(ca_cert_cn)s
[ v3_ca ]
basicConstraints = CA:TRUE
subjectKeyIdentifier = hash
authorityKeyIdentifier = keyid:always,issuer:always
[ v3_req ]
basicConstraints = CA:FALSE
subjectKeyIdentifier = hash"""
|
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pants_test.backend.jvm.tasks.jvm_compile.base_compile_integration_test import BaseCompileIT
class ZincCompileIntegrationTest(BaseCompileIT):
def test_java_src_zinc_compile(self):
with self.do_test_compile('examples/src/java/::'):
# run succeeded as expected
pass
with self.do_test_compile('examples/tests/java/::'):
# run succeeded as expected
pass
def test_in_process(self):
with self.temporary_workdir() as workdir:
with self.temporary_cachedir() as cachedir:
pants_run = self.run_test_compile(
workdir, cachedir, 'examples/src/java/org/pantsbuild/example/hello/main',
extra_args=['-ldebug'], clean_all=True
)
self.assertIn('Attempting to call com.sun.tools.javac.api.JavacTool', pants_run.stdout_data)
self.assertNotIn('Forking javac', pants_run.stdout_data)
def test_log_level(self):
with self.temporary_workdir() as workdir:
with self.temporary_cachedir() as cachedir:
target = 'testprojects/src/java/org/pantsbuild/testproject/dummies:compilation_failure_target'
pants_run = self.run_test_compile(
workdir, cachedir, target,
extra_args=['--no-colors'], clean_all=True
)
self.assertIn('[warn] import sun.security.x509.X500Name;', pants_run.stdout_data)
self.assertIn('[error] System2.out.println("Hello World!");', pants_run.stdout_data)
def test_unicode_source_symbol(self):
with self.temporary_workdir() as workdir:
with self.temporary_cachedir() as cachedir:
target = 'testprojects/src/scala/org/pantsbuild/testproject/unicode/unicodedep/consumer'
pants_run = self.run_test_compile(
workdir, cachedir, target,
extra_args=[
'--compile-zinc-name-hashing',
'--cache-compile-zinc-write-to=["{}/dummy_artifact_cache_dir"]'.format(cachedir),
],
clean_all=True,
)
self.assert_success(pants_run)
def test_apt_compile(self):
with self.do_test_compile('testprojects/src/java/org/pantsbuild/testproject/annotation/processor',
expected_files=['ResourceMappingProcessor.class',
'javax.annotation.processing.Processor']) as found:
self.assertTrue(
self.get_only(found, 'ResourceMappingProcessor.class').endswith(
'org/pantsbuild/testproject/annotation/processor/ResourceMappingProcessor.class'))
processor_service_files = found['javax.annotation.processing.Processor']
# There should be only a per-target service info file.
self.assertEqual(1, len(processor_service_files))
processor_service_file = list(processor_service_files)[0]
self.assertTrue(processor_service_file.endswith(
'META-INF/services/javax.annotation.processing.Processor'))
with open(processor_service_file) as fp:
self.assertEqual('org.pantsbuild.testproject.annotation.processor.ResourceMappingProcessor',
fp.read().strip())
def test_apt_compile_and_run(self):
with self.do_test_compile('testprojects/src/java/org/pantsbuild/testproject/annotation/main',
expected_files=['Main.class',
'deprecation_report.txt']) as found:
self.assertTrue(
self.get_only(found, 'Main.class').endswith(
'org/pantsbuild/testproject/annotation/main/Main.class'))
# This is the proof that the ResourceMappingProcessor annotation processor was compiled in a
# round and then the Main was compiled in a later round with the annotation processor and its
# service info file from on its compile classpath.
with open(self.get_only(found, 'deprecation_report.txt')) as fp:
self.assertIn('org.pantsbuild.testproject.annotation.main.Main', fp.read().splitlines())
def test_stale_apt_with_deps(self):
"""An annotation processor with a dependency doesn't pollute other annotation processors.
At one point, when you added an annotation processor, it stayed configured for all subsequent
compiles. Meaning that if that annotation processor had a dep that wasn't on the classpath,
subsequent compiles would fail with missing symbols required by the stale annotation processor.
"""
# Demonstrate that the annotation processor is working
with self.do_test_compile(
'testprojects/src/java/org/pantsbuild/testproject/annotation/processorwithdep/main',
expected_files=['Main.class', 'Main_HelloWorld.class', 'Main_HelloWorld.java']) as found:
gen_file = self.get_only(found, 'Main_HelloWorld.java')
self.assertTrue(gen_file.endswith(
'org/pantsbuild/testproject/annotation/processorwithdep/main/Main_HelloWorld.java'),
msg='{} does not match'.format(gen_file))
# Try to reproduce second compile that fails with missing symbol
with self.temporary_workdir() as workdir:
with self.temporary_cachedir() as cachedir:
# This annotation processor has a unique external dependency
self.assert_success(self.run_test_compile(
workdir,
cachedir,
'testprojects/src/java/org/pantsbuild/testproject/annotation/processorwithdep::'))
# When we run a second compile with annotation processors, make sure the previous annotation
# processor doesn't stick around to spoil the compile
self.assert_success(self.run_test_compile(
workdir,
cachedir,
'testprojects/src/java/org/pantsbuild/testproject/annotation/processor::',
clean_all=False))
def test_fatal_warning(self):
def test_combination(target, default_fatal_warnings, expect_success):
with self.temporary_workdir() as workdir:
with self.temporary_cachedir() as cachedir:
if default_fatal_warnings:
arg = '--java-fatal-warnings'
else:
arg = '--no-java-fatal-warnings'
pants_run = self.run_test_compile(
workdir,
cachedir,
'testprojects/src/java/org/pantsbuild/testproject/compilation_warnings:{}'.format(target),
extra_args=[arg, '--compile-zinc-warning-args=-C-Xlint:all'])
if expect_success:
self.assert_success(pants_run)
else:
self.assert_failure(pants_run)
test_combination('defaultfatal', default_fatal_warnings=True, expect_success=False)
test_combination('defaultfatal', default_fatal_warnings=False, expect_success=True)
test_combination('fatal', default_fatal_warnings=True, expect_success=False)
test_combination('fatal', default_fatal_warnings=False, expect_success=False)
test_combination('nonfatal', default_fatal_warnings=True, expect_success=True)
test_combination('nonfatal', default_fatal_warnings=False, expect_success=True)
|
"""Utility methods for working with WSGI servers."""
import sys
import eventlet
import eventlet.wsgi
import greenlet
from paste import deploy
import routes.middleware
import webob.dec
import webob.exc
from nova import exception
from nova import flags
from nova import log as logging
from nova import utils
FLAGS = flags.FLAGS
LOG = logging.getLogger('nova.wsgi')
class Server(object):
"""Server class to manage a WSGI server, serving a WSGI application."""
default_pool_size = 1000
def __init__(self, name, app, host=None, port=None, pool_size=None,
protocol=eventlet.wsgi.HttpProtocol):
"""Initialize, but do not start, a WSGI server.
:param name: Pretty name for logging.
:param app: The WSGI application to serve.
:param host: IP address to serve the application.
:param port: Port number to server the application.
:param pool_size: Maximum number of eventlets to spawn concurrently.
:returns: None
"""
self.name = name
self.app = app
self.host = host or "0.0.0.0"
self.port = port or 0
self._server = None
self._tcp_server = None
self._socket = None
self._protocol = protocol
self._pool = eventlet.GreenPool(pool_size or self.default_pool_size)
self._logger = logging.getLogger("eventlet.wsgi.server")
self._wsgi_logger = logging.WritableLogger(self._logger)
def _start(self):
"""Run the blocking eventlet WSGI server.
:returns: None
"""
eventlet.wsgi.server(self._socket,
self.app,
protocol=self._protocol,
custom_pool=self._pool,
log=self._wsgi_logger)
def start(self, backlog=128):
"""Start serving a WSGI application.
:param backlog: Maximum number of queued connections.
:returns: None
"""
self._socket = eventlet.listen((self.host, self.port), backlog=backlog)
self._server = eventlet.spawn(self._start)
(self.host, self.port) = self._socket.getsockname()
LOG.info(_("Started %(name)s on %(host)s:%(port)s") % self.__dict__)
def stop(self):
"""Stop this server.
This is not a very nice action, as currently the method by which a
server is stopped is by killing it's eventlet.
:returns: None
"""
LOG.info(_("Stopping WSGI server."))
self._server.kill()
if self._tcp_server is not None:
LOG.info(_("Stopping raw TCP server."))
self._tcp_server.kill()
def start_tcp(self, listener, port, host='0.0.0.0', key=None, backlog=128):
"""Run a raw TCP server with the given application."""
arg0 = sys.argv[0]
LOG.info(_('Starting TCP server %(arg0)s on %(host)s:%(port)s')
% locals())
socket = eventlet.listen((host, port), backlog=backlog)
self._tcp_server = self._pool.spawn_n(self._run_tcp, listener, socket)
def wait(self):
"""Block, until the server has stopped.
Waits on the server's eventlet to finish, then returns.
:returns: None
"""
try:
self._server.wait()
except greenlet.GreenletExit:
LOG.info(_("WSGI server has stopped."))
def _run_tcp(self, listener, socket):
"""Start a raw TCP server in a new green thread."""
while True:
try:
new_sock, address = socket.accept()
self._pool.spawn_n(listener, new_sock)
except (SystemExit, KeyboardInterrupt):
pass
class Request(webob.Request):
pass
class Application(object):
"""Base WSGI application wrapper. Subclasses need to implement __call__."""
@classmethod
def factory(cls, global_config, **local_config):
"""Used for paste app factories in paste.deploy config files.
Any local configuration (that is, values under the [app:APPNAME]
section of the paste config) will be passed into the `__init__` method
as kwargs.
A hypothetical configuration would look like:
[app:wadl]
latest_version = 1.3
paste.app_factory = nova.api.fancy_api:Wadl.factory
which would result in a call to the `Wadl` class as
import nova.api.fancy_api
fancy_api.Wadl(latest_version='1.3')
You could of course re-implement the `factory` method in subclasses,
but using the kwarg passing it shouldn't be necessary.
"""
return cls(**local_config)
def __call__(self, environ, start_response):
r"""Subclasses will probably want to implement __call__ like this:
@webob.dec.wsgify(RequestClass=Request)
def __call__(self, req):
# Any of the following objects work as responses:
# Option 1: simple string
res = 'message\n'
# Option 2: a nicely formatted HTTP exception page
res = exc.HTTPForbidden(detail='Nice try')
# Option 3: a webob Response object (in case you need to play with
# headers, or you want to be treated like an iterable, or or or)
res = Response();
res.app_iter = open('somefile')
# Option 4: any wsgi app to be run next
res = self.application
# Option 5: you can get a Response object for a wsgi app, too, to
# play with headers etc
res = req.get_response(self.application)
# You can then just return your response...
return res
# ... or set req.response and return None.
req.response = res
See the end of http://pythonpaste.org/webob/modules/dec.html
for more info.
"""
raise NotImplementedError(_('You must implement __call__'))
class Middleware(Application):
"""Base WSGI middleware.
These classes require an application to be
initialized that will be called next. By default the middleware will
simply call its wrapped app, or you can override __call__ to customize its
behavior.
"""
@classmethod
def factory(cls, global_config, **local_config):
"""Used for paste app factories in paste.deploy config files.
Any local configuration (that is, values under the [filter:APPNAME]
section of the paste config) will be passed into the `__init__` method
as kwargs.
A hypothetical configuration would look like:
[filter:analytics]
redis_host = 127.0.0.1
paste.filter_factory = nova.api.analytics:Analytics.factory
which would result in a call to the `Analytics` class as
import nova.api.analytics
analytics.Analytics(app_from_paste, redis_host='127.0.0.1')
You could of course re-implement the `factory` method in subclasses,
but using the kwarg passing it shouldn't be necessary.
"""
def _factory(app):
return cls(app, **local_config)
return _factory
def __init__(self, application):
self.application = application
def process_request(self, req):
"""Called on each request.
If this returns None, the next application down the stack will be
executed. If it returns a response then that response will be returned
and execution will stop here.
"""
return None
def process_response(self, response):
"""Do whatever you'd like to the response."""
return response
@webob.dec.wsgify(RequestClass=Request)
def __call__(self, req):
response = self.process_request(req)
if response:
return response
response = req.get_response(self.application)
return self.process_response(response)
class Debug(Middleware):
"""Helper class for debugging a WSGI application.
Can be inserted into any WSGI application chain to get information
about the request and response.
"""
@webob.dec.wsgify(RequestClass=Request)
def __call__(self, req):
print ('*' * 40) + ' REQUEST ENVIRON'
for key, value in req.environ.items():
print key, '=', value
print
resp = req.get_response(self.application)
print ('*' * 40) + ' RESPONSE HEADERS'
for (key, value) in resp.headers.iteritems():
print key, '=', value
print
resp.app_iter = self.print_generator(resp.app_iter)
return resp
@staticmethod
def print_generator(app_iter):
"""Iterator that prints the contents of a wrapper string."""
print ('*' * 40) + ' BODY'
for part in app_iter:
sys.stdout.write(part)
sys.stdout.flush()
yield part
print
class Router(object):
"""WSGI middleware that maps incoming requests to WSGI apps."""
def __init__(self, mapper):
"""Create a router for the given routes.Mapper.
Each route in `mapper` must specify a 'controller', which is a
WSGI app to call. You'll probably want to specify an 'action' as
well and have your controller be an object that can route
the request to the action-specific method.
Examples:
mapper = routes.Mapper()
sc = ServerController()
# Explicit mapping of one route to a controller+action
mapper.connect(None, '/svrlist', controller=sc, action='list')
# Actions are all implicitly defined
mapper.resource('server', 'servers', controller=sc)
# Pointing to an arbitrary WSGI app. You can specify the
# {path_info:.*} parameter so the target app can be handed just that
# section of the URL.
mapper.connect(None, '/v1.0/{path_info:.*}', controller=BlogApp())
"""
self.map = mapper
self._router = routes.middleware.RoutesMiddleware(self._dispatch,
self.map)
@webob.dec.wsgify(RequestClass=Request)
def __call__(self, req):
"""Route the incoming request to a controller based on self.map.
If no match, return a 404.
"""
return self._router
@staticmethod
@webob.dec.wsgify(RequestClass=Request)
def _dispatch(req):
"""Dispatch the request to the appropriate controller.
Called by self._router after matching the incoming request to a route
and putting the information into req.environ. Either returns 404
or the routed WSGI app's response.
"""
match = req.environ['wsgiorg.routing_args'][1]
if not match:
return webob.exc.HTTPNotFound()
app = match['controller']
return app
class Loader(object):
"""Used to load WSGI applications from paste configurations."""
def __init__(self, config_path=None):
"""Initialize the loader, and attempt to find the config.
:param config_path: Full or relative path to the paste config.
:returns: None
"""
config_path = config_path or FLAGS.api_paste_config
self.config_path = utils.find_config(config_path)
def load_app(self, name):
"""Return the paste URLMap wrapped WSGI application.
:param name: Name of the application to load.
:returns: Paste URLMap object wrapping the requested application.
:raises: `nova.exception.PasteAppNotFound`
"""
try:
return deploy.loadapp("config:%s" % self.config_path, name=name)
except LookupError as err:
LOG.error(err)
raise exception.PasteAppNotFound(name=name, path=self.config_path)
|
import json
import logging
import time
import random
import requests
from .basic import BasicHttpClient
class KylinHttpClient(BasicHttpClient): # pylint: disable=too-many-public-methods
_base_url = 'http://{host}:{port}/kylin/api'
def __init__(self, host, port, version):
super().__init__(host, port)
self._headers = {
'Content-Type': 'application/json;charset=utf-8'
}
self._base_url = self._base_url.format(host=self._host, port=self._port)
self.generic_project = "generic_test_project"
self.pushdown_project = "pushdown_test_project"
self.version = version
def login(self, username, password):
self._inner_session.request('POST', self._base_url + '/user/authentication', auth=(username, password))
return self._request('GET', '/user/authentication', inner_session=True)
def check_login_state(self):
return self._request('GET', '/user/authentication', inner_session=True)
def get_session(self):
return self._inner_session
def logout(self):
self._inner_session = requests.Session()
def list_projects(self, limit=100, offset=0):
params = {'limit': limit, 'offset': offset}
resp = self._request('GET', '/projects', params=params)
return resp
def create_project(self, project_name, description=None, override_kylin_properties=None):
data = {'name': project_name,
'description': description,
'override_kylin_properties': override_kylin_properties,
}
payload = {
'projectDescData': json.dumps(data),
}
resp = self._request('POST', '/projects', json=payload)
return resp
def update_project(self, project_name, description=None, override_kylin_properties=None):
"""
:param project_name: project name
:param description: description of project
:param override_kylin_properties: the kylin properties that needs to be override
:return:
"""
data = {'name': project_name,
'description': description,
'override_kylin_properties': override_kylin_properties,
}
payload = {
'formerProjectName': project_name,
'projectDescData': json.dumps(data),
}
resp = self._request('PUT', '/projects', json=payload)
return resp
def delete_project(self, project_name, force=False):
"""
delete project API, before delete the project, make sure the project does not contain models and cubes.
If you want to force delete the project, make force=True
:param project_name: project name
:param force: if force, delete cubes and models before delete project
:return:
"""
if force:
cubes = self.list_cubes(project_name)
logging.debug("Cubes to be deleted: %s", cubes)
while cubes:
for cube in cubes:
self.delete_cube(cube['name'])
cubes = self.list_cubes(project_name)
models = self.list_model_desc(project_name)
logging.debug("Models to be deleted: %s", models)
while models:
for model in models:
self.delete_model(model['name'])
models = self.list_model_desc(project_name)
url = '/projects/{project}'.format(project=project_name)
resp = self._request('DELETE', url)
return resp
def load_table(self, project_name, tables, calculate=False):
"""
load or reload table api
:param calculate: Default is True
:param project_name: project name
:param tables: table list, for instance, ['default.kylin_fact', 'default.kylin_sales']
:return:
"""
# workaround of #15337
# time.sleep(random.randint(5, 10))
url = '/tables/{tables}/{project}/'.format(tables=tables, project=project_name)
payload = {'calculate': calculate
}
resp = self._request('POST', url, json=payload)
return resp
def unload_table(self, project_name, tables):
url = '/tables/{tables}/{project}'.format(tables=tables, project=project_name)
resp = self._request('DELETE', url)
return resp
def list_hive_tables(self, project_name, extension=False, user_session=False):
"""
:param project_name: project name
:param extension: specify whether the table's extension information is returned
:param user_session: boolean, true for using login session to execute
:return:
"""
url = '/tables'
params = {'project': project_name, 'ext': extension}
resp = self._request('GET', url, params=params, inner_session=user_session)
return resp
def get_table_info(self, project_name, table_name):
"""
:param project_name: project name
:param table_name: table name
:return: hive table information
"""
url = '/tables/{project}/{table}'.format(project=project_name, table=table_name)
resp = self._request('GET', url)
return resp
def get_tables_info(self, project_name, ext='true'):
url = '/tables'
params = {'project': project_name, 'ext': ext}
resp = self._request('GET', url, params=params)
return resp
def get_table_streaming_config(self, project_name, table_name, limit=100, offset=0):
params = {'table': table_name, 'project': project_name, 'limit': limit, 'offset': offset}
resp = self._request('GET', '/streaming/getConfig', params=params)
return resp
def load_kafka_table(self, project_name, kafka_config, streaming_config, table_data, message=None):
url = '/streaming'
payload = {'project': project_name,
'kafkaConfig': json.dumps(kafka_config),
'streamingConfig': json.dumps(streaming_config),
'tableData': json.dumps(table_data),
'message': message}
resp = self._request('POST', url, json=payload)
return resp
def update_kafka_table(self, project_name, kafka_config, streaming_config, table_data, cluster_index=0):
url = '/streaming'
payload = {'project': project_name,
'kafkaConfig': kafka_config,
'streamingConfig': streaming_config,
'tableData': table_data,
'clusterIndex': cluster_index}
resp = self._request('PUT', url, json=payload)
return resp
def list_model_desc(self, project_name=None, model_name=None, limit=100, offset=0):
"""
:param offset:
:param limit:
:param project_name: project name
:param model_name: model name
:return: model desc list
"""
params = {'limit': limit,
'offset': offset,
'modelName': model_name,
'projectName': project_name
}
resp = self._request('GET', '/models', params=params)
return resp
def create_model(self, project_name, model_name, model_desc_data, user_session=False):
url = '/models'
payload = {
'project': project_name,
'model': model_name,
'modelDescData': json.dumps(model_desc_data)
}
logging.debug("Current payload for creating model is %s", payload)
resp = self._request('POST', url, json=payload, inner_session=user_session)
return resp
def update_model(self, project_name, model_name, model_desc_data, user_session=False):
url = '/models'
payload = {
'project': project_name,
'model': model_name,
'modelDescData': json.dumps(model_desc_data)
}
resp = self._request('PUT', url, json=payload, inner_session=user_session)
return resp
def clone_model(self, project_name, model_name, new_model_name):
url = '/models/{model}/clone'.format(model=model_name)
payload = {'modelName': new_model_name, 'project': project_name}
resp = self._request('PUT', url, json=payload)
return resp
def delete_model(self, model_name):
url = '/models/{model}'.format(model=model_name)
# return value is None here
return self._request('DELETE', url)
def get_cube_desc(self, cube_name):
url = '/cube_desc/{cube}'.format(cube=cube_name)
resp = self._request('GET', url)
return resp
def list_cubes(self, project=None, offset=0, limit=10000, cube_name=None, model_name=None, user_session=False):
params = {'projectName': project, 'offset': offset, 'limit': limit,
'cubeName': cube_name, 'modelName': model_name}
resp = self._request('GET', '/cubes/', params=params, inner_session=user_session)
return resp
def get_cube_instance(self, cube_name):
url = '/cubes/{cube}'.format(cube=cube_name)
resp = self._request('GET', url)
return resp
def create_cube(self, project_name, cube_name, cube_desc_data, user_session=False):
# workaround of #15337
time.sleep(random.randint(5, 10))
url = '/cubes'
payload = {
'project': project_name,
'cubeName': cube_name,
'cubeDescData': json.dumps(cube_desc_data)
}
resp = self._request('POST', url, json=payload, inner_session=user_session)
return resp
def update_cube(self, project_name, cube_name, cube_desc_data, user_session=False):
# workaround of #15337
time.sleep(random.randint(5, 10))
url = '/cubes'
payload = {
'project': project_name,
'cubeName': cube_name,
'cubeDescData': json.dumps(cube_desc_data)
}
resp = self._request('PUT', url, json=payload, inner_session=user_session)
return resp
def update_cube_engine(self, cube_name, engine_type):
url = '/cubes/{cube}/engine/{engine}'.format(cube=cube_name, engine=engine_type)
resp = self._request('PUT', url)
return resp
def build_segment(self, cube_name, start_time, end_time, force=False):
"""
:param cube_name: the name of the cube to be built
:param force: force submit mode
:param start_time: long, start time, corresponding to the timestamp in GMT format,
for instance, 1388534400000 corresponding to 2014-01-01 00:00:00
:param end_time: long, end time, corresponding to the timestamp in GMT format
:return:
"""
url = '/cubes/{cube}/build'.format(cube=cube_name)
payload = {
'buildType': 'BUILD',
'startTime': start_time,
'endTime': end_time,
'force': force
}
resp = self._request('PUT', url, json=payload)
return resp
def full_build_cube(self, cube_name, force=False):
"""
:param cube_name: the name of the cube to be built
:param force: force submit mode
:return:
"""
return self.build_segment(cube_name, force=force, start_time=0, end_time=31556995200000)
def merge_segment(self, cube_name, start_time=0, end_time=31556995200000, force=True):
"""
:param cube_name: the name of the cube to be built
:param force: force submit mode
:param start_time: long, start time, corresponding to the timestamp in GMT format,
for instance, 1388534400000 corresponding to 2014-01-01 00:00:00
:param end_time: long, end time, corresponding to the timestamp in GMT format
:return:
"""
url = '/cubes/{cube}/build'.format(cube=cube_name)
payload = {
'buildType': 'MERGE',
'startTime': start_time,
'endTime': end_time,
'force': force
}
resp = self._request('PUT', url, json=payload)
return resp
def refresh_segment(self, cube_name, start_time, end_time, force=True):
"""
:param cube_name: the name of the cube to be built
:param force: force submit mode
:param start_time: long, start time, corresponding to the timestamp in GMT format,
for instance, 1388534400000 corresponding to 2014-01-01 00:00:00
:param end_time: long, end time, corresponding to the timestamp in GMT format
:return:
"""
url = '/cubes/{cube}/build'.format(cube=cube_name)
payload = {
'buildType': 'REFRESH',
'startTime': start_time,
'endTime': end_time,
'force': force
}
resp = self._request('PUT', url, json=payload)
return resp
def delete_segments(self, cube_name, segment_name):
url = '/cubes/{cube}/segs/{segment}'.format(cube=cube_name, segment=segment_name)
resp = self._request('DELETE', url)
return resp
def build_streaming_cube(self, project_name, cube_name, source_offset_start=0,
source_offset_end='9223372036854775807'):
"""
:param cube_name: cube name
:param source_offset_start: long, the start offset where build begins. Here 0 means it is from the last position
:param source_offset_end: long, the end offset where build ends. 9223372036854775807 (Long.MAX_VALUE) means to
the end position on Kafka topic.
:param mp_values: string, multiple partition values of corresponding model
:param force: boolean, force submit mode
:return:
"""
url = '/cubes/{cube}/segments/build_streaming'.format(cube=cube_name)
payload = {
'buildType': 'BUILD',
'project': project_name,
'sourceOffsetStart': source_offset_start,
'sourceOffsetEnd': source_offset_end,
}
resp = self._request('PUT', url, json=payload)
return resp
def build_cube_customized(self, cube_name, source_offset_start, source_offset_end=None, mp_values=None,
force=False):
"""
:param cube_name: cube name
:param source_offset_start: long, the start offset where build begins
:param source_offset_end: long, the end offset where build ends
:param mp_values: string, multiple partition values of corresponding model
:param force: boolean, force submit mode
:return:
"""
url = '/cubes/{cube}/segments/build_customized'.format(cube=cube_name)
payload = {
'buildType': 'BUILD',
'sourceOffsetStart': source_offset_start,
'sourceOffsetEnd': source_offset_end,
'mpValues': mp_values,
'force': force
}
resp = self._request('PUT', url, json=payload)
return resp
def clone_cube(self, project_name, cube_name, new_cube_name):
"""
:param project_name: project name
:param cube_name: cube name of being cloned
:param new_cube_name: cube name to be cloned to
:return:
"""
url = '/cubes/{cube}/clone'.format(cube=cube_name)
payload = {
'cubeName': new_cube_name,
'project': project_name
}
resp = self._request('PUT', url, json=payload)
return resp
def enable_cube(self, cube_name):
url = '/cubes/{cube}/enable'.format(cube=cube_name)
resp = self._request('PUT', url)
return resp
def disable_cube(self, cube_name):
url = '/cubes/{cube}/disable'.format(cube=cube_name)
resp = self._request('PUT', url)
return resp
def purge_cube(self, cube_name):
url = '/cubes/{cube}/purge'.format(cube=cube_name)
resp = self._request('PUT', url)
return resp
def delete_cube(self, cube_name):
url = '/cubes/{cube}'.format(cube=cube_name)
return self._request('DELETE', url)
def list_holes(self, cube_name):
"""
A healthy cube in production should not have holes in the meaning of inconsecutive segments.
:param cube_name: cube name
:return:
"""
url = '/cubes/{cube}/holes'.format(cube=cube_name)
resp = self._request('GET', url)
return resp
def fill_holes(self, cube_name):
"""
For non-streaming data based Cube, Kyligence Enterprise will submit normal build cube job(s) with
corresponding time partition value range(s); For streaming data based Cube, please make sure that
corresponding data is not expired or deleted in source before filling holes, otherwise the build job will fail.
:param cube_name: string, cube name
:return:
"""
url = '/cubes/{cube}/holes'.format(cube=cube_name)
resp = self._request('PUT', url)
return resp
def export_cuboids(self, cube_name):
url = '/cubes/{cube}/cuboids/export'.fomat(cube=cube_name)
resp = self._request('PUT', url)
return resp
def refresh_lookup(self, cube_name, lookup_table):
"""
Only lookup tables of SCD Type 1 are supported to refresh.
:param cube_name: cube name
:param lookup_table: the name of lookup table to be refreshed with the format DATABASE.TABLE
:return:
"""
url = '/cubes/{cube}/refresh_lookup'.format(cube=cube_name)
payload = {
'cubeName': cube_name,
'lookupTableName': lookup_table
}
resp = self._request('PUT', url, json=payload)
return resp
def get_job_info(self, job_id):
url = '/jobs/{job_id}'.format(job_id=job_id)
resp = self._request('GET', url)
return resp
def get_job_status(self, job_id):
return self.get_job_info(job_id)['job_status']
def get_step_output(self, job_id, step_id):
url = '/jobs/{jobId}/steps/{stepId}/output'.format(jobId=job_id, stepId=step_id)
resp = self._request('GET', url)
return resp
def pause_job(self, job_id):
url = '/jobs/{jobId}/pause'.format(jobId=job_id)
resp = self._request('PUT', url)
return resp
def resume_job(self, job_id):
url = '/jobs/{jobId}/resume'.format(jobId=job_id)
resp = self._request('PUT', url)
return resp
def discard_job(self, job_id):
url = '/jobs/{jobId}/cancel'.format(jobId=job_id)
resp = self._request('PUT', url)
return resp
def delete_job(self, job_id):
url = '/jobs/{jobId}/drop'.format(jobId=job_id)
resp = self._request('DELETE', url)
return resp
def resubmit_job(self, job_id):
url = '/jobs/{jobId}/resubmit'.format(jobId=job_id)
resp = self._request('PUT', url)
return resp
def list_jobs(self, project_name, status=None, offset=0, limit=10000, time_filter=1, job_search_mode='ALL'):
"""
list jobs in specific project
:param job_search_mode: CUBING_ONLY, CHECKPOINT_ONLY, ALL
:param project_name: project name
:param status: int, 0 -> NEW, 1 -> PENDING, 2 -> RUNNING,
4 -> FINISHED, 8 -> ERROR, 16 -> DISCARDED, 32 -> STOPPED
:param offset: offset of returned result
:param limit: quantity of returned result per page
:param time_filter: int, 0 -> last one day, 1 -> last one week,
2 -> last one month, 3 -> last one year, 4 -> all
:return:
"""
url = '/jobs'
params = {
'projectName': project_name,
'status': status,
'offset': offset,
'limit': limit,
'timeFilter': time_filter,
'jobSearchMode': job_search_mode
}
resp = self._request('GET', url, params=params)
return resp
def await_all_jobs(self, project_name, waiting_time=30):
"""
await all jobs to be finished, default timeout is 30 minutes
:param project_name: project name
:param waiting_time: timeout, in minutes
:return: boolean, timeout will return false
"""
running_flag = ['PENDING', 'RUNNING']
try_time = 0
max_try_time = waiting_time * 2
while try_time < max_try_time:
jobs = self.list_jobs(project_name)
all_finished = True
for job in jobs:
if job['job_status'] in running_flag:
all_finished = False
break
if job['job_status'] == 'ERROR':
return False
if all_finished:
return True
time.sleep(30)
try_time += 1
return False
def await_job(self, job_id, waiting_time=20, interval=1, excepted_status=None):
"""
Await specific job to be given status, default timeout is 20 minutes.
:param job_id: job id
:param waiting_time: timeout, in minutes.
:param interval: check interval, default value is 1 second
:param excepted_status: excepted job status list, default contains 'ERROR', 'FINISHED' and 'DISCARDED'
:return: boolean, if the job is in finish status, return true
"""
finish_flags = ['ERROR', 'FINISHED', 'DISCARDED']
if excepted_status is None:
excepted_status = finish_flags
timeout = waiting_time * 60
start = time.time()
while time.time() - start < timeout:
job_status = self.get_job_status(job_id)
if job_status in excepted_status:
return True
if job_status in finish_flags:
return False
time.sleep(interval)
return False
def await_job_finished(self, job_id, waiting_time=20, interval=1):
"""
Await specific job to be finished, default timeout is 20 minutes.
:param job_id: job id
:param waiting_time: timeout, in minutes.
:param interval: check interval, default value is 1 second
:return: boolean, if the job is in finish status, return true
"""
return self.await_job(job_id, waiting_time, interval, excepted_status=['FINISHED'])
def await_job_error(self, job_id, waiting_time=20, interval=1):
"""
Await specific job to be error, default timeout is 20 minutes.
:param job_id: job id
:param waiting_time: timeout, in minutes.
:param interval: check interval, default value is 1 second
:return: boolean, if the job is in finish status, return true
"""
return self.await_job(job_id, waiting_time, interval, excepted_status=['ERROR'])
def await_job_discarded(self, job_id, waiting_time=20, interval=1):
"""
Await specific job to be discarded, default timeout is 20 minutes.
:param job_id: job id
:param waiting_time: timeout, in minutes.
:param interval: check interval, default value is 1 second
:return: boolean, if the job is in finish status, return true
"""
return self.await_job(job_id, waiting_time, interval, excepted_status=['DISCARDED'])
def await_job_step(self, job_id, step, excepted_status=None, waiting_time=20, interval=1):
"""
Await specific job step to be given status, default timeout is 20 minutes.
:param job_id: job id
:param step: job step
:param waiting_time: timeout, in minutes.
:param interval: check interval, default value is 1 second
:param excepted_status: excepted job status list, default contains 'ERROR', 'FINISHED' and 'DISCARDED'
:return: boolean, if the job is in finish status, return true
"""
finish_flags = ['ERROR', 'FINISHED', 'DISCARDED']
if excepted_status is None:
excepted_status = finish_flags
timeout = waiting_time * 60
start = time.time()
while time.time() - start < timeout:
job_info = self.get_job_info(job_id)
job_status = job_info['steps'][step]['step_status']
if job_status in excepted_status:
return True
if job_status in finish_flags:
return False
time.sleep(interval)
return False
def execute_query(self, project_name, sql, cube_name=None, offset=None, limit=None, backdoortoggles=None,
user_session=False,
timeout=60):
url = '/query'
payload = {
'project': project_name,
'sql': sql,
'offset': offset,
'limit': limit
}
if cube_name:
backdoortoggles = {"backdoorToggles": {"DEBUG_TOGGLE_HIT_CUBE": cube_name}}
if backdoortoggles:
payload.update(backdoortoggles)
resp = self._request('POST', url, json=payload, inner_session=user_session, timeout=timeout)
return resp
def save_query(self, sql_name, project_name, sql, description=None):
url = '/saved_queries'
payload = {
'name': sql_name,
'project': project_name,
'sql': sql,
'description': description
}
self._request('POST', url, json=payload)
def get_queries(self, project_name, user_session=False):
url = '/saved_queries'
params = {
'project': project_name
}
response = self._request('GET', url, params=params, inner_session=user_session)
return response
def remove_query(self, sql_id):
url = '/saved_queries/{id}'.format(id=sql_id)
self._request('DELETE', url)
def list_queryable_tables(self, project_name):
url = '/tables_and_columns'
params = {'project': project_name}
resp = self._request('GET', url, params=params)
return resp
def get_all_system_prop(self, server=None):
url = '/admin/config'
if server is not None:
url = '/admin/config?server={serverName}'.format(serverName=server)
prop_resp = self._request('GET', url).get('config')
property_values = {}
if prop_resp is None:
return property_values
prop_lines = prop_resp.splitlines(False)
for prop_line in prop_lines:
splits = prop_line.split('=')
property_values[splits[0]] = splits[1]
return property_values
def create_user(self, user_name, password, authorities, disabled=False, user_session=False):
"""
create a user
:param user_name: string, target user name
:param password: string, target password
:param authorities: array, user's authorities
:param disabled: boolean, true for disabled user false for enable user
:param user_session: boolean, true for using login session to execute
:return:
"""
url = '/user/{username}'.format(username=user_name)
payload = {
'username': user_name,
'password': password,
'authorities': authorities,
'disabled': disabled,
}
resp = self._request('POST', url, json=payload, inner_session=user_session)
return resp
def delete_user(self, user_name, user_session=False):
"""
delete user
:param user_name: string
:param user_session: boolean, true for using login session to execute
:return:
"""
url = '/user/{username}'.format(username=user_name)
resp = self._request('DELETE', url, inner_session=user_session)
return resp
def update_user(self, user_name, authorities, password=None, disabled=False,
user_session=False, payload_user_name=None):
"""
update user's info
:param user_name: string, target user name
:param password: string, target password
:param authorities: array, user's authorities
:param disabled: boolean, true for disabled user false for enable user
:param user_session: boolean, true for using login session to execute
:param payload_user_name: string, true for using login session to execute
:return:
"""
url = '/user/{username}'.format(username=user_name)
username_in_payload = user_name if payload_user_name is None else payload_user_name
payload = {
'username': username_in_payload,
'password': password,
'authorities': authorities,
'disabled': disabled,
}
resp = self._request('PUT', url, json=payload, inner_session=user_session)
return resp
def update_user_password(self, user_name, new_password, password=None, user_session=False):
"""
update user's password
:param user_name: string, target for username
:param new_password: string, user's new password
:param password: string, user's old password
:param user_session: boolean, true for using login session to execute
:return:
"""
url = '/user/password'
payload = {
'username': user_name,
'password': password,
'newPassword': new_password
}
resp = self._request('PUT', url, json=payload, inner_session=user_session)
return resp
def list_users(self, project_name=None, group_name=None, is_fuzz_match=False, name=None, offset=0, limit=10000
, user_session=False):
"""
list users
:param group_name:string, group name
:param project_name: string, project's name
:param offset: offset of returned result
:param limit: quantity of returned result per page
:param is_fuzz_match: bool, true for param name fuzzy match
:param name: string, user's name
:param user_session: boolean, true for using login session to execute
:return:
"""
url = '/user/users'
params = {
'offset': offset,
'limit': limit,
'groupName': group_name,
'project': project_name,
'isFuzzMatch': is_fuzz_match,
'name': name
}
resp = self._request('GET', url, params=params, inner_session=user_session)
return resp
def list_user_authorities(self, project_name, user_session=False):
"""
list groups in a project
:param project_name: string, target project name
:param user_session: boolean, true for using login session to execute
:return:
"""
url = '/user_group/groups'
params = {
'project': project_name
}
resp = self._request('GET', url, params=params, inner_session=user_session)
return resp
def create_group(self, group_name, user_session=False):
"""
create a group with group_name
:param group_name: string, target group name
:param user_session: boolean, true for using login session to execute
:return:
"""
url = '/user_group/{group_name}'.format(group_name=group_name)
resp = self._request('POST', url, inner_session=user_session)
return resp
def delete_group(self, group_name, user_session=False):
"""
delete group by group_name
:param group_name: string, target group name
:param user_session: boolean, true for using login session to execute
:return:
"""
url = '/user_group/{group_name}'.format(group_name=group_name)
resp = self._request('DELETE', url, inner_session=user_session)
return resp
def add_or_del_users(self, group_name, users):
url = '/user_group/users/{group}'.format(group=group_name)
payload = {'users': users}
resp = self._request('POST', url, json=payload)
return resp
def _request(self, method, url, **kwargs): # pylint: disable=arguments-differ
return super()._request(method, self._base_url + url, **kwargs)
def connect(**conf):
_host = conf.get('host')
_port = conf.get('port')
_version = conf.get('version')
return KylinHttpClient(_host, _port, _version)
|
"""Tests for the inputsplitter module.
Authors
-------
* Fernando Perez
* Robert Kern
"""
import unittest
import sys
import nose.tools as nt
from IPython.core import inputsplitter as isp
from IPython.core.tests.test_inputtransformer import syntax, syntax_ml
from IPython.testing import tools as tt
from IPython.utils import py3compat
def mini_interactive_loop(input_func):
"""Minimal example of the logic of an interactive interpreter loop.
This serves as an example, and it is used by the test system with a fake
raw_input that simulates interactive input."""
from IPython.core.inputsplitter import InputSplitter
isp = InputSplitter()
# In practice, this input loop would be wrapped in an outside loop to read
# input indefinitely, until some exit/quit command was issued. Here we
# only illustrate the basic inner loop.
while isp.push_accepts_more():
indent = ' '*isp.indent_spaces
prompt = '>>> ' + indent
line = indent + input_func(prompt)
isp.push(line)
# Here we just return input so we can use it in a test suite, but a real
# interpreter would instead send it for execution somewhere.
src = isp.source_reset()
#print 'Input source was:\n', src # dbg
return src
def assemble(block):
"""Assemble a block into multi-line sub-blocks."""
return ['\n'.join(sub_block)+'\n' for sub_block in block]
def pseudo_input(lines):
"""Return a function that acts like raw_input but feeds the input list."""
ilines = iter(lines)
def raw_in(prompt):
try:
return next(ilines)
except StopIteration:
return ''
return raw_in
def test_spaces():
tests = [('', 0),
(' ', 1),
('\n', 0),
(' \n', 1),
('x', 0),
(' x', 1),
(' x',2),
(' x',4),
# Note: tabs are counted as a single whitespace!
('\tx', 1),
('\t x', 2),
]
tt.check_pairs(isp.num_ini_spaces, tests)
def test_remove_comments():
tests = [('text', 'text'),
('text # comment', 'text '),
('text # comment\n', 'text \n'),
('text # comment \n', 'text \n'),
('line # c \nline\n','line \nline\n'),
('line # c \nline#c2 \nline\nline #c\n\n',
'line \nline\nline\nline \n\n'),
]
tt.check_pairs(isp.remove_comments, tests)
def test_get_input_encoding():
encoding = isp.get_input_encoding()
nt.assert_true(isinstance(encoding, basestring))
# simple-minded check that at least encoding a simple string works with the
# encoding we got.
nt.assert_equal(u'test'.encode(encoding), b'test')
class NoInputEncodingTestCase(unittest.TestCase):
def setUp(self):
self.old_stdin = sys.stdin
class X: pass
fake_stdin = X()
sys.stdin = fake_stdin
def test(self):
# Verify that if sys.stdin has no 'encoding' attribute we do the right
# thing
enc = isp.get_input_encoding()
self.assertEqual(enc, 'ascii')
def tearDown(self):
sys.stdin = self.old_stdin
class InputSplitterTestCase(unittest.TestCase):
def setUp(self):
self.isp = isp.InputSplitter()
def test_reset(self):
isp = self.isp
isp.push('x=1')
isp.reset()
self.assertEqual(isp._buffer, [])
self.assertEqual(isp.indent_spaces, 0)
self.assertEqual(isp.source, '')
self.assertEqual(isp.code, None)
self.assertEqual(isp._is_complete, False)
def test_source(self):
self.isp._store('1')
self.isp._store('2')
self.assertEqual(self.isp.source, '1\n2\n')
self.assertTrue(len(self.isp._buffer)>0)
self.assertEqual(self.isp.source_reset(), '1\n2\n')
self.assertEqual(self.isp._buffer, [])
self.assertEqual(self.isp.source, '')
def test_indent(self):
isp = self.isp # shorthand
isp.push('x=1')
self.assertEqual(isp.indent_spaces, 0)
isp.push('if 1:\n x=1')
self.assertEqual(isp.indent_spaces, 4)
isp.push('y=2\n')
self.assertEqual(isp.indent_spaces, 0)
def test_indent2(self):
isp = self.isp
isp.push('if 1:')
self.assertEqual(isp.indent_spaces, 4)
isp.push(' x=1')
self.assertEqual(isp.indent_spaces, 4)
# Blank lines shouldn't change the indent level
isp.push(' '*2)
self.assertEqual(isp.indent_spaces, 4)
def test_indent3(self):
isp = self.isp
# When a multiline statement contains parens or multiline strings, we
# shouldn't get confused.
isp.push("if 1:")
isp.push(" x = (1+\n 2)")
self.assertEqual(isp.indent_spaces, 4)
def test_indent4(self):
isp = self.isp
# whitespace after ':' should not screw up indent level
isp.push('if 1: \n x=1')
self.assertEqual(isp.indent_spaces, 4)
isp.push('y=2\n')
self.assertEqual(isp.indent_spaces, 0)
isp.push('if 1:\t\n x=1')
self.assertEqual(isp.indent_spaces, 4)
isp.push('y=2\n')
self.assertEqual(isp.indent_spaces, 0)
def test_dedent_pass(self):
isp = self.isp # shorthand
# should NOT cause dedent
isp.push('if 1:\n passes = 5')
self.assertEqual(isp.indent_spaces, 4)
isp.push('if 1:\n pass')
self.assertEqual(isp.indent_spaces, 0)
isp.push('if 1:\n pass ')
self.assertEqual(isp.indent_spaces, 0)
def test_dedent_break(self):
isp = self.isp # shorthand
# should NOT cause dedent
isp.push('while 1:\n breaks = 5')
self.assertEqual(isp.indent_spaces, 4)
isp.push('while 1:\n break')
self.assertEqual(isp.indent_spaces, 0)
isp.push('while 1:\n break ')
self.assertEqual(isp.indent_spaces, 0)
def test_dedent_continue(self):
isp = self.isp # shorthand
# should NOT cause dedent
isp.push('while 1:\n continues = 5')
self.assertEqual(isp.indent_spaces, 4)
isp.push('while 1:\n continue')
self.assertEqual(isp.indent_spaces, 0)
isp.push('while 1:\n continue ')
self.assertEqual(isp.indent_spaces, 0)
def test_dedent_raise(self):
isp = self.isp # shorthand
# should NOT cause dedent
isp.push('if 1:\n raised = 4')
self.assertEqual(isp.indent_spaces, 4)
isp.push('if 1:\n raise TypeError()')
self.assertEqual(isp.indent_spaces, 0)
isp.push('if 1:\n raise')
self.assertEqual(isp.indent_spaces, 0)
isp.push('if 1:\n raise ')
self.assertEqual(isp.indent_spaces, 0)
def test_dedent_return(self):
isp = self.isp # shorthand
# should NOT cause dedent
isp.push('if 1:\n returning = 4')
self.assertEqual(isp.indent_spaces, 4)
isp.push('if 1:\n return 5 + 493')
self.assertEqual(isp.indent_spaces, 0)
isp.push('if 1:\n return')
self.assertEqual(isp.indent_spaces, 0)
isp.push('if 1:\n return ')
self.assertEqual(isp.indent_spaces, 0)
isp.push('if 1:\n return(0)')
self.assertEqual(isp.indent_spaces, 0)
def test_push(self):
isp = self.isp
self.assertTrue(isp.push('x=1'))
def test_push2(self):
isp = self.isp
self.assertFalse(isp.push('if 1:'))
for line in [' x=1', '# a comment', ' y=2']:
print(line)
self.assertTrue(isp.push(line))
def test_push3(self):
isp = self.isp
isp.push('if True:')
isp.push(' a = 1')
self.assertFalse(isp.push('b = [1,'))
def test_push_accepts_more(self):
isp = self.isp
isp.push('x=1')
self.assertFalse(isp.push_accepts_more())
def test_push_accepts_more2(self):
isp = self.isp
isp.push('if 1:')
self.assertTrue(isp.push_accepts_more())
isp.push(' x=1')
self.assertTrue(isp.push_accepts_more())
isp.push('')
self.assertFalse(isp.push_accepts_more())
def test_push_accepts_more3(self):
isp = self.isp
isp.push("x = (2+\n3)")
self.assertFalse(isp.push_accepts_more())
def test_push_accepts_more4(self):
isp = self.isp
# When a multiline statement contains parens or multiline strings, we
# shouldn't get confused.
# FIXME: we should be able to better handle de-dents in statements like
# multiline strings and multiline expressions (continued with \ or
# parens). Right now we aren't handling the indentation tracking quite
# correctly with this, though in practice it may not be too much of a
# problem. We'll need to see.
isp.push("if 1:")
isp.push(" x = (2+")
isp.push(" 3)")
self.assertTrue(isp.push_accepts_more())
isp.push(" y = 3")
self.assertTrue(isp.push_accepts_more())
isp.push('')
self.assertFalse(isp.push_accepts_more())
def test_push_accepts_more5(self):
isp = self.isp
isp.push('try:')
isp.push(' a = 5')
isp.push('except:')
isp.push(' raise')
# We want to be able to add an else: block at this point, so it should
# wait for a blank line.
self.assertTrue(isp.push_accepts_more())
def test_continuation(self):
isp = self.isp
isp.push("import os, \\")
self.assertTrue(isp.push_accepts_more())
isp.push("sys")
self.assertFalse(isp.push_accepts_more())
def test_syntax_error(self):
isp = self.isp
# Syntax errors immediately produce a 'ready' block, so the invalid
# Python can be sent to the kernel for evaluation with possible ipython
# special-syntax conversion.
isp.push('run foo')
self.assertFalse(isp.push_accepts_more())
def test_unicode(self):
self.isp.push(u"Pérez")
self.isp.push(u'\xc3\xa9')
self.isp.push(u"u'\xc3\xa9'")
def test_line_continuation(self):
""" Test issue #2108."""
isp = self.isp
# A blank line after a line continuation should not accept more
isp.push("1 \\\n\n")
self.assertFalse(isp.push_accepts_more())
# Whitespace after a \ is a SyntaxError. The only way to test that
# here is to test that push doesn't accept more (as with
# test_syntax_error() above).
isp.push(r"1 \ ")
self.assertFalse(isp.push_accepts_more())
# Even if the line is continuable (c.f. the regular Python
# interpreter)
isp.push(r"(1 \ ")
self.assertFalse(isp.push_accepts_more())
class InteractiveLoopTestCase(unittest.TestCase):
"""Tests for an interactive loop like a python shell.
"""
def check_ns(self, lines, ns):
"""Validate that the given input lines produce the resulting namespace.
Note: the input lines are given exactly as they would be typed in an
auto-indenting environment, as mini_interactive_loop above already does
auto-indenting and prepends spaces to the input.
"""
src = mini_interactive_loop(pseudo_input(lines))
test_ns = {}
exec src in test_ns
# We can't check that the provided ns is identical to the test_ns,
# because Python fills test_ns with extra keys (copyright, etc). But
# we can check that the given dict is *contained* in test_ns
for k,v in ns.iteritems():
self.assertEqual(test_ns[k], v)
def test_simple(self):
self.check_ns(['x=1'], dict(x=1))
def test_simple2(self):
self.check_ns(['if 1:', 'x=2'], dict(x=2))
def test_xy(self):
self.check_ns(['x=1; y=2'], dict(x=1, y=2))
def test_abc(self):
self.check_ns(['if 1:','a=1','b=2','c=3'], dict(a=1, b=2, c=3))
def test_multi(self):
self.check_ns(['x =(1+','1+','2)'], dict(x=4))
class IPythonInputTestCase(InputSplitterTestCase):
"""By just creating a new class whose .isp is a different instance, we
re-run the same test battery on the new input splitter.
In addition, this runs the tests over the syntax and syntax_ml dicts that
were tested by individual functions, as part of the OO interface.
It also makes some checks on the raw buffer storage.
"""
def setUp(self):
self.isp = isp.IPythonInputSplitter()
def test_syntax(self):
"""Call all single-line syntax tests from the main object"""
isp = self.isp
for example in syntax.itervalues():
for raw, out_t in example:
if raw.startswith(' '):
continue
isp.push(raw+'\n')
out, out_raw = isp.source_raw_reset()
self.assertEqual(out.rstrip(), out_t,
tt.pair_fail_msg.format("inputsplitter",raw, out_t, out))
self.assertEqual(out_raw.rstrip(), raw.rstrip())
def test_syntax_multiline(self):
isp = self.isp
for example in syntax_ml.itervalues():
for line_pairs in example:
out_t_parts = []
raw_parts = []
for lraw, out_t_part in line_pairs:
if out_t_part is not None:
out_t_parts.append(out_t_part)
if lraw is not None:
isp.push(lraw)
raw_parts.append(lraw)
out, out_raw = isp.source_raw_reset()
out_t = '\n'.join(out_t_parts).rstrip()
raw = '\n'.join(raw_parts).rstrip()
self.assertEqual(out.rstrip(), out_t)
self.assertEqual(out_raw.rstrip(), raw)
def test_syntax_multiline_cell(self):
isp = self.isp
for example in syntax_ml.itervalues():
out_t_parts = []
for line_pairs in example:
raw = '\n'.join(r for r, _ in line_pairs if r is not None)
out_t = '\n'.join(t for _,t in line_pairs if t is not None)
out = isp.transform_cell(raw)
# Match ignoring trailing whitespace
self.assertEqual(out.rstrip(), out_t.rstrip())
def test_cellmagic_preempt(self):
isp = self.isp
for raw, name, line, cell in [
("%%cellm a\nIn[1]:", u'cellm', u'a', u'In[1]:'),
("%%cellm \nline\n>>>hi", u'cellm', u'', u'line\n>>>hi'),
(">>>%%cellm \nline\n>>>hi", u'cellm', u'', u'line\nhi'),
("%%cellm \n>>>hi", u'cellm', u'', u'hi'),
("%%cellm \nline1\nline2", u'cellm', u'', u'line1\nline2'),
("%%cellm \nline1\\\\\nline2", u'cellm', u'', u'line1\\\\\nline2'),
]:
expected = "get_ipython().run_cell_magic(%r, %r, %r)" % (
name, line, cell
)
out = isp.transform_cell(raw)
self.assertEqual(out.rstrip(), expected.rstrip())
if __name__ == '__main__':
# A simple demo for interactive experimentation. This code will not get
# picked up by any test suite.
from IPython.core.inputsplitter import InputSplitter, IPythonInputSplitter
# configure here the syntax to use, prompt and whether to autoindent
#isp, start_prompt = InputSplitter(), '>>> '
isp, start_prompt = IPythonInputSplitter(), 'In> '
autoindent = True
#autoindent = False
try:
while True:
prompt = start_prompt
while isp.push_accepts_more():
indent = ' '*isp.indent_spaces
if autoindent:
line = indent + raw_input(prompt+indent)
else:
line = raw_input(prompt)
isp.push(line)
prompt = '... '
# Here we just return input so we can use it in a test suite, but a
# real interpreter would instead send it for execution somewhere.
#src = isp.source; raise EOFError # dbg
src, raw = isp.source_raw_reset()
print 'Input source was:\n', src
print 'Raw source was:\n', raw
except EOFError:
print 'Bye'
def test_last_blank():
nt.assert_false(isp.last_blank(''))
nt.assert_false(isp.last_blank('abc'))
nt.assert_false(isp.last_blank('abc\n'))
nt.assert_false(isp.last_blank('abc\na'))
nt.assert_true(isp.last_blank('\n'))
nt.assert_true(isp.last_blank('\n '))
nt.assert_true(isp.last_blank('abc\n '))
nt.assert_true(isp.last_blank('abc\n\n'))
nt.assert_true(isp.last_blank('abc\nd\n\n'))
nt.assert_true(isp.last_blank('abc\nd\ne\n\n'))
nt.assert_true(isp.last_blank('abc \n \n \n\n'))
def test_last_two_blanks():
nt.assert_false(isp.last_two_blanks(''))
nt.assert_false(isp.last_two_blanks('abc'))
nt.assert_false(isp.last_two_blanks('abc\n'))
nt.assert_false(isp.last_two_blanks('abc\n\na'))
nt.assert_false(isp.last_two_blanks('abc\n \n'))
nt.assert_false(isp.last_two_blanks('abc\n\n'))
nt.assert_true(isp.last_two_blanks('\n\n'))
nt.assert_true(isp.last_two_blanks('\n\n '))
nt.assert_true(isp.last_two_blanks('\n \n'))
nt.assert_true(isp.last_two_blanks('abc\n\n '))
nt.assert_true(isp.last_two_blanks('abc\n\n\n'))
nt.assert_true(isp.last_two_blanks('abc\n\n \n'))
nt.assert_true(isp.last_two_blanks('abc\n\n \n '))
nt.assert_true(isp.last_two_blanks('abc\n\n \n \n'))
nt.assert_true(isp.last_two_blanks('abc\nd\n\n\n'))
nt.assert_true(isp.last_two_blanks('abc\nd\ne\nf\n\n\n'))
class CellMagicsCommon(object):
def test_whole_cell(self):
src = "%%cellm line\nbody\n"
sp = self.sp
sp.push(src)
out = sp.source_reset()
ref = u"get_ipython().run_cell_magic({u}'cellm', {u}'line', {u}'body')\n"
nt.assert_equal(out, py3compat.u_format(ref))
def test_cellmagic_help(self):
self.sp.push('%%cellm?')
nt.assert_false(self.sp.push_accepts_more())
def tearDown(self):
self.sp.reset()
class CellModeCellMagics(CellMagicsCommon, unittest.TestCase):
sp = isp.IPythonInputSplitter(line_input_checker=False)
def test_incremental(self):
sp = self.sp
sp.push('%%cellm firstline\n')
nt.assert_true(sp.push_accepts_more()) #1
sp.push('line2\n')
nt.assert_true(sp.push_accepts_more()) #2
sp.push('\n')
# This should accept a blank line and carry on until the cell is reset
nt.assert_true(sp.push_accepts_more()) #3
class LineModeCellMagics(CellMagicsCommon, unittest.TestCase):
sp = isp.IPythonInputSplitter(line_input_checker=True)
def test_incremental(self):
sp = self.sp
sp.push('%%cellm line2\n')
nt.assert_true(sp.push_accepts_more()) #1
sp.push('\n')
# In this case, a blank line should end the cell magic
nt.assert_false(sp.push_accepts_more()) #2
|
import time
from datetime import datetime, timedelta
import six
from cqlengine.functions import QueryValue
from cqlengine.operators import BaseWhereOperator, InOperator
class StatementException(Exception): pass
import sys
class UnicodeMixin(object):
if sys.version_info > (3, 0):
__str__ = lambda x: x.__unicode__()
else:
__str__ = lambda x: six.text_type(x).encode('utf-8')
class ValueQuoter(UnicodeMixin):
def __init__(self, value):
self.value = value
def __unicode__(self):
from cassandra.encoder import cql_quote
if isinstance(self.value, bool):
return 'true' if self.value else 'false'
elif isinstance(self.value, (list, tuple)):
return '[' + ', '.join([cql_quote(v) for v in self.value]) + ']'
elif isinstance(self.value, dict):
return '{' + ', '.join([cql_quote(k) + ':' + cql_quote(v) for k,v in self.value.items()]) + '}'
elif isinstance(self.value, set):
return '{' + ', '.join([cql_quote(v) for v in self.value]) + '}'
return cql_quote(self.value)
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.value == other.value
return False
class InQuoter(ValueQuoter):
def __unicode__(self):
from cassandra.encoder import cql_quote
return '(' + ', '.join([cql_quote(v) for v in self.value]) + ')'
class BaseClause(UnicodeMixin):
def __init__(self, field, value):
self.field = field
self.value = value
self.context_id = None
def __unicode__(self):
raise NotImplementedError
def __hash__(self):
return hash(self.field) ^ hash(self.value)
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.field == other.field and self.value == other.value
return False
def __ne__(self, other):
return not self.__eq__(other)
def get_context_size(self):
""" returns the number of entries this clause will add to the query context """
return 1
def set_context_id(self, i):
""" sets the value placeholder that will be used in the query """
self.context_id = i
def update_context(self, ctx):
""" updates the query context with this clauses values """
assert isinstance(ctx, dict)
ctx[str(self.context_id)] = self.value
class WhereClause(BaseClause):
""" a single where statement used in queries """
def __init__(self, field, operator, value, quote_field=True):
"""
:param field:
:param operator:
:param value:
:param quote_field: hack to get the token function rendering properly
:return:
"""
if not isinstance(operator, BaseWhereOperator):
raise StatementException(
"operator must be of type {}, got {}".format(BaseWhereOperator, type(operator))
)
super(WhereClause, self).__init__(field, value)
self.operator = operator
self.query_value = self.value if isinstance(self.value, QueryValue) else QueryValue(self.value)
self.quote_field = quote_field
def __unicode__(self):
field = ('"{}"' if self.quote_field else '{}').format(self.field)
return u'{} {} {}'.format(field, self.operator, six.text_type(self.query_value))
def __hash__(self):
return super(WhereClause, self).__hash__() ^ hash(self.operator)
def __eq__(self, other):
if super(WhereClause, self).__eq__(other):
return self.operator.__class__ == other.operator.__class__
return False
def get_context_size(self):
return self.query_value.get_context_size()
def set_context_id(self, i):
super(WhereClause, self).set_context_id(i)
self.query_value.set_context_id(i)
def update_context(self, ctx):
if isinstance(self.operator, InOperator):
ctx[str(self.context_id)] = InQuoter(self.value)
else:
self.query_value.update_context(ctx)
class AssignmentClause(BaseClause):
""" a single variable st statement """
def __unicode__(self):
return u'"{}" = %({})s'.format(self.field, self.context_id)
def insert_tuple(self):
return self.field, self.context_id
class TransactionClause(BaseClause):
""" A single variable iff statement """
def __unicode__(self):
return u'"{}" = %({})s'.format(self.field, self.context_id)
def insert_tuple(self):
return self.field, self.context_id
class ContainerUpdateClause(AssignmentClause):
def __init__(self, field, value, operation=None, previous=None, column=None):
super(ContainerUpdateClause, self).__init__(field, value)
self.previous = previous
self._assignments = None
self._operation = operation
self._analyzed = False
self._column = column
def _to_database(self, val):
return self._column.to_database(val) if self._column else val
def _analyze(self):
raise NotImplementedError
def get_context_size(self):
raise NotImplementedError
def update_context(self, ctx):
raise NotImplementedError
class SetUpdateClause(ContainerUpdateClause):
""" updates a set collection """
def __init__(self, field, value, operation=None, previous=None, column=None):
super(SetUpdateClause, self).__init__(field, value, operation, previous, column=column)
self._additions = None
self._removals = None
def __unicode__(self):
qs = []
ctx_id = self.context_id
if (self.previous is None and
self._assignments is None and
self._additions is None and
self._removals is None):
qs += ['"{}" = %({})s'.format(self.field, ctx_id)]
if self._assignments is not None:
qs += ['"{}" = %({})s'.format(self.field, ctx_id)]
ctx_id += 1
if self._additions is not None:
qs += ['"{0}" = "{0}" + %({1})s'.format(self.field, ctx_id)]
ctx_id += 1
if self._removals is not None:
qs += ['"{0}" = "{0}" - %({1})s'.format(self.field, ctx_id)]
return ', '.join(qs)
def _analyze(self):
""" works out the updates to be performed """
if self.value is None or self.value == self.previous:
pass
elif self._operation == "add":
self._additions = self.value
elif self._operation == "remove":
self._removals = self.value
elif self.previous is None:
self._assignments = self.value
else:
# partial update time
self._additions = (self.value - self.previous) or None
self._removals = (self.previous - self.value) or None
self._analyzed = True
def get_context_size(self):
if not self._analyzed: self._analyze()
if (self.previous is None and
not self._assignments and
self._additions is None and
self._removals is None):
return 1
return int(bool(self._assignments)) + int(bool(self._additions)) + int(bool(self._removals))
def update_context(self, ctx):
if not self._analyzed: self._analyze()
ctx_id = self.context_id
if (self.previous is None and
self._assignments is None and
self._additions is None and
self._removals is None):
ctx[str(ctx_id)] = self._to_database({})
if self._assignments is not None:
ctx[str(ctx_id)] = self._to_database(self._assignments)
ctx_id += 1
if self._additions is not None:
ctx[str(ctx_id)] = self._to_database(self._additions)
ctx_id += 1
if self._removals is not None:
ctx[str(ctx_id)] = self._to_database(self._removals)
class ListUpdateClause(ContainerUpdateClause):
""" updates a list collection """
def __init__(self, field, value, operation=None, previous=None, column=None):
super(ListUpdateClause, self).__init__(field, value, operation, previous, column=column)
self._append = None
self._prepend = None
def __unicode__(self):
if not self._analyzed: self._analyze()
qs = []
ctx_id = self.context_id
if self._assignments is not None:
qs += ['"{}" = %({})s'.format(self.field, ctx_id)]
ctx_id += 1
if self._prepend is not None:
qs += ['"{0}" = %({1})s + "{0}"'.format(self.field, ctx_id)]
ctx_id += 1
if self._append is not None:
qs += ['"{0}" = "{0}" + %({1})s'.format(self.field, ctx_id)]
return ', '.join(qs)
def get_context_size(self):
if not self._analyzed: self._analyze()
return int(self._assignments is not None) + int(bool(self._append)) + int(bool(self._prepend))
def update_context(self, ctx):
if not self._analyzed: self._analyze()
ctx_id = self.context_id
if self._assignments is not None:
ctx[str(ctx_id)] = self._to_database(self._assignments)
ctx_id += 1
if self._prepend is not None:
# CQL seems to prepend element at a time, starting
# with the element at idx 0, we can either reverse
# it here, or have it inserted in reverse
ctx[str(ctx_id)] = self._to_database(list(reversed(self._prepend)))
ctx_id += 1
if self._append is not None:
ctx[str(ctx_id)] = self._to_database(self._append)
def _analyze(self):
""" works out the updates to be performed """
if self.value is None or self.value == self.previous:
pass
elif self._operation == "append":
self._append = self.value
elif self._operation == "prepend":
# self.value is a Quoter but we reverse self._prepend later as if
# it's a list, so we have to set it to the underlying list
self._prepend = self.value.value
elif self.previous is None:
self._assignments = self.value
elif len(self.value) < len(self.previous):
# if elements have been removed,
# rewrite the whole list
self._assignments = self.value
elif len(self.previous) == 0:
# if we're updating from an empty
# list, do a complete insert
self._assignments = self.value
else:
# the max start idx we want to compare
search_space = len(self.value) - max(0, len(self.previous)-1)
# the size of the sub lists we want to look at
search_size = len(self.previous)
for i in range(search_space):
#slice boundary
j = i + search_size
sub = self.value[i:j]
idx_cmp = lambda idx: self.previous[idx] == sub[idx]
if idx_cmp(0) and idx_cmp(-1) and self.previous == sub:
self._prepend = self.value[:i] or None
self._append = self.value[j:] or None
break
# if both append and prepend are still None after looking
# at both lists, an insert statement will be created
if self._prepend is self._append is None:
self._assignments = self.value
self._analyzed = True
class MapUpdateClause(ContainerUpdateClause):
""" updates a map collection """
def __init__(self, field, value, operation=None, previous=None, column=None):
super(MapUpdateClause, self).__init__(field, value, operation, previous, column=column)
self._updates = None
def _analyze(self):
if self._operation == "update":
self._updates = self.value.keys()
else:
if self.previous is None:
self._updates = sorted([k for k, v in self.value.items()])
else:
self._updates = sorted([k for k, v in self.value.items() if v != self.previous.get(k)]) or None
self._analyzed = True
def get_context_size(self):
if not self._analyzed: self._analyze()
if self.previous is None and not self._updates:
return 1
return len(self._updates or []) * 2
def update_context(self, ctx):
if not self._analyzed: self._analyze()
ctx_id = self.context_id
if self.previous is None and not self._updates:
ctx[str(ctx_id)] = {}
else:
for key in self._updates or []:
val = self.value.get(key)
ctx[str(ctx_id)] = self._column.key_col.to_database(key) if self._column else key
ctx[str(ctx_id + 1)] = self._column.value_col.to_database(val) if self._column else val
ctx_id += 2
def __unicode__(self):
if not self._analyzed: self._analyze()
qs = []
ctx_id = self.context_id
if self.previous is None and not self._updates:
qs += ['"{}" = %({})s'.format(self.field, ctx_id)]
else:
for _ in self._updates or []:
qs += ['"{}"[%({})s] = %({})s'.format(self.field, ctx_id, ctx_id + 1)]
ctx_id += 2
return ', '.join(qs)
class CounterUpdateClause(ContainerUpdateClause):
def __init__(self, field, value, previous=None, column=None):
super(CounterUpdateClause, self).__init__(field, value, previous=previous, column=column)
self.previous = self.previous or 0
def get_context_size(self):
return 1
def update_context(self, ctx):
ctx[str(self.context_id)] = self._to_database(abs(self.value - self.previous))
def __unicode__(self):
delta = self.value - self.previous
sign = '-' if delta < 0 else '+'
return '"{0}" = "{0}" {1} %({2})s'.format(self.field, sign, self.context_id)
class BaseDeleteClause(BaseClause):
pass
class FieldDeleteClause(BaseDeleteClause):
""" deletes a field from a row """
def __init__(self, field):
super(FieldDeleteClause, self).__init__(field, None)
def __unicode__(self):
return '"{}"'.format(self.field)
def update_context(self, ctx):
pass
def get_context_size(self):
return 0
class MapDeleteClause(BaseDeleteClause):
""" removes keys from a map """
def __init__(self, field, value, previous=None):
super(MapDeleteClause, self).__init__(field, value)
self.value = self.value or {}
self.previous = previous or {}
self._analyzed = False
self._removals = None
def _analyze(self):
self._removals = sorted([k for k in self.previous if k not in self.value])
self._analyzed = True
def update_context(self, ctx):
if not self._analyzed: self._analyze()
for idx, key in enumerate(self._removals):
ctx[str(self.context_id + idx)] = key
def get_context_size(self):
if not self._analyzed: self._analyze()
return len(self._removals)
def __unicode__(self):
if not self._analyzed: self._analyze()
return ', '.join(['"{}"[%({})s]'.format(self.field, self.context_id + i) for i in range(len(self._removals))])
class BaseCQLStatement(UnicodeMixin):
""" The base cql statement class """
def __init__(self, table, consistency=None, timestamp=None, where=None):
super(BaseCQLStatement, self).__init__()
self.table = table
self.consistency = consistency
self.context_id = 0
self.context_counter = self.context_id
self.timestamp = timestamp
self.where_clauses = []
for clause in where or []:
self.add_where_clause(clause)
def add_where_clause(self, clause):
"""
adds a where clause to this statement
:param clause: the clause to add
:type clause: WhereClause
"""
if not isinstance(clause, WhereClause):
raise StatementException("only instances of WhereClause can be added to statements")
clause.set_context_id(self.context_counter)
self.context_counter += clause.get_context_size()
self.where_clauses.append(clause)
def get_context(self):
"""
returns the context dict for this statement
:rtype: dict
"""
ctx = {}
for clause in self.where_clauses or []:
clause.update_context(ctx)
return ctx
def get_context_size(self):
return len(self.get_context())
def update_context_id(self, i):
self.context_id = i
self.context_counter = self.context_id
for clause in self.where_clauses:
clause.set_context_id(self.context_counter)
self.context_counter += clause.get_context_size()
@property
def timestamp_normalized(self):
"""
we're expecting self.timestamp to be either a long, int, a datetime, or a timedelta
:return:
"""
if not self.timestamp:
return None
if isinstance(self.timestamp, six.integer_types):
return self.timestamp
if isinstance(self.timestamp, timedelta):
tmp = datetime.now() + self.timestamp
else:
tmp = self.timestamp
return int(time.mktime(tmp.timetuple()) * 1e+6 + tmp.microsecond)
def __unicode__(self):
raise NotImplementedError
def __repr__(self):
return self.__unicode__()
@property
def _where(self):
return 'WHERE {}'.format(' AND '.join([six.text_type(c) for c in self.where_clauses]))
class SelectStatement(BaseCQLStatement):
""" a cql select statement """
def __init__(self,
table,
fields=None,
count=False,
consistency=None,
where=None,
order_by=None,
limit=None,
allow_filtering=False):
"""
:param where
:type where list of cqlengine.statements.WhereClause
"""
super(SelectStatement, self).__init__(
table,
consistency=consistency,
where=where
)
self.fields = [fields] if isinstance(fields, six.string_types) else (fields or [])
self.count = count
self.order_by = [order_by] if isinstance(order_by, six.string_types) else order_by
self.limit = limit
self.allow_filtering = allow_filtering
def __unicode__(self):
qs = ['SELECT']
if self.count:
qs += ['COUNT(*)']
else:
qs += [', '.join(['"{}"'.format(f) for f in self.fields]) if self.fields else '*']
qs += ['FROM', self.table]
if self.where_clauses:
qs += [self._where]
if self.order_by and not self.count:
qs += ['ORDER BY {}'.format(', '.join(six.text_type(o) for o in self.order_by))]
if self.limit:
qs += ['LIMIT {}'.format(self.limit)]
if self.allow_filtering:
qs += ['ALLOW FILTERING']
return ' '.join(qs)
class AssignmentStatement(BaseCQLStatement):
""" value assignment statements """
def __init__(self,
table,
assignments=None,
consistency=None,
where=None,
ttl=None,
timestamp=None):
super(AssignmentStatement, self).__init__(
table,
consistency=consistency,
where=where,
)
self.ttl = ttl
self.timestamp = timestamp
# add assignments
self.assignments = []
for assignment in assignments or []:
self.add_assignment_clause(assignment)
def update_context_id(self, i):
super(AssignmentStatement, self).update_context_id(i)
for assignment in self.assignments:
assignment.set_context_id(self.context_counter)
self.context_counter += assignment.get_context_size()
def add_assignment_clause(self, clause):
"""
adds an assignment clause to this statement
:param clause: the clause to add
:type clause: AssignmentClause
"""
if not isinstance(clause, AssignmentClause):
raise StatementException("only instances of AssignmentClause can be added to statements")
clause.set_context_id(self.context_counter)
self.context_counter += clause.get_context_size()
self.assignments.append(clause)
@property
def is_empty(self):
return len(self.assignments) == 0
def get_context(self):
ctx = super(AssignmentStatement, self).get_context()
for clause in self.assignments:
clause.update_context(ctx)
return ctx
class InsertStatement(AssignmentStatement):
""" an cql insert select statement """
def __init__(self,
table,
assignments=None,
consistency=None,
where=None,
ttl=None,
timestamp=None,
if_not_exists=False):
super(InsertStatement, self).__init__(
table,
assignments=assignments,
consistency=consistency,
where=where,
ttl=ttl,
timestamp=timestamp)
self.if_not_exists = if_not_exists
def add_where_clause(self, clause):
raise StatementException("Cannot add where clauses to insert statements")
def __unicode__(self):
qs = ['INSERT INTO {}'.format(self.table)]
# get column names and context placeholders
fields = [a.insert_tuple() for a in self.assignments]
columns, values = zip(*fields)
qs += ["({})".format(', '.join(['"{}"'.format(c) for c in columns]))]
qs += ['VALUES']
qs += ["({})".format(', '.join(['%({})s'.format(v) for v in values]))]
if self.if_not_exists:
qs += ["IF NOT EXISTS"]
if self.ttl:
qs += ["USING TTL {}".format(self.ttl)]
if self.timestamp:
qs += ["USING TIMESTAMP {}".format(self.timestamp_normalized)]
return ' '.join(qs)
class UpdateStatement(AssignmentStatement):
""" an cql update select statement """
def __init__(self,
table,
assignments=None,
consistency=None,
where=None,
ttl=None,
timestamp=None,
transactions=None):
super(UpdateStatement, self). __init__(table,
assignments=assignments,
consistency=consistency,
where=where,
ttl=ttl,
timestamp=timestamp)
# Add iff statements
self.transactions = []
for transaction in transactions or []:
self.add_transaction_clause(transaction)
def __unicode__(self):
qs = ['UPDATE', self.table]
using_options = []
if self.ttl:
using_options += ["TTL {}".format(self.ttl)]
if self.timestamp:
using_options += ["TIMESTAMP {}".format(self.timestamp_normalized)]
if using_options:
qs += ["USING {}".format(" AND ".join(using_options))]
qs += ['SET']
qs += [', '.join([six.text_type(c) for c in self.assignments])]
if self.where_clauses:
qs += [self._where]
if len(self.transactions) > 0:
qs += [self._get_transactions()]
return ' '.join(qs)
def add_transaction_clause(self, clause):
"""
Adds a iff clause to this statement
:param clause: The clause that will be added to the iff statement
:type clause: TransactionClause
"""
if not isinstance(clause, TransactionClause):
raise StatementException('only instances of AssignmentClause can be added to statements')
clause.set_context_id(self.context_counter)
self.context_counter += clause.get_context_size()
self.transactions.append(clause)
def get_context(self):
ctx = super(UpdateStatement, self).get_context()
for clause in self.transactions or []:
clause.update_context(ctx)
return ctx
def _get_transactions(self):
return 'IF {}'.format(' AND '.join([six.text_type(c) for c in self.transactions]))
def update_context_id(self, i):
super(UpdateStatement, self).update_context_id(i)
for transaction in self.transactions:
transaction.set_context_id(self.context_counter)
self.context_counter += transaction.get_context_size()
class DeleteStatement(BaseCQLStatement):
""" a cql delete statement """
def __init__(self, table, fields=None, consistency=None, where=None, timestamp=None):
super(DeleteStatement, self).__init__(
table,
consistency=consistency,
where=where,
timestamp=timestamp
)
self.fields = []
if isinstance(fields, six.string_types):
fields = [fields]
for field in fields or []:
self.add_field(field)
def update_context_id(self, i):
super(DeleteStatement, self).update_context_id(i)
for field in self.fields:
field.set_context_id(self.context_counter)
self.context_counter += field.get_context_size()
def get_context(self):
ctx = super(DeleteStatement, self).get_context()
for field in self.fields:
field.update_context(ctx)
return ctx
def add_field(self, field):
if isinstance(field, six.string_types):
field = FieldDeleteClause(field)
if not isinstance(field, BaseClause):
raise StatementException("only instances of AssignmentClause can be added to statements")
field.set_context_id(self.context_counter)
self.context_counter += field.get_context_size()
self.fields.append(field)
def __unicode__(self):
qs = ['DELETE']
if self.fields:
qs += [', '.join(['{}'.format(f) for f in self.fields])]
qs += ['FROM', self.table]
delete_option = []
if self.timestamp:
delete_option += ["TIMESTAMP {}".format(self.timestamp_normalized)]
if delete_option:
qs += [" USING {} ".format(" AND ".join(delete_option))]
if self.where_clauses:
qs += [self._where]
return ' '.join(qs)
|
import os
import xlog
logger = xlog.getLogger("gae_proxy")
logger.set_buffer(500)
from . import check_local_network
from .config import config, direct_config
from . import host_manager
from front_base.openssl_wrap import SSLContext
from front_base.connect_creator import ConnectCreator
from front_base.ip_manager import IpManager
from front_base.ip_source import Ipv4RangeSource, Ipv6PoolSource, IpCombineSource
from front_base.http_dispatcher import HttpsDispatcher
from front_base.connect_manager import ConnectManager
from .check_ip import CheckIp
from .appid_manager import AppidManager
current_path = os.path.dirname(os.path.abspath(__file__))
root_path = os.path.abspath(os.path.join(current_path, os.pardir, os.pardir))
data_path = os.path.abspath(os.path.join(root_path, os.pardir, os.pardir, 'data'))
module_data_path = os.path.join(data_path, 'gae_proxy')
class Front(object):
name = "gae_front"
def __init__(self):
self.logger = logger
self.config = config
def start(self):
self.running = True
ca_certs = os.path.join(current_path, "cacert.pem")
self.openssl_context = SSLContext(
logger, ca_certs=ca_certs, support_http2=config.support_http2,
protocol="TLSv1_2"
#cipher_suites=[b'ALL', b"!RC4-SHA", b"!ECDHE-RSA-RC4-SHA", b"!ECDHE-RSA-AES128-GCM-SHA256",
# b"!AES128-GCM-SHA256", b"!ECDHE-RSA-AES128-SHA", b"!AES128-SHA"]
)
self.appid_manager = AppidManager(self.config, logger)
self.host_manager = host_manager.HostManager(self.config, logger)
self.host_manager.appid_manager = self.appid_manager
self.connect_creator = ConnectCreator(
logger, self.config, self.openssl_context, self.host_manager)
self.ip_checker = CheckIp(xlog.null, self.config, self.connect_creator)
self.ipv4_source = Ipv4RangeSource(
logger, self.config,
os.path.join(current_path, "ip_range.txt"),
os.path.join(module_data_path, "ip_range.txt")
)
self.ipv6_source = Ipv6PoolSource(
logger, self.config,
os.path.join(current_path, "ipv6_list.txt")
)
self.ip_source = IpCombineSource(
logger, self.config,
self.ipv4_source, self.ipv6_source
)
self.ip_manager = IpManager(
logger, self.config, self.ip_source, check_local_network,
self.check_ip,
None,
os.path.join(module_data_path, "good_ip.txt"),
scan_ip_log=None)
self.appid_manager.check_api = self.ip_checker.check_ip
self.appid_manager.ip_manager = self.ip_manager
self.connect_manager = ConnectManager(
logger, self.config, self.connect_creator, self.ip_manager, check_local_network)
self.http_dispatcher = HttpsDispatcher(
logger, self.config, self.ip_manager, self.connect_manager
)
def check_ip(self, ip):
sni = self.host_manager.sni_manager.get()
host = self.config.check_ip_host
return self.ip_checker.check_ip(ip, sni=sni, host=host)
def get_dispatcher(self):
return self.http_dispatcher
def request(self, method, host, path=b"/", headers={}, data="", timeout=120):
response = self.http_dispatcher.request(method, host, path, dict(headers), data, timeout=timeout)
return response
def stop(self):
logger.info("terminate")
self.connect_manager.set_ssl_created_cb(None)
self.http_dispatcher.stop()
self.connect_manager.stop()
self.ip_manager.stop()
self.running = False
def set_proxy(self, args):
logger.info("set_proxy:%s", args)
self.config.PROXY_ENABLE = args["enable"]
self.config.PROXY_TYPE = args["type"]
self.config.PROXY_HOST = args["host"]
self.config.PROXY_PORT = args["port"]
self.config.PROXY_USER = args["user"]
self.config.PROXY_PASSWD = args["passwd"]
self.config.save()
self.connect_creator.update_config()
front = Front()
class DirectFront(object):
name = "direct_front"
def __init__(self):
pass
def start(self):
self.running = True
self.host_manager = host_manager.HostManager(front.config, logger)
ca_certs = os.path.join(current_path, "cacert.pem")
self.openssl_context = SSLContext(
logger, ca_certs=ca_certs, support_http2=False, protocol="TLSv1_2"
#cipher_suites=[b'ALL', b"!RC4-SHA", b"!ECDHE-RSA-RC4-SHA", b"!ECDHE-RSA-AES128-GCM-SHA256",
# b"!AES128-GCM-SHA256", b"!ECDHE-RSA-AES128-SHA", b"!AES128-SHA"]
)
self.connect_creator = ConnectCreator(
logger, front.config, self.openssl_context, self.host_manager)
self.ip_manager = front.ip_manager
self.connect_manager = ConnectManager(
logger, front.config, self.connect_creator, self.ip_manager, check_local_network)
self.dispatchs = {}
def get_dispatcher(self, host):
if host not in self.dispatchs:
http_dispatcher = HttpsDispatcher(
logger, direct_config, front.ip_manager, self.connect_manager)
self.dispatchs[host] = http_dispatcher
return self.dispatchs[host]
def request(self, method, host, path="/", headers={}, data="", timeout=60):
dispatcher = self.get_dispatcher(host)
response = dispatcher.request(method, host, path, dict(headers), data, timeout=timeout)
return response
def stop(self):
logger.info("terminate")
self.connect_manager.set_ssl_created_cb(None)
for host in self.dispatchs:
dispatcher = self.dispatchs[host]
dispatcher.stop()
self.connect_manager.stop()
self.running = False
def set_proxy(self, args):
self.connect_creator.update_config()
direct_front = DirectFront()
|
Coordinator = 'Coordinator' # Voodoo Coordinator; it will always be there
Login = 'Login'
UserProcessing = 'UserProcessing'
Proxy = 'Proxy'
Laboratory = 'Laboratory'
Translator = 'Translator'
Experiment = 'Experiment'
|
'''
Calcule a soma dos dígitos de um inteiro positivo n. Ex.: sd(123) = 6.
'''
def sd(n):
if n <= 9:
return n
else:
return n % 10 + sd(n//10)
|
import os
import typing
import unittest
from threading import Event
from unittest.mock import Mock, call, patch
import pytest
import requests_mock
from Crypto.Cipher import AES
from Crypto.Util.Padding import pad
from streamlink.session import Streamlink
from streamlink.stream.hls import HLSStream, HLSStreamReader
from tests.mixins.stream_hls import EventedHLSStreamWriter, Playlist, Segment, Tag, TestMixinStreamHLS
from tests.resources import text
class EncryptedBase:
def __init__(self, num, key, iv, *args, padding=b"", append=b"", **kwargs):
super().__init__(num, *args, **kwargs)
aesCipher = AES.new(key, AES.MODE_CBC, iv)
padded = self.content + padding if padding else pad(self.content, AES.block_size, style="pkcs7")
self.content_plain = self.content
self.content = aesCipher.encrypt(padded) + append
class TagMap(Tag):
def __init__(self, num, namespace):
self.path = f"map{num}"
self.content = f"[map{num}]".encode("ascii")
super().__init__("EXT-X-MAP", {"URI": self.val_quoted_string(self.url(namespace))})
class TagMapEnc(EncryptedBase, TagMap):
pass
class TagKey(Tag):
path = "encryption.key"
def __init__(self, method="NONE", uri=None, iv=None, keyformat=None, keyformatversions=None):
attrs = {"METHOD": method}
if uri is not False: # pragma: no branch
attrs.update({"URI": lambda tag, namespace: tag.val_quoted_string(tag.url(namespace))})
if iv is not None: # pragma: no branch
attrs.update({"IV": self.val_hex(iv)})
if keyformat is not None: # pragma: no branch
attrs.update({"KEYFORMAT": self.val_quoted_string(keyformat)})
if keyformatversions is not None: # pragma: no branch
attrs.update({"KEYFORMATVERSIONS": self.val_quoted_string(keyformatversions)})
super().__init__("EXT-X-KEY", attrs)
self.uri = uri
def url(self, namespace):
return self.uri.format(namespace=namespace) if self.uri else super().url(namespace)
class SegmentEnc(EncryptedBase, Segment):
pass
class TestHLSStreamRepr(unittest.TestCase):
def test_repr(self):
session = Streamlink()
stream = HLSStream(session, "https://foo.bar/playlist.m3u8")
self.assertEqual(repr(stream), "<HLSStream('https://foo.bar/playlist.m3u8', None)>")
stream = HLSStream(session, "https://foo.bar/playlist.m3u8", "https://foo.bar/master.m3u8")
self.assertEqual(repr(stream), "<HLSStream('https://foo.bar/playlist.m3u8', 'https://foo.bar/master.m3u8')>")
class TestHLSVariantPlaylist(unittest.TestCase):
@classmethod
def get_master_playlist(cls, playlist):
with text(playlist) as pl:
return pl.read()
def subject(self, playlist, options=None):
with requests_mock.Mocker() as mock:
url = "http://mocked/{0}/master.m3u8".format(self.id())
content = self.get_master_playlist(playlist)
mock.get(url, text=content)
session = Streamlink(options)
return HLSStream.parse_variant_playlist(session, url)
def test_variant_playlist(self):
streams = self.subject("hls/test_master.m3u8")
self.assertEqual(
list(streams.keys()),
["720p", "720p_alt", "480p", "360p", "160p", "1080p (source)", "90k"],
"Finds all streams in master playlist"
)
self.assertTrue(
all([isinstance(stream, HLSStream) for stream in streams.values()]),
"Returns HLSStream instances"
)
class EventedHLSReader(HLSStreamReader):
__writer__ = EventedHLSStreamWriter
class EventedHLSStream(HLSStream):
__reader__ = EventedHLSReader
@patch("streamlink.stream.hls.HLSStreamWorker.wait", Mock(return_value=True))
class TestHLSStream(TestMixinStreamHLS, unittest.TestCase):
def get_session(self, options=None, *args, **kwargs):
session = super().get_session(options)
session.set_option("hls-live-edge", 3)
return session
def test_offset_and_duration(self):
thread, segments = self.subject([
Playlist(1234, [Segment(0), Segment(1, duration=0.5), Segment(2, duration=0.5), Segment(3)], end=True)
], streamoptions={"start_offset": 1, "duration": 1})
data = self.await_read(read_all=True)
self.assertEqual(data, self.content(segments, cond=lambda s: 0 < s.num < 3), "Respects the offset and duration")
self.assertTrue(all(self.called(s) for s in segments.values() if 0 < s.num < 3), "Downloads second and third segment")
self.assertFalse(any(self.called(s) for s in segments.values() if 0 > s.num > 3), "Skips other segments")
def test_map(self):
discontinuity = Tag("EXT-X-DISCONTINUITY")
map1 = TagMap(1, self.id())
map2 = TagMap(2, self.id())
self.mock("GET", self.url(map1), content=map1.content)
self.mock("GET", self.url(map2), content=map2.content)
thread, segments = self.subject([
Playlist(0, [map1, Segment(0), Segment(1), Segment(2), Segment(3)]),
Playlist(4, [map1, Segment(4), map2, Segment(5), Segment(6), discontinuity, Segment(7)], end=True)
])
data = self.await_read(read_all=True, timeout=None)
self.assertEqual(data, self.content([
map1, segments[1], map1, segments[2], map1, segments[3],
map1, segments[4], map2, segments[5], map2, segments[6], segments[7]
]))
self.assertTrue(self.called(map1, once=True), "Downloads first map only once")
self.assertTrue(self.called(map2, once=True), "Downloads second map only once")
@patch("streamlink.stream.hls.HLSStreamWorker.wait", Mock(return_value=True))
class TestHLSStreamEncrypted(TestMixinStreamHLS, unittest.TestCase):
__stream__ = EventedHLSStream
def get_session(self, options=None, *args, **kwargs):
session = super().get_session(options)
session.set_option("hls-live-edge", 3)
session.set_option("http-headers", {"X-FOO": "BAR"})
return session
def gen_key(self, aes_key=None, aes_iv=None, method="AES-128", uri=None, keyformat="identity", keyformatversions=1):
aes_key = aes_key or os.urandom(16)
aes_iv = aes_iv or os.urandom(16)
key = TagKey(method=method, uri=uri, iv=aes_iv, keyformat=keyformat, keyformatversions=keyformatversions)
self.mock("GET", key.url(self.id()), content=aes_key)
return aes_key, aes_iv, key
def test_hls_encrypted_aes128(self):
aesKey, aesIv, key = self.gen_key()
# noinspection PyTypeChecker
thread, segments = self.subject([
Playlist(0, [key] + [SegmentEnc(num, aesKey, aesIv) for num in range(0, 4)]),
Playlist(4, [key] + [SegmentEnc(num, aesKey, aesIv) for num in range(4, 8)], end=True)
])
self.await_write(3 + 4)
data = self.await_read(read_all=True)
expected = self.content(segments, prop="content_plain", cond=lambda s: s.num >= 1)
self.assertEqual(data, expected, "Decrypts the AES-128 identity stream")
self.assertTrue(self.called(key, once=True), "Downloads encryption key only once")
self.assertEqual(self.get_mock(key).last_request._request.headers.get("X-FOO"), "BAR")
self.assertFalse(any(self.called(s) for s in segments.values() if s.num < 1), "Skips first segment")
self.assertTrue(all(self.called(s) for s in segments.values() if s.num >= 1), "Downloads all remaining segments")
self.assertEqual(self.get_mock(segments[1]).last_request._request.headers.get("X-FOO"), "BAR")
def test_hls_encrypted_aes128_with_map(self):
aesKey, aesIv, key = self.gen_key()
map1 = TagMapEnc(1, namespace=self.id(), key=aesKey, iv=aesIv)
map2 = TagMapEnc(2, namespace=self.id(), key=aesKey, iv=aesIv)
self.mock("GET", self.url(map1), content=map1.content)
self.mock("GET", self.url(map2), content=map2.content)
# noinspection PyTypeChecker
thread, segments = self.subject([
Playlist(0, [key, map1] + [SegmentEnc(num, aesKey, aesIv) for num in range(0, 2)]),
Playlist(2, [key, map2] + [SegmentEnc(num, aesKey, aesIv) for num in range(2, 4)], end=True)
])
self.await_write(2 * 2 + 2 * 2)
data = self.await_read(read_all=True)
self.assertEqual(data, self.content([
map1, segments[0], map1, segments[1], map2, segments[2], map2, segments[3]
], prop="content_plain"))
def test_hls_encrypted_aes128_key_uri_override(self):
aesKey, aesIv, key = self.gen_key(uri="http://real-mocked/{namespace}/encryption.key?foo=bar")
aesKeyInvalid = bytes([ord(aesKey[i:i + 1]) ^ 0xFF for i in range(16)])
_, __, key_invalid = self.gen_key(aesKeyInvalid, aesIv, uri="http://mocked/{namespace}/encryption.key?foo=bar")
# noinspection PyTypeChecker
thread, segments = self.subject([
Playlist(0, [key_invalid] + [SegmentEnc(num, aesKey, aesIv) for num in range(0, 4)]),
Playlist(4, [key_invalid] + [SegmentEnc(num, aesKey, aesIv) for num in range(4, 8)], end=True)
], options={"hls-segment-key-uri": "{scheme}://real-{netloc}{path}?{query}"})
self.await_write(3 + 4)
data = self.await_read(read_all=True)
expected = self.content(segments, prop="content_plain", cond=lambda s: s.num >= 1)
self.assertEqual(data, expected, "Decrypts stream from custom key")
self.assertFalse(self.called(key_invalid), "Skips encryption key")
self.assertTrue(self.called(key, once=True), "Downloads custom encryption key")
self.assertEqual(self.get_mock(key).last_request._request.headers.get("X-FOO"), "BAR")
@patch("streamlink.stream.hls.log")
def test_hls_encrypted_aes128_incorrect_block_length(self, mock_log):
aesKey, aesIv, key = self.gen_key()
# noinspection PyTypeChecker
thread, segments = self.subject([
Playlist(0, [key] + [
SegmentEnc(0, aesKey, aesIv, append=b"?" * 1),
SegmentEnc(1, aesKey, aesIv, append=b"?" * (AES.block_size - 1))
], end=True)
])
self.await_write(2)
data = self.await_read(read_all=True)
expected = self.content(segments, prop="content_plain")
self.assertEqual(data, expected, "Removes garbage data from segments")
self.assertIn(call("Cutting off 1 bytes of garbage before decrypting"), mock_log.debug.mock_calls)
self.assertIn(call("Cutting off 15 bytes of garbage before decrypting"), mock_log.debug.mock_calls)
def test_hls_encrypted_aes128_incorrect_padding_length(self):
aesKey, aesIv, key = self.gen_key()
padding = b"\x00" * (AES.block_size - len(b"[0]"))
self.subject([
Playlist(0, [key, SegmentEnc(0, aesKey, aesIv, padding=padding)], end=True)
])
# close read thread early
self.thread.close()
with self.assertRaises(ValueError) as cm:
self.await_write()
self.assertEqual(str(cm.exception), "Padding is incorrect.", "Crypto.Util.Padding.unpad exception")
def test_hls_encrypted_aes128_incorrect_padding_content(self):
aesKey, aesIv, key = self.gen_key()
padding = (b"\x00" * (AES.block_size - len(b"[0]") - 1)) + bytes([AES.block_size])
self.subject([
Playlist(0, [key, SegmentEnc(0, aesKey, aesIv, padding=padding)], end=True)
])
# close read thread early
self.thread.close()
with self.assertRaises(ValueError) as cm:
self.await_write()
self.assertEqual(str(cm.exception), "PKCS#7 padding is incorrect.", "Crypto.Util.Padding.unpad exception")
@patch("streamlink.stream.hls.HLSStreamWorker.wait", Mock(return_value=True))
class TestHlsPlaylistReloadTime(TestMixinStreamHLS, unittest.TestCase):
segments = [
Segment(0, duration=11),
Segment(1, duration=7),
Segment(2, duration=5),
Segment(3, duration=3)
]
def get_session(self, options=None, reload_time=None, *args, **kwargs):
return super().get_session(dict(options or {}, **{
"hls-live-edge": 3,
"hls-playlist-reload-time": reload_time
}))
def subject(self, *args, **kwargs):
thread, segments = super().subject(start=False, *args, **kwargs)
# mock the worker thread's _playlist_reload_time method, so that the main thread can wait on its call
playlist_reload_time_called = Event()
orig_playlist_reload_time = thread.reader.worker._playlist_reload_time
def mocked_playlist_reload_time(*args, **kwargs):
playlist_reload_time_called.set()
return orig_playlist_reload_time(*args, **kwargs)
# immediately kill the writer thread as we don't need it and don't want to wait for its queue polling to end
def mocked_futures_get():
return None, None
with patch.object(thread.reader.worker, "_playlist_reload_time", side_effect=mocked_playlist_reload_time), \
patch.object(thread.reader.writer, "_futures_get", side_effect=mocked_futures_get):
self.start()
if not playlist_reload_time_called.wait(timeout=5): # pragma: no cover
raise RuntimeError("Missing _playlist_reload_time() call")
# wait for the worker thread to terminate, so that deterministic assertions can be done about the reload time
thread.reader.worker.join()
return thread.reader.worker.playlist_reload_time
def test_hls_playlist_reload_time_default(self):
time = self.subject([Playlist(0, self.segments, end=True, targetduration=4)], reload_time="default")
self.assertEqual(time, 4, "default sets the reload time to the playlist's target duration")
def test_hls_playlist_reload_time_segment(self):
time = self.subject([Playlist(0, self.segments, end=True, targetduration=4)], reload_time="segment")
self.assertEqual(time, 3, "segment sets the reload time to the playlist's last segment")
def test_hls_playlist_reload_time_segment_no_segments(self):
time = self.subject([Playlist(0, [], end=True, targetduration=4)], reload_time="segment")
self.assertEqual(time, 4, "segment sets the reload time to the targetduration if no segments are available")
def test_hls_playlist_reload_time_segment_no_segments_no_targetduration(self):
time = self.subject([Playlist(0, [], end=True, targetduration=0)], reload_time="segment")
self.assertEqual(time, 6, "sets reload time to 6 seconds when no segments and no targetduration are available")
def test_hls_playlist_reload_time_live_edge(self):
time = self.subject([Playlist(0, self.segments, end=True, targetduration=4)], reload_time="live-edge")
self.assertEqual(time, 8, "live-edge sets the reload time to the sum of the number of segments of the live-edge")
def test_hls_playlist_reload_time_live_edge_no_segments(self):
time = self.subject([Playlist(0, [], end=True, targetduration=4)], reload_time="live-edge")
self.assertEqual(time, 4, "live-edge sets the reload time to the targetduration if no segments are available")
def test_hls_playlist_reload_time_live_edge_no_segments_no_targetduration(self):
time = self.subject([Playlist(0, [], end=True, targetduration=0)], reload_time="live-edge")
self.assertEqual(time, 6, "sets reload time to 6 seconds when no segments and no targetduration are available")
def test_hls_playlist_reload_time_number(self):
time = self.subject([Playlist(0, self.segments, end=True, targetduration=4)], reload_time="2")
self.assertEqual(time, 2, "number values override the reload time")
def test_hls_playlist_reload_time_number_invalid(self):
time = self.subject([Playlist(0, self.segments, end=True, targetduration=4)], reload_time="0")
self.assertEqual(time, 4, "invalid number values set the reload time to the playlist's targetduration")
def test_hls_playlist_reload_time_no_target_duration(self):
time = self.subject([Playlist(0, self.segments, end=True, targetduration=0)], reload_time="default")
self.assertEqual(time, 8, "uses the live-edge sum if the playlist is missing the targetduration data")
def test_hls_playlist_reload_time_no_data(self):
time = self.subject([Playlist(0, [], end=True, targetduration=0)], reload_time="default")
self.assertEqual(time, 6, "sets reload time to 6 seconds when no data is available")
@patch("streamlink.stream.hls.log")
@patch("streamlink.stream.hls.HLSStreamWorker.wait", Mock(return_value=True))
class TestHlsPlaylistParseErrors(TestMixinStreamHLS, unittest.TestCase):
__stream__ = EventedHLSStream
class FakePlaylist(typing.NamedTuple):
is_master: bool = False
iframes_only: bool = False
class InvalidPlaylist(Playlist):
def build(self, *args, **kwargs):
return "invalid"
def test_generic(self, mock_log):
self.subject([self.InvalidPlaylist()])
self.assertEqual(self.await_read(read_all=True), b"")
self.await_close()
self.assertTrue(self.thread.reader.buffer.closed, "Closes the stream on initial playlist parsing error")
self.assertEqual(mock_log.debug.mock_calls, [call("Reloading playlist")])
self.assertEqual(mock_log.error.mock_calls, [call("Missing #EXTM3U header")])
def test_reload(self, mock_log):
thread, segments = self.subject([
Playlist(1, [Segment(0)]),
self.InvalidPlaylist(),
self.InvalidPlaylist(),
Playlist(2, [Segment(2)], end=True)
])
self.await_write(2)
data = self.await_read(read_all=True)
self.assertEqual(data, self.content(segments))
self.close()
self.await_close()
self.assertEqual(mock_log.warning.mock_calls, [
call("Failed to reload playlist: Missing #EXTM3U header"),
call("Failed to reload playlist: Missing #EXTM3U header")
])
@patch("streamlink.stream.hls.HLSStreamWorker._reload_playlist", Mock(return_value=FakePlaylist(is_master=True)))
def test_is_master(self, mock_log):
self.subject([Playlist()])
self.assertEqual(self.await_read(read_all=True), b"")
self.await_close()
self.assertTrue(self.thread.reader.buffer.closed, "Closes the stream on initial playlist parsing error")
self.assertEqual(mock_log.debug.mock_calls, [call("Reloading playlist")])
self.assertEqual(mock_log.error.mock_calls, [
call(f"Attempted to play a variant playlist, use 'hls://{self.stream.url}' instead")
])
@patch("streamlink.stream.hls.HLSStreamWorker._reload_playlist", Mock(return_value=FakePlaylist(iframes_only=True)))
def test_iframes_only(self, mock_log):
self.subject([Playlist()])
self.assertEqual(self.await_read(read_all=True), b"")
self.await_close()
self.assertTrue(self.thread.reader.buffer.closed, "Closes the stream on initial playlist parsing error")
self.assertEqual(mock_log.debug.mock_calls, [call("Reloading playlist")])
self.assertEqual(mock_log.error.mock_calls, [call("Streams containing I-frames only are not playable")])
@patch('streamlink.stream.hls.FFMPEGMuxer.is_usable', Mock(return_value=True))
class TestHlsExtAudio(unittest.TestCase):
@property
def playlist(self):
with text("hls/test_2.m3u8") as pl:
return pl.read()
def run_streamlink(self, playlist, audio_select=None):
streamlink = Streamlink()
if audio_select:
streamlink.set_option("hls-audio-select", audio_select)
master_stream = HLSStream.parse_variant_playlist(streamlink, playlist)
return master_stream
def test_hls_ext_audio_not_selected(self):
master_url = "http://mocked/path/master.m3u8"
with requests_mock.Mocker() as mock:
mock.get(master_url, text=self.playlist)
master_stream = self.run_streamlink(master_url)['video']
with pytest.raises(AttributeError):
master_stream.substreams
assert master_stream.url == 'http://mocked/path/playlist.m3u8'
def test_hls_ext_audio_en(self):
"""
m3u8 with ext audio but no options should not download additional streams
:return:
"""
master_url = "http://mocked/path/master.m3u8"
expected = ['http://mocked/path/playlist.m3u8', 'http://mocked/path/en.m3u8']
with requests_mock.Mocker() as mock:
mock.get(master_url, text=self.playlist)
master_stream = self.run_streamlink(master_url, 'en')
substreams = master_stream['video'].substreams
result = [x.url for x in substreams]
# Check result
self.assertEqual(result, expected)
def test_hls_ext_audio_es(self):
"""
m3u8 with ext audio but no options should not download additional streams
:return:
"""
master_url = "http://mocked/path/master.m3u8"
expected = ['http://mocked/path/playlist.m3u8', 'http://mocked/path/es.m3u8']
with requests_mock.Mocker() as mock:
mock.get(master_url, text=self.playlist)
master_stream = self.run_streamlink(master_url, 'es')
substreams = master_stream['video'].substreams
result = [x.url for x in substreams]
# Check result
self.assertEqual(result, expected)
def test_hls_ext_audio_all(self):
"""
m3u8 with ext audio but no options should not download additional streams
:return:
"""
master_url = "http://mocked/path/master.m3u8"
expected = ['http://mocked/path/playlist.m3u8', 'http://mocked/path/en.m3u8', 'http://mocked/path/es.m3u8']
with requests_mock.Mocker() as mock:
mock.get(master_url, text=self.playlist)
master_stream = self.run_streamlink(master_url, 'en,es')
substreams = master_stream['video'].substreams
result = [x.url for x in substreams]
# Check result
self.assertEqual(result, expected)
def test_hls_ext_audio_wildcard(self):
master_url = "http://mocked/path/master.m3u8"
expected = ['http://mocked/path/playlist.m3u8', 'http://mocked/path/en.m3u8', 'http://mocked/path/es.m3u8']
with requests_mock.Mocker() as mock:
mock.get(master_url, text=self.playlist)
master_stream = self.run_streamlink(master_url, '*')
substreams = master_stream['video'].substreams
result = [x.url for x in substreams]
# Check result
self.assertEqual(result, expected)
|
from setuptools import setup, find_packages
import versioneer
setup(
name='q2cli',
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
license='BSD-3-Clause',
url='https://qiime2.org',
packages=find_packages(),
include_package_data=True,
scripts=['bin/tab-qiime'],
entry_points='''
[console_scripts]
qiime=q2cli.__main__:qiime
''',
zip_safe=False,
)
|
from django.test import TestCase, RequestFactory
from django.conf import settings
from djangobb_forum.models import Post
from djangobb_forum.util import smiles, convert_text_to_html, paginate
class TestParsers(TestCase):
def setUp(self):
self.data_url = "Lorem ipsum dolor sit amet, consectetur http://djangobb.org/ adipiscing elit."
self.data_smiles = "Lorem ipsum dolor :| sit amet :) <a href=\"http://djangobb.org/\">http://djangobb.org/</a>"
self.markdown = ""
self.bbcode = "[b]Lorem[/b] [code]ipsum :)[/code] =)"
def test_smiles(self):
smiled_data = smiles(self.data_smiles)
self.assertEqual(smiled_data, u"Lorem ipsum dolor <img src=\"{0}djangobb_forum/img/smilies/neutral.png\" /> sit amet <img src=\"{0}djangobb_forum/img/smilies/smile.png\" /> <a href=\"http://djangobb.org/\">http://djangobb.org/</a>".format(settings.STATIC_URL))
def test_convert_text_to_html(self):
class User(object):
has_perm = lambda s, p: True
class Profile(object):
markup = 'bbcode'
user = User()
bb_data = convert_text_to_html(self.bbcode, Profile())
self.assertEqual(bb_data, '<span class="bb-bold">Lorem</span> <div class="code"><pre>ipsum :)</pre></div>=)')
class TestPaginators(TestCase):
fixtures = ['test_forum.json']
def setUp(self):
self.posts = Post.objects.all()[:5]
self.factory = RequestFactory()
def test_paginate(self):
request = self.factory.get('/?page=2')
pages, paginator, _ = paginate(self.posts, request, 3)
self.assertEqual(pages, 2)
request = self.factory.get('/?page=1')
_, _, paged_list_name = paginate(self.posts, request, 3)
self.assertEqual(paged_list_name.count(), 3)
|
from django.db import models
from django.utils.translation import ugettext as _
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from django.db.models.signals import post_save
from development.models import Project
from django.contrib.auth.models import User
from .signals import notify_municipal_user
class ModeratedProject(Project):
"""
Project awaiting moderation. Registered Users not belonging to
any group have their edits created using a ModeratedProject as
opposed to a Project. Once approved by a Municipal User / Admin,
the ModeratedProject updates the corresponding Project.
"""
accepted = models.BooleanField(default=False)
completed = models.BooleanField(default=False)
project = models.ForeignKey(Project, related_name='moderated_project')
user = models.ForeignKey(User, null=True)
class Meta:
verbose_name = _('ModeratedProject')
verbose_name_plural = _('ModeratedProjects')
# def __unicode__(self):
# return str(self.project.ddname) or 'Untitled'
def diff(self):
moderated_project = self
project = self.project
diff = {}
frozen_fields = [ 'last_modified', 'created', 'dd_id', 'moderated_project', 'location' ]
editable_fields = list( set(moderated_project._meta.fields).intersection(project._meta.fields) )
for field in editable_fields:
if field.name in frozen_fields: continue
proposed = getattr(moderated_project, field.name, None)
current = getattr(project, field.name, None)
if proposed != current:
diff[field.verbose_name] = {'name': field.verbose_name.title(), 'current': current, 'proposed': proposed}
return diff
def changed_fields(self):
moderated_project = self
project = self.project_object
fields = list()
for field in set( moderated_project._meta.fields + project._meta.fields ):
proposed = getattr(moderated_project, field.name, None)
current = getattr(project, field.name, None)
if proposed != current:
fields.append(field.name)
return fields
def accept(self):
self.accepted = True
self.completed = True
self.update_project_with_edits()
self.save()
def decline(self):
self.accepted = False
self.completed = True
self.save()
def reopen(self):
self.accepted = False
self.completed = False
self.save()
def update_project_with_edits(self):
project = self.project
for edit, value in self.diff().iteritems():
project.__setattr__(edit, value['proposed'])
project.save()
@classmethod
def new_from_project(self, project):
"""
Creates a new instance of ModeratedProject based on an
existing Project object.
"""
moderated_project_fields = set( self._meta.get_all_field_names() )
project_fields = set( project._meta.get_all_field_names() )
common_fields = project_fields.intersection(moderated_project_fields)
common_fields.remove('dd_id') # moderated_project.dd_id = None
common_fields.remove('moderated_project')
moderated_project = ModeratedProject(project=project)
for field in common_fields:
moderated_project.__setattr__(field, getattr(project, field))
# moderated_project.save()
return moderated_project
|
"""
Test Program for evaluating the PIL module, writing and reading PNG images
Tom Anderson
"""
from PIL import Image, ImageDraw
import time
filename= "pngtest.png"
imsize= 512
im = Image.new("RGB", (imsize, imsize), "black")
draw = ImageDraw.Draw(im)
draw.setfill("on")
draw.setink("yellow")
draw.line((0, im.size[1], im.size[0], 0), fill='blue', width=5)
draw.line((0, 0) + im.size, fill='green', width=10)
draw.rectangle((100,50,200,75))
draw.setink("red")
draw.ellipse((300,300,375,375))
del draw
im.save(filename, 'PNG')
del im
imread= Image.open(filename, mode='r')
bands= imread.getbands()
print str(bands)
xysize= imread.size
width= xysize[0]
height= xysize[1]
print "Width: " + str(width) + " Height: " + str(height)
pix = imread.load()
hasred= 0
hasgreen= 0
hasblue= 0
for xn in range(0,imsize-1):
for yn in range(0,imsize-1):
if pix[xn,yn][0] > 0:
hasred += 1
if pix[xn,yn][1] > 0:
hasgreen += 1
if pix[xn,yn][2] > 0:
hasblue += 1
print "Red count = " + str(hasred)
print "Green count = " + str(hasgreen)
print "Blue count = " + str(hasblue)
|
from copy import deepcopy
import numpy as np
from ... import units as u
from ...tests.helper import (catch_warnings,
pytest, quantity_allclose as allclose,
assert_quantity_allclose as assert_allclose)
from ...utils import OrderedDescriptorContainer
from ...utils.compat import NUMPY_LT_1_14
from ...utils.exceptions import AstropyWarning
from .. import representation as r
from ..representation import REPRESENTATION_CLASSES
from .test_representation import unitphysics # this fixture is used below
def setup_function(func):
func.REPRESENTATION_CLASSES_ORIG = deepcopy(REPRESENTATION_CLASSES)
def teardown_function(func):
REPRESENTATION_CLASSES.clear()
REPRESENTATION_CLASSES.update(func.REPRESENTATION_CLASSES_ORIG)
def test_frame_attribute_descriptor():
""" Unit tests of the Attribute descriptor """
from ..attributes import Attribute
class TestAttributes(metaclass=OrderedDescriptorContainer):
attr_none = Attribute()
attr_2 = Attribute(default=2)
attr_3_attr2 = Attribute(default=3, secondary_attribute='attr_2')
attr_none_attr2 = Attribute(default=None, secondary_attribute='attr_2')
attr_none_nonexist = Attribute(default=None, secondary_attribute='nonexist')
t = TestAttributes()
# Defaults
assert t.attr_none is None
assert t.attr_2 == 2
assert t.attr_3_attr2 == 3
assert t.attr_none_attr2 == t.attr_2
assert t.attr_none_nonexist is None # No default and non-existent secondary attr
# Setting values via '_'-prefixed internal vars (as would normally done in __init__)
t._attr_none = 10
assert t.attr_none == 10
t._attr_2 = 20
assert t.attr_2 == 20
assert t.attr_3_attr2 == 3
assert t.attr_none_attr2 == t.attr_2
t._attr_none_attr2 = 40
assert t.attr_none_attr2 == 40
# Make sure setting values via public attribute fails
with pytest.raises(AttributeError) as err:
t.attr_none = 5
assert 'Cannot set frame attribute' in str(err)
def test_frame_subclass_attribute_descriptor():
from ..builtin_frames import FK4
from ..attributes import Attribute, TimeAttribute
from astropy.time import Time
_EQUINOX_B1980 = Time('B1980', scale='tai')
class MyFK4(FK4):
# equinox inherited from FK4, obstime overridden, and newattr is new
obstime = TimeAttribute(default=_EQUINOX_B1980)
newattr = Attribute(default='newattr')
mfk4 = MyFK4()
assert mfk4.equinox.value == 'B1950.000'
assert mfk4.obstime.value == 'B1980.000'
assert mfk4.newattr == 'newattr'
assert set(mfk4.get_frame_attr_names()) == set(['equinox', 'obstime', 'newattr'])
mfk4 = MyFK4(equinox='J1980.0', obstime='J1990.0', newattr='world')
assert mfk4.equinox.value == 'J1980.000'
assert mfk4.obstime.value == 'J1990.000'
assert mfk4.newattr == 'world'
def test_create_data_frames():
from ..builtin_frames import ICRS
# from repr
i1 = ICRS(r.SphericalRepresentation(1*u.deg, 2*u.deg, 3*u.kpc))
i2 = ICRS(r.UnitSphericalRepresentation(lon=1*u.deg, lat=2*u.deg))
# from preferred name
i3 = ICRS(ra=1*u.deg, dec=2*u.deg, distance=3*u.kpc)
i4 = ICRS(ra=1*u.deg, dec=2*u.deg)
assert i1.data.lat == i3.data.lat
assert i1.data.lon == i3.data.lon
assert i1.data.distance == i3.data.distance
assert i2.data.lat == i4.data.lat
assert i2.data.lon == i4.data.lon
# now make sure the preferred names work as properties
assert_allclose(i1.ra, i3.ra)
assert_allclose(i2.ra, i4.ra)
assert_allclose(i1.distance, i3.distance)
with pytest.raises(AttributeError):
i1.ra = [11.]*u.deg
def test_create_orderered_data():
from ..builtin_frames import ICRS, Galactic, AltAz
TOL = 1e-10*u.deg
i = ICRS(1*u.deg, 2*u.deg)
assert (i.ra - 1*u.deg) < TOL
assert (i.dec - 2*u.deg) < TOL
g = Galactic(1*u.deg, 2*u.deg)
assert (g.l - 1*u.deg) < TOL
assert (g.b - 2*u.deg) < TOL
a = AltAz(1*u.deg, 2*u.deg)
assert (a.az - 1*u.deg) < TOL
assert (a.alt - 2*u.deg) < TOL
with pytest.raises(TypeError):
ICRS(1*u.deg, 2*u.deg, 1*u.deg, 2*u.deg)
with pytest.raises(TypeError):
sph = r.SphericalRepresentation(1*u.deg, 2*u.deg, 3*u.kpc)
ICRS(sph, 1*u.deg, 2*u.deg)
def test_create_nodata_frames():
from ..builtin_frames import ICRS, FK4, FK5
i = ICRS()
assert len(i.get_frame_attr_names()) == 0
f5 = FK5()
assert f5.equinox == FK5.get_frame_attr_names()['equinox']
f4 = FK4()
assert f4.equinox == FK4.get_frame_attr_names()['equinox']
# obstime is special because it's a property that uses equinox if obstime is not set
assert f4.obstime in (FK4.get_frame_attr_names()['obstime'],
FK4.get_frame_attr_names()['equinox'])
def test_no_data_nonscalar_frames():
from ..builtin_frames import AltAz
from astropy.time import Time
a1 = AltAz(obstime=Time('2012-01-01') + np.arange(10.) * u.day,
temperature=np.ones((3, 1)) * u.deg_C)
assert a1.obstime.shape == (3, 10)
assert a1.temperature.shape == (3, 10)
assert a1.shape == (3, 10)
with pytest.raises(ValueError) as exc:
AltAz(obstime=Time('2012-01-01') + np.arange(10.) * u.day,
temperature=np.ones((3,)) * u.deg_C)
assert 'inconsistent shapes' in str(exc)
def test_frame_repr():
from ..builtin_frames import ICRS, FK5
i = ICRS()
assert repr(i) == '<ICRS Frame>'
f5 = FK5()
assert repr(f5).startswith('<FK5 Frame (equinox=')
i2 = ICRS(ra=1*u.deg, dec=2*u.deg)
i3 = ICRS(ra=1*u.deg, dec=2*u.deg, distance=3*u.kpc)
assert repr(i2) == ('<ICRS Coordinate: (ra, dec) in deg\n'
' ({})>').format(' 1., 2.' if NUMPY_LT_1_14
else '1., 2.')
assert repr(i3) == ('<ICRS Coordinate: (ra, dec, distance) in (deg, deg, kpc)\n'
' ({})>').format(' 1., 2., 3.' if NUMPY_LT_1_14
else '1., 2., 3.')
# try with arrays
i2 = ICRS(ra=[1.1, 2.1]*u.deg, dec=[2.1, 3.1]*u.deg)
i3 = ICRS(ra=[1.1, 2.1]*u.deg, dec=[-15.6, 17.1]*u.deg, distance=[11., 21.]*u.kpc)
assert repr(i2) == ('<ICRS Coordinate: (ra, dec) in deg\n'
' [{}]>').format('( 1.1, 2.1), ( 2.1, 3.1)'
if NUMPY_LT_1_14 else
'(1.1, 2.1), (2.1, 3.1)')
if NUMPY_LT_1_14:
assert repr(i3) == ('<ICRS Coordinate: (ra, dec, distance) in (deg, deg, kpc)\n'
' [( 1.1, -15.6, 11.), ( 2.1, 17.1, 21.)]>')
else:
assert repr(i3) == ('<ICRS Coordinate: (ra, dec, distance) in (deg, deg, kpc)\n'
' [(1.1, -15.6, 11.), (2.1, 17.1, 21.)]>')
def test_frame_repr_vels():
from ..builtin_frames import ICRS
i = ICRS(ra=1*u.deg, dec=2*u.deg,
pm_ra_cosdec=1*u.marcsec/u.yr, pm_dec=2*u.marcsec/u.yr)
# unit comes out as mas/yr because of the preferred units defined in the
# frame RepresentationMapping
assert repr(i) == ('<ICRS Coordinate: (ra, dec) in deg\n'
' ({0})\n'
' (pm_ra_cosdec, pm_dec) in mas / yr\n'
' ({0})>').format(' 1., 2.' if NUMPY_LT_1_14 else
'1., 2.')
def test_converting_units():
import re
from ..baseframe import RepresentationMapping
from ..builtin_frames import ICRS, FK5
# this is a regular expression that with split (see below) removes what's
# the decimal point to fix rounding problems
rexrepr = re.compile(r'(.*?=\d\.).*?( .*?=\d\.).*?( .*)')
# Use values that aren't subject to rounding down to X.9999...
i2 = ICRS(ra=2.*u.deg, dec=2.*u.deg)
i2_many = ICRS(ra=[2., 4.]*u.deg, dec=[2., -8.1]*u.deg)
# converting from FK5 to ICRS and back changes the *internal* representation,
# but it should still come out in the preferred form
i4 = i2.transform_to(FK5).transform_to(ICRS)
i4_many = i2_many.transform_to(FK5).transform_to(ICRS)
ri2 = ''.join(rexrepr.split(repr(i2)))
ri4 = ''.join(rexrepr.split(repr(i4)))
assert ri2 == ri4
assert i2.data.lon.unit != i4.data.lon.unit # Internal repr changed
ri2_many = ''.join(rexrepr.split(repr(i2_many)))
ri4_many = ''.join(rexrepr.split(repr(i4_many)))
assert ri2_many == ri4_many
assert i2_many.data.lon.unit != i4_many.data.lon.unit # Internal repr changed
# but that *shouldn't* hold if we turn off units for the representation
class FakeICRS(ICRS):
frame_specific_representation_info = {
'spherical': [RepresentationMapping('lon', 'ra', u.hourangle),
RepresentationMapping('lat', 'dec', None),
RepresentationMapping('distance', 'distance')] # should fall back to default of None unit
}
fi = FakeICRS(i4.data)
ri2 = ''.join(rexrepr.split(repr(i2)))
rfi = ''.join(rexrepr.split(repr(fi)))
rfi = re.sub('FakeICRS', 'ICRS', rfi) # Force frame name to match
assert ri2 != rfi
# the attributes should also get the right units
assert i2.dec.unit == i4.dec.unit
# unless no/explicitly given units
assert i2.dec.unit != fi.dec.unit
assert i2.ra.unit != fi.ra.unit
assert fi.ra.unit == u.hourangle
def test_representation_info():
from ..baseframe import RepresentationMapping
from ..builtin_frames import ICRS
class NewICRS1(ICRS):
frame_specific_representation_info = {
r.SphericalRepresentation: [
RepresentationMapping('lon', 'rara', u.hourangle),
RepresentationMapping('lat', 'decdec', u.degree),
RepresentationMapping('distance', 'distance', u.kpc)]
}
i1 = NewICRS1(rara=10*u.degree, decdec=-12*u.deg, distance=1000*u.pc,
pm_rara_cosdecdec=100*u.mas/u.yr,
pm_decdec=17*u.mas/u.yr,
radial_velocity=10*u.km/u.s)
assert allclose(i1.rara, 10*u.deg)
assert i1.rara.unit == u.hourangle
assert allclose(i1.decdec, -12*u.deg)
assert allclose(i1.distance, 1000*u.pc)
assert i1.distance.unit == u.kpc
assert allclose(i1.pm_rara_cosdecdec, 100*u.mas/u.yr)
assert allclose(i1.pm_decdec, 17*u.mas/u.yr)
# this should auto-set the names of UnitSpherical:
i1.set_representation_cls(r.UnitSphericalRepresentation,
s=r.UnitSphericalCosLatDifferential)
assert allclose(i1.rara, 10*u.deg)
assert allclose(i1.decdec, -12*u.deg)
assert allclose(i1.pm_rara_cosdecdec, 100*u.mas/u.yr)
assert allclose(i1.pm_decdec, 17*u.mas/u.yr)
# For backwards compatibility, we also support the string name in the
# representation info dictionary:
class NewICRS2(ICRS):
frame_specific_representation_info = {
'spherical': [
RepresentationMapping('lon', 'ang1', u.hourangle),
RepresentationMapping('lat', 'ang2', u.degree),
RepresentationMapping('distance', 'howfar', u.kpc)]
}
i2 = NewICRS2(ang1=10*u.degree, ang2=-12*u.deg, howfar=1000*u.pc)
assert allclose(i2.ang1, 10*u.deg)
assert i2.ang1.unit == u.hourangle
assert allclose(i2.ang2, -12*u.deg)
assert allclose(i2.howfar, 1000*u.pc)
assert i2.howfar.unit == u.kpc
# Test that the differential kwargs get overridden
class NewICRS3(ICRS):
frame_specific_representation_info = {
r.SphericalCosLatDifferential: [
RepresentationMapping('d_lon_coslat', 'pm_ang1', u.hourangle/u.year),
RepresentationMapping('d_lat', 'pm_ang2'),
RepresentationMapping('d_distance', 'vlos', u.kpc/u.Myr)]
}
i3 = NewICRS3(lon=10*u.degree, lat=-12*u.deg, distance=1000*u.pc,
pm_ang1=1*u.mas/u.yr, pm_ang2=2*u.mas/u.yr,
vlos=100*u.km/u.s)
assert allclose(i3.pm_ang1, 1*u.mas/u.yr)
assert i3.pm_ang1.unit == u.hourangle/u.year
assert allclose(i3.pm_ang2, 2*u.mas/u.yr)
assert allclose(i3.vlos, 100*u.km/u.s)
assert i3.vlos.unit == u.kpc/u.Myr
def test_realizing():
from ..builtin_frames import ICRS, FK5
from ...time import Time
rep = r.SphericalRepresentation(1*u.deg, 2*u.deg, 3*u.kpc)
i = ICRS()
i2 = i.realize_frame(rep)
assert not i.has_data
assert i2.has_data
f = FK5(equinox=Time('J2001', scale='utc'))
f2 = f.realize_frame(rep)
assert not f.has_data
assert f2.has_data
assert f2.equinox == f.equinox
assert f2.equinox != FK5.get_frame_attr_names()['equinox']
# Check that a nicer error message is returned:
with pytest.raises(TypeError) as excinfo:
f.realize_frame(f.representation)
assert ('Class passed as data instead of a representation' in
excinfo.value.args[0])
def test_replicating():
from ..builtin_frames import ICRS, AltAz
from ...time import Time
i = ICRS(ra=[1]*u.deg, dec=[2]*u.deg)
icopy = i.replicate(copy=True)
irepl = i.replicate(copy=False)
i.data._lat[:] = 0*u.deg
assert np.all(i.data.lat == irepl.data.lat)
assert np.all(i.data.lat != icopy.data.lat)
iclone = i.replicate_without_data()
assert i.has_data
assert not iclone.has_data
aa = AltAz(alt=1*u.deg, az=2*u.deg, obstime=Time('J2000'))
aaclone = aa.replicate_without_data(obstime=Time('J2001'))
assert not aaclone.has_data
assert aa.obstime != aaclone.obstime
assert aa.pressure == aaclone.pressure
assert aa.obswl == aaclone.obswl
def test_getitem():
from ..builtin_frames import ICRS
rep = r.SphericalRepresentation(
[1, 2, 3]*u.deg, [4, 5, 6]*u.deg, [7, 8, 9]*u.kpc)
i = ICRS(rep)
assert len(i.ra) == 3
iidx = i[1:]
assert len(iidx.ra) == 2
iidx2 = i[0]
assert iidx2.ra.isscalar
def test_transform():
"""
This test just makes sure the transform architecture works, but does *not*
actually test all the builtin transforms themselves are accurate
"""
from ..builtin_frames import ICRS, FK4, FK5, Galactic
from ...time import Time
i = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg)
f = i.transform_to(FK5)
i2 = f.transform_to(ICRS)
assert i2.data.__class__ == r.UnitSphericalRepresentation
assert_allclose(i.ra, i2.ra)
assert_allclose(i.dec, i2.dec)
i = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[5, 6]*u.kpc)
f = i.transform_to(FK5)
i2 = f.transform_to(ICRS)
assert i2.data.__class__ != r.UnitSphericalRepresentation
f = FK5(ra=1*u.deg, dec=2*u.deg, equinox=Time('J2001', scale='utc'))
f4 = f.transform_to(FK4)
f4_2 = f.transform_to(FK4(equinox=f.equinox))
# make sure attributes are copied over correctly
assert f4.equinox == FK4.get_frame_attr_names()['equinox']
assert f4_2.equinox == f.equinox
# make sure self-transforms also work
i = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg)
i2 = i.transform_to(ICRS)
assert_allclose(i.ra, i2.ra)
assert_allclose(i.dec, i2.dec)
f = FK5(ra=1*u.deg, dec=2*u.deg, equinox=Time('J2001', scale='utc'))
f2 = f.transform_to(FK5) # default equinox, so should be *different*
assert f2.equinox == FK5().equinox
with pytest.raises(AssertionError):
assert_allclose(f.ra, f2.ra)
with pytest.raises(AssertionError):
assert_allclose(f.dec, f2.dec)
# finally, check Galactic round-tripping
i1 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg)
i2 = i1.transform_to(Galactic).transform_to(ICRS)
assert_allclose(i1.ra, i2.ra)
assert_allclose(i1.dec, i2.dec)
def test_transform_to_nonscalar_nodata_frame():
# https://github.com/astropy/astropy/pull/5254#issuecomment-241592353
from ..builtin_frames import ICRS, FK5
from ...time import Time
times = Time('2016-08-23') + np.linspace(0, 10, 12)*u.day
coo1 = ICRS(ra=[[0.], [10.], [20.]]*u.deg,
dec=[[-30.], [30.], [60.]]*u.deg)
coo2 = coo1.transform_to(FK5(equinox=times))
assert coo2.shape == (3, 12)
def test_sep():
from ..builtin_frames import ICRS
i1 = ICRS(ra=0*u.deg, dec=1*u.deg)
i2 = ICRS(ra=0*u.deg, dec=2*u.deg)
sep = i1.separation(i2)
assert sep.deg == 1
i3 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[5, 6]*u.kpc)
i4 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[4, 5]*u.kpc)
sep3d = i3.separation_3d(i4)
assert_allclose(sep3d.to(u.kpc), np.array([1, 1])*u.kpc)
# check that it works even with velocities
i5 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[5, 6]*u.kpc,
pm_ra_cosdec=[1, 2]*u.mas/u.yr, pm_dec=[3, 4]*u.mas/u.yr,
radial_velocity=[5, 6]*u.km/u.s)
i6 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[7, 8]*u.kpc,
pm_ra_cosdec=[1, 2]*u.mas/u.yr, pm_dec=[3, 4]*u.mas/u.yr,
radial_velocity=[5, 6]*u.km/u.s)
sep3d = i5.separation_3d(i6)
assert_allclose(sep3d.to(u.kpc), np.array([2, 2])*u.kpc)
def test_time_inputs():
"""
Test validation and conversion of inputs for equinox and obstime attributes.
"""
from ...time import Time
from ..builtin_frames import FK4
c = FK4(1 * u.deg, 2 * u.deg, equinox='J2001.5', obstime='2000-01-01 12:00:00')
assert c.equinox == Time('J2001.5')
assert c.obstime == Time('2000-01-01 12:00:00')
with pytest.raises(ValueError) as err:
c = FK4(1 * u.deg, 2 * u.deg, equinox=1.5)
assert 'Invalid time input' in str(err)
with pytest.raises(ValueError) as err:
c = FK4(1 * u.deg, 2 * u.deg, obstime='hello')
assert 'Invalid time input' in str(err)
# A vector time should work if the shapes match, but we don't automatically
# broadcast the basic data (just like time).
FK4([1, 2] * u.deg, [2, 3] * u.deg, obstime=['J2000', 'J2001'])
with pytest.raises(ValueError) as err:
FK4(1 * u.deg, 2 * u.deg, obstime=['J2000', 'J2001'])
assert 'shape' in str(err)
def test_is_frame_attr_default():
"""
Check that the `is_frame_attr_default` machinery works as expected
"""
from ...time import Time
from ..builtin_frames import FK5
c1 = FK5(ra=1*u.deg, dec=1*u.deg)
c2 = FK5(ra=1*u.deg, dec=1*u.deg, equinox=FK5.get_frame_attr_names()['equinox'])
c3 = FK5(ra=1*u.deg, dec=1*u.deg, equinox=Time('J2001.5'))
assert c1.equinox == c2.equinox
assert c1.equinox != c3.equinox
assert c1.is_frame_attr_default('equinox')
assert not c2.is_frame_attr_default('equinox')
assert not c3.is_frame_attr_default('equinox')
c4 = c1.realize_frame(r.UnitSphericalRepresentation(3*u.deg, 4*u.deg))
c5 = c2.realize_frame(r.UnitSphericalRepresentation(3*u.deg, 4*u.deg))
assert c4.is_frame_attr_default('equinox')
assert not c5.is_frame_attr_default('equinox')
def test_altaz_attributes():
from ...time import Time
from .. import EarthLocation, AltAz
aa = AltAz(1*u.deg, 2*u.deg)
assert aa.obstime is None
assert aa.location is None
aa2 = AltAz(1*u.deg, 2*u.deg, obstime='J2000')
assert aa2.obstime == Time('J2000')
aa3 = AltAz(1*u.deg, 2*u.deg, location=EarthLocation(0*u.deg, 0*u.deg, 0*u.m))
assert isinstance(aa3.location, EarthLocation)
def test_representation():
"""
Test the getter and setter properties for `representation`
"""
from ..builtin_frames import ICRS
# Create the frame object.
icrs = ICRS(ra=1*u.deg, dec=1*u.deg)
data = icrs.data
# Create some representation objects.
icrs_cart = icrs.cartesian
icrs_spher = icrs.spherical
# Testing when `_representation` set to `CartesianRepresentation`.
icrs.representation = r.CartesianRepresentation
assert icrs.representation == r.CartesianRepresentation
assert icrs_cart.x == icrs.x
assert icrs_cart.y == icrs.y
assert icrs_cart.z == icrs.z
assert icrs.data == data
# Testing that an ICRS object in CartesianRepresentation must not have spherical attributes.
for attr in ('ra', 'dec', 'distance'):
with pytest.raises(AttributeError) as err:
getattr(icrs, attr)
assert 'object has no attribute' in str(err)
# Testing when `_representation` set to `CylindricalRepresentation`.
icrs.representation = r.CylindricalRepresentation
assert icrs.representation == r.CylindricalRepresentation
assert icrs.data == data
# Testing setter input using text argument for spherical.
icrs.representation = 'spherical'
assert icrs.representation is r.SphericalRepresentation
assert icrs_spher.lat == icrs.dec
assert icrs_spher.lon == icrs.ra
assert icrs_spher.distance == icrs.distance
assert icrs.data == data
# Testing that an ICRS object in SphericalRepresentation must not have cartesian attributes.
for attr in ('x', 'y', 'z'):
with pytest.raises(AttributeError) as err:
getattr(icrs, attr)
assert 'object has no attribute' in str(err)
# Testing setter input using text argument for cylindrical.
icrs.representation = 'cylindrical'
assert icrs.representation is r.CylindricalRepresentation
assert icrs.data == data
with pytest.raises(ValueError) as err:
icrs.representation = 'WRONG'
assert 'but must be a BaseRepresentation class' in str(err)
with pytest.raises(ValueError) as err:
icrs.representation = ICRS
assert 'but must be a BaseRepresentation class' in str(err)
def test_represent_as():
from ..builtin_frames import ICRS
icrs = ICRS(ra=1*u.deg, dec=1*u.deg)
cart1 = icrs.represent_as('cartesian')
cart2 = icrs.represent_as(r.CartesianRepresentation)
cart1.x == cart2.x
cart1.y == cart2.y
cart1.z == cart2.z
# now try with velocities
icrs = ICRS(ra=0*u.deg, dec=0*u.deg, distance=10*u.kpc,
pm_ra_cosdec=0*u.mas/u.yr, pm_dec=0*u.mas/u.yr,
radial_velocity=1*u.km/u.s)
# single string
rep2 = icrs.represent_as('cylindrical')
assert isinstance(rep2, r.CylindricalRepresentation)
assert isinstance(rep2.differentials['s'], r.CylindricalDifferential)
# single class with positional in_frame_units, verify that warning raised
with catch_warnings() as w:
icrs.represent_as(r.CylindricalRepresentation, False)
assert len(w) == 1
assert w[0].category == AstropyWarning
assert 'argument position' in str(w[0].message)
# TODO: this should probably fail in the future once we figure out a better
# workaround for dealing with UnitSphericalRepresentation's with
# RadialDifferential's
# two classes
# rep2 = icrs.represent_as(r.CartesianRepresentation,
# r.SphericalCosLatDifferential)
# assert isinstance(rep2, r.CartesianRepresentation)
# assert isinstance(rep2.differentials['s'], r.SphericalCosLatDifferential)
with pytest.raises(ValueError):
icrs.represent_as('odaigahara')
def test_shorthand_representations():
from ..builtin_frames import ICRS
rep = r.CartesianRepresentation([1, 2, 3]*u.pc)
dif = r.CartesianDifferential([1, 2, 3]*u.km/u.s)
rep = rep.with_differentials(dif)
icrs = ICRS(rep)
sph = icrs.spherical
assert isinstance(sph, r.SphericalRepresentation)
assert isinstance(sph.differentials['s'], r.SphericalDifferential)
sph = icrs.sphericalcoslat
assert isinstance(sph, r.SphericalRepresentation)
assert isinstance(sph.differentials['s'], r.SphericalCosLatDifferential)
def test_dynamic_attrs():
from ..builtin_frames import ICRS
c = ICRS(1*u.deg, 2*u.deg)
assert 'ra' in dir(c)
assert 'dec' in dir(c)
with pytest.raises(AttributeError) as err:
c.blahblah
assert "object has no attribute 'blahblah'" in str(err)
with pytest.raises(AttributeError) as err:
c.ra = 1
assert "Cannot set any frame attribute" in str(err)
c.blahblah = 1
assert c.blahblah == 1
def test_nodata_error():
from ..builtin_frames import ICRS
i = ICRS()
with pytest.raises(ValueError) as excinfo:
i.data
assert 'does not have associated data' in str(excinfo.value)
def test_len0_data():
from ..builtin_frames import ICRS
i = ICRS([]*u.deg, []*u.deg)
assert i.has_data
repr(i)
def test_quantity_attributes():
from ..builtin_frames import GCRS
# make sure we can create a GCRS frame with valid inputs
GCRS(obstime='J2002', obsgeoloc=[1, 2, 3]*u.km, obsgeovel=[4, 5, 6]*u.km/u.s)
# make sure it fails for invalid lovs or vels
with pytest.raises(TypeError):
GCRS(obsgeoloc=[1, 2, 3]) # no unit
with pytest.raises(u.UnitsError):
GCRS(obsgeoloc=[1, 2, 3]*u.km/u.s) # incorrect unit
with pytest.raises(ValueError):
GCRS(obsgeoloc=[1, 3]*u.km) # incorrect shape
def test_eloc_attributes():
from .. import AltAz, ITRS, GCRS, EarthLocation
el = EarthLocation(lon=12.3*u.deg, lat=45.6*u.deg, height=1*u.km)
it = ITRS(r.SphericalRepresentation(lon=12.3*u.deg, lat=45.6*u.deg, distance=1*u.km))
gc = GCRS(ra=12.3*u.deg, dec=45.6*u.deg, distance=6375*u.km)
el1 = AltAz(location=el).location
assert isinstance(el1, EarthLocation)
# these should match *exactly* because the EarthLocation
assert el1.lat == el.lat
assert el1.lon == el.lon
assert el1.height == el.height
el2 = AltAz(location=it).location
assert isinstance(el2, EarthLocation)
# these should *not* match because giving something in Spherical ITRS is
# *not* the same as giving it as an EarthLocation: EarthLocation is on an
# elliptical geoid. So the longitude should match (because flattening is
# only along the z-axis), but latitude should not. Also, height is relative
# to the *surface* in EarthLocation, but the ITRS distance is relative to
# the center of the Earth
assert not allclose(el2.lat, it.spherical.lat)
assert allclose(el2.lon, it.spherical.lon)
assert el2.height < -6000*u.km
el3 = AltAz(location=gc).location
# GCRS inputs implicitly get transformed to ITRS and then onto
# EarthLocation's elliptical geoid. So both lat and lon shouldn't match
assert isinstance(el3, EarthLocation)
assert not allclose(el3.lat, gc.dec)
assert not allclose(el3.lon, gc.ra)
assert np.abs(el3.height) < 500*u.km
def test_equivalent_frames():
from .. import SkyCoord
from ..builtin_frames import ICRS, FK4, FK5, AltAz
i = ICRS()
i2 = ICRS(1*u.deg, 2*u.deg)
assert i.is_equivalent_frame(i)
assert i.is_equivalent_frame(i2)
with pytest.raises(TypeError):
assert i.is_equivalent_frame(10)
with pytest.raises(TypeError):
assert i2.is_equivalent_frame(SkyCoord(i2))
f1 = FK5()
f2 = FK5(1*u.deg, 2*u.deg, equinox='J2000')
f3 = FK5(equinox='J2010')
f4 = FK4(equinox='J2010')
assert f1.is_equivalent_frame(f1)
assert not i.is_equivalent_frame(f1)
assert f1.is_equivalent_frame(f2)
assert not f1.is_equivalent_frame(f3)
assert not f3.is_equivalent_frame(f4)
aa1 = AltAz()
aa2 = AltAz(obstime='J2010')
assert aa2.is_equivalent_frame(aa2)
assert not aa1.is_equivalent_frame(i)
assert not aa1.is_equivalent_frame(aa2)
def test_representation_subclass():
# Regression test for #3354
from ..builtin_frames import FK5
# Normally when instantiating a frame without a distance the frame will try
# and use UnitSphericalRepresentation internally instead of
# SphericalRepresentation.
frame = FK5(representation=r.SphericalRepresentation, ra=32 * u.deg, dec=20 * u.deg)
assert type(frame._data) == r.UnitSphericalRepresentation
assert frame.representation == r.SphericalRepresentation
# If using a SphericalRepresentation class this used to not work, so we
# test here that this is now fixed.
class NewSphericalRepresentation(r.SphericalRepresentation):
attr_classes = r.SphericalRepresentation.attr_classes
frame = FK5(representation=NewSphericalRepresentation, lon=32 * u.deg, lat=20 * u.deg)
assert type(frame._data) == r.UnitSphericalRepresentation
assert frame.representation == NewSphericalRepresentation
# A similar issue then happened in __repr__ with subclasses of
# SphericalRepresentation.
assert repr(frame) == ("<FK5 Coordinate (equinox=J2000.000): (lon, lat) in deg\n"
" ({})>").format(' 32., 20.' if NUMPY_LT_1_14
else '32., 20.')
# A more subtle issue is when specifying a custom
# UnitSphericalRepresentation subclass for the data and
# SphericalRepresentation or a subclass for the representation.
class NewUnitSphericalRepresentation(r.UnitSphericalRepresentation):
attr_classes = r.UnitSphericalRepresentation.attr_classes
def __repr__(self):
return "<NewUnitSphericalRepresentation: spam spam spam>"
frame = FK5(NewUnitSphericalRepresentation(lon=32 * u.deg, lat=20 * u.deg),
representation=NewSphericalRepresentation)
assert repr(frame) == "<FK5 Coordinate (equinox=J2000.000): spam spam spam>"
def test_getitem_representation():
"""
Make sure current representation survives __getitem__ even if different
from data representation.
"""
from ..builtin_frames import ICRS
c = ICRS([1, 1] * u.deg, [2, 2] * u.deg)
c.representation = 'cartesian'
assert c[0].representation is r.CartesianRepresentation
def test_component_error_useful():
"""
Check that a data-less frame gives useful error messages about not having
data when the attributes asked for are possible coordinate components
"""
from ..builtin_frames import ICRS
i = ICRS()
with pytest.raises(ValueError) as excinfo:
i.ra
assert 'does not have associated data' in str(excinfo.value)
with pytest.raises(AttributeError) as excinfo1:
i.foobar
with pytest.raises(AttributeError) as excinfo2:
i.lon # lon is *not* the component name despite being the underlying representation's name
assert "object has no attribute 'foobar'" in str(excinfo1.value)
assert "object has no attribute 'lon'" in str(excinfo2.value)
def test_cache_clear():
from ..builtin_frames import ICRS
i = ICRS(1*u.deg, 2*u.deg)
# Add an in frame units version of the rep to the cache.
repr(i)
assert len(i.cache['representation']) == 2
i.cache.clear()
assert len(i.cache['representation']) == 0
def test_inplace_array():
from ..builtin_frames import ICRS
i = ICRS([[1, 2], [3, 4]]*u.deg, [[10, 20], [30, 40]]*u.deg)
# Add an in frame units version of the rep to the cache.
repr(i)
# Check that repr() has added a rep to the cache
assert len(i.cache['representation']) == 2
# Modify the data
i.data.lon[:, 0] = [100, 200]*u.deg
# Clear the cache
i.cache.clear()
# This will use a second (potentially cached rep)
assert_allclose(i.ra, [[100, 2], [200, 4]]*u.deg)
assert_allclose(i.dec, [[10, 20], [30, 40]]*u.deg)
def test_inplace_change():
from ..builtin_frames import ICRS
i = ICRS(1*u.deg, 2*u.deg)
# Add an in frame units version of the rep to the cache.
repr(i)
# Check that repr() has added a rep to the cache
assert len(i.cache['representation']) == 2
# Modify the data
i.data.lon[()] = 10*u.deg
# Clear the cache
i.cache.clear()
# This will use a second (potentially cached rep)
assert i.ra == 10 * u.deg
assert i.dec == 2 * u.deg
def test_representation_with_multiple_differentials():
from ..builtin_frames import ICRS
dif1 = r.CartesianDifferential([1, 2, 3]*u.km/u.s)
dif2 = r.CartesianDifferential([1, 2, 3]*u.km/u.s**2)
rep = r.CartesianRepresentation([1, 2, 3]*u.pc,
differentials={'s': dif1, 's2': dif2})
# check warning is raised for a scalar
with pytest.raises(ValueError):
ICRS(rep)
def test_representation_arg_backwards_compatibility():
# TODO: this test can be removed when the `representation` argument is
# removed from the BaseCoordinateFrame initializer.
from ..builtin_frames import ICRS
c1 = ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc,
representation_type=r.CartesianRepresentation)
c2 = ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc,
representation=r.CartesianRepresentation)
c3 = ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc,
representation='cartesian')
assert c1.x == c2.x
assert c1.y == c2.y
assert c1.z == c2.z
assert c1.x == c3.x
assert c1.y == c3.y
assert c1.z == c3.z
assert c1.representation == c1.representation_type
with pytest.raises(ValueError):
ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc,
representation='cartesian',
representation_type='cartesian')
def test_missing_component_error_names():
"""
This test checks that the component names are frame component names, not
representation or differential names, when referenced in an exception raised
when not passing in enough data. For example:
ICRS(ra=10*u.deg)
should state:
TypeError: __init__() missing 1 required positional argument: 'dec'
"""
from ..builtin_frames import ICRS
with pytest.raises(TypeError) as e:
ICRS(ra=150 * u.deg)
assert "missing 1 required positional argument: 'dec'" in str(e)
with pytest.raises(TypeError) as e:
ICRS(ra=150*u.deg, dec=-11*u.deg,
pm_ra=100*u.mas/u.yr, pm_dec=10*u.mas/u.yr)
assert "pm_ra_cosdec" in str(e)
def test_non_spherical_representation_unit_creation(unitphysics):
from ..builtin_frames import ICRS
class PhysicsICRS(ICRS):
default_representation = r.PhysicsSphericalRepresentation
pic = PhysicsICRS(phi=1*u.deg, theta=25*u.deg, r=1*u.kpc)
assert isinstance(pic.data, r.PhysicsSphericalRepresentation)
picu = PhysicsICRS(phi=1*u.deg, theta=25*u.deg)
assert isinstance(picu.data, unitphysics)
|
"""
Simple GRU RNNs for solving the QA tasks from:
"Towards AI-Complete Question Answering: A Set of Prerequisite Toy Tasks"
J. Weston, A. Bordes, S. Chopra, T. Mikolov, A. Rush
http://arxiv.org/abs/1502.05698
Inspired by (and approximately replicating) the blog post by Stephen Merity:
http://smerity.com/articles/2015/keras_qa.html
This blog post was turned into a Keras example:
https://github.com/fchollet/keras/blob/master/examples/babi_rnn.py
"""
from collections import OrderedDict
import theano
import numpy as np
from dagbldr.datasets import fetch_babi
from dagbldr.utils import make_embedding_minibatch, make_minibatch
from dagbldr.utils import add_embedding_datasets_to_graph, add_datasets_to_graph
from dagbldr.utils import fixed_n_epochs_trainer
from dagbldr.utils import get_params_and_grads
from dagbldr.nodes import gru_recurrent_layer, softmax_layer
from dagbldr.nodes import embedding_layer, categorical_crossentropy
from dagbldr.optimizers import adadelta
babi = fetch_babi(task_number=2)
X_story = babi["stories"]
X_query = babi["queries"]
y_answer = babi["target"]
train_indices = babi["train_indices"]
valid_indices = babi["valid_indices"]
vocab_size = babi["vocabulary_size"]
random_state = np.random.RandomState(1999)
graph = OrderedDict()
minibatch_size = 32
n_emb = 50
n_hid = 100
X_story_mb, X_story_mask = make_embedding_minibatch(
X_story, slice(0, minibatch_size))
X_query_mb, X_query_mask = make_embedding_minibatch(
X_query, slice(0, minibatch_size))
embedding_datasets = [X_story_mb, X_query_mb]
masks = [X_story_mask, X_query_mask]
r = add_embedding_datasets_to_graph(embedding_datasets, masks, "babi_data",
graph)
(X_story_syms, X_query_syms), (X_story_mask_sym, X_query_mask_sym) = r
y_sym = add_datasets_to_graph([y_answer], ["y"], graph)
l1_story = embedding_layer(X_story_syms, vocab_size, n_emb, graph, 'l1_story',
random_state=random_state)
masked_story = X_story_mask_sym.dimshuffle(0, 1, 'x') * l1_story
h_story = gru_recurrent_layer([masked_story], X_story_mask_sym, n_hid, graph,
'story_rec', random_state)
l1_query = embedding_layer(X_query_syms, vocab_size, n_emb, graph, 'l1_query',
random_state)
h_query = gru_recurrent_layer([l1_query], X_query_mask_sym, n_hid, graph,
'query_rec', random_state)
y_pred = softmax_layer([h_query[-1], h_story[-1]], graph, 'y_pred',
y_answer.shape[1], random_state=random_state)
cost = categorical_crossentropy(y_pred, y_sym).mean()
params, grads = get_params_and_grads(graph, cost)
opt = adadelta(params)
updates = opt.updates(params, grads)
print("Compiling fit...")
fit_function = theano.function(X_story_syms + [X_story_mask_sym] + X_query_syms
+ [X_query_mask_sym, y_sym], [cost],
updates=updates)
print("Compiling cost...")
cost_function = theano.function(X_story_syms + [X_story_mask_sym] + X_query_syms
+ [X_query_mask_sym, y_sym], [cost])
print("Compiling predict...")
predict_function = theano.function(X_story_syms + [X_story_mask_sym] +
X_query_syms + [X_query_mask_sym], [y_pred])
def error(*args):
xargs = args[:-1]
y = args[-1]
final_args = xargs
y_pred = predict_function(*final_args)[0]
return 1 - np.mean((np.argmax(
y_pred, axis=1).ravel()) == (np.argmax(y, axis=1).ravel()))
checkpoint_dict = {}
epoch_results = fixed_n_epochs_trainer(
fit_function, error, train_indices, valid_indices, checkpoint_dict,
[X_story, X_query, y_answer],
minibatch_size,
list_of_minibatch_functions=[make_embedding_minibatch,
make_embedding_minibatch,
make_minibatch],
list_of_train_output_names=["cost"],
valid_output_name="valid_error", n_epochs=100)
|
import base64
import collections
import json
import unittest
from decimal import Decimal
from django import forms
from django.core.exceptions import ValidationError
from django.forms.utils import ErrorList
from django.template.loader import render_to_string
from django.test import SimpleTestCase, TestCase
from django.utils.safestring import SafeData, mark_safe
from django.utils.translation import gettext_lazy as __
from wagtail.core import blocks
from wagtail.core.blocks.field_block import FieldBlockAdapter
from wagtail.core.blocks.list_block import ListBlockAdapter
from wagtail.core.blocks.static_block import StaticBlockAdapter
from wagtail.core.blocks.stream_block import StreamBlockAdapter
from wagtail.core.blocks.struct_block import StructBlockAdapter
from wagtail.core.models import Page
from wagtail.core.rich_text import RichText
from wagtail.tests.testapp.blocks import LinkBlock as CustomLinkBlock
from wagtail.tests.testapp.blocks import SectionBlock
from wagtail.tests.testapp.models import EventPage, SimplePage
from wagtail.tests.utils import WagtailTestUtils
class FooStreamBlock(blocks.StreamBlock):
text = blocks.CharBlock()
error = 'At least one block must say "foo"'
def clean(self, value):
value = super().clean(value)
if not any(block.value == "foo" for block in value):
raise blocks.StreamBlockValidationError(
non_block_errors=ErrorList([self.error])
)
return value
class ContextCharBlock(blocks.CharBlock):
def get_context(self, value, parent_context=None):
value = str(value).upper()
return super(blocks.CharBlock, self).get_context(value, parent_context)
class TestFieldBlock(WagtailTestUtils, SimpleTestCase):
def test_charfield_render(self):
block = blocks.CharBlock()
html = block.render("Hello world!")
self.assertEqual(html, "Hello world!")
def test_charfield_render_with_template(self):
block = blocks.CharBlock(template="tests/blocks/heading_block.html")
html = block.render("Hello world!")
self.assertEqual(html, "<h1>Hello world!</h1>")
def test_charblock_adapter(self):
block = blocks.CharBlock(help_text="Some helpful text")
block.set_name("test_block")
js_args = FieldBlockAdapter().js_args(block)
self.assertEqual(js_args[0], "test_block")
self.assertIsInstance(js_args[1], forms.TextInput)
self.assertEqual(
js_args[2],
{
"label": "Test block",
"helpText": "Some helpful text",
"required": True,
"icon": "placeholder",
"classname": "field char_field widget-text_input fieldname-test_block",
"showAddCommentButton": True,
"strings": {"ADD_COMMENT": "Add Comment"},
},
)
def test_charblock_adapter_form_classname(self):
"""
Meta data test for FormField; this checks if both the meta values
form_classname and classname are accepted and are rendered
in the form
"""
block = blocks.CharBlock(form_classname="special-char-formclassname")
block.set_name("test_block")
js_args = FieldBlockAdapter().js_args(block)
self.assertIn(" special-char-formclassname", js_args[2]["classname"])
# Checks if it is backward compatible with classname
block_with_classname = blocks.CharBlock(classname="special-char-classname")
block_with_classname.set_name("test_block")
js_args = FieldBlockAdapter().js_args(block_with_classname)
self.assertIn(" special-char-classname", js_args[2]["classname"])
def test_charfield_render_with_template_with_extra_context(self):
block = ContextCharBlock(template="tests/blocks/heading_block.html")
html = block.render(
"Bonjour le monde!",
context={
"language": "fr",
},
)
self.assertEqual(html, '<h1 lang="fr">BONJOUR LE MONDE!</h1>')
def test_charfield_get_form_state(self):
block = blocks.CharBlock()
form_state = block.get_form_state("Hello world!")
self.assertEqual(form_state, "Hello world!")
def test_charfield_searchable_content(self):
block = blocks.CharBlock()
content = block.get_searchable_content("Hello world!")
self.assertEqual(content, ["Hello world!"])
def test_charfield_with_validator(self):
def validate_is_foo(value):
if value != "foo":
raise ValidationError("Value must be 'foo'")
block = blocks.CharBlock(validators=[validate_is_foo])
with self.assertRaises(ValidationError):
block.clean("bar")
def test_choicefield_render(self):
class ChoiceBlock(blocks.FieldBlock):
field = forms.ChoiceField(
choices=(
("choice-1", "Choice 1"),
("choice-2", "Choice 2"),
)
)
block = ChoiceBlock()
html = block.render("choice-2")
self.assertEqual(html, "choice-2")
def test_adapt_custom_choicefield(self):
class ChoiceBlock(blocks.FieldBlock):
field = forms.ChoiceField(
choices=(
("choice-1", "Choice 1"),
("choice-2", "Choice 2"),
)
)
block = ChoiceBlock()
block.set_name("test_choiceblock")
js_args = FieldBlockAdapter().js_args(block)
self.assertEqual(js_args[0], "test_choiceblock")
self.assertIsInstance(js_args[1], forms.Select)
self.assertEqual(
js_args[1].choices,
[
("choice-1", "Choice 1"),
("choice-2", "Choice 2"),
],
)
self.assertEqual(
js_args[2],
{
"label": "Test choiceblock",
"required": True,
"icon": "placeholder",
"classname": "field choice_field widget-select fieldname-test_choiceblock",
"showAddCommentButton": True,
"strings": {"ADD_COMMENT": "Add Comment"},
},
)
def test_searchable_content(self):
"""
FieldBlock should not return anything for `get_searchable_content` by
default. Subclasses are free to override it and provide relevant
content.
"""
class CustomBlock(blocks.FieldBlock):
field = forms.CharField(required=True)
block = CustomBlock()
self.assertEqual(block.get_searchable_content("foo bar"), [])
def test_form_handling_is_independent_of_serialisation(self):
class Base64EncodingCharBlock(blocks.CharBlock):
"""A CharBlock with a deliberately perverse JSON (de)serialisation format
so that it visibly blows up if we call to_python / get_prep_value where we shouldn't"""
def to_python(self, jsonish_value):
# decode as base64 on the way out of the JSON serialisation
return base64.b64decode(jsonish_value)
def get_prep_value(self, native_value):
# encode as base64 on the way into the JSON serialisation
return base64.b64encode(native_value)
block = Base64EncodingCharBlock()
form_state = block.get_form_state("hello world")
self.assertEqual(form_state, "hello world")
def test_prepare_value_called(self):
"""
Check that Field.prepare_value is called before sending the value to
the widget for rendering.
Actual real-world use case: A Youtube field that produces YoutubeVideo
instances from IDs, but videos are entered using their full URLs.
"""
class PrefixWrapper:
prefix = "http://example.com/"
def __init__(self, value):
self.value = value
def with_prefix(self):
return self.prefix + self.value
@classmethod
def from_prefixed(cls, value):
if not value.startswith(cls.prefix):
raise ValueError
return cls(value[len(cls.prefix) :])
def __eq__(self, other):
return self.value == other.value
class PrefixField(forms.Field):
def clean(self, value):
value = super().clean(value)
return PrefixWrapper.from_prefixed(value)
def prepare_value(self, value):
return value.with_prefix()
class PrefixedBlock(blocks.FieldBlock):
def __init__(self, required=True, help_text="", **kwargs):
super().__init__(**kwargs)
self.field = PrefixField(required=required, help_text=help_text)
block = PrefixedBlock()
# Check that the form value is serialized with a prefix correctly
value = PrefixWrapper("foo")
form_state = block.get_form_state(value)
self.assertEqual(form_state, "http://example.com/foo")
# Check that the value was coerced back to a PrefixValue
data = {"url": "http://example.com/bar"}
new_value = block.clean(block.value_from_datadict(data, {}, "url"))
self.assertEqual(new_value, PrefixWrapper("bar"))
class TestIntegerBlock(unittest.TestCase):
def test_type(self):
block = blocks.IntegerBlock()
digit = block.value_from_form(1234)
self.assertEqual(type(digit), int)
def test_render(self):
block = blocks.IntegerBlock()
digit = block.value_from_form(1234)
self.assertEqual(digit, 1234)
def test_render_required_error(self):
block = blocks.IntegerBlock()
with self.assertRaises(ValidationError):
block.clean("")
def test_render_max_value_validation(self):
block = blocks.IntegerBlock(max_value=20)
with self.assertRaises(ValidationError):
block.clean(25)
def test_render_min_value_validation(self):
block = blocks.IntegerBlock(min_value=20)
with self.assertRaises(ValidationError):
block.clean(10)
def test_render_with_validator(self):
def validate_is_even(value):
if value % 2 > 0:
raise ValidationError("Value must be even")
block = blocks.IntegerBlock(validators=[validate_is_even])
with self.assertRaises(ValidationError):
block.clean(3)
class TestEmailBlock(unittest.TestCase):
def test_render(self):
block = blocks.EmailBlock()
email = block.render("example@email.com")
self.assertEqual(email, "example@email.com")
def test_render_required_error(self):
block = blocks.EmailBlock()
with self.assertRaises(ValidationError):
block.clean("")
def test_format_validation(self):
block = blocks.EmailBlock()
with self.assertRaises(ValidationError):
block.clean("example.email.com")
def test_render_with_validator(self):
def validate_is_example_domain(value):
if not value.endswith("@example.com"):
raise ValidationError("E-mail address must end in @example.com")
block = blocks.EmailBlock(validators=[validate_is_example_domain])
with self.assertRaises(ValidationError):
block.clean("foo@example.net")
class TestBooleanBlock(unittest.TestCase):
def test_get_form_state(self):
block = blocks.BooleanBlock(required=False)
form_state = block.get_form_state(True)
self.assertIs(form_state, True)
form_state = block.get_form_state(False)
self.assertIs(form_state, False)
class TestBlockQuoteBlock(unittest.TestCase):
def test_render(self):
block = blocks.BlockQuoteBlock()
quote = block.render("Now is the time...")
self.assertEqual(quote, "<blockquote>Now is the time...</blockquote>")
def test_render_with_validator(self):
def validate_is_proper_story(value):
if not value.startswith("Once upon a time"):
raise ValidationError("Value must be a proper story")
block = blocks.BlockQuoteBlock(validators=[validate_is_proper_story])
with self.assertRaises(ValidationError):
block.clean("A long, long time ago")
class TestFloatBlock(TestCase):
def test_type(self):
block = blocks.FloatBlock()
block_val = block.value_from_form(float(1.63))
self.assertEqual(type(block_val), float)
def test_render(self):
block = blocks.FloatBlock()
test_val = float(1.63)
block_val = block.value_from_form(test_val)
self.assertEqual(block_val, test_val)
def test_raises_required_error(self):
block = blocks.FloatBlock()
with self.assertRaises(ValidationError):
block.clean("")
def test_raises_max_value_validation_error(self):
block = blocks.FloatBlock(max_value=20)
with self.assertRaises(ValidationError):
block.clean("20.01")
def test_raises_min_value_validation_error(self):
block = blocks.FloatBlock(min_value=20)
with self.assertRaises(ValidationError):
block.clean("19.99")
def test_render_with_validator(self):
def validate_is_even(value):
if value % 2 > 0:
raise ValidationError("Value must be even")
block = blocks.FloatBlock(validators=[validate_is_even])
with self.assertRaises(ValidationError):
block.clean("3.0")
class TestDecimalBlock(TestCase):
def test_type(self):
block = blocks.DecimalBlock()
block_val = block.value_from_form(Decimal("1.63"))
self.assertEqual(type(block_val), Decimal)
def test_render(self):
block = blocks.DecimalBlock()
test_val = Decimal(1.63)
block_val = block.value_from_form(test_val)
self.assertEqual(block_val, test_val)
def test_raises_required_error(self):
block = blocks.DecimalBlock()
with self.assertRaises(ValidationError):
block.clean("")
def test_raises_max_value_validation_error(self):
block = blocks.DecimalBlock(max_value=20)
with self.assertRaises(ValidationError):
block.clean("20.01")
def test_raises_min_value_validation_error(self):
block = blocks.DecimalBlock(min_value=20)
with self.assertRaises(ValidationError):
block.clean("19.99")
def test_render_with_validator(self):
def validate_is_even(value):
if value % 2 > 0:
raise ValidationError("Value must be even")
block = blocks.DecimalBlock(validators=[validate_is_even])
with self.assertRaises(ValidationError):
block.clean("3.0")
class TestRegexBlock(TestCase):
def test_render(self):
block = blocks.RegexBlock(regex=r"^[0-9]{3}$")
test_val = "123"
block_val = block.value_from_form(test_val)
self.assertEqual(block_val, test_val)
def test_raises_required_error(self):
block = blocks.RegexBlock(regex=r"^[0-9]{3}$")
with self.assertRaises(ValidationError) as context:
block.clean("")
self.assertIn("This field is required.", context.exception.messages)
def test_raises_custom_required_error(self):
test_message = "Oops, you missed a bit."
block = blocks.RegexBlock(
regex=r"^[0-9]{3}$",
error_messages={
"required": test_message,
},
)
with self.assertRaises(ValidationError) as context:
block.clean("")
self.assertIn(test_message, context.exception.messages)
def test_raises_validation_error(self):
block = blocks.RegexBlock(regex=r"^[0-9]{3}$")
with self.assertRaises(ValidationError) as context:
block.clean("[/]")
self.assertIn("Enter a valid value.", context.exception.messages)
def test_raises_custom_error_message(self):
test_message = "Not a valid library card number."
block = blocks.RegexBlock(
regex=r"^[0-9]{3}$", error_messages={"invalid": test_message}
)
with self.assertRaises(ValidationError) as context:
block.clean("[/]")
self.assertIn(test_message, context.exception.messages)
def test_render_with_validator(self):
def validate_is_foo(value):
if value != "foo":
raise ValidationError("Value must be 'foo'")
block = blocks.RegexBlock(regex=r"^.*$", validators=[validate_is_foo])
with self.assertRaises(ValidationError):
block.clean("bar")
class TestRichTextBlock(TestCase):
fixtures = ["test.json"]
def test_get_default_with_fallback_value(self):
default_value = blocks.RichTextBlock().get_default()
self.assertIsInstance(default_value, RichText)
self.assertEqual(default_value.source, "")
def test_get_default_with_default_none(self):
default_value = blocks.RichTextBlock(default=None).get_default()
self.assertIsInstance(default_value, RichText)
self.assertEqual(default_value.source, "")
def test_get_default_with_empty_string(self):
default_value = blocks.RichTextBlock(default="").get_default()
self.assertIsInstance(default_value, RichText)
self.assertEqual(default_value.source, "")
def test_get_default_with_nonempty_string(self):
default_value = blocks.RichTextBlock(default="<p>foo</p>").get_default()
self.assertIsInstance(default_value, RichText)
self.assertEqual(default_value.source, "<p>foo</p>")
def test_get_default_with_richtext_value(self):
default_value = blocks.RichTextBlock(
default=RichText("<p>foo</p>")
).get_default()
self.assertIsInstance(default_value, RichText)
self.assertEqual(default_value.source, "<p>foo</p>")
def test_render(self):
block = blocks.RichTextBlock()
value = RichText('<p>Merry <a linktype="page" id="4">Christmas</a>!</p>')
result = block.render(value)
self.assertEqual(
result, '<p>Merry <a href="/events/christmas/">Christmas</a>!</p>'
)
def test_adapter(self):
from wagtail.tests.testapp.rich_text import CustomRichTextArea
block = blocks.RichTextBlock(editor="custom")
block.set_name("test_richtextblock")
js_args = FieldBlockAdapter().js_args(block)
self.assertEqual(js_args[0], "test_richtextblock")
self.assertIsInstance(js_args[1], CustomRichTextArea)
self.assertEqual(
js_args[2],
{
"classname": "field char_field widget-custom_rich_text_area fieldname-test_richtextblock",
"icon": "doc-full",
"label": "Test richtextblock",
"required": True,
"showAddCommentButton": True,
"strings": {"ADD_COMMENT": "Add Comment"},
},
)
def test_adapter_with_draftail(self):
from wagtail.admin.rich_text import DraftailRichTextArea
block = blocks.RichTextBlock()
block.set_name("test_richtextblock")
js_args = FieldBlockAdapter().js_args(block)
self.assertEqual(js_args[0], "test_richtextblock")
self.assertIsInstance(js_args[1], DraftailRichTextArea)
self.assertEqual(
js_args[2],
{
"label": "Test richtextblock",
"required": True,
"icon": "doc-full",
"classname": "field char_field widget-draftail_rich_text_area fieldname-test_richtextblock",
"showAddCommentButton": False, # Draftail manages its own comments
"strings": {"ADD_COMMENT": "Add Comment"},
},
)
def test_validate_required_richtext_block(self):
block = blocks.RichTextBlock()
with self.assertRaises(ValidationError):
block.clean(RichText(""))
def test_validate_non_required_richtext_block(self):
block = blocks.RichTextBlock(required=False)
result = block.clean(RichText(""))
self.assertIsInstance(result, RichText)
self.assertEqual(result.source, "")
def test_render_with_validator(self):
def validate_contains_foo(value):
if "foo" not in value:
raise ValidationError("Value must contain 'foo'")
block = blocks.RichTextBlock(validators=[validate_contains_foo])
with self.assertRaises(ValidationError):
block.clean(RichText("<p>bar</p>"))
def test_get_searchable_content(self):
block = blocks.RichTextBlock()
value = RichText(
'<p>Merry <a linktype="page" id="4">Christmas</a>! & a happy new year</p>\n'
"<p>Our Santa pet <b>Wagtail</b> has some cool stuff in store for you all!</p>"
)
result = block.get_searchable_content(value)
self.assertEqual(
result,
[
"Merry Christmas! & a happy new year \n"
"Our Santa pet Wagtail has some cool stuff in store for you all!"
],
)
def test_get_searchable_content_whitespace(self):
block = blocks.RichTextBlock()
value = RichText("<p>mashed</p><p>po<i>ta</i>toes</p>")
result = block.get_searchable_content(value)
self.assertEqual(result, ["mashed potatoes"])
class TestChoiceBlock(WagtailTestUtils, SimpleTestCase):
def setUp(self):
from django.db.models.fields import BLANK_CHOICE_DASH
self.blank_choice_dash_label = BLANK_CHOICE_DASH[0][1]
def test_adapt_choice_block(self):
block = blocks.ChoiceBlock(choices=[("tea", "Tea"), ("coffee", "Coffee")])
block.set_name("test_choiceblock")
js_args = FieldBlockAdapter().js_args(block)
self.assertEqual(js_args[0], "test_choiceblock")
self.assertIsInstance(js_args[1], forms.Select)
self.assertEqual(
list(js_args[1].choices),
[("", "---------"), ("tea", "Tea"), ("coffee", "Coffee")],
)
self.assertEqual(
js_args[2],
{
"label": "Test choiceblock",
"required": True,
"icon": "placeholder",
"classname": "field choice_field widget-select fieldname-test_choiceblock",
"showAddCommentButton": True,
"strings": {"ADD_COMMENT": "Add Comment"},
},
)
def test_choice_block_with_default(self):
block = blocks.ChoiceBlock(
choices=[("tea", "Tea"), ("coffee", "Coffee")], default="tea"
)
self.assertEqual(block.get_default(), "tea")
def test_adapt_choice_block_with_callable_choices(self):
def callable_choices():
return [("tea", "Tea"), ("coffee", "Coffee")]
block = blocks.ChoiceBlock(choices=callable_choices)
block.set_name("test_choiceblock")
js_args = FieldBlockAdapter().js_args(block)
self.assertIsInstance(js_args[1], forms.Select)
self.assertEqual(
list(js_args[1].choices),
[("", "---------"), ("tea", "Tea"), ("coffee", "Coffee")],
)
def test_validate_required_choice_block(self):
block = blocks.ChoiceBlock(choices=[("tea", "Tea"), ("coffee", "Coffee")])
self.assertEqual(block.clean("coffee"), "coffee")
with self.assertRaises(ValidationError):
block.clean("whisky")
with self.assertRaises(ValidationError):
block.clean("")
with self.assertRaises(ValidationError):
block.clean(None)
def test_adapt_non_required_choice_block(self):
block = blocks.ChoiceBlock(
choices=[("tea", "Tea"), ("coffee", "Coffee")], required=False
)
block.set_name("test_choiceblock")
js_args = FieldBlockAdapter().js_args(block)
self.assertFalse(js_args[2]["required"])
def test_validate_non_required_choice_block(self):
block = blocks.ChoiceBlock(
choices=[("tea", "Tea"), ("coffee", "Coffee")], required=False
)
self.assertEqual(block.clean("coffee"), "coffee")
with self.assertRaises(ValidationError):
block.clean("whisky")
self.assertEqual(block.clean(""), "")
self.assertEqual(block.clean(None), "")
def test_adapt_choice_block_with_existing_blank_choice(self):
block = blocks.ChoiceBlock(
choices=[("tea", "Tea"), ("coffee", "Coffee"), ("", "No thanks")],
required=False,
)
block.set_name("test_choiceblock")
js_args = FieldBlockAdapter().js_args(block)
self.assertEqual(
list(js_args[1].choices),
[("tea", "Tea"), ("coffee", "Coffee"), ("", "No thanks")],
)
def test_adapt_choice_block_with_existing_blank_choice_and_with_callable_choices(
self,
):
def callable_choices():
return [("tea", "Tea"), ("coffee", "Coffee"), ("", "No thanks")]
block = blocks.ChoiceBlock(choices=callable_choices, required=False)
block.set_name("test_choiceblock")
js_args = FieldBlockAdapter().js_args(block)
self.assertEqual(
list(js_args[1].choices),
[("tea", "Tea"), ("coffee", "Coffee"), ("", "No thanks")],
)
def test_named_groups_without_blank_option(self):
block = blocks.ChoiceBlock(
choices=[
(
"Alcoholic",
[
("gin", "Gin"),
("whisky", "Whisky"),
],
),
(
"Non-alcoholic",
[
("tea", "Tea"),
("coffee", "Coffee"),
],
),
]
)
block.set_name("test_choiceblock")
js_args = FieldBlockAdapter().js_args(block)
self.assertEqual(
list(js_args[1].choices),
[
("", "---------"),
(
"Alcoholic",
[
("gin", "Gin"),
("whisky", "Whisky"),
],
),
(
"Non-alcoholic",
[
("tea", "Tea"),
("coffee", "Coffee"),
],
),
],
)
def test_named_groups_with_blank_option(self):
block = blocks.ChoiceBlock(
choices=[
(
"Alcoholic",
[
("gin", "Gin"),
("whisky", "Whisky"),
],
),
(
"Non-alcoholic",
[
("tea", "Tea"),
("coffee", "Coffee"),
],
),
("Not thirsty", [("", "No thanks")]),
],
required=False,
)
block.set_name("test_choiceblock")
js_args = FieldBlockAdapter().js_args(block)
self.assertEqual(
list(js_args[1].choices),
[
# Blank option not added
(
"Alcoholic",
[
("gin", "Gin"),
("whisky", "Whisky"),
],
),
(
"Non-alcoholic",
[
("tea", "Tea"),
("coffee", "Coffee"),
],
),
("Not thirsty", [("", "No thanks")]),
],
)
def test_subclassing(self):
class BeverageChoiceBlock(blocks.ChoiceBlock):
choices = [
("tea", "Tea"),
("coffee", "Coffee"),
]
block = BeverageChoiceBlock(required=False)
block.set_name("test_choiceblock")
js_args = FieldBlockAdapter().js_args(block)
self.assertEqual(
list(js_args[1].choices),
[
("", "---------"),
("tea", "Tea"),
("coffee", "Coffee"),
],
)
# subclasses of ChoiceBlock should deconstruct to a basic ChoiceBlock for migrations
self.assertEqual(
block.deconstruct(),
(
"wagtail.core.blocks.ChoiceBlock",
[],
{
"choices": [("tea", "Tea"), ("coffee", "Coffee")],
"required": False,
},
),
)
def test_searchable_content(self):
block = blocks.ChoiceBlock(
choices=[
("choice-1", "Choice 1"),
("choice-2", "Choice 2"),
]
)
self.assertEqual(block.get_searchable_content("choice-1"), ["Choice 1"])
def test_searchable_content_with_callable_choices(self):
def callable_choices():
return [
("choice-1", "Choice 1"),
("choice-2", "Choice 2"),
]
block = blocks.ChoiceBlock(choices=callable_choices)
self.assertEqual(block.get_searchable_content("choice-1"), ["Choice 1"])
def test_optgroup_searchable_content(self):
block = blocks.ChoiceBlock(
choices=[
(
"Section 1",
[
("1-1", "Block 1"),
("1-2", "Block 2"),
],
),
(
"Section 2",
[
("2-1", "Block 1"),
("2-2", "Block 2"),
],
),
]
)
self.assertEqual(block.get_searchable_content("2-2"), ["Section 2", "Block 2"])
def test_invalid_searchable_content(self):
block = blocks.ChoiceBlock(
choices=[
("one", "One"),
("two", "Two"),
]
)
self.assertEqual(block.get_searchable_content("three"), [])
def test_searchable_content_with_lazy_translation(self):
block = blocks.ChoiceBlock(
choices=[
("choice-1", __("Choice 1")),
("choice-2", __("Choice 2")),
]
)
result = block.get_searchable_content("choice-1")
# result must survive JSON (de)serialisation, which is not the case for
# lazy translation objects
result = json.loads(json.dumps(result))
self.assertEqual(result, ["Choice 1"])
def test_optgroup_searchable_content_with_lazy_translation(self):
block = blocks.ChoiceBlock(
choices=[
(
__("Section 1"),
[
("1-1", __("Block 1")),
("1-2", __("Block 2")),
],
),
(
__("Section 2"),
[
("2-1", __("Block 1")),
("2-2", __("Block 2")),
],
),
]
)
result = block.get_searchable_content("2-2")
# result must survive JSON (de)serialisation, which is not the case for
# lazy translation objects
result = json.loads(json.dumps(result))
self.assertEqual(result, ["Section 2", "Block 2"])
def test_deconstruct_with_callable_choices(self):
def callable_choices():
return [
("tea", "Tea"),
("coffee", "Coffee"),
]
block = blocks.ChoiceBlock(choices=callable_choices, required=False)
block.set_name("test_choiceblock")
js_args = FieldBlockAdapter().js_args(block)
self.assertEqual(
list(js_args[1].choices),
[
("", "---------"),
("tea", "Tea"),
("coffee", "Coffee"),
],
)
self.assertEqual(
block.deconstruct(),
(
"wagtail.core.blocks.ChoiceBlock",
[],
{
"choices": callable_choices,
"required": False,
},
),
)
def test_render_with_validator(self):
choices = [
("tea", "Tea"),
("coffee", "Coffee"),
]
def validate_tea_is_selected(value):
raise ValidationError("You must select 'tea'")
block = blocks.ChoiceBlock(
choices=choices, validators=[validate_tea_is_selected]
)
with self.assertRaises(ValidationError):
block.clean("coffee")
class TestMultipleChoiceBlock(WagtailTestUtils, SimpleTestCase):
def setUp(self):
from django.db.models.fields import BLANK_CHOICE_DASH
self.blank_choice_dash_label = BLANK_CHOICE_DASH[0][1]
def test_adapt_multiple_choice_block(self):
block = blocks.MultipleChoiceBlock(
choices=[("tea", "Tea"), ("coffee", "Coffee")]
)
block.set_name("test_choiceblock")
js_args = FieldBlockAdapter().js_args(block)
self.assertEqual(js_args[0], "test_choiceblock")
self.assertIsInstance(js_args[1], forms.Select)
self.assertEqual(
list(js_args[1].choices), [("tea", "Tea"), ("coffee", "Coffee")]
)
self.assertEqual(
js_args[2],
{
"label": "Test choiceblock",
"required": True,
"icon": "placeholder",
"classname": "field multiple_choice_field widget-select_multiple fieldname-test_choiceblock",
"showAddCommentButton": True,
"strings": {"ADD_COMMENT": "Add Comment"},
},
)
def test_multiple_choice_block_with_default(self):
block = blocks.MultipleChoiceBlock(
choices=[("tea", "Tea"), ("coffee", "Coffee")], default="tea"
)
self.assertEqual(block.get_default(), "tea")
def test_adapt_multiple_choice_block_with_callable_choices(self):
def callable_choices():
return [("tea", "Tea"), ("coffee", "Coffee")]
block = blocks.MultipleChoiceBlock(choices=callable_choices)
block.set_name("test_choiceblock")
js_args = FieldBlockAdapter().js_args(block)
self.assertIsInstance(js_args[1], forms.Select)
self.assertEqual(
list(js_args[1].choices), [("tea", "Tea"), ("coffee", "Coffee")]
)
def test_validate_required_multiple_choice_block(self):
block = blocks.MultipleChoiceBlock(
choices=[("tea", "Tea"), ("coffee", "Coffee")]
)
self.assertEqual(block.clean(["coffee"]), ["coffee"])
with self.assertRaises(ValidationError):
block.clean(["whisky"])
with self.assertRaises(ValidationError):
block.clean("")
with self.assertRaises(ValidationError):
block.clean(None)
def test_adapt_non_required_multiple_choice_block(self):
block = blocks.MultipleChoiceBlock(
choices=[("tea", "Tea"), ("coffee", "Coffee")], required=False
)
block.set_name("test_choiceblock")
js_args = FieldBlockAdapter().js_args(block)
self.assertFalse(js_args[2]["required"])
def test_validate_non_required_multiple_choice_block(self):
block = blocks.MultipleChoiceBlock(
choices=[("tea", "Tea"), ("coffee", "Coffee")], required=False
)
self.assertEqual(block.clean(["coffee"]), ["coffee"])
with self.assertRaises(ValidationError):
block.clean(["whisky"])
self.assertEqual(block.clean(""), [])
self.assertEqual(block.clean(None), [])
def test_adapt_multiple_choice_block_with_existing_blank_choice(self):
block = blocks.MultipleChoiceBlock(
choices=[("tea", "Tea"), ("coffee", "Coffee"), ("", "No thanks")],
required=False,
)
block.set_name("test_choiceblock")
js_args = FieldBlockAdapter().js_args(block)
self.assertEqual(
list(js_args[1].choices),
[("tea", "Tea"), ("coffee", "Coffee"), ("", "No thanks")],
)
def test_adapt_multiple_choice_block_with_existing_blank_choice_and_with_callable_choices(
self,
):
def callable_choices():
return [("tea", "Tea"), ("coffee", "Coffee"), ("", "No thanks")]
block = blocks.MultipleChoiceBlock(choices=callable_choices, required=False)
block.set_name("test_choiceblock")
js_args = FieldBlockAdapter().js_args(block)
self.assertEqual(
list(js_args[1].choices),
[("tea", "Tea"), ("coffee", "Coffee"), ("", "No thanks")],
)
def test_named_groups_without_blank_option(self):
block = blocks.MultipleChoiceBlock(
choices=[
(
"Alcoholic",
[
("gin", "Gin"),
("whisky", "Whisky"),
],
),
(
"Non-alcoholic",
[
("tea", "Tea"),
("coffee", "Coffee"),
],
),
]
)
block.set_name("test_choiceblock")
js_args = FieldBlockAdapter().js_args(block)
self.assertEqual(
list(js_args[1].choices),
[
(
"Alcoholic",
[
("gin", "Gin"),
("whisky", "Whisky"),
],
),
(
"Non-alcoholic",
[
("tea", "Tea"),
("coffee", "Coffee"),
],
),
],
)
def test_named_groups_with_blank_option(self):
block = blocks.MultipleChoiceBlock(
choices=[
(
"Alcoholic",
[
("gin", "Gin"),
("whisky", "Whisky"),
],
),
(
"Non-alcoholic",
[
("tea", "Tea"),
("coffee", "Coffee"),
],
),
("Not thirsty", [("", "No thanks")]),
],
required=False,
)
block.set_name("test_choiceblock")
js_args = FieldBlockAdapter().js_args(block)
self.assertEqual(
list(js_args[1].choices),
[
(
"Alcoholic",
[
("gin", "Gin"),
("whisky", "Whisky"),
],
),
(
"Non-alcoholic",
[
("tea", "Tea"),
("coffee", "Coffee"),
],
),
("Not thirsty", [("", "No thanks")]),
],
)
def test_subclassing(self):
class BeverageMultipleChoiceBlock(blocks.MultipleChoiceBlock):
choices = [
("tea", "Tea"),
("coffee", "Coffee"),
]
block = BeverageMultipleChoiceBlock(required=False)
block.set_name("test_choiceblock")
js_args = FieldBlockAdapter().js_args(block)
self.assertEqual(
list(js_args[1].choices),
[
("tea", "Tea"),
("coffee", "Coffee"),
],
)
# subclasses of ChoiceBlock should deconstruct to a basic ChoiceBlock for migrations
self.assertEqual(
block.deconstruct(),
(
"wagtail.core.blocks.MultipleChoiceBlock",
[],
{
"choices": [("tea", "Tea"), ("coffee", "Coffee")],
"required": False,
},
),
)
def test_searchable_content(self):
block = blocks.MultipleChoiceBlock(
choices=[
("choice-1", "Choice 1"),
("choice-2", "Choice 2"),
]
)
self.assertEqual(block.get_searchable_content("choice-1"), ["Choice 1"])
def test_searchable_content_with_callable_choices(self):
def callable_choices():
return [
("choice-1", "Choice 1"),
("choice-2", "Choice 2"),
]
block = blocks.MultipleChoiceBlock(choices=callable_choices)
self.assertEqual(block.get_searchable_content("choice-1"), ["Choice 1"])
def test_optgroup_searchable_content(self):
block = blocks.MultipleChoiceBlock(
choices=[
(
"Section 1",
[
("1-1", "Block 1"),
("1-2", "Block 2"),
],
),
(
"Section 2",
[
("2-1", "Block 1"),
("2-2", "Block 2"),
],
),
]
)
self.assertEqual(block.get_searchable_content("2-2"), ["Section 2", "Block 2"])
def test_invalid_searchable_content(self):
block = blocks.MultipleChoiceBlock(
choices=[
("one", "One"),
("two", "Two"),
]
)
self.assertEqual(block.get_searchable_content("three"), [])
def test_searchable_content_with_lazy_translation(self):
block = blocks.MultipleChoiceBlock(
choices=[
("choice-1", __("Choice 1")),
("choice-2", __("Choice 2")),
]
)
result = block.get_searchable_content("choice-1")
# result must survive JSON (de)serialisation, which is not the case for
# lazy translation objects
result = json.loads(json.dumps(result))
self.assertEqual(result, ["Choice 1"])
def test_optgroup_searchable_content_with_lazy_translation(self):
block = blocks.MultipleChoiceBlock(
choices=[
(
__("Section 1"),
[
("1-1", __("Block 1")),
("1-2", __("Block 2")),
],
),
(
__("Section 2"),
[
("2-1", __("Block 1")),
("2-2", __("Block 2")),
],
),
]
)
result = block.get_searchable_content("2-2")
# result must survive JSON (de)serialisation, which is not the case for
# lazy translation objects
result = json.loads(json.dumps(result))
self.assertEqual(result, ["Section 2", "Block 2"])
def test_deconstruct_with_callable_choices(self):
def callable_choices():
return [
("tea", "Tea"),
("coffee", "Coffee"),
]
block = blocks.MultipleChoiceBlock(choices=callable_choices, required=False)
block.set_name("test_choiceblock")
js_args = FieldBlockAdapter().js_args(block)
self.assertEqual(
list(js_args[1].choices),
[
("tea", "Tea"),
("coffee", "Coffee"),
],
)
self.assertEqual(
block.deconstruct(),
(
"wagtail.core.blocks.MultipleChoiceBlock",
[],
{
"choices": callable_choices,
"required": False,
},
),
)
def test_render_with_validator(self):
choices = [
("tea", "Tea"),
("coffee", "Coffee"),
]
def validate_tea_is_selected(value):
raise ValidationError("You must select 'tea'")
block = blocks.MultipleChoiceBlock(
choices=choices, validators=[validate_tea_is_selected]
)
with self.assertRaises(ValidationError):
block.clean("coffee")
class TestRawHTMLBlock(unittest.TestCase):
def test_get_default_with_fallback_value(self):
default_value = blocks.RawHTMLBlock().get_default()
self.assertEqual(default_value, "")
self.assertIsInstance(default_value, SafeData)
def test_get_default_with_none(self):
default_value = blocks.RawHTMLBlock(default=None).get_default()
self.assertEqual(default_value, "")
self.assertIsInstance(default_value, SafeData)
def test_get_default_with_empty_string(self):
default_value = blocks.RawHTMLBlock(default="").get_default()
self.assertEqual(default_value, "")
self.assertIsInstance(default_value, SafeData)
def test_get_default_with_nonempty_string(self):
default_value = blocks.RawHTMLBlock(default="<blink>BÖÖM</blink>").get_default()
self.assertEqual(default_value, "<blink>BÖÖM</blink>")
self.assertIsInstance(default_value, SafeData)
def test_serialize(self):
block = blocks.RawHTMLBlock()
result = block.get_prep_value(mark_safe("<blink>BÖÖM</blink>"))
self.assertEqual(result, "<blink>BÖÖM</blink>")
self.assertNotIsInstance(result, SafeData)
def test_deserialize(self):
block = blocks.RawHTMLBlock()
result = block.to_python("<blink>BÖÖM</blink>")
self.assertEqual(result, "<blink>BÖÖM</blink>")
self.assertIsInstance(result, SafeData)
def test_render(self):
block = blocks.RawHTMLBlock()
result = block.render(mark_safe("<blink>BÖÖM</blink>"))
self.assertEqual(result, "<blink>BÖÖM</blink>")
self.assertIsInstance(result, SafeData)
def test_get_form_state(self):
block = blocks.RawHTMLBlock()
form_state = block.get_form_state("<blink>BÖÖM</blink>")
self.assertEqual(form_state, "<blink>BÖÖM</blink>")
def test_adapt(self):
block = blocks.RawHTMLBlock()
block.set_name("test_rawhtmlblock")
js_args = FieldBlockAdapter().js_args(block)
self.assertEqual(js_args[0], "test_rawhtmlblock")
self.assertIsInstance(js_args[1], forms.Textarea)
self.assertEqual(js_args[1].attrs, {"cols": "40", "rows": "10"})
self.assertEqual(
js_args[2],
{
"label": "Test rawhtmlblock",
"required": True,
"icon": "code",
"classname": "field char_field widget-textarea fieldname-test_rawhtmlblock",
"showAddCommentButton": True,
"strings": {"ADD_COMMENT": "Add Comment"},
},
)
def test_form_response(self):
block = blocks.RawHTMLBlock()
result = block.value_from_datadict(
{"rawhtml": "<blink>BÖÖM</blink>"}, {}, prefix="rawhtml"
)
self.assertEqual(result, "<blink>BÖÖM</blink>")
self.assertIsInstance(result, SafeData)
def test_value_omitted_from_data(self):
block = blocks.RawHTMLBlock()
self.assertFalse(
block.value_omitted_from_data({"rawhtml": "ohai"}, {}, "rawhtml")
)
self.assertFalse(block.value_omitted_from_data({"rawhtml": ""}, {}, "rawhtml"))
self.assertTrue(
block.value_omitted_from_data({"nothing-here": "nope"}, {}, "rawhtml")
)
def test_clean_required_field(self):
block = blocks.RawHTMLBlock()
result = block.clean(mark_safe("<blink>BÖÖM</blink>"))
self.assertEqual(result, "<blink>BÖÖM</blink>")
self.assertIsInstance(result, SafeData)
with self.assertRaises(ValidationError):
block.clean(mark_safe(""))
def test_clean_nonrequired_field(self):
block = blocks.RawHTMLBlock(required=False)
result = block.clean(mark_safe("<blink>BÖÖM</blink>"))
self.assertEqual(result, "<blink>BÖÖM</blink>")
self.assertIsInstance(result, SafeData)
result = block.clean(mark_safe(""))
self.assertEqual(result, "")
self.assertIsInstance(result, SafeData)
def test_render_with_validator(self):
def validate_contains_foo(value):
if "foo" not in value:
raise ValidationError("Value must contain 'foo'")
block = blocks.RawHTMLBlock(validators=[validate_contains_foo])
with self.assertRaises(ValidationError):
block.clean(mark_safe("<p>bar</p>"))
class TestMeta(unittest.TestCase):
def test_set_template_with_meta(self):
class HeadingBlock(blocks.CharBlock):
class Meta:
template = "heading.html"
block = HeadingBlock()
self.assertEqual(block.meta.template, "heading.html")
def test_set_template_with_constructor(self):
block = blocks.CharBlock(template="heading.html")
self.assertEqual(block.meta.template, "heading.html")
def test_set_template_with_constructor_overrides_meta(self):
class HeadingBlock(blocks.CharBlock):
class Meta:
template = "heading.html"
block = HeadingBlock(template="subheading.html")
self.assertEqual(block.meta.template, "subheading.html")
def test_meta_nested_inheritance(self):
"""
Check that having a multi-level inheritance chain works
"""
class HeadingBlock(blocks.CharBlock):
class Meta:
template = "heading.html"
test = "Foo"
class SubHeadingBlock(HeadingBlock):
class Meta:
template = "subheading.html"
block = SubHeadingBlock()
self.assertEqual(block.meta.template, "subheading.html")
self.assertEqual(block.meta.test, "Foo")
def test_meta_multi_inheritance(self):
"""
Check that multi-inheritance and Meta classes work together
"""
class LeftBlock(blocks.CharBlock):
class Meta:
template = "template.html"
clash = "the band"
label = "Left block"
class RightBlock(blocks.CharBlock):
class Meta:
default = "hello"
clash = "the album"
label = "Right block"
class ChildBlock(LeftBlock, RightBlock):
class Meta:
label = "Child block"
block = ChildBlock()
# These should be directly inherited from the LeftBlock/RightBlock
self.assertEqual(block.meta.template, "template.html")
self.assertEqual(block.meta.default, "hello")
# This should be inherited from the LeftBlock, solving the collision,
# as LeftBlock comes first
self.assertEqual(block.meta.clash, "the band")
# This should come from ChildBlock itself, ignoring the label on
# LeftBlock/RightBlock
self.assertEqual(block.meta.label, "Child block")
class TestStructBlock(SimpleTestCase):
def test_initialisation(self):
block = blocks.StructBlock(
[
("title", blocks.CharBlock()),
("link", blocks.URLBlock()),
]
)
self.assertEqual(list(block.child_blocks.keys()), ["title", "link"])
def test_initialisation_from_subclass(self):
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock()
link = blocks.URLBlock()
block = LinkBlock()
self.assertEqual(list(block.child_blocks.keys()), ["title", "link"])
def test_initialisation_from_subclass_with_extra(self):
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock()
link = blocks.URLBlock()
block = LinkBlock([("classname", blocks.CharBlock())])
self.assertEqual(
list(block.child_blocks.keys()), ["title", "link", "classname"]
)
def test_initialisation_with_multiple_subclassses(self):
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock()
link = blocks.URLBlock()
class StyledLinkBlock(LinkBlock):
classname = blocks.CharBlock()
block = StyledLinkBlock()
self.assertEqual(
list(block.child_blocks.keys()), ["title", "link", "classname"]
)
def test_initialisation_with_mixins(self):
"""
The order of fields of classes with multiple parent classes is slightly
surprising at first. Child fields are inherited in a bottom-up order,
by traversing the MRO in reverse. In the example below,
``StyledLinkBlock`` will have an MRO of::
[StyledLinkBlock, StylingMixin, LinkBlock, StructBlock, ...]
This will result in ``classname`` appearing *after* ``title`` and
``link`` in ``StyleLinkBlock`.child_blocks`, even though
``StylingMixin`` appeared before ``LinkBlock``.
"""
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock()
link = blocks.URLBlock()
class StylingMixin(blocks.StructBlock):
classname = blocks.CharBlock()
class StyledLinkBlock(StylingMixin, LinkBlock):
source = blocks.CharBlock()
block = StyledLinkBlock()
self.assertEqual(
list(block.child_blocks.keys()), ["title", "link", "classname", "source"]
)
def test_render(self):
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock()
link = blocks.URLBlock()
block = LinkBlock()
html = block.render(
block.to_python(
{
"title": "Wagtail site",
"link": "http://www.wagtail.org",
}
)
)
expected_html = "\n".join(
[
"<dl>",
"<dt>title</dt>",
"<dd>Wagtail site</dd>",
"<dt>link</dt>",
"<dd>http://www.wagtail.org</dd>",
"</dl>",
]
)
self.assertHTMLEqual(html, expected_html)
def test_get_api_representation_calls_same_method_on_fields_with_context(self):
"""
The get_api_representation method of a StructBlock should invoke
the block's get_api_representation method on each field and the
context should be passed on.
"""
class ContextBlock(blocks.CharBlock):
def get_api_representation(self, value, context=None):
return context[value]
class AuthorBlock(blocks.StructBlock):
language = ContextBlock()
author = ContextBlock()
block = AuthorBlock()
api_representation = block.get_api_representation(
{
"language": "en",
"author": "wagtail",
},
context={"en": "English", "wagtail": "Wagtail!"},
)
self.assertDictEqual(
api_representation, {"language": "English", "author": "Wagtail!"}
)
def test_render_unknown_field(self):
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock()
link = blocks.URLBlock()
block = LinkBlock()
html = block.render(
block.to_python(
{
"title": "Wagtail site",
"link": "http://www.wagtail.org",
"image": 10,
}
)
)
self.assertIn("<dt>title</dt>", html)
self.assertIn("<dd>Wagtail site</dd>", html)
self.assertIn("<dt>link</dt>", html)
self.assertIn("<dd>http://www.wagtail.org</dd>", html)
# Don't render the extra item
self.assertNotIn("<dt>image</dt>", html)
def test_render_bound_block(self):
# the string representation of a bound block should be the value as rendered by
# the associated block
class SectionBlock(blocks.StructBlock):
title = blocks.CharBlock()
body = blocks.RichTextBlock()
block = SectionBlock()
struct_value = block.to_python(
{
"title": "hello",
"body": "<b>world</b>",
}
)
body_bound_block = struct_value.bound_blocks["body"]
expected = "<b>world</b>"
self.assertEqual(str(body_bound_block), expected)
def test_get_form_context(self):
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock()
link = blocks.URLBlock()
block = LinkBlock()
context = block.get_form_context(
block.to_python(
{
"title": "Wagtail site",
"link": "http://www.wagtail.org",
}
),
prefix="mylink",
)
self.assertIsInstance(context["children"], collections.OrderedDict)
self.assertEqual(len(context["children"]), 2)
self.assertIsInstance(context["children"]["title"], blocks.BoundBlock)
self.assertEqual(context["children"]["title"].value, "Wagtail site")
self.assertIsInstance(context["children"]["link"], blocks.BoundBlock)
self.assertEqual(context["children"]["link"].value, "http://www.wagtail.org")
self.assertEqual(context["block_definition"], block)
self.assertEqual(context["prefix"], "mylink")
def test_adapt(self):
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock(required=False)
link = blocks.URLBlock(required=False)
block = LinkBlock()
block.set_name("test_structblock")
js_args = StructBlockAdapter().js_args(block)
self.assertEqual(js_args[0], "test_structblock")
self.assertEqual(
js_args[2],
{
"label": "Test structblock",
"required": False,
"icon": "placeholder",
"classname": "struct-block",
},
)
self.assertEqual(len(js_args[1]), 2)
title_field, link_field = js_args[1]
self.assertEqual(title_field, block.child_blocks["title"])
self.assertEqual(link_field, block.child_blocks["link"])
def test_adapt_with_form_template(self):
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock(required=False)
link = blocks.URLBlock(required=False)
class Meta:
form_template = "tests/block_forms/struct_block_form_template.html"
block = LinkBlock()
block.set_name("test_structblock")
js_args = StructBlockAdapter().js_args(block)
self.assertEqual(
js_args[2],
{
"label": "Test structblock",
"required": False,
"icon": "placeholder",
"classname": "struct-block",
"formTemplate": "<div>Hello</div>",
},
)
def test_adapt_with_form_template_jinja(self):
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock(required=False)
link = blocks.URLBlock(required=False)
class Meta:
form_template = "tests/jinja2/struct_block_form_template.html"
block = LinkBlock()
block.set_name("test_structblock")
js_args = StructBlockAdapter().js_args(block)
self.assertEqual(
js_args[2],
{
"label": "Test structblock",
"required": False,
"icon": "placeholder",
"classname": "struct-block",
"formTemplate": "<div>Hello</div>",
},
)
def test_get_default(self):
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock(default="Torchbox")
link = blocks.URLBlock(default="http://www.torchbox.com")
block = LinkBlock()
default_val = block.get_default()
self.assertEqual(default_val.get("title"), "Torchbox")
def test_adapt_with_help_text_on_meta(self):
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock()
link = blocks.URLBlock()
class Meta:
help_text = "Self-promotion is encouraged"
block = LinkBlock()
block.set_name("test_structblock")
js_args = StructBlockAdapter().js_args(block)
self.assertEqual(
js_args[2],
{
"label": "Test structblock",
"required": False,
"icon": "placeholder",
"classname": "struct-block",
"helpIcon": (
'<svg class="icon icon-help default" aria-hidden="true">'
'<use href="#icon-help"></use></svg>'
),
"helpText": "Self-promotion is encouraged",
},
)
def test_adapt_with_help_text_as_argument(self):
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock()
link = blocks.URLBlock()
block = LinkBlock(help_text="Self-promotion is encouraged")
block.set_name("test_structblock")
js_args = StructBlockAdapter().js_args(block)
self.assertEqual(
js_args[2],
{
"label": "Test structblock",
"required": False,
"icon": "placeholder",
"classname": "struct-block",
"helpIcon": (
'<svg class="icon icon-help default" aria-hidden="true">'
'<use href="#icon-help"></use></svg>'
),
"helpText": "Self-promotion is encouraged",
},
)
def test_searchable_content(self):
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock()
link = blocks.URLBlock()
block = LinkBlock()
content = block.get_searchable_content(
block.to_python(
{
"title": "Wagtail site",
"link": "http://www.wagtail.org",
}
)
)
self.assertEqual(content, ["Wagtail site"])
def test_value_from_datadict(self):
block = blocks.StructBlock(
[
("title", blocks.CharBlock()),
("link", blocks.URLBlock()),
]
)
struct_val = block.value_from_datadict(
{"mylink-title": "Torchbox", "mylink-link": "http://www.torchbox.com"},
{},
"mylink",
)
self.assertEqual(struct_val["title"], "Torchbox")
self.assertEqual(struct_val["link"], "http://www.torchbox.com")
self.assertIsInstance(struct_val, blocks.StructValue)
self.assertIsInstance(struct_val.bound_blocks["link"].block, blocks.URLBlock)
def test_value_omitted_from_data(self):
block = blocks.StructBlock(
[
("title", blocks.CharBlock()),
("link", blocks.URLBlock()),
]
)
# overall value is considered present in the form if any sub-field is present
self.assertFalse(
block.value_omitted_from_data({"mylink-title": "Torchbox"}, {}, "mylink")
)
self.assertTrue(
block.value_omitted_from_data({"nothing-here": "nope"}, {}, "mylink")
)
def test_default_is_returned_as_structvalue(self):
"""When returning the default value of a StructBlock (e.g. because it's
a child of another StructBlock, and the outer value is missing that key)
we should receive it as a StructValue, not just a plain dict"""
class PersonBlock(blocks.StructBlock):
first_name = blocks.CharBlock()
surname = blocks.CharBlock()
class EventBlock(blocks.StructBlock):
title = blocks.CharBlock()
guest_speaker = PersonBlock(
default={"first_name": "Ed", "surname": "Balls"}
)
event_block = EventBlock()
event = event_block.to_python({"title": "Birthday party"})
self.assertEqual(event["guest_speaker"]["first_name"], "Ed")
self.assertIsInstance(event["guest_speaker"], blocks.StructValue)
def test_default_value_is_distinct_instance(self):
"""
Whenever the default value of a StructBlock is invoked, it should be a distinct
instance of the dict so that modifying it doesn't modify other places where the
default value appears.
"""
class PersonBlock(blocks.StructBlock):
first_name = blocks.CharBlock()
surname = blocks.CharBlock()
class EventBlock(blocks.StructBlock):
title = blocks.CharBlock()
guest_speaker = PersonBlock(
default={"first_name": "Ed", "surname": "Balls"}
)
event_block = EventBlock()
event1 = event_block.to_python(
{"title": "Birthday party"}
) # guest_speaker will default to Ed Balls
event2 = event_block.to_python(
{"title": "Christmas party"}
) # guest_speaker will default to Ed Balls, but a distinct instance
event1["guest_speaker"]["surname"] = "Miliband"
self.assertEqual(event1["guest_speaker"]["surname"], "Miliband")
# event2 should not be modified
self.assertEqual(event2["guest_speaker"]["surname"], "Balls")
def test_bulk_to_python_returns_distinct_default_instances(self):
"""
Whenever StructBlock.bulk_to_python invokes a child block's get_default method to
fill in missing fields, it should use a separate invocation for each record so that
we don't end up with the same instance of a mutable value on multiple records
"""
class ShoppingListBlock(blocks.StructBlock):
shop = blocks.CharBlock()
items = blocks.ListBlock(blocks.CharBlock(default="chocolate"))
block = ShoppingListBlock()
shopping_lists = block.bulk_to_python(
[
{"shop": "Tesco"}, # 'items' defaults to ['chocolate']
{
"shop": "Asda"
}, # 'items' defaults to ['chocolate'], but a distinct instance
]
)
shopping_lists[0]["items"].append("cake")
self.assertEqual(list(shopping_lists[0]["items"]), ["chocolate", "cake"])
# shopping_lists[1] should not be updated
self.assertEqual(list(shopping_lists[1]["items"]), ["chocolate"])
def test_clean(self):
block = blocks.StructBlock(
[
("title", blocks.CharBlock()),
("link", blocks.URLBlock()),
]
)
value = block.to_python(
{"title": "Torchbox", "link": "http://www.torchbox.com/"}
)
clean_value = block.clean(value)
self.assertIsInstance(clean_value, blocks.StructValue)
self.assertEqual(clean_value["title"], "Torchbox")
value = block.to_python({"title": "Torchbox", "link": "not a url"})
with self.assertRaises(ValidationError):
block.clean(value)
def test_bound_blocks_are_available_on_template(self):
"""
Test that we are able to use value.bound_blocks within templates
to access a child block's own HTML rendering
"""
block = SectionBlock()
value = block.to_python({"title": "Hello", "body": "<i>italic</i> world"})
result = block.render(value)
self.assertEqual(result, """<h1>Hello</h1><i>italic</i> world""")
def test_render_block_with_extra_context(self):
block = SectionBlock()
value = block.to_python({"title": "Bonjour", "body": "monde <i>italique</i>"})
result = block.render(value, context={"language": "fr"})
self.assertEqual(result, """<h1 lang="fr">Bonjour</h1>monde <i>italique</i>""")
def test_render_structvalue(self):
"""
The HTML representation of a StructValue should use the block's template
"""
block = SectionBlock()
value = block.to_python({"title": "Hello", "body": "<i>italic</i> world"})
result = value.__html__()
self.assertEqual(result, """<h1>Hello</h1><i>italic</i> world""")
# value.render_as_block() should be equivalent to value.__html__()
result = value.render_as_block()
self.assertEqual(result, """<h1>Hello</h1><i>italic</i> world""")
def test_str_structvalue(self):
"""
The str() representation of a StructValue should NOT render the template, as that's liable
to cause an infinite loop if any debugging / logging code attempts to log the fact that
it rendered a template with this object in the context:
https://github.com/wagtail/wagtail/issues/2874
https://github.com/jazzband/django-debug-toolbar/issues/950
"""
block = SectionBlock()
value = block.to_python({"title": "Hello", "body": "<i>italic</i> world"})
result = str(value)
self.assertNotIn("<h1>", result)
# The expected rendering should correspond to the native representation of an OrderedDict:
# "StructValue([('title', u'Hello'), ('body', <wagtail.core.rich_text.RichText object at 0xb12d5eed>)])"
# - give or take some quoting differences between Python versions
self.assertIn("StructValue", result)
self.assertIn("title", result)
self.assertIn("Hello", result)
def test_render_structvalue_with_extra_context(self):
block = SectionBlock()
value = block.to_python({"title": "Bonjour", "body": "monde <i>italique</i>"})
result = value.render_as_block(context={"language": "fr"})
self.assertEqual(result, """<h1 lang="fr">Bonjour</h1>monde <i>italique</i>""")
class TestStructBlockWithCustomStructValue(SimpleTestCase):
def test_initialisation(self):
class CustomStructValue(blocks.StructValue):
def joined(self):
return self.get("title", "") + self.get("link", "")
block = blocks.StructBlock(
[
("title", blocks.CharBlock()),
("link", blocks.URLBlock()),
],
value_class=CustomStructValue,
)
self.assertEqual(list(block.child_blocks.keys()), ["title", "link"])
block_value = block.to_python(
{"title": "Birthday party", "link": "https://myparty.co.uk"}
)
self.assertIsInstance(block_value, CustomStructValue)
default_value = block.get_default()
self.assertIsInstance(default_value, CustomStructValue)
value_from_datadict = block.value_from_datadict(
{"mylink-title": "Torchbox", "mylink-link": "http://www.torchbox.com"},
{},
"mylink",
)
self.assertIsInstance(value_from_datadict, CustomStructValue)
value = block.to_python(
{"title": "Torchbox", "link": "http://www.torchbox.com/"}
)
clean_value = block.clean(value)
self.assertIsInstance(clean_value, CustomStructValue)
self.assertEqual(clean_value["title"], "Torchbox")
value = block.to_python({"title": "Torchbox", "link": "not a url"})
with self.assertRaises(ValidationError):
block.clean(value)
def test_initialisation_from_subclass(self):
class LinkStructValue(blocks.StructValue):
def url(self):
return self.get("page") or self.get("link")
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock()
page = blocks.PageChooserBlock(required=False)
link = blocks.URLBlock(required=False)
class Meta:
value_class = LinkStructValue
block = LinkBlock()
self.assertEqual(list(block.child_blocks.keys()), ["title", "page", "link"])
block_value = block.to_python(
{"title": "Website", "link": "https://website.com"}
)
self.assertIsInstance(block_value, LinkStructValue)
default_value = block.get_default()
self.assertIsInstance(default_value, LinkStructValue)
def test_initialisation_with_multiple_subclassses(self):
class LinkStructValue(blocks.StructValue):
def url(self):
return self.get("page") or self.get("link")
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock()
page = blocks.PageChooserBlock(required=False)
link = blocks.URLBlock(required=False)
class Meta:
value_class = LinkStructValue
class StyledLinkBlock(LinkBlock):
classname = blocks.CharBlock()
block = StyledLinkBlock()
self.assertEqual(
list(block.child_blocks.keys()), ["title", "page", "link", "classname"]
)
value_from_datadict = block.value_from_datadict(
{
"queen-title": "Torchbox",
"queen-link": "http://www.torchbox.com",
"queen-classname": "fullsize",
},
{},
"queen",
)
self.assertIsInstance(value_from_datadict, LinkStructValue)
def test_initialisation_with_mixins(self):
class LinkStructValue(blocks.StructValue):
pass
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock()
link = blocks.URLBlock()
class Meta:
value_class = LinkStructValue
class StylingMixin(blocks.StructBlock):
classname = blocks.CharBlock()
class StyledLinkBlock(StylingMixin, LinkBlock):
source = blocks.CharBlock()
block = StyledLinkBlock()
self.assertEqual(
list(block.child_blocks.keys()), ["title", "link", "classname", "source"]
)
block_value = block.to_python(
{
"title": "Website",
"link": "https://website.com",
"source": "google",
"classname": "full-size",
}
)
self.assertIsInstance(block_value, LinkStructValue)
def test_value_property(self):
class SectionStructValue(blocks.StructValue):
@property
def foo(self):
return "bar %s" % self.get("title", "")
class SectionBlock(blocks.StructBlock):
title = blocks.CharBlock()
body = blocks.RichTextBlock()
class Meta:
value_class = SectionStructValue
block = SectionBlock()
struct_value = block.to_python({"title": "hello", "body": "<b>world</b>"})
value = struct_value.foo
self.assertEqual(value, "bar hello")
def test_render_with_template(self):
class SectionStructValue(blocks.StructValue):
def title_with_suffix(self):
title = self.get("title")
if title:
return "SUFFIX %s" % title
return "EMPTY TITLE"
class SectionBlock(blocks.StructBlock):
title = blocks.CharBlock(required=False)
class Meta:
value_class = SectionStructValue
block = SectionBlock(template="tests/blocks/struct_block_custom_value.html")
struct_value = block.to_python({"title": "hello"})
html = block.render(struct_value)
self.assertEqual(html, "<div>SUFFIX hello</div>\n")
struct_value = block.to_python({})
html = block.render(struct_value)
self.assertEqual(html, "<div>EMPTY TITLE</div>\n")
class TestListBlock(WagtailTestUtils, SimpleTestCase):
def test_initialise_with_class(self):
block = blocks.ListBlock(blocks.CharBlock)
# Child block should be initialised for us
self.assertIsInstance(block.child_block, blocks.CharBlock)
def test_initialise_with_instance(self):
child_block = blocks.CharBlock()
block = blocks.ListBlock(child_block)
self.assertEqual(block.child_block, child_block)
def render(self):
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock()
link = blocks.URLBlock()
block = blocks.ListBlock(LinkBlock())
return block.render(
[
{
"title": "Wagtail",
"link": "http://www.wagtail.org",
},
{
"title": "Django",
"link": "http://www.djangoproject.com",
},
]
)
def test_render_uses_ul(self):
html = self.render()
self.assertIn("<ul>", html)
self.assertIn("</ul>", html)
def test_render_uses_li(self):
html = self.render()
self.assertIn("<li>", html)
self.assertIn("</li>", html)
def test_render_calls_block_render_on_children(self):
"""
The default rendering of a ListBlock should invoke the block's render method
on each child, rather than just outputting the child value as a string.
"""
block = blocks.ListBlock(
blocks.CharBlock(template="tests/blocks/heading_block.html")
)
html = block.render(["Hello world!", "Goodbye world!"])
self.assertIn("<h1>Hello world!</h1>", html)
self.assertIn("<h1>Goodbye world!</h1>", html)
def test_render_passes_context_to_children(self):
"""
Template context passed to the render method should be passed on
to the render method of the child block.
"""
block = blocks.ListBlock(
blocks.CharBlock(template="tests/blocks/heading_block.html")
)
html = block.render(
["Bonjour le monde!", "Au revoir le monde!"],
context={
"language": "fr",
},
)
self.assertIn('<h1 lang="fr">Bonjour le monde!</h1>', html)
self.assertIn('<h1 lang="fr">Au revoir le monde!</h1>', html)
def test_get_api_representation_calls_same_method_on_children_with_context(self):
"""
The get_api_representation method of a ListBlock should invoke
the block's get_api_representation method on each child and
the context should be passed on.
"""
class ContextBlock(blocks.CharBlock):
def get_api_representation(self, value, context=None):
return context[value]
block = blocks.ListBlock(ContextBlock())
api_representation = block.get_api_representation(
["en", "fr"], context={"en": "Hello world!", "fr": "Bonjour le monde!"}
)
self.assertEqual(api_representation, ["Hello world!", "Bonjour le monde!"])
def test_adapt(self):
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock()
link = blocks.URLBlock()
block = blocks.ListBlock(LinkBlock)
block.set_name("test_listblock")
js_args = ListBlockAdapter().js_args(block)
self.assertEqual(js_args[0], "test_listblock")
self.assertIsInstance(js_args[1], LinkBlock)
self.assertEqual(js_args[2], {"title": None, "link": None})
self.assertEqual(
js_args[3],
{
"label": "Test listblock",
"icon": "placeholder",
"classname": None,
"collapsed": False,
"strings": {
"DELETE": "Delete",
"DUPLICATE": "Duplicate",
"MOVE_DOWN": "Move down",
"MOVE_UP": "Move up",
"ADD": "Add",
},
},
)
def test_adapt_with_min_num_max_num(self):
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock()
link = blocks.URLBlock()
block = blocks.ListBlock(LinkBlock, min_num=2, max_num=5)
block.set_name("test_listblock")
js_args = ListBlockAdapter().js_args(block)
self.assertEqual(js_args[0], "test_listblock")
self.assertIsInstance(js_args[1], LinkBlock)
self.assertEqual(js_args[2], {"title": None, "link": None})
self.assertEqual(
js_args[3],
{
"label": "Test listblock",
"icon": "placeholder",
"classname": None,
"collapsed": False,
"minNum": 2,
"maxNum": 5,
"strings": {
"DELETE": "Delete",
"DUPLICATE": "Duplicate",
"MOVE_DOWN": "Move down",
"MOVE_UP": "Move up",
"ADD": "Add",
},
},
)
def test_searchable_content(self):
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock()
link = blocks.URLBlock()
block = blocks.ListBlock(LinkBlock())
content = block.get_searchable_content(
[
{
"title": "Wagtail",
"link": "http://www.wagtail.org",
},
{
"title": "Django",
"link": "http://www.djangoproject.com",
},
]
)
self.assertEqual(content, ["Wagtail", "Django"])
def test_value_omitted_from_data(self):
block = blocks.ListBlock(blocks.CharBlock())
# overall value is considered present in the form if the 'count' field is present
self.assertFalse(
block.value_omitted_from_data({"mylist-count": "0"}, {}, "mylist")
)
self.assertFalse(
block.value_omitted_from_data(
{
"mylist-count": "1",
"mylist-0-value": "hello",
"mylist-0-deleted": "",
"mylist-0-order": "0",
},
{},
"mylist",
)
)
self.assertTrue(
block.value_omitted_from_data({"nothing-here": "nope"}, {}, "mylist")
)
def test_id_from_form_submission_is_preserved(self):
block = blocks.ListBlock(blocks.CharBlock())
post_data = {"shoppinglist-count": "3"}
for i in range(0, 3):
post_data.update(
{
"shoppinglist-%d-deleted" % i: "",
"shoppinglist-%d-order" % i: str(i),
"shoppinglist-%d-value" % i: "item %d" % i,
"shoppinglist-%d-id" % i: "0000000%d" % i,
}
)
block_value = block.value_from_datadict(post_data, {}, "shoppinglist")
self.assertEqual(block_value.bound_blocks[1].value, "item 1")
self.assertEqual(block_value.bound_blocks[1].id, "00000001")
def test_ordering_in_form_submission_uses_order_field(self):
block = blocks.ListBlock(blocks.CharBlock())
# check that items are ordered by the 'order' field, not the order they appear in the form
post_data = {"shoppinglist-count": "3"}
for i in range(0, 3):
post_data.update(
{
"shoppinglist-%d-deleted" % i: "",
"shoppinglist-%d-order" % i: str(2 - i),
"shoppinglist-%d-value" % i: "item %d" % i,
"shoppinglist-%d-id" % i: "0000000%d" % i,
}
)
block_value = block.value_from_datadict(post_data, {}, "shoppinglist")
self.assertEqual(block_value[2], "item 0")
def test_ordering_in_form_submission_is_numeric(self):
block = blocks.ListBlock(blocks.CharBlock())
# check that items are ordered by 'order' numerically, not alphabetically
post_data = {"shoppinglist-count": "12"}
for i in range(0, 12):
post_data.update(
{
"shoppinglist-%d-deleted" % i: "",
"shoppinglist-%d-order" % i: str(i),
"shoppinglist-%d-value" % i: "item %d" % i,
"shoppinglist-%d-id" % i: "0000000%d" % i,
}
)
block_value = block.value_from_datadict(post_data, {}, "shoppinglist")
self.assertEqual(block_value[2], "item 2")
def test_can_specify_default(self):
block = blocks.ListBlock(
blocks.CharBlock(), default=["peas", "beans", "carrots"]
)
self.assertEqual(list(block.get_default()), ["peas", "beans", "carrots"])
def test_default_default(self):
"""
if no explicit 'default' is set on the ListBlock, it should fall back on
a single instance of the child block in its default state.
"""
block = blocks.ListBlock(blocks.CharBlock(default="chocolate"))
self.assertEqual(list(block.get_default()), ["chocolate"])
block.set_name("test_shoppinglistblock")
js_args = ListBlockAdapter().js_args(block)
self.assertEqual(js_args[2], "chocolate")
def test_default_value_is_distinct_instance(self):
"""
Whenever the default value of a ListBlock is invoked, it should be a distinct
instance of the list so that modifying it doesn't modify other places where the
default value appears.
"""
class ShoppingListBlock(blocks.StructBlock):
shop = blocks.CharBlock()
items = blocks.ListBlock(blocks.CharBlock(default="chocolate"))
block = ShoppingListBlock()
tesco_shopping = block.to_python(
{"shop": "Tesco"}
) # 'items' will default to ['chocolate']
asda_shopping = block.to_python(
{"shop": "Asda"}
) # 'items' will default to ['chocolate'], but a distinct instance
tesco_shopping["items"].append("cake")
self.assertEqual(list(tesco_shopping["items"]), ["chocolate", "cake"])
# asda_shopping should not be modified
self.assertEqual(list(asda_shopping["items"]), ["chocolate"])
def test_adapt_with_classname_via_kwarg(self):
"""form_classname from kwargs to be used as an additional class when rendering list block"""
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock()
link = blocks.URLBlock()
block = blocks.ListBlock(LinkBlock, form_classname="special-list-class")
block.set_name("test_listblock")
js_args = ListBlockAdapter().js_args(block)
self.assertEqual(
js_args[3],
{
"label": "Test listblock",
"icon": "placeholder",
"classname": "special-list-class",
"collapsed": False,
"strings": {
"DELETE": "Delete",
"DUPLICATE": "Duplicate",
"MOVE_DOWN": "Move down",
"MOVE_UP": "Move up",
"ADD": "Add",
},
},
)
def test_adapt_with_classname_via_class_meta(self):
"""form_classname from meta to be used as an additional class when rendering list block"""
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock()
link = blocks.URLBlock()
class CustomListBlock(blocks.ListBlock):
class Meta:
form_classname = "custom-list-class"
block = CustomListBlock(LinkBlock)
block.set_name("test_listblock")
js_args = ListBlockAdapter().js_args(block)
self.assertEqual(
js_args[3],
{
"label": "Test listblock",
"icon": "placeholder",
"classname": "custom-list-class",
"collapsed": False,
"strings": {
"DELETE": "Delete",
"DUPLICATE": "Duplicate",
"MOVE_DOWN": "Move down",
"MOVE_UP": "Move up",
"ADD": "Add",
},
},
)
def test_clean_preserves_block_ids(self):
block = blocks.ListBlock(blocks.CharBlock())
block_val = block.to_python(
[
{
"type": "item",
"value": "foo",
"id": "11111111-1111-1111-1111-111111111111",
},
{
"type": "item",
"value": "bar",
"id": "22222222-2222-2222-2222-222222222222",
},
]
)
cleaned_block_val = block.clean(block_val)
self.assertEqual(
cleaned_block_val.bound_blocks[0].id, "11111111-1111-1111-1111-111111111111"
)
def test_min_num_validation_errors(self):
block = blocks.ListBlock(blocks.CharBlock(), min_num=2)
block_val = block.to_python(["foo"])
with self.assertRaises(ValidationError) as catcher:
block.clean(block_val)
self.assertEqual(
catcher.exception.params,
{
"block_errors": [None],
"non_block_errors": ["The minimum number of items is 2"],
},
)
# a value with >= 2 blocks should pass validation
block_val = block.to_python(["foo", "bar"])
self.assertTrue(block.clean(block_val))
def test_max_num_validation_errors(self):
block = blocks.ListBlock(blocks.CharBlock(), max_num=2)
block_val = block.to_python(["foo", "bar", "baz"])
with self.assertRaises(ValidationError) as catcher:
block.clean(block_val)
self.assertEqual(
catcher.exception.params,
{
"block_errors": [None, None, None],
"non_block_errors": ["The maximum number of items is 2"],
},
)
# a value with <= 2 blocks should pass validation
block_val = block.to_python(["foo", "bar"])
self.assertTrue(block.clean(block_val))
def test_unpack_old_database_format(self):
block = blocks.ListBlock(blocks.CharBlock())
list_val = block.to_python(["foo", "bar"])
# list_val should behave as a list
self.assertEqual(len(list_val), 2)
self.assertEqual(list_val[0], "foo")
# but also provide a bound_blocks property
self.assertEqual(len(list_val.bound_blocks), 2)
self.assertEqual(list_val.bound_blocks[0].value, "foo")
# Bound blocks should be assigned UUIDs
self.assertRegex(list_val.bound_blocks[0].id, r"[0-9a-f-]+")
def test_bulk_unpack_old_database_format(self):
block = blocks.ListBlock(blocks.CharBlock())
[list_1, list_2] = block.bulk_to_python([["foo", "bar"], ["xxx", "yyy", "zzz"]])
self.assertEqual(len(list_1), 2)
self.assertEqual(len(list_2), 3)
self.assertEqual(list_1[0], "foo")
self.assertEqual(list_2[0], "xxx")
# lists also provide a bound_blocks property
self.assertEqual(len(list_1.bound_blocks), 2)
self.assertEqual(list_1.bound_blocks[0].value, "foo")
# Bound blocks should be assigned UUIDs
self.assertRegex(list_1.bound_blocks[0].id, r"[0-9a-f-]+")
def test_unpack_new_database_format(self):
block = blocks.ListBlock(blocks.CharBlock())
list_val = block.to_python(
[
{
"type": "item",
"value": "foo",
"id": "11111111-1111-1111-1111-111111111111",
},
{
"type": "item",
"value": "bar",
"id": "22222222-2222-2222-2222-222222222222",
},
]
)
# list_val should behave as a list
self.assertEqual(len(list_val), 2)
self.assertEqual(list_val[0], "foo")
# but also provide a bound_blocks property
self.assertEqual(len(list_val.bound_blocks), 2)
self.assertEqual(list_val.bound_blocks[0].value, "foo")
self.assertEqual(
list_val.bound_blocks[0].id, "11111111-1111-1111-1111-111111111111"
)
def test_bulk_unpack_new_database_format(self):
block = blocks.ListBlock(blocks.CharBlock())
[list_1, list_2] = block.bulk_to_python(
[
[
{
"type": "item",
"value": "foo",
"id": "11111111-1111-1111-1111-111111111111",
},
{
"type": "item",
"value": "bar",
"id": "22222222-2222-2222-2222-222222222222",
},
],
[
{
"type": "item",
"value": "baz",
"id": "33333333-3333-3333-3333-333333333333",
},
],
]
)
self.assertEqual(len(list_1), 2)
self.assertEqual(len(list_2), 1)
self.assertEqual(list_1[0], "foo")
self.assertEqual(list_2[0], "baz")
# lists also provide a bound_blocks property
self.assertEqual(len(list_1.bound_blocks), 2)
self.assertEqual(list_1.bound_blocks[0].value, "foo")
self.assertEqual(
list_1.bound_blocks[0].id, "11111111-1111-1111-1111-111111111111"
)
def test_assign_listblock_with_list(self):
stream_block = blocks.StreamBlock(
[
("bullet_list", blocks.ListBlock(blocks.CharBlock())),
]
)
stream_value = stream_block.to_python([])
stream_value.append(("bullet_list", ["foo", "bar"]))
clean_stream_value = stream_block.clean(stream_value)
result = stream_block.get_prep_value(clean_stream_value)
self.assertEqual(result[0]["type"], "bullet_list")
self.assertEqual(len(result[0]["value"]), 2)
self.assertEqual(result[0]["value"][0]["value"], "foo")
class TestListBlockWithFixtures(TestCase):
fixtures = ["test.json"]
def test_calls_child_bulk_to_python_when_available(self):
page_ids = [2, 3, 4, 5]
expected_pages = Page.objects.filter(pk__in=page_ids)
block = blocks.ListBlock(blocks.PageChooserBlock())
with self.assertNumQueries(1):
pages = block.to_python(page_ids)
self.assertSequenceEqual(pages, expected_pages)
def test_bulk_to_python(self):
block = blocks.ListBlock(blocks.PageChooserBlock())
with self.assertNumQueries(1):
result = block.bulk_to_python([[4, 5], [], [2]])
# result will be a list of ListValues - convert to lists for equality check
clean_result = [list(val) for val in result]
self.assertEqual(
clean_result,
[
[Page.objects.get(id=4), Page.objects.get(id=5)],
[],
[Page.objects.get(id=2)],
],
)
class TestStreamBlock(WagtailTestUtils, SimpleTestCase):
def test_initialisation(self):
block = blocks.StreamBlock(
[
("heading", blocks.CharBlock()),
("paragraph", blocks.CharBlock()),
]
)
self.assertEqual(list(block.child_blocks.keys()), ["heading", "paragraph"])
def test_initialisation_with_binary_string_names(self):
# migrations will sometimes write out names as binary strings, just to keep us on our toes
block = blocks.StreamBlock(
[
(b"heading", blocks.CharBlock()),
(b"paragraph", blocks.CharBlock()),
]
)
self.assertEqual(list(block.child_blocks.keys()), [b"heading", b"paragraph"])
def test_initialisation_from_subclass(self):
class ArticleBlock(blocks.StreamBlock):
heading = blocks.CharBlock()
paragraph = blocks.CharBlock()
block = ArticleBlock()
self.assertEqual(list(block.child_blocks.keys()), ["heading", "paragraph"])
def test_initialisation_from_subclass_with_extra(self):
class ArticleBlock(blocks.StreamBlock):
heading = blocks.CharBlock()
paragraph = blocks.CharBlock()
block = ArticleBlock([("intro", blocks.CharBlock())])
self.assertEqual(
list(block.child_blocks.keys()), ["heading", "paragraph", "intro"]
)
def test_initialisation_with_multiple_subclassses(self):
class ArticleBlock(blocks.StreamBlock):
heading = blocks.CharBlock()
paragraph = blocks.CharBlock()
class ArticleWithIntroBlock(ArticleBlock):
intro = blocks.CharBlock()
block = ArticleWithIntroBlock()
self.assertEqual(
list(block.child_blocks.keys()), ["heading", "paragraph", "intro"]
)
def test_initialisation_with_mixins(self):
"""
The order of child blocks of a ``StreamBlock`` with multiple parent
classes is slightly surprising at first. Child blocks are inherited in
a bottom-up order, by traversing the MRO in reverse. In the example
below, ``ArticleWithIntroBlock`` will have an MRO of::
[ArticleWithIntroBlock, IntroMixin, ArticleBlock, StreamBlock, ...]
This will result in ``intro`` appearing *after* ``heading`` and
``paragraph`` in ``ArticleWithIntroBlock.child_blocks``, even though
``IntroMixin`` appeared before ``ArticleBlock``.
"""
class ArticleBlock(blocks.StreamBlock):
heading = blocks.CharBlock()
paragraph = blocks.CharBlock()
class IntroMixin(blocks.StreamBlock):
intro = blocks.CharBlock()
class ArticleWithIntroBlock(IntroMixin, ArticleBlock):
by_line = blocks.CharBlock()
block = ArticleWithIntroBlock()
self.assertEqual(
list(block.child_blocks.keys()),
["heading", "paragraph", "intro", "by_line"],
)
def test_field_has_changed(self):
block = blocks.StreamBlock([("paragraph", blocks.CharBlock())])
initial_value = blocks.StreamValue(block, [("paragraph", "test")])
initial_value[0].id = "a"
data_value = blocks.StreamValue(block, [("paragraph", "test")])
data_value[0].id = "a"
# identical ids and content, so has_changed should return False
self.assertFalse(
blocks.BlockField(block).has_changed(initial_value, data_value)
)
changed_data_value = blocks.StreamValue(block, [("paragraph", "not a test")])
changed_data_value[0].id = "a"
# identical ids but changed content, so has_changed should return True
self.assertTrue(
blocks.BlockField(block).has_changed(initial_value, changed_data_value)
)
def test_required_raises_an_exception_if_empty(self):
block = blocks.StreamBlock([("paragraph", blocks.CharBlock())], required=True)
value = blocks.StreamValue(block, [])
with self.assertRaises(blocks.StreamBlockValidationError):
block.clean(value)
def test_required_does_not_raise_an_exception_if_not_empty(self):
block = blocks.StreamBlock([("paragraph", blocks.CharBlock())], required=True)
value = block.to_python([{"type": "paragraph", "value": "Hello"}])
try:
block.clean(value)
except blocks.StreamBlockValidationError:
raise self.failureException(
"%s was raised" % blocks.StreamBlockValidationError
)
def test_not_required_does_not_raise_an_exception_if_empty(self):
block = blocks.StreamBlock([("paragraph", blocks.CharBlock())], required=False)
value = blocks.StreamValue(block, [])
try:
block.clean(value)
except blocks.StreamBlockValidationError:
raise self.failureException(
"%s was raised" % blocks.StreamBlockValidationError
)
def test_required_by_default(self):
block = blocks.StreamBlock([("paragraph", blocks.CharBlock())])
value = blocks.StreamValue(block, [])
with self.assertRaises(blocks.StreamBlockValidationError):
block.clean(value)
def render_article(self, data):
class ArticleBlock(blocks.StreamBlock):
heading = blocks.CharBlock()
paragraph = blocks.RichTextBlock()
block = ArticleBlock()
value = block.to_python(data)
return block.render(value)
def test_get_api_representation_calls_same_method_on_children_with_context(self):
"""
The get_api_representation method of a StreamBlock should invoke
the block's get_api_representation method on each child and
the context should be passed on.
"""
class ContextBlock(blocks.CharBlock):
def get_api_representation(self, value, context=None):
return context[value]
block = blocks.StreamBlock(
[
("language", ContextBlock()),
("author", ContextBlock()),
]
)
api_representation = block.get_api_representation(
block.to_python(
[
{"type": "language", "value": "en"},
{"type": "author", "value": "wagtail", "id": "111111"},
]
),
context={"en": "English", "wagtail": "Wagtail!"},
)
self.assertListEqual(
api_representation,
[
{"type": "language", "value": "English", "id": None},
{"type": "author", "value": "Wagtail!", "id": "111111"},
],
)
def test_render(self):
html = self.render_article(
[
{
"type": "heading",
"value": "My title",
},
{
"type": "paragraph",
"value": "My <i>first</i> paragraph",
},
{
"type": "paragraph",
"value": "My second paragraph",
},
]
)
self.assertIn('<div class="block-heading">My title</div>', html)
self.assertIn(
'<div class="block-paragraph">My <i>first</i> paragraph</div>', html
)
self.assertIn('<div class="block-paragraph">My second paragraph</div>', html)
def test_render_unknown_type(self):
# This can happen if a developer removes a type from their StreamBlock
html = self.render_article(
[
{
"type": "foo",
"value": "Hello",
},
{
"type": "paragraph",
"value": "My first paragraph",
},
]
)
self.assertNotIn("foo", html)
self.assertNotIn("Hello", html)
self.assertIn('<div class="block-paragraph">My first paragraph</div>', html)
def test_render_calls_block_render_on_children(self):
"""
The default rendering of a StreamBlock should invoke the block's render method
on each child, rather than just outputting the child value as a string.
"""
block = blocks.StreamBlock(
[
(
"heading",
blocks.CharBlock(template="tests/blocks/heading_block.html"),
),
("paragraph", blocks.CharBlock()),
]
)
value = block.to_python([{"type": "heading", "value": "Hello"}])
html = block.render(value)
self.assertIn('<div class="block-heading"><h1>Hello</h1></div>', html)
# calling render_as_block() on value (a StreamValue instance)
# should be equivalent to block.render(value)
html = value.render_as_block()
self.assertIn('<div class="block-heading"><h1>Hello</h1></div>', html)
def test_render_passes_context_to_children(self):
block = blocks.StreamBlock(
[
(
"heading",
blocks.CharBlock(template="tests/blocks/heading_block.html"),
),
("paragraph", blocks.CharBlock()),
]
)
value = block.to_python([{"type": "heading", "value": "Bonjour"}])
html = block.render(
value,
context={
"language": "fr",
},
)
self.assertIn(
'<div class="block-heading"><h1 lang="fr">Bonjour</h1></div>', html
)
# calling render_as_block(context=foo) on value (a StreamValue instance)
# should be equivalent to block.render(value, context=foo)
html = value.render_as_block(
context={
"language": "fr",
}
)
self.assertIn(
'<div class="block-heading"><h1 lang="fr">Bonjour</h1></div>', html
)
def test_render_on_stream_child_uses_child_template(self):
"""
Accessing a child element of the stream (giving a StreamChild object) and rendering it
should use the block template, not just render the value's string representation
"""
block = blocks.StreamBlock(
[
(
"heading",
blocks.CharBlock(template="tests/blocks/heading_block.html"),
),
("paragraph", blocks.CharBlock()),
]
)
value = block.to_python([{"type": "heading", "value": "Hello"}])
html = value[0].render()
self.assertEqual("<h1>Hello</h1>", html)
# StreamChild.__str__ should do the same
html = str(value[0])
self.assertEqual("<h1>Hello</h1>", html)
# and so should StreamChild.render_as_block
html = value[0].render_as_block()
self.assertEqual("<h1>Hello</h1>", html)
def test_can_pass_context_to_stream_child_template(self):
block = blocks.StreamBlock(
[
(
"heading",
blocks.CharBlock(template="tests/blocks/heading_block.html"),
),
("paragraph", blocks.CharBlock()),
]
)
value = block.to_python([{"type": "heading", "value": "Bonjour"}])
html = value[0].render(context={"language": "fr"})
self.assertEqual('<h1 lang="fr">Bonjour</h1>', html)
# the same functionality should be available through the alias `render_as_block`
html = value[0].render_as_block(context={"language": "fr"})
self.assertEqual('<h1 lang="fr">Bonjour</h1>', html)
def test_adapt(self):
class ArticleBlock(blocks.StreamBlock):
heading = blocks.CharBlock()
paragraph = blocks.CharBlock()
block = ArticleBlock()
block.set_name("test_streamblock")
js_args = StreamBlockAdapter().js_args(block)
self.assertEqual(js_args[0], "test_streamblock")
# convert group_by iterable into a list
grouped_blocks = [
(group_name, list(group_iter)) for (group_name, group_iter) in js_args[1]
]
self.assertEqual(len(grouped_blocks), 1)
group_name, block_iter = grouped_blocks[0]
self.assertEqual(group_name, "")
block_list = list(block_iter)
self.assertIsInstance(block_list[0], blocks.CharBlock)
self.assertEqual(block_list[0].name, "heading")
self.assertIsInstance(block_list[1], blocks.CharBlock)
self.assertEqual(block_list[1].name, "paragraph")
self.assertEqual(js_args[2], {"heading": None, "paragraph": None})
self.assertEqual(
js_args[3],
{
"label": "Test streamblock",
"icon": "placeholder",
"classname": None,
"collapsed": False,
"maxNum": None,
"minNum": None,
"blockCounts": {},
"required": True,
"strings": {
"DELETE": "Delete",
"DUPLICATE": "Duplicate",
"MOVE_DOWN": "Move down",
"MOVE_UP": "Move up",
"ADD": "Add",
},
},
)
def test_value_omitted_from_data(self):
block = blocks.StreamBlock(
[
("heading", blocks.CharBlock()),
]
)
# overall value is considered present in the form if the 'count' field is present
self.assertFalse(
block.value_omitted_from_data({"mystream-count": "0"}, {}, "mystream")
)
self.assertFalse(
block.value_omitted_from_data(
{
"mystream-count": "1",
"mystream-0-type": "heading",
"mystream-0-value": "hello",
"mystream-0-deleted": "",
"mystream-0-order": "0",
},
{},
"mystream",
)
)
self.assertTrue(
block.value_omitted_from_data({"nothing-here": "nope"}, {}, "mystream")
)
def test_validation_errors(self):
class ValidatedBlock(blocks.StreamBlock):
char = blocks.CharBlock()
url = blocks.URLBlock()
block = ValidatedBlock()
value = blocks.StreamValue(
block,
[
("char", ""),
("char", "foo"),
("url", "http://example.com/"),
("url", "not a url"),
],
)
with self.assertRaises(ValidationError) as catcher:
block.clean(value)
self.assertEqual(
catcher.exception.params,
{
0: ["This field is required."],
3: ["Enter a valid URL."],
},
)
def test_min_num_validation_errors(self):
class ValidatedBlock(blocks.StreamBlock):
char = blocks.CharBlock()
url = blocks.URLBlock()
block = ValidatedBlock(min_num=1)
value = blocks.StreamValue(block, [])
with self.assertRaises(ValidationError) as catcher:
block.clean(value)
self.assertEqual(
catcher.exception.params, {"__all__": ["The minimum number of items is 1"]}
)
# a value with >= 1 blocks should pass validation
value = blocks.StreamValue(block, [("char", "foo")])
self.assertTrue(block.clean(value))
def test_max_num_validation_errors(self):
class ValidatedBlock(blocks.StreamBlock):
char = blocks.CharBlock()
url = blocks.URLBlock()
block = ValidatedBlock(max_num=1)
value = blocks.StreamValue(
block,
[
("char", "foo"),
("char", "foo"),
("url", "http://example.com/"),
("url", "http://example.com/"),
],
)
with self.assertRaises(ValidationError) as catcher:
block.clean(value)
self.assertEqual(
catcher.exception.params, {"__all__": ["The maximum number of items is 1"]}
)
# a value with 1 block should pass validation
value = blocks.StreamValue(block, [("char", "foo")])
self.assertTrue(block.clean(value))
def test_block_counts_min_validation_errors(self):
class ValidatedBlock(blocks.StreamBlock):
char = blocks.CharBlock()
url = blocks.URLBlock()
block = ValidatedBlock(block_counts={"char": {"min_num": 1}})
value = blocks.StreamValue(
block,
[
("url", "http://example.com/"),
("url", "http://example.com/"),
],
)
with self.assertRaises(ValidationError) as catcher:
block.clean(value)
self.assertEqual(
catcher.exception.params,
{"__all__": ["Char: The minimum number of items is 1"]},
)
# a value with 1 char block should pass validation
value = blocks.StreamValue(
block,
[
("url", "http://example.com/"),
("char", "foo"),
("url", "http://example.com/"),
],
)
self.assertTrue(block.clean(value))
def test_block_counts_max_validation_errors(self):
class ValidatedBlock(blocks.StreamBlock):
char = blocks.CharBlock()
url = blocks.URLBlock()
block = ValidatedBlock(block_counts={"char": {"max_num": 1}})
value = blocks.StreamValue(
block,
[
("char", "foo"),
("char", "foo"),
("url", "http://example.com/"),
("url", "http://example.com/"),
],
)
with self.assertRaises(ValidationError) as catcher:
block.clean(value)
self.assertEqual(
catcher.exception.params,
{"__all__": ["Char: The maximum number of items is 1"]},
)
# a value with 1 char block should pass validation
value = blocks.StreamValue(
block,
[
("char", "foo"),
("url", "http://example.com/"),
("url", "http://example.com/"),
],
)
self.assertTrue(block.clean(value))
def test_ordering_in_form_submission_uses_order_field(self):
class ArticleBlock(blocks.StreamBlock):
heading = blocks.CharBlock()
paragraph = blocks.CharBlock()
block = ArticleBlock()
# check that items are ordered by the 'order' field, not the order they appear in the form
post_data = {"article-count": "3"}
for i in range(0, 3):
post_data.update(
{
"article-%d-deleted" % i: "",
"article-%d-order" % i: str(2 - i),
"article-%d-type" % i: "heading",
"article-%d-value" % i: "heading %d" % i,
"article-%d-id" % i: "000%d" % i,
}
)
block_value = block.value_from_datadict(post_data, {}, "article")
self.assertEqual(block_value[2].value, "heading 0")
self.assertEqual(block_value[2].id, "0000")
def test_ordering_in_form_submission_is_numeric(self):
class ArticleBlock(blocks.StreamBlock):
heading = blocks.CharBlock()
paragraph = blocks.CharBlock()
block = ArticleBlock()
# check that items are ordered by 'order' numerically, not alphabetically
post_data = {"article-count": "12"}
for i in range(0, 12):
post_data.update(
{
"article-%d-deleted" % i: "",
"article-%d-order" % i: str(i),
"article-%d-type" % i: "heading",
"article-%d-value" % i: "heading %d" % i,
}
)
block_value = block.value_from_datadict(post_data, {}, "article")
self.assertEqual(block_value[2].value, "heading 2")
def test_searchable_content(self):
class ArticleBlock(blocks.StreamBlock):
heading = blocks.CharBlock()
paragraph = blocks.CharBlock()
block = ArticleBlock()
value = block.to_python(
[
{
"type": "heading",
"value": "My title",
},
{
"type": "paragraph",
"value": "My first paragraph",
},
{
"type": "paragraph",
"value": "My second paragraph",
},
]
)
content = block.get_searchable_content(value)
self.assertEqual(
content,
[
"My title",
"My first paragraph",
"My second paragraph",
],
)
def test_meta_default(self):
"""Test that we can specify a default value in the Meta of a StreamBlock"""
class ArticleBlock(blocks.StreamBlock):
heading = blocks.CharBlock()
paragraph = blocks.CharBlock()
class Meta:
default = [("heading", "A default heading")]
# to access the default value, we retrieve it through a StructBlock
# from a struct value that's missing that key
class ArticleContainerBlock(blocks.StructBlock):
author = blocks.CharBlock()
article = ArticleBlock()
block = ArticleContainerBlock()
struct_value = block.to_python({"author": "Bob"})
stream_value = struct_value["article"]
self.assertIsInstance(stream_value, blocks.StreamValue)
self.assertEqual(len(stream_value), 1)
self.assertEqual(stream_value[0].block_type, "heading")
self.assertEqual(stream_value[0].value, "A default heading")
def test_constructor_default(self):
"""Test that we can specify a default value in the constructor of a StreamBlock"""
class ArticleBlock(blocks.StreamBlock):
heading = blocks.CharBlock()
paragraph = blocks.CharBlock()
class Meta:
default = [("heading", "A default heading")]
# to access the default value, we retrieve it through a StructBlock
# from a struct value that's missing that key
class ArticleContainerBlock(blocks.StructBlock):
author = blocks.CharBlock()
article = ArticleBlock(default=[("heading", "A different default heading")])
block = ArticleContainerBlock()
struct_value = block.to_python({"author": "Bob"})
stream_value = struct_value["article"]
self.assertIsInstance(stream_value, blocks.StreamValue)
self.assertEqual(len(stream_value), 1)
self.assertEqual(stream_value[0].block_type, "heading")
self.assertEqual(stream_value[0].value, "A different default heading")
def test_stream_value_equality(self):
block = blocks.StreamBlock(
[
("text", blocks.CharBlock()),
]
)
value1 = block.to_python([{"type": "text", "value": "hello"}])
value2 = block.to_python([{"type": "text", "value": "hello"}])
value3 = block.to_python([{"type": "text", "value": "goodbye"}])
self.assertEqual(value1, value2)
self.assertNotEqual(value1, value3)
def test_adapt_considers_group_attribute(self):
"""If group attributes are set in Block Meta classes, make sure the blocks are grouped together"""
class Group1Block1(blocks.CharBlock):
class Meta:
group = "group1"
class Group1Block2(blocks.CharBlock):
class Meta:
group = "group1"
class Group2Block1(blocks.CharBlock):
class Meta:
group = "group2"
class Group2Block2(blocks.CharBlock):
class Meta:
group = "group2"
class NoGroupBlock(blocks.CharBlock):
pass
block = blocks.StreamBlock(
[
("b1", Group1Block1()),
("b2", Group1Block2()),
("b3", Group2Block1()),
("b4", Group2Block2()),
("ngb", NoGroupBlock()),
]
)
block.set_name("test_streamblock")
js_args = StreamBlockAdapter().js_args(block)
blockdefs_dict = dict(js_args[1])
self.assertEqual(blockdefs_dict.keys(), {"", "group1", "group2"})
def test_value_from_datadict(self):
class ArticleBlock(blocks.StreamBlock):
heading = blocks.CharBlock()
paragraph = blocks.CharBlock()
block = ArticleBlock()
value = block.value_from_datadict(
{
"foo-count": "3",
"foo-0-deleted": "",
"foo-0-order": "2",
"foo-0-type": "heading",
"foo-0-id": "0000",
"foo-0-value": "this is my heading",
"foo-1-deleted": "1",
"foo-1-order": "1",
"foo-1-type": "heading",
"foo-1-id": "0001",
"foo-1-value": "a deleted heading",
"foo-2-deleted": "",
"foo-2-order": "0",
"foo-2-type": "paragraph",
"foo-2-id": "",
"foo-2-value": "<p>this is a paragraph</p>",
},
{},
prefix="foo",
)
self.assertEqual(len(value), 2)
self.assertEqual(value[0].block_type, "paragraph")
self.assertEqual(value[0].id, "")
self.assertEqual(value[0].value, "<p>this is a paragraph</p>")
self.assertEqual(value[1].block_type, "heading")
self.assertEqual(value[1].id, "0000")
self.assertEqual(value[1].value, "this is my heading")
def check_get_prep_value(self, stream_data, is_lazy):
class ArticleBlock(blocks.StreamBlock):
heading = blocks.CharBlock()
paragraph = blocks.CharBlock()
block = ArticleBlock()
value = blocks.StreamValue(block, stream_data, is_lazy=is_lazy)
jsonish_value = block.get_prep_value(value)
self.assertEqual(len(jsonish_value), 2)
self.assertEqual(
jsonish_value[0],
{"type": "heading", "value": "this is my heading", "id": "0000"},
)
self.assertEqual(jsonish_value[1]["type"], "paragraph")
self.assertEqual(jsonish_value[1]["value"], "<p>this is a paragraph</p>")
# get_prep_value should assign a new (random and non-empty)
# ID to this block, as it didn't have one already.
self.assertTrue(jsonish_value[1]["id"])
# Calling get_prep_value again should preserve existing IDs, including the one
# just assigned to block 1
jsonish_value_again = block.get_prep_value(value)
self.assertEqual(jsonish_value[0]["id"], jsonish_value_again[0]["id"])
self.assertEqual(jsonish_value[1]["id"], jsonish_value_again[1]["id"])
def test_get_prep_value_not_lazy(self):
stream_data = [
("heading", "this is my heading", "0000"),
("paragraph", "<p>this is a paragraph</p>"),
]
self.check_get_prep_value(stream_data, is_lazy=False)
def test_get_prep_value_is_lazy(self):
stream_data = [
{"type": "heading", "value": "this is my heading", "id": "0000"},
{"type": "paragraph", "value": "<p>this is a paragraph</p>"},
]
self.check_get_prep_value(stream_data, is_lazy=True)
def check_get_prep_value_nested_streamblocks(self, stream_data, is_lazy):
class TwoColumnBlock(blocks.StructBlock):
left = blocks.StreamBlock([("text", blocks.CharBlock())])
right = blocks.StreamBlock([("text", blocks.CharBlock())])
block = TwoColumnBlock()
value = {
k: blocks.StreamValue(block.child_blocks[k], v, is_lazy=is_lazy)
for k, v in stream_data.items()
}
jsonish_value = block.get_prep_value(value)
self.assertEqual(len(jsonish_value), 2)
self.assertEqual(
jsonish_value["left"],
[{"type": "text", "value": "some text", "id": "0000"}],
)
self.assertEqual(len(jsonish_value["right"]), 1)
right_block = jsonish_value["right"][0]
self.assertEqual(right_block["type"], "text")
self.assertEqual(right_block["value"], "some other text")
# get_prep_value should assign a new (random and non-empty)
# ID to this block, as it didn't have one already.
self.assertTrue(right_block["id"])
def test_get_prep_value_nested_streamblocks_not_lazy(self):
stream_data = {
"left": [("text", "some text", "0000")],
"right": [("text", "some other text")],
}
self.check_get_prep_value_nested_streamblocks(stream_data, is_lazy=False)
def test_get_prep_value_nested_streamblocks_is_lazy(self):
stream_data = {
"left": [
{
"type": "text",
"value": "some text",
"id": "0000",
}
],
"right": [
{
"type": "text",
"value": "some other text",
}
],
}
self.check_get_prep_value_nested_streamblocks(stream_data, is_lazy=True)
def test_modifications_to_stream_child_id_are_saved(self):
class ArticleBlock(blocks.StreamBlock):
heading = blocks.CharBlock()
paragraph = blocks.CharBlock()
block = ArticleBlock()
stream = block.to_python(
[
{"type": "heading", "value": "hello", "id": "0001"},
{"type": "paragraph", "value": "world", "id": "0002"},
]
)
stream[1].id = "0003"
raw_data = block.get_prep_value(stream)
self.assertEqual(
raw_data,
[
{"type": "heading", "value": "hello", "id": "0001"},
{"type": "paragraph", "value": "world", "id": "0003"},
],
)
def test_modifications_to_stream_child_value_are_saved(self):
class ArticleBlock(blocks.StreamBlock):
heading = blocks.CharBlock()
paragraph = blocks.CharBlock()
block = ArticleBlock()
stream = block.to_python(
[
{"type": "heading", "value": "hello", "id": "0001"},
{"type": "paragraph", "value": "world", "id": "0002"},
]
)
stream[1].value = "earth"
raw_data = block.get_prep_value(stream)
self.assertEqual(
raw_data,
[
{"type": "heading", "value": "hello", "id": "0001"},
{"type": "paragraph", "value": "earth", "id": "0002"},
],
)
def test_set_streamvalue_item(self):
class ArticleBlock(blocks.StreamBlock):
heading = blocks.CharBlock()
paragraph = blocks.CharBlock()
block = ArticleBlock()
stream = block.to_python(
[
{"type": "heading", "value": "hello", "id": "0001"},
{"type": "paragraph", "value": "world", "id": "0002"},
]
)
stream[1] = ("heading", "goodbye", "0003")
raw_data = block.get_prep_value(stream)
self.assertEqual(
raw_data,
[
{"type": "heading", "value": "hello", "id": "0001"},
{"type": "heading", "value": "goodbye", "id": "0003"},
],
)
def test_delete_streamvalue_item(self):
class ArticleBlock(blocks.StreamBlock):
heading = blocks.CharBlock()
paragraph = blocks.CharBlock()
block = ArticleBlock()
stream = block.to_python(
[
{"type": "heading", "value": "hello", "id": "0001"},
{"type": "paragraph", "value": "world", "id": "0002"},
]
)
del stream[0]
raw_data = block.get_prep_value(stream)
self.assertEqual(
raw_data,
[
{"type": "paragraph", "value": "world", "id": "0002"},
],
)
def test_insert_streamvalue_item(self):
class ArticleBlock(blocks.StreamBlock):
heading = blocks.CharBlock()
paragraph = blocks.CharBlock()
block = ArticleBlock()
stream = block.to_python(
[
{"type": "heading", "value": "hello", "id": "0001"},
{"type": "paragraph", "value": "world", "id": "0002"},
]
)
stream.insert(1, ("paragraph", "mutable", "0003"))
raw_data = block.get_prep_value(stream)
self.assertEqual(
raw_data,
[
{"type": "heading", "value": "hello", "id": "0001"},
{"type": "paragraph", "value": "mutable", "id": "0003"},
{"type": "paragraph", "value": "world", "id": "0002"},
],
)
def test_append_streamvalue_item(self):
class ArticleBlock(blocks.StreamBlock):
heading = blocks.CharBlock()
paragraph = blocks.CharBlock()
block = ArticleBlock()
stream = block.to_python(
[
{"type": "heading", "value": "hello", "id": "0001"},
{"type": "paragraph", "value": "world", "id": "0002"},
]
)
stream.append(("paragraph", "of warcraft", "0003"))
raw_data = block.get_prep_value(stream)
self.assertEqual(
raw_data,
[
{"type": "heading", "value": "hello", "id": "0001"},
{"type": "paragraph", "value": "world", "id": "0002"},
{"type": "paragraph", "value": "of warcraft", "id": "0003"},
],
)
def test_streamvalue_raw_data(self):
class ArticleBlock(blocks.StreamBlock):
heading = blocks.CharBlock()
paragraph = blocks.CharBlock()
block = ArticleBlock()
stream = block.to_python(
[
{"type": "heading", "value": "hello", "id": "0001"},
{"type": "paragraph", "value": "world", "id": "0002"},
]
)
self.assertEqual(
stream.raw_data[0], {"type": "heading", "value": "hello", "id": "0001"}
)
stream.raw_data[0]["value"] = "bonjour"
self.assertEqual(
stream.raw_data[0], {"type": "heading", "value": "bonjour", "id": "0001"}
)
# changes to raw_data will be written back via get_prep_value...
raw_data = block.get_prep_value(stream)
self.assertEqual(
raw_data,
[
{"type": "heading", "value": "bonjour", "id": "0001"},
{"type": "paragraph", "value": "world", "id": "0002"},
],
)
# ...but once the bound-block representation has been accessed, that takes precedence
self.assertEqual(stream[0].value, "bonjour")
stream.raw_data[0]["value"] = "guten tag"
self.assertEqual(stream.raw_data[0]["value"], "guten tag")
self.assertEqual(stream[0].value, "bonjour")
raw_data = block.get_prep_value(stream)
self.assertEqual(
raw_data,
[
{"type": "heading", "value": "bonjour", "id": "0001"},
{"type": "paragraph", "value": "world", "id": "0002"},
],
)
# Replacing a raw_data entry outright will propagate to the bound block, though
stream.raw_data[0] = {"type": "heading", "value": "konnichiwa", "id": "0003"}
raw_data = block.get_prep_value(stream)
self.assertEqual(
raw_data,
[
{"type": "heading", "value": "konnichiwa", "id": "0003"},
{"type": "paragraph", "value": "world", "id": "0002"},
],
)
self.assertEqual(stream[0].value, "konnichiwa")
# deletions / insertions on raw_data will also propagate to the bound block representation
del stream.raw_data[1]
stream.raw_data.insert(
0, {"type": "paragraph", "value": "hello kitty says", "id": "0004"}
)
raw_data = block.get_prep_value(stream)
self.assertEqual(
raw_data,
[
{"type": "paragraph", "value": "hello kitty says", "id": "0004"},
{"type": "heading", "value": "konnichiwa", "id": "0003"},
],
)
def test_adapt_with_classname_via_kwarg(self):
"""form_classname from kwargs to be used as an additional class when rendering stream block"""
block = blocks.StreamBlock(
[
(b"heading", blocks.CharBlock()),
(b"paragraph", blocks.CharBlock()),
],
form_classname="rocket-section",
)
block.set_name("test_streamblock")
js_args = StreamBlockAdapter().js_args(block)
self.assertEqual(
js_args[3],
{
"label": "Test streamblock",
"icon": "placeholder",
"minNum": None,
"maxNum": None,
"blockCounts": {},
"collapsed": False,
"required": True,
"classname": "rocket-section",
"strings": {
"DELETE": "Delete",
"DUPLICATE": "Duplicate",
"MOVE_DOWN": "Move down",
"MOVE_UP": "Move up",
"ADD": "Add",
},
},
)
def test_adapt_with_classname_via_class_meta(self):
"""form_classname from meta to be used as an additional class when rendering stream block"""
class ProfileBlock(blocks.StreamBlock):
username = blocks.CharBlock()
class Meta:
form_classname = "profile-block-large"
block = ProfileBlock()
block.set_name("test_streamblock")
js_args = StreamBlockAdapter().js_args(block)
self.assertEqual(
js_args[3],
{
"label": "Test streamblock",
"icon": "placeholder",
"minNum": None,
"maxNum": None,
"blockCounts": {},
"collapsed": False,
"required": True,
"classname": "profile-block-large",
"strings": {
"DELETE": "Delete",
"DUPLICATE": "Duplicate",
"MOVE_DOWN": "Move down",
"MOVE_UP": "Move up",
"ADD": "Add",
},
},
)
class TestStructBlockWithFixtures(TestCase):
fixtures = ["test.json"]
def test_bulk_to_python(self):
page_link_block = blocks.StructBlock(
[
("page", blocks.PageChooserBlock(required=False)),
("link_text", blocks.CharBlock(default="missing title")),
]
)
with self.assertNumQueries(1):
result = page_link_block.bulk_to_python(
[
{"page": 2, "link_text": "page two"},
{"page": 3, "link_text": "page three"},
{"page": None, "link_text": "no page"},
{"page": 4},
]
)
result_types = [type(val) for val in result]
self.assertEqual(result_types, [blocks.StructValue] * 4)
result_titles = [val["link_text"] for val in result]
self.assertEqual(
result_titles, ["page two", "page three", "no page", "missing title"]
)
result_pages = [val["page"] for val in result]
self.assertEqual(
result_pages,
[
Page.objects.get(id=2),
Page.objects.get(id=3),
None,
Page.objects.get(id=4),
],
)
class TestStreamBlockWithFixtures(TestCase):
fixtures = ["test.json"]
def test_bulk_to_python(self):
stream_block = blocks.StreamBlock(
[
("page", blocks.PageChooserBlock()),
("heading", blocks.CharBlock()),
]
)
# The naive implementation of bulk_to_python (calling to_python on each item) would perform
# NO queries, as StreamBlock.to_python returns a lazy StreamValue that only starts calling
# to_python on its children (and thus triggering DB queries) when its items are accessed.
# This is a good thing for a standalone to_python call, because loading a model instance
# with a StreamField in it will immediately call StreamField.to_python which in turn calls
# to_python on the top-level StreamBlock, and we really don't want
# SomeModelWithAStreamField.objects.get(id=1) to immediately trigger a cascading fetch of
# all objects referenced in the StreamField.
#
# However, for bulk_to_python that's bad, as it means each stream in the list would end up
# doing its own object lookups in isolation, missing the opportunity to group them together
# into a single call to the child block's bulk_to_python. Therefore, the ideal outcome is
# that we perform one query now (covering all PageChooserBlocks across all streams),
# returning a list of non-lazy StreamValues.
with self.assertNumQueries(1):
results = stream_block.bulk_to_python(
[
[
{"type": "heading", "value": "interesting pages"},
{"type": "page", "value": 2},
{"type": "page", "value": 3},
],
[
{"type": "heading", "value": "pages written by dogs"},
{"type": "woof", "value": "woof woof"},
],
[
{"type": "heading", "value": "boring pages"},
{"type": "page", "value": 4},
],
]
)
# If bulk_to_python has indeed given us non-lazy StreamValues, then no further queries
# should be performed when iterating over its child blocks.
with self.assertNumQueries(0):
block_types = [[block.block_type for block in stream] for stream in results]
self.assertEqual(
block_types,
[
["heading", "page", "page"],
["heading"],
["heading", "page"],
],
)
with self.assertNumQueries(0):
block_values = [[block.value for block in stream] for stream in results]
self.assertEqual(
block_values,
[
["interesting pages", Page.objects.get(id=2), Page.objects.get(id=3)],
["pages written by dogs"],
["boring pages", Page.objects.get(id=4)],
],
)
class TestPageChooserBlock(TestCase):
fixtures = ["test.json"]
def test_serialize(self):
"""The value of a PageChooserBlock (a Page object) should serialize to an ID"""
block = blocks.PageChooserBlock()
christmas_page = Page.objects.get(slug="christmas")
self.assertEqual(block.get_prep_value(christmas_page), christmas_page.id)
# None should serialize to None
self.assertIsNone(block.get_prep_value(None))
def test_deserialize(self):
"""The serialized value of a PageChooserBlock (an ID) should deserialize to a Page object"""
block = blocks.PageChooserBlock()
christmas_page = Page.objects.get(slug="christmas")
self.assertEqual(block.to_python(christmas_page.id), christmas_page)
# None should deserialize to None
self.assertIsNone(block.to_python(None))
def test_adapt(self):
from wagtail.admin.widgets.chooser import AdminPageChooser
block = blocks.PageChooserBlock(help_text="pick a page, any page")
block.set_name("test_pagechooserblock")
js_args = FieldBlockAdapter().js_args(block)
self.assertEqual(js_args[0], "test_pagechooserblock")
self.assertIsInstance(js_args[1], AdminPageChooser)
self.assertEqual(js_args[1].target_models, [Page])
self.assertFalse(js_args[1].can_choose_root)
self.assertEqual(
js_args[2],
{
"label": "Test pagechooserblock",
"required": True,
"icon": "redirect",
"helpText": "pick a page, any page",
"classname": "field model_choice_field widget-admin_page_chooser fieldname-test_pagechooserblock",
"showAddCommentButton": True,
"strings": {"ADD_COMMENT": "Add Comment"},
},
)
def test_adapt_with_target_model_string(self):
block = blocks.PageChooserBlock(
help_text="pick a page, any page", page_type="tests.SimplePage"
)
block.set_name("test_pagechooserblock")
js_args = FieldBlockAdapter().js_args(block)
self.assertEqual(js_args[1].target_models, [SimplePage])
def test_adapt_with_target_model_literal(self):
block = blocks.PageChooserBlock(
help_text="pick a page, any page", page_type=SimplePage
)
block.set_name("test_pagechooserblock")
js_args = FieldBlockAdapter().js_args(block)
self.assertEqual(js_args[1].target_models, [SimplePage])
def test_adapt_with_target_model_multiple_strings(self):
block = blocks.PageChooserBlock(
help_text="pick a page, any page",
page_type=["tests.SimplePage", "tests.EventPage"],
)
block.set_name("test_pagechooserblock")
js_args = FieldBlockAdapter().js_args(block)
self.assertEqual(js_args[1].target_models, [SimplePage, EventPage])
def test_adapt_with_target_model_multiple_literals(self):
block = blocks.PageChooserBlock(
help_text="pick a page, any page", page_type=[SimplePage, EventPage]
)
block.set_name("test_pagechooserblock")
js_args = FieldBlockAdapter().js_args(block)
self.assertEqual(js_args[1].target_models, [SimplePage, EventPage])
def test_adapt_with_can_choose_root(self):
block = blocks.PageChooserBlock(
help_text="pick a page, any page", can_choose_root=True
)
block.set_name("test_pagechooserblock")
js_args = FieldBlockAdapter().js_args(block)
self.assertTrue(js_args[1].can_choose_root)
def test_form_response(self):
block = blocks.PageChooserBlock()
christmas_page = Page.objects.get(slug="christmas")
value = block.value_from_datadict({"page": str(christmas_page.id)}, {}, "page")
self.assertEqual(value, christmas_page)
empty_value = block.value_from_datadict({"page": ""}, {}, "page")
self.assertIsNone(empty_value)
def test_clean(self):
required_block = blocks.PageChooserBlock()
nonrequired_block = blocks.PageChooserBlock(required=False)
christmas_page = Page.objects.get(slug="christmas")
self.assertEqual(required_block.clean(christmas_page), christmas_page)
with self.assertRaises(ValidationError):
required_block.clean(None)
self.assertEqual(nonrequired_block.clean(christmas_page), christmas_page)
self.assertIsNone(nonrequired_block.clean(None))
def test_target_model_default(self):
block = blocks.PageChooserBlock()
self.assertEqual(block.target_model, Page)
def test_target_model_string(self):
block = blocks.PageChooserBlock(page_type="tests.SimplePage")
self.assertEqual(block.target_model, SimplePage)
def test_target_model_literal(self):
block = blocks.PageChooserBlock(page_type=SimplePage)
self.assertEqual(block.target_model, SimplePage)
def test_target_model_multiple_strings(self):
block = blocks.PageChooserBlock(
page_type=["tests.SimplePage", "tests.EventPage"]
)
self.assertEqual(block.target_model, Page)
def test_target_model_multiple_literals(self):
block = blocks.PageChooserBlock(page_type=[SimplePage, EventPage])
self.assertEqual(block.target_model, Page)
def test_deconstruct_target_model_default(self):
block = blocks.PageChooserBlock()
self.assertEqual(
block.deconstruct(), ("wagtail.core.blocks.PageChooserBlock", (), {})
)
def test_deconstruct_target_model_string(self):
block = blocks.PageChooserBlock(page_type="tests.SimplePage")
self.assertEqual(
block.deconstruct(),
(
"wagtail.core.blocks.PageChooserBlock",
(),
{"page_type": ["tests.SimplePage"]},
),
)
def test_deconstruct_target_model_literal(self):
block = blocks.PageChooserBlock(page_type=SimplePage)
self.assertEqual(
block.deconstruct(),
(
"wagtail.core.blocks.PageChooserBlock",
(),
{"page_type": ["tests.SimplePage"]},
),
)
def test_deconstruct_target_model_multiple_strings(self):
block = blocks.PageChooserBlock(
page_type=["tests.SimplePage", "tests.EventPage"]
)
self.assertEqual(
block.deconstruct(),
(
"wagtail.core.blocks.PageChooserBlock",
(),
{"page_type": ["tests.SimplePage", "tests.EventPage"]},
),
)
def test_deconstruct_target_model_multiple_literals(self):
block = blocks.PageChooserBlock(page_type=[SimplePage, EventPage])
self.assertEqual(
block.deconstruct(),
(
"wagtail.core.blocks.PageChooserBlock",
(),
{"page_type": ["tests.SimplePage", "tests.EventPage"]},
),
)
def test_bulk_to_python(self):
page_ids = [2, 3, 4, 5]
expected_pages = Page.objects.filter(pk__in=page_ids)
block = blocks.PageChooserBlock()
with self.assertNumQueries(1):
pages = block.bulk_to_python(page_ids)
self.assertSequenceEqual(pages, expected_pages)
class TestStaticBlock(unittest.TestCase):
def test_adapt_with_constructor(self):
block = blocks.StaticBlock(
admin_text="Latest posts - This block doesn't need to be configured, it will be displayed automatically",
template="tests/blocks/posts_static_block.html",
)
block.set_name("posts_static_block")
js_args = StaticBlockAdapter().js_args(block)
self.assertEqual(js_args[0], "posts_static_block")
self.assertEqual(
js_args[1],
{
"text": "Latest posts - This block doesn't need to be configured, it will be displayed automatically",
"icon": "placeholder",
"label": "Posts static block",
},
)
def test_adapt_with_subclass(self):
class PostsStaticBlock(blocks.StaticBlock):
class Meta:
admin_text = "Latest posts - This block doesn't need to be configured, it will be displayed automatically"
template = "tests/blocks/posts_static_block.html"
block = PostsStaticBlock()
block.set_name("posts_static_block")
js_args = StaticBlockAdapter().js_args(block)
self.assertEqual(js_args[0], "posts_static_block")
self.assertEqual(
js_args[1],
{
"text": "Latest posts - This block doesn't need to be configured, it will be displayed automatically",
"icon": "placeholder",
"label": "Posts static block",
},
)
def test_adapt_with_subclass_displays_default_text_if_no_admin_text(self):
class LabelOnlyStaticBlock(blocks.StaticBlock):
class Meta:
label = "Latest posts"
block = LabelOnlyStaticBlock()
block.set_name("posts_static_block")
js_args = StaticBlockAdapter().js_args(block)
self.assertEqual(js_args[0], "posts_static_block")
self.assertEqual(
js_args[1],
{
"text": "Latest posts: this block has no options.",
"icon": "placeholder",
"label": "Latest posts",
},
)
def test_adapt_with_subclass_displays_default_text_if_no_admin_text_and_no_label(
self,
):
class NoMetaStaticBlock(blocks.StaticBlock):
pass
block = NoMetaStaticBlock()
block.set_name("posts_static_block")
js_args = StaticBlockAdapter().js_args(block)
self.assertEqual(js_args[0], "posts_static_block")
self.assertEqual(
js_args[1],
{
"text": "Posts static block: this block has no options.",
"icon": "placeholder",
"label": "Posts static block",
},
)
def test_adapt_works_with_mark_safe(self):
block = blocks.StaticBlock(
admin_text=mark_safe(
"<b>Latest posts</b> - This block doesn't need to be configured, it will be displayed automatically"
),
template="tests/blocks/posts_static_block.html",
)
block.set_name("posts_static_block")
js_args = StaticBlockAdapter().js_args(block)
self.assertEqual(js_args[0], "posts_static_block")
self.assertEqual(
js_args[1],
{
"html": "<b>Latest posts</b> - This block doesn't need to be configured, it will be displayed automatically",
"icon": "placeholder",
"label": "Posts static block",
},
)
def test_get_default(self):
block = blocks.StaticBlock()
default_value = block.get_default()
self.assertIsNone(default_value)
def test_render(self):
block = blocks.StaticBlock(template="tests/blocks/posts_static_block.html")
result = block.render(None)
self.assertEqual(result, "<p>PostsStaticBlock template</p>")
def test_serialize(self):
block = blocks.StaticBlock()
result = block.get_prep_value(None)
self.assertIsNone(result)
def test_deserialize(self):
block = blocks.StaticBlock()
result = block.to_python(None)
self.assertIsNone(result)
class TestDateBlock(TestCase):
def test_adapt(self):
from wagtail.admin.widgets.datetime import AdminDateInput
block = blocks.DateBlock()
block.set_name("test_dateblock")
js_args = FieldBlockAdapter().js_args(block)
self.assertEqual(js_args[0], "test_dateblock")
self.assertIsInstance(js_args[1], AdminDateInput)
self.assertEqual(js_args[1].js_format, "Y-m-d")
self.assertEqual(
js_args[2],
{
"label": "Test dateblock",
"required": True,
"icon": "date",
"classname": "field date_field widget-admin_date_input fieldname-test_dateblock",
"showAddCommentButton": True,
"strings": {"ADD_COMMENT": "Add Comment"},
},
)
def test_adapt_with_format(self):
block = blocks.DateBlock(format="%d.%m.%Y")
block.set_name("test_dateblock")
js_args = FieldBlockAdapter().js_args(block)
self.assertEqual(js_args[1].js_format, "d.m.Y")
class TestTimeBlock(TestCase):
def test_adapt(self):
from wagtail.admin.widgets.datetime import AdminTimeInput
block = blocks.TimeBlock()
block.set_name("test_timeblock")
js_args = FieldBlockAdapter().js_args(block)
self.assertEqual(js_args[0], "test_timeblock")
self.assertIsInstance(js_args[1], AdminTimeInput)
self.assertEqual(js_args[1].js_format, "H:i")
self.assertEqual(
js_args[2],
{
"label": "Test timeblock",
"required": True,
"icon": "time",
"classname": "field time_field widget-admin_time_input fieldname-test_timeblock",
"showAddCommentButton": True,
"strings": {"ADD_COMMENT": "Add Comment"},
},
)
def test_adapt_with_format(self):
block = blocks.TimeBlock(format="%H:%M:%S")
block.set_name("test_timeblock")
js_args = FieldBlockAdapter().js_args(block)
self.assertEqual(js_args[1].js_format, "H:i:s")
class TestDateTimeBlock(TestCase):
def test_adapt(self):
from wagtail.admin.widgets.datetime import AdminDateTimeInput
block = blocks.DateTimeBlock()
block.set_name("test_datetimeblock")
js_args = FieldBlockAdapter().js_args(block)
self.assertEqual(js_args[0], "test_datetimeblock")
self.assertIsInstance(js_args[1], AdminDateTimeInput)
self.assertEqual(js_args[1].js_format, "Y-m-d H:i")
self.assertEqual(
js_args[2],
{
"label": "Test datetimeblock",
"required": True,
"icon": "date",
"classname": "field date_time_field widget-admin_date_time_input fieldname-test_datetimeblock",
"showAddCommentButton": True,
"strings": {"ADD_COMMENT": "Add Comment"},
},
)
def test_adapt_with_format(self):
block = blocks.DateTimeBlock(format="%d.%m.%Y %H:%M")
block.set_name("test_datetimeblock")
js_args = FieldBlockAdapter().js_args(block)
self.assertEqual(js_args[1].js_format, "d.m.Y H:i")
class TestSystemCheck(TestCase):
def test_name_cannot_contain_non_alphanumeric(self):
block = blocks.StreamBlock(
[
("heading", blocks.CharBlock()),
("rich+text", blocks.RichTextBlock()),
]
)
errors = block.check()
self.assertEqual(len(errors), 1)
self.assertEqual(errors[0].id, "wagtailcore.E001")
self.assertEqual(
errors[0].hint,
"Block names should follow standard Python conventions for variable names: alphanumeric and underscores, and cannot begin with a digit",
)
self.assertEqual(errors[0].obj, block.child_blocks["rich+text"])
def test_name_must_be_nonempty(self):
block = blocks.StreamBlock(
[
("heading", blocks.CharBlock()),
("", blocks.RichTextBlock()),
]
)
errors = block.check()
self.assertEqual(len(errors), 1)
self.assertEqual(errors[0].id, "wagtailcore.E001")
self.assertEqual(errors[0].hint, "Block name cannot be empty")
self.assertEqual(errors[0].obj, block.child_blocks[""])
def test_name_cannot_contain_spaces(self):
block = blocks.StreamBlock(
[
("heading", blocks.CharBlock()),
("rich text", blocks.RichTextBlock()),
]
)
errors = block.check()
self.assertEqual(len(errors), 1)
self.assertEqual(errors[0].id, "wagtailcore.E001")
self.assertEqual(errors[0].hint, "Block names cannot contain spaces")
self.assertEqual(errors[0].obj, block.child_blocks["rich text"])
def test_name_cannot_contain_dashes(self):
block = blocks.StreamBlock(
[
("heading", blocks.CharBlock()),
("rich-text", blocks.RichTextBlock()),
]
)
errors = block.check()
self.assertEqual(len(errors), 1)
self.assertEqual(errors[0].id, "wagtailcore.E001")
self.assertEqual(errors[0].hint, "Block names cannot contain dashes")
self.assertEqual(errors[0].obj, block.child_blocks["rich-text"])
def test_name_cannot_begin_with_digit(self):
block = blocks.StreamBlock(
[
("heading", blocks.CharBlock()),
("99richtext", blocks.RichTextBlock()),
]
)
errors = block.check()
self.assertEqual(len(errors), 1)
self.assertEqual(errors[0].id, "wagtailcore.E001")
self.assertEqual(errors[0].hint, "Block names cannot begin with a digit")
self.assertEqual(errors[0].obj, block.child_blocks["99richtext"])
def test_system_checks_recurse_into_lists(self):
failing_block = blocks.RichTextBlock()
block = blocks.StreamBlock(
[
(
"paragraph_list",
blocks.ListBlock(
blocks.StructBlock(
[
("heading", blocks.CharBlock()),
("rich text", failing_block),
]
)
),
)
]
)
errors = block.check()
self.assertEqual(len(errors), 1)
self.assertEqual(errors[0].id, "wagtailcore.E001")
self.assertEqual(errors[0].hint, "Block names cannot contain spaces")
self.assertEqual(errors[0].obj, failing_block)
def test_system_checks_recurse_into_streams(self):
failing_block = blocks.RichTextBlock()
block = blocks.StreamBlock(
[
(
"carousel",
blocks.StreamBlock(
[
(
"text",
blocks.StructBlock(
[
("heading", blocks.CharBlock()),
("rich text", failing_block),
]
),
)
]
),
)
]
)
errors = block.check()
self.assertEqual(len(errors), 1)
self.assertEqual(errors[0].id, "wagtailcore.E001")
self.assertEqual(errors[0].hint, "Block names cannot contain spaces")
self.assertEqual(errors[0].obj, failing_block)
def test_system_checks_recurse_into_structs(self):
failing_block_1 = blocks.RichTextBlock()
failing_block_2 = blocks.RichTextBlock()
block = blocks.StreamBlock(
[
(
"two_column",
blocks.StructBlock(
[
(
"left",
blocks.StructBlock(
[
("heading", blocks.CharBlock()),
("rich text", failing_block_1),
]
),
),
(
"right",
blocks.StructBlock(
[
("heading", blocks.CharBlock()),
("rich text", failing_block_2),
]
),
),
]
),
)
]
)
errors = block.check()
self.assertEqual(len(errors), 2)
self.assertEqual(errors[0].id, "wagtailcore.E001")
self.assertEqual(errors[0].hint, "Block names cannot contain spaces")
self.assertEqual(errors[0].obj, failing_block_1)
self.assertEqual(errors[1].id, "wagtailcore.E001")
self.assertEqual(errors[1].hint, "Block names cannot contain spaces")
self.assertEqual(errors[1].obj, failing_block_2)
class TestTemplateRendering(TestCase):
def test_render_with_custom_context(self):
block = CustomLinkBlock()
value = block.to_python({"title": "Torchbox", "url": "http://torchbox.com/"})
context = {"classname": "important"}
result = block.render(value, context)
self.assertEqual(
result, '<a href="http://torchbox.com/" class="important">Torchbox</a>'
)
@unittest.expectedFailure # TODO(telepath)
def test_render_with_custom_form_context(self):
block = CustomLinkBlock()
value = block.to_python({"title": "Torchbox", "url": "http://torchbox.com/"})
result = block.render_form(value, prefix="my-link-block")
self.assertIn('data-prefix="my-link-block"', result)
self.assertIn("<p>Hello from get_form_context!</p>", result)
class TestIncludeBlockTag(TestCase):
def test_include_block_tag_with_boundblock(self):
"""
The include_block tag should be able to render a BoundBlock's template
while keeping the parent template's context
"""
block = blocks.CharBlock(template="tests/blocks/heading_block.html")
bound_block = block.bind("bonjour")
result = render_to_string(
"tests/blocks/include_block_test.html",
{
"test_block": bound_block,
"language": "fr",
},
)
self.assertIn('<body><h1 lang="fr">bonjour</h1></body>', result)
def test_include_block_tag_with_structvalue(self):
"""
The include_block tag should be able to render a StructValue's template
while keeping the parent template's context
"""
block = SectionBlock()
struct_value = block.to_python(
{"title": "Bonjour", "body": "monde <i>italique</i>"}
)
result = render_to_string(
"tests/blocks/include_block_test.html",
{
"test_block": struct_value,
"language": "fr",
},
)
self.assertIn(
"""<body><h1 lang="fr">Bonjour</h1>monde <i>italique</i></body>""", result
)
def test_include_block_tag_with_streamvalue(self):
"""
The include_block tag should be able to render a StreamValue's template
while keeping the parent template's context
"""
block = blocks.StreamBlock(
[
(
"heading",
blocks.CharBlock(template="tests/blocks/heading_block.html"),
),
("paragraph", blocks.CharBlock()),
],
template="tests/blocks/stream_with_language.html",
)
stream_value = block.to_python([{"type": "heading", "value": "Bonjour"}])
result = render_to_string(
"tests/blocks/include_block_test.html",
{
"test_block": stream_value,
"language": "fr",
},
)
self.assertIn(
'<div class="heading" lang="fr"><h1 lang="fr">Bonjour</h1></div>', result
)
def test_include_block_tag_with_plain_value(self):
"""
The include_block tag should be able to render a value without a render_as_block method
by just rendering it as a string
"""
result = render_to_string(
"tests/blocks/include_block_test.html",
{
"test_block": 42,
},
)
self.assertIn("<body>42</body>", result)
def test_include_block_tag_with_filtered_value(self):
"""
The block parameter on include_block tag should support complex values including filters,
e.g. {% include_block foo|default:123 %}
"""
block = blocks.CharBlock(template="tests/blocks/heading_block.html")
bound_block = block.bind("bonjour")
result = render_to_string(
"tests/blocks/include_block_test_with_filter.html",
{
"test_block": bound_block,
"language": "fr",
},
)
self.assertIn('<body><h1 lang="fr">bonjour</h1></body>', result)
result = render_to_string(
"tests/blocks/include_block_test_with_filter.html",
{
"test_block": None,
"language": "fr",
},
)
self.assertIn("<body>999</body>", result)
def test_include_block_tag_with_extra_context(self):
"""
Test that it's possible to pass extra context on an include_block tag using
{% include_block foo with classname="bar" %}
"""
block = blocks.CharBlock(template="tests/blocks/heading_block.html")
bound_block = block.bind("bonjour")
result = render_to_string(
"tests/blocks/include_block_with_test.html",
{
"test_block": bound_block,
"language": "fr",
},
)
self.assertIn(
'<body><h1 lang="fr" class="important">bonjour</h1></body>', result
)
def test_include_block_tag_with_only_flag(self):
"""
A tag such as {% include_block foo with classname="bar" only %}
should not inherit the parent context
"""
block = blocks.CharBlock(template="tests/blocks/heading_block.html")
bound_block = block.bind("bonjour")
result = render_to_string(
"tests/blocks/include_block_only_test.html",
{
"test_block": bound_block,
"language": "fr",
},
)
self.assertIn('<body><h1 class="important">bonjour</h1></body>', result)
def test_include_block_html_escaping(self):
"""
Output of include_block should be escaped as per Django autoescaping rules
"""
block = blocks.CharBlock()
bound_block = block.bind(block.to_python("some <em>evil</em> HTML"))
result = render_to_string(
"tests/blocks/include_block_test.html",
{
"test_block": bound_block,
},
)
self.assertIn("<body>some <em>evil</em> HTML</body>", result)
# {% autoescape off %} should be respected
result = render_to_string(
"tests/blocks/include_block_autoescape_off_test.html",
{
"test_block": bound_block,
},
)
self.assertIn("<body>some <em>evil</em> HTML</body>", result)
# The same escaping should be applied when passed a plain value rather than a BoundBlock -
# a typical situation where this would occur would be rendering an item of a StructBlock,
# e.g. {% include_block person_block.first_name %} as opposed to
# {% include_block person_block.bound_blocks.first_name %}
result = render_to_string(
"tests/blocks/include_block_test.html",
{
"test_block": "some <em>evil</em> HTML",
},
)
self.assertIn("<body>some <em>evil</em> HTML</body>", result)
result = render_to_string(
"tests/blocks/include_block_autoescape_off_test.html",
{
"test_block": "some <em>evil</em> HTML",
},
)
self.assertIn("<body>some <em>evil</em> HTML</body>", result)
# Blocks that explicitly return 'safe HTML'-marked values (such as RawHTMLBlock) should
# continue to produce unescaped output
block = blocks.RawHTMLBlock()
bound_block = block.bind(block.to_python("some <em>evil</em> HTML"))
result = render_to_string(
"tests/blocks/include_block_test.html",
{
"test_block": bound_block,
},
)
self.assertIn("<body>some <em>evil</em> HTML</body>", result)
# likewise when applied to a plain 'safe HTML' value rather than a BoundBlock
result = render_to_string(
"tests/blocks/include_block_test.html",
{
"test_block": mark_safe("some <em>evil</em> HTML"),
},
)
self.assertIn("<body>some <em>evil</em> HTML</body>", result)
class BlockUsingGetTemplateMethod(blocks.Block):
my_new_template = "my_super_awesome_dynamic_template.html"
def get_template(self):
return self.my_new_template
class TestOverriddenGetTemplateBlockTag(TestCase):
def test_template_is_overridden_by_get_template(self):
block = BlockUsingGetTemplateMethod(
template="tests/blocks/this_shouldnt_be_used.html"
)
template = block.get_template()
self.assertEqual(template, block.my_new_template)
|
from __future__ import unicode_literals
from django.db import migrations
INDEX_SQL = """
CREATE INDEX msgs_unlabelled_inbox
ON msgs_message(org_id, created_on DESC)
WHERE is_active = TRUE AND is_handled = TRUE AND is_archived = TRUE AND "type" = 'I';
"""
class Migration(migrations.Migration):
dependencies = [("msgs", "0023_auto_20160308_1153")]
operations = [migrations.RunSQL(INDEX_SQL)]
|
from django.conf.urls.defaults import *
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Example:
# (r'^my-project/', include('my_project.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# (r'^admin/doc/', include('django.contrib.admindocs.urls')),
(r'^admin/', include(admin.site.urls)),
# RapidSMS core URLs
(r'^account/', include('rapidsms.urls.login_logout')),
url(r'^$', 'rapidsms.views.dashboard', name='rapidsms-dashboard'),
# RapidSMS contrib app URLs
# (r'^ajax/', include('rapidsms.contrib.ajax.urls')),
# (r'^export/', include('rapidsms.contrib.export.urls')),
url(r'^httptester/$',
'threadless_router.backends.httptester.views.generate_identity',
{'backend_name': 'httptester'}, name='httptester-index'),
(r'^httptester/', include('threadless_router.backends.httptester.urls')),
# (r'^locations/', include('rapidsms.contrib.locations.urls')),
(r'^messagelog/', include('rapidsms.contrib.messagelog.urls')),
(r'^messaging/', include('rapidsms.contrib.messaging.urls')),
# (r'^registration/', include('rapidsms.contrib.registration.urls')),
# (r'^scheduler/', include('rapidsms.contrib.scheduler.urls')),
)
if settings.DEBUG:
urlpatterns += patterns('',
# helper URLs file that automatically serves the 'static' folder in
# INSTALLED_APPS via the Django static media server (NOT for use in
# production)
(r'^', include('rapidsms.urls.static_media')),
)
|
import sys
import os
import numpy as np
import pandas
import wqio
from wqio import utils
import pybmpdb
import pynsqd
from .info import POC_dicts
bmpcats_to_use = [
'Bioretention', 'Detention Basin',
'Green Roof', 'Biofilter',
'LID', 'Manufactured Device',
'Media Filter', 'Porous Pavement',
'Retention Pond', 'Wetland Basin',
'Wetland Channel'
]
def _fix_nsqd_bacteria_units(df, unitscol='units'):
df = df.copy()
df[unitscol] = df[unitscol].replace(to_replace='MPN/100 mL', value='CFU/100 mL')
return df
class nsqd:
"""
Object to provide convenient access to the NSQD.
Parameters
----------
color, marker : string
Matplotlib symbology strings.
"""
def __init__(self, color, marker):
self.color = color
self.marker = marker
self._data = None
self._datacollection = None
self._medians = None
self._seasonal_datacollection = None
self._seasonal_medians = None
self.label_col = 'primary_landuse'
self.index_cols = [
'epa_rain_zone', 'location_code', 'station_name', 'primary_landuse',
'start_date', 'season', 'station', 'parameter', 'units',
]
self.db = pynsqd.NSQData()
@property
def landuses(self):
return self.data['primary_landuse'].unique()
@property
def labels(self):
return self.landuses
@property
def data(self):
if self._data is None:
params = [p['nsqdname'] for p in POC_dicts]
self._data = (
self.db
.data
.query("primary_landuse != 'Unknown'")
.query("parameter in @params")
.query("fraction == 'Total'")
.query("epa_rain_zone == 1")
.assign(cvcparam=lambda df: df['parameter'].apply(self._get_cvc_parameter))
.drop('parameter', axis=1)
.rename(columns={'cvcparam': 'parameter'})
.groupby(by=self.index_cols)
.first()
.reset_index()
.pipe(_fix_nsqd_bacteria_units)
)
return self._data
def _make_dc(self, which):
_dc_map = {
'overall': ['units', 'primary_landuse'],
'seasonal': ['units', 'primary_landuse', 'season'],
}
dc = wqio.DataCollection(
self.data.set_index(self.index_cols),
ndval='<',
othergroups=_dc_map[which],
paramcol='parameter'
)
return dc
def _get_medians(self, which):
_med_dict = {
'overall': self.datacollection.medians,
'seasonal': self.seasonal_datacollection.medians,
}
medians = (
_med_dict[which.lower()]
['outflow']
.xs('Residential', level='primary_landuse')
.pipe(np.round, 3)
.reset_index()
.rename(columns={'stat': 'NSQD Median'})
)
return medians
@property
def datacollection(self):
if self._datacollection is None:
self._datacollection = self._make_dc('overall')
return self._datacollection
@property
def medians(self):
if self._medians is None:
self._medians = self._get_medians('overall')
return self._medians
@property
def seasonal_datacollection(self):
if self._seasonal_datacollection is None:
self._seasonal_datacollection = self._make_dc('seasonal')
return self._seasonal_datacollection
@property
def seasonal_medians(self):
if self._seasonal_medians is None:
self._seasonal_medians = self._get_medians('seasonal')
return self._seasonal_medians
@staticmethod
def _get_cvc_parameter(nsqdparam):
try:
cvcparam = list(filter(
lambda p: p['nsqdname'] == nsqdparam, POC_dicts
))[0]['cvcname']
except IndexError:
cvcparam = np.nan
return cvcparam
class bmpdb:
"""
Object to provide convenient access to the BMP DB.
Parameters
----------
color, marker : string
Matplotlib symbology strings.
"""
def __init__(self, color, marker):
self.color = color
self.marker = marker
self.paramnames = [p['bmpname'] for p in POC_dicts]
self._mainparams = list(filter(
lambda x: x['conc_units']['plain'] != 'CFU/100 mL', POC_dicts
))
self._bioparams = list(filter(
lambda x: x['conc_units']['plain'] == 'CFU/100 mL', POC_dicts
))
self.table, self.db = pybmpdb.getSummaryData(
catanalysis=False,
astable=True,
parameter=self.paramnames,
category=bmpcats_to_use,
epazone=1,
)
self._data = None
self._datasets = None
self._effluentLocations = None
self._medians = None
self._datacollection = None
self.label_col = 'category'
@property
def categories(self):
return self.table.bmp_categories
@property
def labels(self):
return self.categories
@property
def data(self):
if self._data is None:
index_cache = self.table.data.index.names
self._data = (
self.table
.data
.reset_index()
.query("station == 'outflow'")
.query("epazone == 1")
.assign(bmpparam=lambda df: df['parameter'].apply(self._get_cvc_parameter))
.drop('parameter', axis=1)
.rename(columns={'bmpparam': 'parameter'})
.set_index(index_cache)
)
return self._data
@property
def datacollection(self):
if self._datacollection is None:
groupcols = ['units', 'category']
dc = wqio.DataCollection(self.data, ndval='ND', othergroups=groupcols,
paramcol='parameter')
self._datacollection = dc
return self._datacollection
@property
def medians(self):
if self._medians is None:
self._medians = (
self.datacollection
.medians['outflow']
.pipe(np.round, 3)
.reset_index()
.rename(columns={'stat': 'BMPDB Medians'})
)
return self._medians
@staticmethod
def _get_cvc_parameter(bmpparam):
try:
bmpparam = list(filter(
lambda p: p['bmpname'] == bmpparam, POC_dicts
))[0]['cvcname']
except:
bmpparam = np.nan
return bmpparam
def combine_wq(wq, external, external_site_col):
"""
Combines CVC water quality dataframes with the `tidy`
attributes of a `bmpdb` or `nsqd` object.
wq : pandas.DataFrame
A dataframe of CVC water quality data
external : nsqd or bmpdb object
external_site_col : str
The column in `external.datacollection.tidy` that on
which the data should be grouped. Analogous to "site"
in the CVC data (e.g., ED-1, LV-2, ...)
Returns
-------
tidy : pandas.DataFrame
"""
final_cols = [
'parameter',
'units',
'site',
'concentration',
]
exttidy = (
external.datacollection.tidy
.rename(columns={external_site_col: 'site', 'ros_res': 'concentration'})
)[final_cols]
tidy = pandas.concat([wq[final_cols], exttidy])
return tidy
|
from twisted.internet.protocol import Protocol, Factory
from twisted.internet import reactor
from MySQLdb import *
from getpass import *
import ConfigParser
from twisted.protocols.basic import LineReceiver
from DatabaseServiceProvider import DatabaseServiceProvider
class TCPLoggingServer(LineReceiver):
def connectionMade(self):
self.transport.write("connectionMade\r\n")
def connectionLost(self, reason):
print "lost a connection", reason
def lineReceived(self, line):
print line,
#self.transport.write("server:" + line)
self.factory.dsp.logToDatabase(line)
class TCPLoggingFactory(Factory):
protocol = TCPLoggingServer
def __init__(self):
print "starting server"
try:
config = ConfigParser.ConfigParser()
config.read("db.conf")
pw = config.get("database", "passwd")
hostname = config.get("database", "host")
usr = config.get("database", "user")
dbn = config.get("database", "db")
p = config.get("database", "port")
timeout = config.get("database", "timeout")
self.db = connect(host=hostname,user=usr,passwd=pw, \
db=dbn,port=int(p), connect_timeout=int(timeout))
except OperationalError:
print "Error!"
return
print self.db
self.cursor = self.db.cursor()
self.dsp = DatabaseServiceProvider(self.db)
reactor.listenTCP(8005, TCPLoggingFactory())
reactor.run()
|
import sys, os, pprint
from datetime import date
template_blitshader_source = """// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name}.
//
// Copyright {year} The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// Blit11Helper_autogen.inc:
// Defines and retrieves blitshaders for the D3D11 backend.
namespace
{{
// Include inline shaders in the anonymous namespace to make sure no symbols are exported
{shader_includes}
}} // namespace
enum Blit11::BlitShaderOperation : unsigned int
{{
{blitshaderop_enums}
}};
enum Blit11::BlitShaderType : unsigned int
{{
{blitshadertype_enums}
}};
Blit11::BlitShaderType Blit11::getBlitShaderType(BlitShaderOperation operation, ShaderDimension dimension)
{{
switch(operation)
{{
{get_blitshaders_case_list}
default:
UNREACHABLE();
return BLITSHADER_INVALID;
}}
}}
angle::Result Blit11::mapBlitShader(const gl::Context *context,
BlitShaderType blitShaderType)
{{
switch(blitShaderType)
{{
{add_blitshader_case_list}
default:
ANGLE_HR_UNREACHABLE(GetImplAs<Context11>(context));
}}
return angle::Result::Continue;
}}
"""
template_blitshaders_gni = """# GENERATED FILE - DO NOT EDIT.
libangle_d3d11_blit_shaders = [
{shader_filename_list}
]"""
template_compiled_blitshader_include = """#include "libANGLE/renderer/d3d/d3d11/shaders/compiled/{filename}\""""
template_get_blitshader_case = """ case {operation}:
switch (dimension)
{{
{get_blitshader_dimension_cases}
default:
UNREACHABLE();
return BLITSHADER_INVALID;
}}"""
template_get_blitshader_case_dimension = """ case SHADER_{dimension}:
return BLITSHADER_{blitshader};"""
template_map_blitshader_case = """ case {blitshader_name}:
ANGLE_TRY(addBlitShaderToMap(context, blitShaderType, SHADER_{dimension_upper},
ShaderData(g_PS_{compiled_shader_name}),
"Blit11 {dimension} {shader_comment} pixel shader"));
break;"""
supported_dimensions = ["2D", "3D", "2DArray"]
blitshader_data = [
("RGBAF", "PassthroughRGBA*", "passthroughrgba*11ps.h"), ("BGRAF", "PassthroughRGBA*"),
("RGBF", "PassthroughRGB*", "passthroughrgb*11ps.h"),
("RGF", "PassthroughRG*", "passthroughrg*11ps.h"),
("RF", "PassthroughR*", "passthroughr*11ps.h"),
("ALPHA", "PassthroughA*", "passthrougha*11ps.h"),
("LUMA", "PassthroughLum*", "passthroughlum*11ps.h"),
("LUMAALPHA", "PassthroughLumAlpha*", "passthroughlumalpha*11ps.h"),
("RGBAUI", "PassthroughRGBA*UI", "passthroughrgba*ui11ps.h"),
("RGBAI", "PassthroughRGBA*I", "passthroughrgba*i11ps.h"),
("RGBUI", "PassthroughRGB*UI", "passthroughrgb*ui11ps.h"),
("RGBI", "PassthroughRGB*I", "passthroughrgb*i11ps.h"),
("RGUI", "PassthroughRG*UI", "passthroughrg*ui11ps.h"),
("RGI", "PassthroughRG*I", "passthroughrg*i11ps.h"),
("RUI", "PassthroughR*UI", "passthroughr*ui11ps.h"),
("RI", "PassthroughR*I", "passthroughr*i11ps.h"),
("RGBAF_PREMULTIPLY", "FtoF_PM_RGBA_*", "multiplyalpha_ftof_pm_rgba_*_ps.h"),
("RGBAF_UNMULTIPLY", "FtoF_UM_RGBA_*", "multiplyalpha_ftof_um_rgba_*_ps.h"),
("RGBF_PREMULTIPLY", "FtoF_PM_RGB_*", "multiplyalpha_ftof_pm_rgb_*_ps.h"),
("RGBF_UNMULTIPLY", "FtoF_UM_RGB_*", "multiplyalpha_ftof_um_rgb_*_ps.h"),
("RGBAF_TOUI", "FtoU_PT_RGBA_*", "multiplyalpha_ftou_pt_rgba_*_ps.h"),
("RGBAF_TOUI_PREMULTIPLY", "FtoU_PM_RGBA_*", "multiplyalpha_ftou_pm_rgba_*_ps.h"),
("RGBAF_TOUI_UNMULTIPLY", "FtoU_UM_RGBA_*", "multiplyalpha_ftou_um_rgba_*_ps.h"),
("RGBF_TOUI", "FtoU_PT_RGB_*", "multiplyalpha_ftou_pt_rgb_*_ps.h"),
("RGBF_TOUI_PREMULTIPLY", "FtoU_PM_RGB_*", "multiplyalpha_ftou_pm_rgb_*_ps.h"),
("RGBF_TOUI_UNMULTIPLY", "FtoU_UM_RGB_*", "multiplyalpha_ftou_um_rgb_*_ps.h"),
("RGBAF_TOI", "FtoI_PT_RGBA_*", "multiplyalpha_ftoi_pt_rgba_*_ps.h"),
("RGBAF_TOI_PREMULTIPLY", "FtoI_PM_RGBA_*", "multiplyalpha_ftoi_pm_rgba_*_ps.h"),
("RGBAF_TOI_UNMULTIPLY", "FtoI_UM_RGBA_*", "multiplyalpha_ftoi_um_rgba_*_ps.h"),
("RGBF_TOI", "FtoI_PT_RGB_*", "multiplyalpha_ftoi_pt_rgb_*_ps.h"),
("RGBF_TOI_PREMULTIPLY", "FtoI_PM_RGB_*", "multiplyalpha_ftoi_pm_rgb_*_ps.h"),
("RGBF_TOI_UNMULTIPLY", "FtoI_UM_RGB_*", "multiplyalpha_ftoi_um_rgb_*_ps.h"),
("LUMAF_PREMULTIPLY", "FtoF_PM_LUMA_*", "multiplyalpha_ftof_pm_luma_*_ps.h"),
("LUMAF_UNMULTIPLY", "FtoF_UM_LUMA_*", "multiplyalpha_ftof_um_luma_*_ps.h"),
("LUMAALPHAF_PREMULTIPLY", "FtoF_PM_LUMAALPHA_*", "multiplyalpha_ftof_pm_lumaalpha_*_ps.h"),
("LUMAALPHAF_UNMULTIPLY", "FtoF_UM_LUMAALPHA_*", "multiplyalpha_ftof_um_lumaalpha_*_ps.h"),
("RGBAF_4444", "PassthroughRGBA*_4444", "passthroughrgba*_4444_11ps.h"),
("RGBAF_4444_PREMULTIPLY", "FtoF_PM_RGBA_4444_*", "multiplyalpha_ftof_pm_rgba_4444_*_ps.h"),
("RGBAF_4444_UNMULTIPLY", "FtoF_UM_RGBA_4444_*", "multiplyalpha_ftof_um_rgba_4444_*_ps.h"),
("RGBF_565", "PassthroughRGB*_565", "passthroughrgb*_565_11ps.h"),
("RGBF_565_PREMULTIPLY", "FtoF_PM_RGB_565_*", "multiplyalpha_ftof_pm_rgb_565_*_ps.h"),
("RGBF_565_UNMULTIPLY", "FtoF_UM_RGB_565_*", "multiplyalpha_ftof_um_rgb_565_*_ps.h"),
("RGBAF_5551", "PassthroughRGBA*_5551", "passthroughrgba*_5551_11ps.h"),
("RGBAF_5551_PREMULTIPLY", "FtoF_PM_RGBA_5551_*", "multiplyalpha_ftof_pm_rgba_5551_*_ps.h"),
("RGBAF_5551_UNMULTIPLY", "FtoF_UM_RGBA_5551_*", "multiplyalpha_ftof_um_rgba_5551_*_ps.h")
]
def format_shader_include(dimension, blitshader):
return template_compiled_blitshader_include.format(
filename=blitshader[2].replace("*", dimension.lower()))
def format_get_blitshader_case(operation):
dimension_cases = []
for dimension in supported_dimensions:
dimension_cases.append(format_get_blitshader_case_dimension(operation, dimension))
return template_get_blitshader_case.format(
get_blitshader_dimension_cases="\n".join([c for c in dimension_cases]),
operation=operation)
def format_get_blitshader_case_dimension(operation, dimension):
# 2D float to int shaders have not been implemented
if dimension == "2D" and operation.find("TOI") != -1:
blitshader = "INVALID"
else:
blitshader = dimension.upper() + "_" + operation
return template_get_blitshader_case_dimension.format(
dimension=dimension.upper(), blitshader=blitshader)
def format_map_blitshader_case(dimension, blitshader):
blitshader_name = "BLITSHADER_" + dimension.upper() + "_" + blitshader[0]
# 3D and 2DArray use the RGBA shader for passthrough alpha
if dimension != "2D" and blitshader[0] == "ALPHA":
compiled_shader_name = "PassthroughRGBA" + dimension
else:
compiled_shader_name = blitshader[1].replace("*", dimension)
shader_comment = compiled_shader_name.replace("_", " ")
case = template_map_blitshader_case.format(
blitshader_name=blitshader_name,
dimension=dimension,
dimension_upper=dimension.upper(),
compiled_shader_name=compiled_shader_name,
shader_comment=shader_comment,
)
return case
def format_shader_filename(dimension, blitshader):
return "shaders/compiled/" + blitshader[2].replace("*", dimension.lower()) + ","
def get_shader_includes():
includes = []
for dimension in supported_dimensions:
for blitshader in blitshader_data:
# 2D float to int shaders have not been implemented
if dimension == "2D" and blitshader[0].find("TOI") != -1:
continue
# 3D and 2DArray just use the RGBA shader for passthrough alpha
if dimension != "2D" and blitshader[0] == "ALPHA":
continue
if len(blitshader) == 3:
includes.append(format_shader_include(dimension, blitshader))
return includes
def get_blitshader_cases():
blitshader_cases = []
for blitshader in blitshader_data:
blitshader_cases.append(format_get_blitshader_case(blitshader[0]))
return blitshader_cases
def get_map_blitshader_cases():
blitshader_cases = []
for dimension in supported_dimensions:
for blitshader in blitshader_data:
# 2D float to int shaders have not been implemented
if dimension == "2D" and blitshader[0].find("TOI") != -1:
continue
blitshader_cases.append(format_map_blitshader_case(dimension, blitshader))
return blitshader_cases
def get_blitshaderop_enums():
blitshaderops = []
for blitshader in blitshader_data:
blitshaderops.append(" " + blitshader[0] + ",")
blitshaderops.append(" OPERATION_INVALID")
return blitshaderops
def get_blitshadertype_enums():
blitshaders = []
for dimension in supported_dimensions:
for blitshader in blitshader_data:
# 2D float to int shaders have not been implemented
if dimension == "2D" and blitshader[0].find("TOI") != -1:
continue
blitshaders.append(" BLITSHADER_" + dimension.upper() + "_" + blitshader[0] + ",")
blitshaders.append(" BLITSHADER_INVALID")
return blitshaders
def get_shader_filenames():
filenames = []
for dimension in supported_dimensions:
for blitshader in blitshader_data:
# 2D float to int shaders have not been implemented
if dimension == "2D" and blitshader[0].find("TOI") != -1:
continue
# 3D and 2DArray just use the RGBA shader for passthrough alpha
if dimension != "2D" and blitshader[0] == "ALPHA":
continue
if len(blitshader) == 3:
filenames.append(
(" \"src/libANGLE/renderer/d3d/d3d11/shaders/compiled/{0}\",").format(
blitshader[2].replace("*", dimension.lower())))
return filenames
def write_inc_file(get_blitshaders_case_list, add_blitshader_case_list, shader_includes,
blitshaderop_enums, blitshadertype_enums):
content = template_blitshader_source.format(
script_name=os.path.basename(sys.argv[0]),
year=date.today().year,
blitshaderop_enums=blitshaderop_enums,
blitshadertype_enums=blitshadertype_enums,
get_blitshaders_case_list=get_blitshaders_case_list,
add_blitshader_case_list=add_blitshader_case_list,
shader_includes=shader_includes)
path = os.path.join("Blit11Helper_autogen.inc")
with open(path, "w") as out:
out.write(content)
out.close()
def write_gni_file(shader_filename_list):
content = template_blitshaders_gni.format(
script_name=os.path.basename(sys.argv[0]),
year=date.today().year,
shader_filename_list=shader_filename_list)
path = os.path.join("d3d11_blit_shaders_autogen.gni")
with open(path, "w") as out:
out.write(content)
out.close()
def main():
# auto_script parameters.
if len(sys.argv) > 1:
inputs = []
outputs = ['Blit11Helper_autogen.inc', 'd3d11_blit_shaders_autogen.gni']
if sys.argv[1] == 'inputs':
print ','.join(inputs)
elif sys.argv[1] == 'outputs':
print ','.join(outputs)
else:
print('Invalid script parameters')
return 1
return 0
map_blitshader_cases = []
shader_includes = []
blitshadertype_cases = []
blitshadertype_enums = []
blitshaderop_enums = []
shader_filenames = []
map_blitshader_cases = get_map_blitshader_cases()
shader_includes = get_shader_includes()
blitshadertype_cases = get_blitshader_cases()
blitshaderop_enums = get_blitshaderop_enums()
blitshadertype_enums = get_blitshadertype_enums()
shader_filenames = get_shader_filenames()
write_inc_file("\n".join([d for d in blitshadertype_cases]), "\n".join(
[c for c in map_blitshader_cases]), "\n".join([i for i in shader_includes]), "\n".join(
[e for e in blitshaderop_enums]), "\n".join([e for e in blitshadertype_enums]))
write_gni_file("\n".join([s for s in shader_filenames]))
return 0
if __name__ == '__main__':
sys.exit(main())
|
from itertools import product
import os
import os.path as op
from unittest import SkipTest
import pytest
import numpy as np
from numpy.testing import (assert_array_almost_equal, assert_array_equal,
assert_allclose, assert_equal)
from scipy import stats, linalg
import matplotlib.pyplot as plt
from mne import (Epochs, read_events, pick_types, create_info, EpochsArray,
EvokedArray, Annotations, pick_channels_regexp)
from mne.cov import read_cov
from mne.preprocessing import (ICA, ica_find_ecg_events, ica_find_eog_events,
read_ica)
from mne.preprocessing.ica import (get_score_funcs, corrmap, _sort_components,
_ica_explained_variance, read_ica_eeglab)
from mne.io import read_raw_fif, Info, RawArray, read_raw_ctf, read_raw_eeglab
from mne.io.meas_info import _kind_dict
from mne.io.pick import _DATA_CH_TYPES_SPLIT
from mne.io.eeglab.eeglab import _check_load_mat
from mne.rank import _compute_rank_int
from mne.utils import (catch_logging, _TempDir, requires_sklearn,
run_tests_if_main)
from mne.datasets import testing
from mne.event import make_fixed_length_events
data_dir = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data')
raw_fname = op.join(data_dir, 'test_raw.fif')
event_name = op.join(data_dir, 'test-eve.fif')
test_cov_name = op.join(data_dir, 'test-cov.fif')
test_base_dir = testing.data_path(download=False)
ctf_fname = op.join(test_base_dir, 'CTF', 'testdata_ctf.ds')
fif_fname = op.join(test_base_dir, 'MEG', 'sample',
'sample_audvis_trunc_raw.fif')
eeglab_fname = op.join(test_base_dir, 'EEGLAB', 'test_raw.set')
eeglab_montage = op.join(test_base_dir, 'EEGLAB', 'test_chans.locs')
ctf_fname2 = op.join(test_base_dir, 'CTF', 'catch-alp-good-f.ds')
event_id, tmin, tmax = 1, -0.2, 0.2
start, stop = 0, 6
score_funcs_unsuited = ['pointbiserialr', 'ansari']
def _skip_check_picard(method):
if method == 'picard':
try:
import picard # noqa, analysis:ignore
except Exception as exp:
raise SkipTest("Picard is not installed (%s)." % (exp,))
@requires_sklearn
@pytest.mark.parametrize("method", ["fastica", "picard"])
def test_ica_full_data_recovery(method):
"""Test recovery of full data when no source is rejected."""
# Most basic recovery
_skip_check_picard(method)
raw = read_raw_fif(raw_fname).crop(0.5, stop).load_data()
events = read_events(event_name)
picks = pick_types(raw.info, meg=True, stim=False, ecg=False,
eog=False, exclude='bads')[:10]
with pytest.warns(RuntimeWarning, match='projection'):
epochs = Epochs(raw, events[:4], event_id, tmin, tmax, picks=picks,
baseline=(None, 0), preload=True)
evoked = epochs.average()
n_channels = 5
data = raw._data[:n_channels].copy()
data_epochs = epochs.get_data()
data_evoked = evoked.data
raw.set_annotations(Annotations([0.5], [0.5], ['BAD']))
methods = [method]
for method in methods:
stuff = [(2, n_channels, True), (2, n_channels // 2, False)]
for n_components, n_pca_components, ok in stuff:
ica = ICA(n_components=n_components, random_state=0,
max_pca_components=n_pca_components,
n_pca_components=n_pca_components,
method=method, max_iter=1)
with pytest.warns(UserWarning, match=None): # sometimes warns
ica.fit(raw, picks=list(range(n_channels)))
_assert_ica_attributes(ica)
raw2 = ica.apply(raw.copy(), exclude=[])
if ok:
assert_allclose(data[:n_channels], raw2._data[:n_channels],
rtol=1e-10, atol=1e-15)
else:
diff = np.abs(data[:n_channels] - raw2._data[:n_channels])
assert (np.max(diff) > 1e-14)
ica = ICA(n_components=n_components, method=method,
max_pca_components=n_pca_components,
n_pca_components=n_pca_components, random_state=0)
with pytest.warns(None): # sometimes warns
ica.fit(epochs, picks=list(range(n_channels)))
_assert_ica_attributes(ica)
epochs2 = ica.apply(epochs.copy(), exclude=[])
data2 = epochs2.get_data()[:, :n_channels]
if ok:
assert_allclose(data_epochs[:, :n_channels], data2,
rtol=1e-10, atol=1e-15)
else:
diff = np.abs(data_epochs[:, :n_channels] - data2)
assert (np.max(diff) > 1e-14)
evoked2 = ica.apply(evoked.copy(), exclude=[])
data2 = evoked2.data[:n_channels]
if ok:
assert_allclose(data_evoked[:n_channels], data2,
rtol=1e-10, atol=1e-15)
else:
diff = np.abs(evoked.data[:n_channels] - data2)
assert (np.max(diff) > 1e-14)
with pytest.raises(ValueError, match='Invalid value'):
ICA(method='pizza-decomposision')
@pytest.mark.parametrize("method", ["fastica", "picard"])
def test_ica_simple(method):
"""Test that ICA recovers the unmixing matrix in a simple case."""
if method == "fastica":
try:
import sklearn # noqa: F401
except ImportError:
raise SkipTest("scikit-learn not installed")
_skip_check_picard(method)
n_components = 3
n_samples = 1000
rng = np.random.RandomState(0)
S = rng.laplace(size=(n_components, n_samples))
A = rng.randn(n_components, n_components)
data = np.dot(A, S)
ica = ICA(n_components=n_components, method=method, random_state=0)
ica._fit(data, n_components, 0)
transform = np.dot(np.dot(ica.unmixing_matrix_, ica.pca_components_), A)
amari_distance = np.mean(np.sum(np.abs(transform), axis=1) /
np.max(np.abs(transform), axis=1) - 1.)
assert amari_distance < 0.1
@requires_sklearn
@pytest.mark.parametrize("method", ["infomax", "fastica", "picard"])
def test_ica_n_iter_(method):
"""Test that ICA.n_iter_ is set after fitting."""
_skip_check_picard(method)
raw = read_raw_fif(raw_fname).crop(0.5, stop).load_data()
n_components = 3
max_iter = 1
ica = ICA(n_components=n_components, max_iter=max_iter, method=method)
if method == 'infomax':
ica.fit(raw)
else:
with pytest.warns(UserWarning, match='did not converge'):
ica.fit(raw)
assert_equal(ica.n_iter_, max_iter)
# Test I/O roundtrip.
tempdir = _TempDir()
output_fname = op.join(tempdir, 'test_ica-ica.fif')
_assert_ica_attributes(ica)
ica.save(output_fname)
ica = read_ica(output_fname)
_assert_ica_attributes(ica)
assert_equal(ica.n_iter_, max_iter)
@requires_sklearn
@pytest.mark.parametrize("method", ["fastica", "picard"])
def test_ica_rank_reduction(method):
"""Test recovery ICA rank reduction."""
_skip_check_picard(method)
# Most basic recovery
raw = read_raw_fif(raw_fname).crop(0.5, stop).load_data()
picks = pick_types(raw.info, meg=True, stim=False, ecg=False,
eog=False, exclude='bads')[:10]
n_components = 5
max_pca_components = len(picks)
for n_pca_components in [6, 10]:
with pytest.warns(UserWarning, match='did not converge'):
ica = ICA(n_components=n_components,
max_pca_components=max_pca_components,
n_pca_components=n_pca_components,
method=method, max_iter=1).fit(raw, picks=picks)
rank_before = _compute_rank_int(raw.copy().pick(picks), proj=False)
assert_equal(rank_before, len(picks))
raw_clean = ica.apply(raw.copy())
rank_after = _compute_rank_int(raw_clean.copy().pick(picks),
proj=False)
# interaction between ICA rejection and PCA components difficult
# to preduct. Rank_after often seems to be 1 higher then
# n_pca_components
assert (n_components < n_pca_components <= rank_after <=
rank_before)
@requires_sklearn
@pytest.mark.parametrize("method", ["fastica", "picard"])
def test_ica_reset(method):
"""Test ICA resetting."""
_skip_check_picard(method)
raw = read_raw_fif(raw_fname).crop(0.5, stop).load_data()
picks = pick_types(raw.info, meg=True, stim=False, ecg=False,
eog=False, exclude='bads')[:10]
run_time_attrs = (
'pre_whitener_',
'unmixing_matrix_',
'mixing_matrix_',
'n_components_',
'n_samples_',
'pca_components_',
'pca_explained_variance_',
'pca_mean_',
'n_iter_'
)
with pytest.warns(UserWarning, match='did not converge'):
ica = ICA(
n_components=3, max_pca_components=3, n_pca_components=3,
method=method, max_iter=1).fit(raw, picks=picks)
assert (all(hasattr(ica, attr) for attr in run_time_attrs))
assert ica.labels_ is not None
ica._reset()
assert (not any(hasattr(ica, attr) for attr in run_time_attrs))
assert ica.labels_ is not None
@requires_sklearn
@pytest.mark.parametrize("method", ["fastica", "picard"])
def test_ica_core(method):
"""Test ICA on raw and epochs."""
_skip_check_picard(method)
raw = read_raw_fif(raw_fname).crop(1.5, stop).load_data()
# XXX. The None cases helped revealing bugs but are time consuming.
test_cov = read_cov(test_cov_name)
events = read_events(event_name)
picks = pick_types(raw.info, meg=True, stim=False, ecg=False,
eog=False, exclude='bads')
epochs = Epochs(raw, events[:4], event_id, tmin, tmax, picks=picks,
baseline=(None, 0), preload=True)
noise_cov = [None, test_cov]
# removed None cases to speed up...
n_components = [2, 1.0] # for future dbg add cases
max_pca_components = [3]
picks_ = [picks]
methods = [method]
iter_ica_params = product(noise_cov, n_components, max_pca_components,
picks_, methods)
# # test init catchers
with pytest.raises(ValueError, match='must be smaller than max_pca'):
ICA(n_components=3, max_pca_components=2)
with pytest.raises(ValueError, match='explained variance needs values'):
ICA(n_components=2.3, max_pca_components=3)
# test essential core functionality
for n_cov, n_comp, max_n, pcks, method in iter_ica_params:
# Test ICA raw
ica = ICA(noise_cov=n_cov, n_components=n_comp,
max_pca_components=max_n, n_pca_components=max_n,
random_state=0, method=method, max_iter=1)
with pytest.raises(ValueError, match='Cannot check for channels of t'):
'meg' in ica
print(ica) # to test repr
# test fit checker
with pytest.raises(RuntimeError, match='No fit available'):
ica.get_sources(raw)
with pytest.raises(RuntimeError, match='No fit available'):
ica.get_sources(epochs)
# Test error upon empty epochs fitting
with pytest.raises(RuntimeError, match='none were found'):
ica.fit(epochs[0:0])
# test decomposition
with pytest.warns(UserWarning, match='did not converge'):
ica.fit(raw, picks=pcks, start=start, stop=stop)
repr(ica) # to test repr
assert ('mag' in ica) # should now work without error
# test re-fit
unmixing1 = ica.unmixing_matrix_
with pytest.warns(UserWarning, match='did not converge'):
ica.fit(raw, picks=pcks, start=start, stop=stop)
assert_array_almost_equal(unmixing1, ica.unmixing_matrix_)
raw_sources = ica.get_sources(raw)
# test for #3804
assert_equal(raw_sources._filenames, [None])
print(raw_sources)
# test for gh-6271 (scaling of ICA traces)
fig = raw_sources.plot()
assert len(fig.axes[0].lines) in (4, 5, 6)
for line in fig.axes[0].lines:
y = line.get_ydata()
if len(y) > 2: # actual data, not markers
assert np.ptp(y) < 15
plt.close('all')
sources = raw_sources[:, :][0]
assert (sources.shape[0] == ica.n_components_)
# test preload filter
raw3 = raw.copy()
raw3.preload = False
pytest.raises(RuntimeError, ica.apply, raw3,
include=[1, 2])
#######################################################################
# test epochs decomposition
ica = ICA(noise_cov=n_cov, n_components=n_comp,
max_pca_components=max_n, n_pca_components=max_n,
random_state=0, method=method)
with pytest.warns(None): # sometimes warns
ica.fit(epochs, picks=picks)
_assert_ica_attributes(ica)
data = epochs.get_data()[:, 0, :]
n_samples = np.prod(data.shape)
assert_equal(ica.n_samples_, n_samples)
print(ica) # to test repr
sources = ica.get_sources(epochs).get_data()
assert (sources.shape[1] == ica.n_components_)
with pytest.raises(ValueError, match='target do not have the same nu'):
ica.score_sources(epochs, target=np.arange(1))
# test preload filter
epochs3 = epochs.copy()
epochs3.preload = False
with pytest.raises(RuntimeError, match='requires epochs data to be l'):
ica.apply(epochs3, include=[1, 2])
# test for bug with whitener updating
_pre_whitener = ica.pre_whitener_.copy()
epochs._data[:, 0, 10:15] *= 1e12
ica.apply(epochs.copy())
assert_array_equal(_pre_whitener, ica.pre_whitener_)
# test expl. var threshold leading to empty sel
ica.n_components = 0.1
with pytest.raises(RuntimeError, match='One PCA component captures most'):
ica.fit(epochs)
offender = 1, 2, 3,
with pytest.raises(ValueError, match='Data input must be of Raw'):
ica.get_sources(offender)
with pytest.raises(TypeError, match='must be an instance of'):
ica.fit(offender)
with pytest.raises(TypeError, match='must be an instance of'):
ica.apply(offender)
# gh-7868
ica.max_pca_components = 3
ica.n_components = 0.99
with pytest.raises(ValueError, match='pca_components.*cannot be greater'):
ica.fit(epochs, picks=[0, 1])
ica.max_pca_components = None
ica.n_components = 3
with pytest.raises(ValueError, match='n_components.*cannot be greater'):
ica.fit(epochs, picks=[0, 1])
@requires_sklearn
@pytest.mark.slowtest
@pytest.mark.parametrize("method", ["picard", "fastica"])
def test_ica_additional(method):
"""Test additional ICA functionality."""
_skip_check_picard(method)
tempdir = _TempDir()
stop2 = 500
raw = read_raw_fif(raw_fname).crop(1.5, stop).load_data()
raw.del_proj() # avoid warnings
raw.set_annotations(Annotations([0.5], [0.5], ['BAD']))
# XXX This breaks the tests :(
# raw.info['bads'] = [raw.ch_names[1]]
test_cov = read_cov(test_cov_name)
events = read_events(event_name)
picks = pick_types(raw.info, meg=True, stim=False, ecg=False,
eog=False, exclude='bads')[1::2]
epochs = Epochs(raw, events, None, tmin, tmax, picks=picks,
baseline=(None, 0), preload=True, proj=False)
epochs.decimate(3, verbose='error')
assert len(epochs) == 4
# test if n_components=None works
ica = ICA(n_components=None, max_pca_components=None,
n_pca_components=None, random_state=0, method=method, max_iter=1)
with pytest.warns(UserWarning, match='did not converge'):
ica.fit(epochs)
_assert_ica_attributes(ica)
# for testing eog functionality
picks2 = np.concatenate([picks, pick_types(raw.info, False, eog=True)])
epochs_eog = Epochs(raw, events[:4], event_id, tmin, tmax, picks=picks2,
baseline=(None, 0), preload=True)
del picks2
test_cov2 = test_cov.copy()
ica = ICA(noise_cov=test_cov2, n_components=3, max_pca_components=4,
n_pca_components=4, method=method)
assert (ica.info is None)
with pytest.warns(RuntimeWarning, match='normalize_proj'):
ica.fit(raw, picks[:5])
_assert_ica_attributes(ica)
assert (isinstance(ica.info, Info))
assert (ica.n_components_ < 5)
ica = ICA(n_components=3, max_pca_components=4, method=method,
n_pca_components=4, random_state=0)
pytest.raises(RuntimeError, ica.save, '')
ica.fit(raw, picks=[1, 2, 3, 4, 5], start=start, stop=stop2)
_assert_ica_attributes(ica)
# check Kuiper index threshold
assert_equal(ica._get_ctps_threshold(), 0.21)
# check deprecation warning
with pytest.warns(DeprecationWarning, match='The default for "threshold"'):
ica.find_bads_ecg(raw, threshold=None)
# check invalid `measure`
with pytest.warns(RuntimeWarning, match='longer'):
with pytest.raises(ValueError, match='Unknown measure'):
ica.find_bads_ecg(raw, method='correlation', measure='unknown',
threshold='auto')
# check passing a ch_name to find_bads_ecg
with pytest.warns(RuntimeWarning, match='longer'):
_, scores_1 = ica.find_bads_ecg(raw, threshold='auto')
_, scores_2 = ica.find_bads_ecg(raw, raw.ch_names[1], threshold='auto')
assert scores_1[0] != scores_2[0]
# test corrmap
ica2 = ica.copy()
ica3 = ica.copy()
corrmap([ica, ica2], (0, 0), threshold='auto', label='blinks', plot=True,
ch_type="mag")
with pytest.raises(RuntimeError, match='No component detected'):
corrmap([ica, ica2], (0, 0), threshold=2, plot=False, show=False,)
corrmap([ica, ica2], (0, 0), threshold=0.5, plot=False, show=False)
assert (ica.labels_["blinks"] == ica2.labels_["blinks"])
assert (0 in ica.labels_["blinks"])
# test retrieval of component maps as arrays
components = ica.get_components()
template = components[:, 0]
EvokedArray(components, ica.info, tmin=0.).plot_topomap([0], time_unit='s')
corrmap([ica, ica3], template, threshold='auto', label='blinks', plot=True,
ch_type="mag")
assert (ica2.labels_["blinks"] == ica3.labels_["blinks"])
plt.close('all')
# No match
bad_ica = ica2.copy()
bad_ica.mixing_matrix_[:] = 0.
with pytest.warns(RuntimeWarning, match='divide'):
with catch_logging() as log:
corrmap([ica, bad_ica], (0, 0), threshold=0.5, plot=False,
show=False, verbose=True)
log = log.getvalue()
assert 'No maps selected' in log
# make sure a single threshold in a list works
corrmap([ica, ica3], template, threshold=[0.5], label='blinks', plot=True,
ch_type="mag")
ica_different_channels = ICA(n_components=2, random_state=0).fit(
raw, picks=[2, 3, 4, 5])
pytest.raises(ValueError, corrmap, [ica_different_channels, ica], (0, 0))
# test warnings on bad filenames
ica_badname = op.join(op.dirname(tempdir), 'test-bad-name.fif.gz')
with pytest.warns(RuntimeWarning, match='-ica.fif'):
ica.save(ica_badname)
with pytest.warns(RuntimeWarning, match='-ica.fif'):
read_ica(ica_badname)
# test decim
ica = ICA(n_components=3, max_pca_components=4,
n_pca_components=4, method=method, max_iter=1)
raw_ = raw.copy()
for _ in range(3):
raw_.append(raw_)
n_samples = raw_._data.shape[1]
with pytest.warns(UserWarning, match='did not converge'):
ica.fit(raw, picks=picks[:5], decim=3)
_assert_ica_attributes(ica)
assert raw_._data.shape[1] == n_samples
# test expl var
ica = ICA(n_components=1.0, max_pca_components=4,
n_pca_components=4, method=method, max_iter=1)
with pytest.warns(UserWarning, match='did not converge'):
ica.fit(raw, picks=None, decim=3)
_assert_ica_attributes(ica)
assert (ica.n_components_ == 4)
ica_var = _ica_explained_variance(ica, raw, normalize=True)
assert (np.all(ica_var[:-1] >= ica_var[1:]))
# test ica sorting
ica.exclude = [0]
ica.labels_ = dict(blink=[0], think=[1])
ica_sorted = _sort_components(ica, [3, 2, 1, 0], copy=True)
assert_equal(ica_sorted.exclude, [3])
assert_equal(ica_sorted.labels_, dict(blink=[3], think=[2]))
# epochs extraction from raw fit
pytest.raises(RuntimeError, ica.get_sources, epochs)
# test reading and writing
test_ica_fname = op.join(op.dirname(tempdir), 'test-ica.fif')
for cov in (None, test_cov):
ica = ICA(noise_cov=cov, n_components=2, max_pca_components=4,
n_pca_components=4, method=method, max_iter=1)
with pytest.warns(None): # ICA does not converge
ica.fit(raw, picks=picks[:10], start=start, stop=stop2)
_assert_ica_attributes(ica)
sources = ica.get_sources(epochs).get_data()
assert (ica.mixing_matrix_.shape == (2, 2))
assert (ica.unmixing_matrix_.shape == (2, 2))
assert (ica.pca_components_.shape == (4, 10))
assert (sources.shape[1] == ica.n_components_)
for exclude in [[], [0], np.array([1, 2, 3])]:
ica.exclude = exclude
ica.labels_ = {'foo': [0]}
ica.save(test_ica_fname)
ica_read = read_ica(test_ica_fname)
assert (list(ica.exclude) == ica_read.exclude)
assert_equal(ica.labels_, ica_read.labels_)
ica.apply(raw)
ica.exclude = []
ica.apply(raw, exclude=[1])
assert (ica.exclude == [])
ica.exclude = [0, 1]
ica.apply(raw, exclude=[1])
assert (ica.exclude == [0, 1])
ica_raw = ica.get_sources(raw)
assert (ica.exclude == [ica_raw.ch_names.index(e) for e in
ica_raw.info['bads']])
# test filtering
d1 = ica_raw._data[0].copy()
ica_raw.filter(4, 20, fir_design='firwin2')
assert_equal(ica_raw.info['lowpass'], 20.)
assert_equal(ica_raw.info['highpass'], 4.)
assert ((d1 != ica_raw._data[0]).any())
d1 = ica_raw._data[0].copy()
ica_raw.notch_filter([10], trans_bandwidth=10, fir_design='firwin')
assert ((d1 != ica_raw._data[0]).any())
ica.n_pca_components = 2
ica.method = 'fake'
ica.save(test_ica_fname)
ica_read = read_ica(test_ica_fname)
assert (ica.n_pca_components == ica_read.n_pca_components)
assert_equal(ica.method, ica_read.method)
assert_equal(ica.labels_, ica_read.labels_)
# check type consistency
attrs = ('mixing_matrix_ unmixing_matrix_ pca_components_ '
'pca_explained_variance_ pre_whitener_')
def f(x, y):
return getattr(x, y).dtype
for attr in attrs.split():
assert_equal(f(ica_read, attr), f(ica, attr))
ica.n_pca_components = 4
ica_read.n_pca_components = 4
ica.exclude = []
ica.save(test_ica_fname)
ica_read = read_ica(test_ica_fname)
for attr in ['mixing_matrix_', 'unmixing_matrix_', 'pca_components_',
'pca_mean_', 'pca_explained_variance_',
'pre_whitener_']:
assert_array_almost_equal(getattr(ica, attr),
getattr(ica_read, attr))
assert (ica.ch_names == ica_read.ch_names)
assert (isinstance(ica_read.info, Info))
sources = ica.get_sources(raw)[:, :][0]
sources2 = ica_read.get_sources(raw)[:, :][0]
assert_array_almost_equal(sources, sources2)
_raw1 = ica.apply(raw, exclude=[1])
_raw2 = ica_read.apply(raw, exclude=[1])
assert_array_almost_equal(_raw1[:, :][0], _raw2[:, :][0])
os.remove(test_ica_fname)
# check score funcs
for name, func in get_score_funcs().items():
if name in score_funcs_unsuited:
continue
scores = ica.score_sources(raw, target='EOG 061', score_func=func,
start=0, stop=10)
assert (ica.n_components_ == len(scores))
# check univariate stats
scores = ica.score_sources(raw, start=0, stop=50, score_func=stats.skew)
# check exception handling
pytest.raises(ValueError, ica.score_sources, raw,
target=np.arange(1))
params = []
params += [(None, -1, slice(2), [0, 1])] # variance, kurtosis params
params += [(None, 'MEG 1531')] # ECG / EOG channel params
for idx, ch_name in product(*params):
ica.detect_artifacts(raw, start_find=0, stop_find=50, ecg_ch=ch_name,
eog_ch=ch_name, skew_criterion=idx,
var_criterion=idx, kurt_criterion=idx)
# Make sure detect_artifacts marks the right components.
# For int criterion, the doc says "E.g. range(2) would return the two
# sources with the highest score". Assert that's what it does.
# Only test for skew, since it's always the same code.
ica.exclude = []
ica.detect_artifacts(raw, start_find=0, stop_find=50, ecg_ch=None,
eog_ch=None, skew_criterion=0,
var_criterion=None, kurt_criterion=None)
assert np.abs(scores[ica.exclude]) == np.max(np.abs(scores))
evoked = epochs.average()
evoked_data = evoked.data.copy()
raw_data = raw[:][0].copy()
epochs_data = epochs.get_data().copy()
with pytest.warns(RuntimeWarning, match='longer'):
idx, scores = ica.find_bads_ecg(raw, method='ctps', threshold='auto')
assert_equal(len(scores), ica.n_components_)
with pytest.warns(RuntimeWarning, match='longer'):
idx, scores = ica.find_bads_ecg(raw, method='correlation',
threshold='auto')
assert_equal(len(scores), ica.n_components_)
with pytest.warns(RuntimeWarning, match='longer'):
idx, scores = ica.find_bads_eog(raw)
assert_equal(len(scores), ica.n_components_)
idx, scores = ica.find_bads_ecg(epochs, method='ctps', threshold='auto')
assert_equal(len(scores), ica.n_components_)
pytest.raises(ValueError, ica.find_bads_ecg, epochs.average(),
method='ctps', threshold='auto')
pytest.raises(ValueError, ica.find_bads_ecg, raw,
method='crazy-coupling')
with pytest.warns(RuntimeWarning, match='longer'):
idx, scores = ica.find_bads_eog(raw)
assert_equal(len(scores), ica.n_components_)
raw.info['chs'][raw.ch_names.index('EOG 061') - 1]['kind'] = 202
with pytest.warns(RuntimeWarning, match='longer'):
idx, scores = ica.find_bads_eog(raw)
assert (isinstance(scores, list))
assert_equal(len(scores[0]), ica.n_components_)
idx, scores = ica.find_bads_eog(evoked, ch_name='MEG 1441')
assert_equal(len(scores), ica.n_components_)
idx, scores = ica.find_bads_ecg(evoked, method='correlation',
threshold='auto')
assert_equal(len(scores), ica.n_components_)
assert_array_equal(raw_data, raw[:][0])
assert_array_equal(epochs_data, epochs.get_data())
assert_array_equal(evoked_data, evoked.data)
# check score funcs
for name, func in get_score_funcs().items():
if name in score_funcs_unsuited:
continue
scores = ica.score_sources(epochs_eog, target='EOG 061',
score_func=func)
assert (ica.n_components_ == len(scores))
# check univariate stats
scores = ica.score_sources(epochs, score_func=stats.skew)
# check exception handling
pytest.raises(ValueError, ica.score_sources, epochs,
target=np.arange(1))
# ecg functionality
ecg_scores = ica.score_sources(raw, target='MEG 1531',
score_func='pearsonr')
with pytest.warns(RuntimeWarning, match='longer'):
ecg_events = ica_find_ecg_events(
raw, sources[np.abs(ecg_scores).argmax()])
assert (ecg_events.ndim == 2)
# eog functionality
eog_scores = ica.score_sources(raw, target='EOG 061',
score_func='pearsonr')
with pytest.warns(RuntimeWarning, match='longer'):
eog_events = ica_find_eog_events(
raw, sources[np.abs(eog_scores).argmax()])
assert (eog_events.ndim == 2)
# Test ica fiff export
ica_raw = ica.get_sources(raw, start=0, stop=100)
assert (ica_raw.last_samp - ica_raw.first_samp == 100)
assert_equal(len(ica_raw._filenames), 1) # API consistency
ica_chans = [ch for ch in ica_raw.ch_names if 'ICA' in ch]
assert (ica.n_components_ == len(ica_chans))
test_ica_fname = op.join(op.abspath(op.curdir), 'test-ica_raw.fif')
ica.n_components = np.int32(ica.n_components)
ica_raw.save(test_ica_fname, overwrite=True)
ica_raw2 = read_raw_fif(test_ica_fname, preload=True)
assert_allclose(ica_raw._data, ica_raw2._data, rtol=1e-5, atol=1e-4)
ica_raw2.close()
os.remove(test_ica_fname)
# Test ica epochs export
ica_epochs = ica.get_sources(epochs)
assert (ica_epochs.events.shape == epochs.events.shape)
ica_chans = [ch for ch in ica_epochs.ch_names if 'ICA' in ch]
assert (ica.n_components_ == len(ica_chans))
assert (ica.n_components_ == ica_epochs.get_data().shape[1])
assert (ica_epochs._raw is None)
assert (ica_epochs.preload is True)
# test float n pca components
ica.pca_explained_variance_ = np.array([0.2] * 5)
ica.n_components_ = 0
for ncomps, expected in [[0.3, 1], [0.9, 4], [1, 1]]:
ncomps_ = ica._check_n_pca_components(ncomps)
assert (ncomps_ == expected)
ica = ICA(method=method)
with pytest.warns(None): # sometimes does not converge
ica.fit(raw, picks=picks[:5])
_assert_ica_attributes(ica)
with pytest.warns(RuntimeWarning, match='longer'):
ica.find_bads_ecg(raw, threshold='auto')
ica.find_bads_eog(epochs, ch_name='MEG 0121')
assert_array_equal(raw_data, raw[:][0])
raw.drop_channels(['MEG 0122'])
pytest.raises(RuntimeError, ica.find_bads_eog, raw)
with pytest.warns(RuntimeWarning, match='longer'):
pytest.raises(RuntimeError, ica.find_bads_ecg, raw, threshold='auto')
@requires_sklearn
@pytest.mark.parametrize("method", ("fastica", "picard", "infomax"))
@pytest.mark.parametrize("idx", (None, -1, slice(2), [0, 1]))
@pytest.mark.parametrize("ch_name", (None, 'MEG 1531'))
def test_detect_artifacts_replacement_of_run_ica(method, idx, ch_name):
"""Test replacement workflow for deprecated run_ica() function."""
_skip_check_picard(method)
raw = read_raw_fif(raw_fname).crop(1.5, stop).load_data()
ica = ICA(n_components=2, method=method)
ica.fit(raw)
ica.detect_artifacts(raw, start_find=0, stop_find=5, ecg_ch=ch_name,
eog_ch=ch_name, skew_criterion=idx,
var_criterion=idx, kurt_criterion=idx)
@requires_sklearn
@pytest.mark.parametrize("method", ["fastica", "picard"])
def test_ica_reject_buffer(method):
"""Test ICA data raw buffer rejection."""
_skip_check_picard(method)
raw = read_raw_fif(raw_fname).crop(1.5, stop).load_data()
picks = pick_types(raw.info, meg=True, stim=False, ecg=False,
eog=False, exclude='bads')
raw._data[2, 1000:1005] = 5e-12
ica = ICA(n_components=3, max_pca_components=4, n_pca_components=4,
method=method)
with catch_logging() as drop_log:
ica.fit(raw, picks[:5], reject=dict(mag=2.5e-12), decim=2,
tstep=0.01, verbose=True, reject_by_annotation=False)
assert (raw._data[:5, ::2].shape[1] - 4 == ica.n_samples_)
log = [line for line in drop_log.getvalue().split('\n')
if 'detected' in line]
assert_equal(len(log), 1)
_assert_ica_attributes(ica)
@requires_sklearn
@pytest.mark.parametrize("method", ["fastica", "picard"])
def test_ica_twice(method):
"""Test running ICA twice."""
_skip_check_picard(method)
raw = read_raw_fif(raw_fname).crop(1.5, stop).load_data()
picks = pick_types(raw.info, meg='grad', exclude='bads')
n_components = 0.9
max_pca_components = None
n_pca_components = 1.1
ica1 = ICA(n_components=n_components, method=method,
max_pca_components=max_pca_components,
n_pca_components=n_pca_components, random_state=0)
ica1.fit(raw, picks=picks, decim=3)
raw_new = ica1.apply(raw, n_pca_components=n_pca_components)
ica2 = ICA(n_components=n_components, method=method,
max_pca_components=max_pca_components,
n_pca_components=1.0, random_state=0)
ica2.fit(raw_new, picks=picks, decim=3)
assert_equal(ica1.n_components_, ica2.n_components_)
@requires_sklearn
@pytest.mark.parametrize("method", ["fastica", "picard", "infomax"])
def test_fit_params(method, tmpdir):
"""Test fit_params for ICA."""
_skip_check_picard(method)
fit_params = {}
ICA(fit_params=fit_params, method=method) # test no side effects
assert fit_params == {}
# Test I/O roundtrip.
# Only picard and infomax support the "extended" keyword, so limit the
# tests to those.
if method in ['picard', 'infomax']:
tmpdir = str(tmpdir)
output_fname = op.join(tmpdir, 'test_ica-ica.fif')
raw = read_raw_fif(raw_fname).crop(0.5, stop).load_data()
n_components = 3
max_iter = 1
fit_params = dict(extended=True)
ica = ICA(fit_params=fit_params, n_components=n_components,
max_iter=max_iter, method=method)
fit_params_after_instantiation = ica.fit_params
if method == 'infomax':
ica.fit(raw)
else:
with pytest.warns(UserWarning, match='did not converge'):
ica.fit(raw)
ica.save(output_fname)
ica = read_ica(output_fname)
assert ica.fit_params == fit_params_after_instantiation
@requires_sklearn
@pytest.mark.parametrize("method", ["fastica", "picard"])
@pytest.mark.parametrize("allow_ref_meg", [True, False])
def test_bad_channels(method, allow_ref_meg):
"""Test exception when unsupported channels are used."""
_skip_check_picard(method)
chs = [i for i in _kind_dict]
info = create_info(len(chs), 500, chs)
rng = np.random.RandomState(0)
data = rng.rand(len(chs), 50)
raw = RawArray(data, info)
data = rng.rand(100, len(chs), 50)
epochs = EpochsArray(data, info)
n_components = 0.9
data_chs = list(_DATA_CH_TYPES_SPLIT + ('eog',))
if allow_ref_meg:
data_chs.append('ref_meg')
chs_bad = list(set(chs) - set(data_chs))
ica = ICA(n_components=n_components, method=method,
allow_ref_meg=allow_ref_meg)
for inst in [raw, epochs]:
for ch in chs_bad:
if allow_ref_meg:
# Test case for only bad channels
picks_bad1 = pick_types(inst.info, meg=False,
ref_meg=False,
**{str(ch): True})
# Test case for good and bad channels
picks_bad2 = pick_types(inst.info, meg=True,
ref_meg=True,
**{str(ch): True})
else:
# Test case for only bad channels
picks_bad1 = pick_types(inst.info, meg=False,
**{str(ch): True})
# Test case for good and bad channels
picks_bad2 = pick_types(inst.info, meg=True,
**{str(ch): True})
pytest.raises(ValueError, ica.fit, inst, picks=picks_bad1)
pytest.raises(ValueError, ica.fit, inst, picks=picks_bad2)
pytest.raises(ValueError, ica.fit, inst, picks=[])
@requires_sklearn
@pytest.mark.parametrize("method", ["fastica", "picard"])
def test_eog_channel(method):
"""Test that EOG channel is included when performing ICA."""
_skip_check_picard(method)
raw = read_raw_fif(raw_fname, preload=True)
events = read_events(event_name)
picks = pick_types(raw.info, meg=True, stim=True, ecg=False,
eog=True, exclude='bads')
epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks,
baseline=(None, 0), preload=True)
n_components = 0.9
ica = ICA(n_components=n_components, method=method)
# Test case for MEG and EOG data. Should have EOG channel
for inst in [raw, epochs]:
picks1a = pick_types(inst.info, meg=True, stim=False, ecg=False,
eog=False, exclude='bads')[:4]
picks1b = pick_types(inst.info, meg=False, stim=False, ecg=False,
eog=True, exclude='bads')
picks1 = np.append(picks1a, picks1b)
ica.fit(inst, picks=picks1)
assert (any('EOG' in ch for ch in ica.ch_names))
_assert_ica_attributes(ica)
# Test case for MEG data. Should have no EOG channel
for inst in [raw, epochs]:
picks1 = pick_types(inst.info, meg=True, stim=False, ecg=False,
eog=False, exclude='bads')[:5]
ica.fit(inst, picks=picks1)
_assert_ica_attributes(ica)
assert not any('EOG' in ch for ch in ica.ch_names)
@requires_sklearn
@pytest.mark.parametrize("method", ["fastica", "picard"])
def test_max_pca_components_none(method):
"""Test max_pca_components=None."""
_skip_check_picard(method)
raw = read_raw_fif(raw_fname).crop(1.5, stop).load_data()
events = read_events(event_name)
picks = pick_types(raw.info, eeg=True, meg=False)
epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks,
baseline=(None, 0), preload=True)
max_pca_components = None
n_components = 10
random_state = 12345
tempdir = _TempDir()
output_fname = op.join(tempdir, 'test_ica-ica.fif')
ica = ICA(max_pca_components=max_pca_components, method=method,
n_components=n_components, random_state=random_state)
with pytest.warns(None):
ica.fit(epochs)
_assert_ica_attributes(ica)
ica.save(output_fname)
ica = read_ica(output_fname)
# ICA.fit() replaced max_pca_components, which was previously None,
# with the appropriate integer value.
assert_equal(ica.max_pca_components, epochs.info['nchan'])
assert_equal(ica.n_components, 10)
@requires_sklearn
@pytest.mark.parametrize("method", ["fastica", "picard"])
def test_n_components_none(method):
"""Test n_components=None."""
_skip_check_picard(method)
raw = read_raw_fif(raw_fname).crop(1.5, stop).load_data()
events = read_events(event_name)
picks = pick_types(raw.info, eeg=True, meg=False)
epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks,
baseline=(None, 0), preload=True)
max_pca_components = 10
n_components = None
random_state = 12345
tempdir = _TempDir()
output_fname = op.join(tempdir, 'test_ica-ica.fif')
ica = ICA(max_pca_components=max_pca_components, method=method,
n_components=n_components, random_state=random_state)
with pytest.warns(None):
ica.fit(epochs)
_assert_ica_attributes(ica)
ica.save(output_fname)
ica = read_ica(output_fname)
_assert_ica_attributes(ica)
# ICA.fit() replaced max_pca_components, which was previously None,
# with the appropriate integer value.
assert_equal(ica.max_pca_components, 10)
assert ica.n_components is None
@requires_sklearn
@pytest.mark.parametrize("method", ["fastica", "picard"])
def test_n_components_and_max_pca_components_none(method):
"""Test n_components and max_pca_components=None."""
_skip_check_picard(method)
raw = read_raw_fif(raw_fname).crop(1.5, stop).load_data()
events = read_events(event_name)
picks = pick_types(raw.info, eeg=True, meg=False)
epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks,
baseline=(None, 0), preload=True)
max_pca_components = None
n_components = None
random_state = 12345
tempdir = _TempDir()
output_fname = op.join(tempdir, 'test_ica-ica.fif')
ica = ICA(max_pca_components=max_pca_components, method=method,
n_components=n_components, random_state=random_state)
with pytest.warns(None): # convergence
ica.fit(epochs)
ica.save(output_fname)
_assert_ica_attributes(ica)
ica = read_ica(output_fname)
_assert_ica_attributes(ica)
# ICA.fit() replaced max_pca_components, which was previously None,
# with the appropriate integer value.
assert_equal(ica.max_pca_components, epochs.info['nchan'])
assert ica.n_components is None
@requires_sklearn
@testing.requires_testing_data
def test_ica_ctf():
"""Test run ICA computation on ctf data with/without compensation."""
method = 'fastica'
raw = read_raw_ctf(ctf_fname, preload=True)
events = make_fixed_length_events(raw, 99999)
for comp in [0, 1]:
raw.apply_gradient_compensation(comp)
epochs = Epochs(raw, events, None, -0.2, 0.2, preload=True)
evoked = epochs.average()
# test fit
for inst in [raw, epochs]:
ica = ICA(n_components=2, random_state=0, max_iter=2,
method=method)
with pytest.warns(UserWarning, match='did not converge'):
ica.fit(inst)
_assert_ica_attributes(ica)
# test apply and get_sources
for inst in [raw, epochs, evoked]:
ica.apply(inst)
ica.get_sources(inst)
# test mixed compensation case
raw.apply_gradient_compensation(0)
ica = ICA(n_components=2, random_state=0, max_iter=2, method=method)
with pytest.warns(UserWarning, match='did not converge'):
ica.fit(raw)
_assert_ica_attributes(ica)
raw.apply_gradient_compensation(1)
epochs = Epochs(raw, events, None, -0.2, 0.2, preload=True)
evoked = epochs.average()
for inst in [raw, epochs, evoked]:
with pytest.raises(RuntimeError, match='Compensation grade of ICA'):
ica.apply(inst)
with pytest.raises(RuntimeError, match='Compensation grade of ICA'):
ica.get_sources(inst)
@requires_sklearn
@testing.requires_testing_data
def test_ica_labels():
"""Test ICA labels."""
# The CTF data are uniquely well suited to testing the ICA.find_bads_
# methods
raw = read_raw_ctf(ctf_fname, preload=True)
# set the appropriate EEG channels to EOG and ECG
raw.set_channel_types({'EEG057': 'eog', 'EEG058': 'eog', 'EEG059': 'ecg'})
ica = ICA(n_components=4, random_state=0, max_iter=2, method='fastica',
allow_ref_meg=True)
with pytest.warns(UserWarning, match='did not converge'):
ica.fit(raw)
_assert_ica_attributes(ica)
ica.find_bads_eog(raw, l_freq=None, h_freq=None)
picks = list(pick_types(raw.info, meg=False, eog=True))
for idx, ch in enumerate(picks):
assert '{}/{}/{}'.format('eog', idx, raw.ch_names[ch]) in ica.labels_
assert 'eog' in ica.labels_
for key in ('ecg', 'ref_meg', 'ecg/ECG-MAG'):
assert key not in ica.labels_
ica.find_bads_ecg(raw, l_freq=None, h_freq=None, method='correlation',
threshold='auto')
picks = list(pick_types(raw.info, meg=False, ecg=True))
for idx, ch in enumerate(picks):
assert '{}/{}/{}'.format('ecg', idx, raw.ch_names[ch]) in ica.labels_
for key in ('ecg', 'eog'):
assert key in ica.labels_
for key in ('ref_meg', 'ecg/ECG-MAG'):
assert key not in ica.labels_
# derive reference ICA components and append them to raw
ica_rf = ICA(n_components=2, random_state=0, max_iter=2,
allow_ref_meg=True)
with pytest.warns(UserWarning, match='did not converge'):
ica_rf.fit(raw.copy().pick_types(meg=False, ref_meg=True))
icacomps = ica_rf.get_sources(raw)
# rename components so they are auto-detected by find_bads_ref
icacomps.rename_channels({c: 'REF_' + c for c in icacomps.ch_names})
# and add them to raw
raw.add_channels([icacomps])
ica.find_bads_ref(raw, l_freq=None, h_freq=None, method="separate")
picks = pick_channels_regexp(raw.ch_names, 'REF_ICA*')
for idx, ch in enumerate(picks):
assert '{}/{}/{}'.format('ref_meg', idx,
raw.ch_names[ch]) in ica.labels_
ica.find_bads_ref(raw, l_freq=None, h_freq=None, method="together")
assert 'ref_meg' in ica.labels_
for key in ('ecg', 'eog', 'ref_meg'):
assert key in ica.labels_
assert 'ecg/ECG-MAG' not in ica.labels_
ica.find_bads_ecg(raw, l_freq=None, h_freq=None, threshold='auto')
for key in ('ecg', 'eog', 'ref_meg', 'ecg/ECG-MAG'):
assert key in ica.labels_
@requires_sklearn
@testing.requires_testing_data
def test_ica_eeg():
"""Test ICA on EEG."""
method = 'fastica'
raw_fif = read_raw_fif(fif_fname, preload=True)
raw_eeglab = read_raw_eeglab(input_fname=eeglab_fname,
preload=True)
for raw in [raw_fif, raw_eeglab]:
events = make_fixed_length_events(raw, 99999, start=0, stop=0.3,
duration=0.1)
picks_meg = pick_types(raw.info, meg=True, eeg=False)[:2]
picks_eeg = pick_types(raw.info, meg=False, eeg=True)[:2]
picks_all = []
picks_all.extend(picks_meg)
picks_all.extend(picks_eeg)
epochs = Epochs(raw, events, None, -0.1, 0.1, preload=True)
evoked = epochs.average()
for picks in [picks_meg, picks_eeg, picks_all]:
if len(picks) == 0:
continue
# test fit
for inst in [raw, epochs]:
ica = ICA(n_components=2, random_state=0, max_iter=2,
method=method)
with pytest.warns(None):
ica.fit(inst, picks=picks)
_assert_ica_attributes(ica)
# test apply and get_sources
for inst in [raw, epochs, evoked]:
ica.apply(inst)
ica.get_sources(inst)
with pytest.warns(RuntimeWarning, match='MISC channel'):
raw = read_raw_ctf(ctf_fname2, preload=True)
events = make_fixed_length_events(raw, 99999, start=0, stop=0.2,
duration=0.1)
picks_meg = pick_types(raw.info, meg=True, eeg=False)[:2]
picks_eeg = pick_types(raw.info, meg=False, eeg=True)[:2]
picks_all = picks_meg + picks_eeg
for comp in [0, 1]:
raw.apply_gradient_compensation(comp)
epochs = Epochs(raw, events, None, -0.1, 0.1, preload=True)
evoked = epochs.average()
for picks in [picks_meg, picks_eeg, picks_all]:
if len(picks) == 0:
continue
# test fit
for inst in [raw, epochs]:
ica = ICA(n_components=2, random_state=0, max_iter=2,
method=method)
with pytest.warns(None):
ica.fit(inst)
_assert_ica_attributes(ica)
# test apply and get_sources
for inst in [raw, epochs, evoked]:
ica.apply(inst)
ica.get_sources(inst)
@testing.requires_testing_data
def test_read_ica_eeglab():
"""Test read_ica_eeglab function."""
fname = op.join(test_base_dir, "EEGLAB", "test_raw.set")
fname_cleaned_matlab = op.join(test_base_dir, "EEGLAB",
"test_raw.cleaned.set")
raw = read_raw_eeglab(fname, preload=True)
raw_eeg = _check_load_mat(fname, None)
raw_cleaned_matlab = read_raw_eeglab(fname_cleaned_matlab,
preload=True)
mark_to_remove = ["manual"]
comp_info = raw_eeg.marks["comp_info"]
if len(comp_info["flags"].shape) > 1:
ind_comp_to_drop = [np.where(flags)[0]
for flags, label in zip(comp_info["flags"],
comp_info["label"])
if label in mark_to_remove]
ind_comp_to_drop = np.unique(np.concatenate(ind_comp_to_drop))
else:
ind_comp_to_drop = np.where(comp_info["flags"])[0]
ica = read_ica_eeglab(fname)
_assert_ica_attributes(ica)
raw_cleaned = ica.apply(raw.copy(), exclude=ind_comp_to_drop)
assert_allclose(raw_cleaned_matlab.get_data(), raw_cleaned.get_data(),
rtol=1e-05, atol=1e-08)
def _assert_ica_attributes(ica):
"""Assert some attributes of ICA objects."""
__tracebackhide__ = True
# This tests properties, but also serves as documentation of
# the shapes these arrays can obtain and how they obtain them
# Pre-whitener
n_ch = len(ica.ch_names)
assert ica.pre_whitener_.shape == (
n_ch, n_ch if ica.noise_cov is not None else 1)
# PCA
n_pca = ica.max_pca_components
assert ica.pca_components_.shape == (n_pca, n_ch), 'PCA shape'
assert_allclose(np.dot(ica.pca_components_, ica.pca_components_.T),
np.eye(n_pca), atol=1e-6, err_msg='PCA orthogonality')
assert ica.pca_mean_.shape == (n_ch,)
# Mixing/unmixing
assert ica.unmixing_matrix_.shape == (ica.n_components_,) * 2, \
'Unmixing shape'
assert ica.mixing_matrix_.shape == (ica.n_components_,) * 2, \
'Mixing shape'
mix_unmix = np.dot(ica.mixing_matrix_, ica.unmixing_matrix_)
s = linalg.svdvals(ica.unmixing_matrix_)
nz = len(s) - (s > s[0] * 1e-12).sum()
want = np.eye(ica.n_components_)
want[:nz] = 0
assert_allclose(mix_unmix, want, atol=1e-6, err_msg='Mixing as pinv')
run_tests_if_main()
|
from buzhug import *
|
"""
:mod:`operalib.kernels` implements some Operator-Valued Kernel
models.
"""
from numpy import dot, diag, sqrt
from sklearn.metrics.pairwise import rbf_kernel
from sklearn.kernel_approximation import RBFSampler, SkewedChi2Sampler
from scipy.sparse.linalg import LinearOperator
from scipy.linalg import svd
class DotProductKernel(object):
r"""
Dot product Operator-Valued Kernel of the form:
.. math::
x, y \mapsto K(x, y) = \mu \langle x, y \rangle 1_p + (1-\mu) \langle
x, y \rangle^2 I_p
Attributes
----------
mu : {array, LinearOperator}, shape = [n_targets, n_targets]
Tradeoff between shared and independant components
p : {Int}
dimension of the targets (n_targets).
References
----------
See also
--------
DotProductKernelMap
Dot Product Kernel Map
Examples
--------
>>> import operalib as ovk
>>> import numpy as np
>>> X = np.random.randn(100, 10)
>>> K = ovk.DotProductKernel(mu=.2, p=5)
>>> # The kernel matrix as a linear operator
>>> K(X, X) # doctest: +NORMALIZE_WHITESPACE +ELLIPSIS
<500x500 _CustomLinearOperator with dtype=float64>
"""
def __init__(self, mu, p):
"""Initialize the Dot product Operator-Valued Kernel.
Parameters
----------
mu : {float}
Tradeoff between shared and independant components.
p : {integer}
dimension of the targets (n_targets).
"""
self.mu = mu
self.p = p
def get_kernel_map(self, X):
r"""Return the kernel map associated with the data X.
.. math::
K_x: Y \mapsto K(X, Y)
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
Samples.
Returns
-------
K_x : DotProductKernelMap, callable
.. math::
K_x: Y \mapsto K(X, Y).
"""
from .kernel_maps import DotProductKernelMap
return DotProductKernelMap(X, self.mu, self.p)
def __call__(self, X, Y=None):
r"""Return the kernel map associated with the data X.
.. math::
K_x: \begin{cases}
Y \mapsto K(X, Y) \enskip\text{if } Y \text{is None,} \\
K(X, Y) \enskip\text{otherwise.}
\end{cases}
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples1, n_features]
Samples.
Y : {array-like, sparse matrix}, shape = [n_samples2, n_features],
default = None
Samples.
Returns
-------
K_x : DotProductKernelMap, callable or LinearOperator
.. math::
K_x: \begin{cases}
Y \mapsto K(X, Y) \enskip\text{if } Y \text{is None,} \\
K(X, Y) \enskip\text{otherwise}
\end{cases}
"""
Kmap = self.get_kernel_map(X)
if Y is None:
return Kmap
else:
return Kmap(Y)
class DecomposableKernel(object):
r"""
Decomposable Operator-Valued Kernel of the form:
.. math::
X, Y \mapsto K(X, Y) = k_s(X, Y) A
where A is a symmetric positive semidefinite operator acting on the
outputs.
Attributes
----------
A : {array, LinearOperator}, shape = [n_targets, n_targets]
Linear operator acting on the outputs
scalar_kernel : {callable}
Callable which associate to the training points X the Gram matrix.
scalar_kernel_params : {mapping of string to any}
Additional parameters (keyword arguments) for kernel function passed as
callable object.
References
----------
See also
--------
DecomposableKernelMap
Decomposable Kernel map
Examples
--------
>>> import operalib as ovk
>>> import numpy as np
>>> X = np.random.randn(100, 10)
>>> K = ovk.DecomposableKernel(np.eye(2))
>>> # The kernel matrix as a linear operator
>>> K(X, X) # doctest: +NORMALIZE_WHITESPACE +ELLIPSIS
<200x200 _CustomLinearOperator with dtype=float64>
"""
def __init__(self, A, scalar_kernel=rbf_kernel, scalar_kernel_params=None):
"""Initialize the Decomposable Operator-Valued Kernel.
Parameters
----------
A : {array, LinearOperator}, shape = [n_targets, n_targets]
Linear operator acting on the outputs
scalar_kernel : {callable}
Callable which associate to the training points X the Gram matrix.
scalar_kernel_params : {mapping of string to any}, optional
Additional parameters (keyword arguments) for kernel function
passed as callable object.
"""
self.A = A
self.scalar_kernel = scalar_kernel
self.scalar_kernel_params = scalar_kernel_params
self.p = A.shape[0]
def get_kernel_map(self, X):
r"""Return the kernel map associated with the data X.
.. math::
K_x: Y \mapsto K(X, Y)
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
Samples.
Returns
-------
K_x : DecomposableKernelMap, callable
.. math::
K_x: Y \mapsto K(X, Y).
"""
from .kernel_maps import DecomposableKernelMap
return DecomposableKernelMap(X, self.A,
self.scalar_kernel,
self.scalar_kernel_params)
def get_orff_map(self, X, D=100, eps=1e-5, random_state=0):
r"""Return the Random Fourier Feature map associated with the data X.
.. math::
K_x: Y \mapsto \tilde{\Phi}(X)
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
Samples.
Returns
-------
\tilde{\Phi}(X) : Linear Operator, callable
"""
u, s, v = svd(self.A, full_matrices=False, compute_uv=True)
self.B_ = dot(diag(sqrt(s[s > eps])), v[s > eps, :])
self.r = self.B_.shape[0]
if (self.scalar_kernel is rbf_kernel) and not hasattr(self, 'Xb_'):
if self.scalar_kernel_params is None:
gamma = 1.
else:
gamma = self.scalar_kernel_params['gamma']
self.phi_ = RBFSampler(gamma=gamma,
n_components=D, random_state=random_state)
self.phi_.fit(X)
self.Xb_ = self.phi_.transform(X).astype(X.dtype)
elif (self.scalar_kernel is 'skewed_chi2') and not hasattr(self,
'Xb_'):
if self.scalar_kernel_params is None:
skew = 1.
else:
skew = self.scalar_kernel_params['skew']
self.phi_ = SkewedChi2Sampler(skewedness=skew,
n_components=D,
random_state=random_state)
self.phi_.fit(X)
self.Xb_ = self.phi_.transform(X).astype(X.dtype)
elif not hasattr(self, 'Xb_'):
raise NotImplementedError('ORFF map for kernel is not '
'implemented yet')
D = self.phi_.n_components
if X is self.Xb_:
cshape = (D, self.r)
rshape = (self.Xb_.shape[0], self.p)
oshape = (self.Xb_.shape[0] * self.p, D * self.r)
return LinearOperator(oshape,
dtype=self.Xb_.dtype,
matvec=lambda b: dot(dot(self.Xb_,
b.reshape(cshape)),
self.B_),
rmatvec=lambda r: dot(Xb.T,
dot(r.reshape(rshape),
self.B_.T)))
else:
Xb = self.phi_.transform(X)
cshape = (D, self.r)
rshape = (X.shape[0], self.p)
oshape = (Xb.shape[0] * self.p, D * self.r)
return LinearOperator(oshape,
dtype=self.Xb_.dtype,
matvec=lambda b: dot(dot(Xb,
b.reshape(cshape)),
self.B_),
rmatvec=lambda r: dot(Xb.T,
dot(r.reshape(rshape),
self.B_.T)))
def __call__(self, X, Y=None):
r"""Return the kernel map associated with the data X.
.. math::
K_x: \begin{cases}
Y \mapsto K(X, Y) \enskip\text{if } Y \text{is None,} \\
K(X, Y) \enskip\text{otherwise.}
\end{cases}
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples1, n_features]
Samples.
Y : {array-like, sparse matrix}, shape = [n_samples2, n_features],
default = None
Samples.
Returns
-------
K_x : DecomposableKernelMap, callable or LinearOperator
.. math::
K_x: \begin{cases}
Y \mapsto K(X, Y) \enskip\text{if } Y \text{is None,} \\
K(X, Y) \enskip\text{otherwise}
\end{cases}
"""
Kmap = self.get_kernel_map(X)
if Y is None:
return Kmap
else:
return Kmap(Y)
class RBFCurlFreeKernel(object):
r"""
Curl-free Operator-Valued Kernel of the form:
.. math::
X \mapsto K_X(Y) = 2 \gamma exp(-\gamma||X - Y||^2)(I - 2\gamma(X - Y)
(X - T)^T).
Attributes
----------
gamma : {float}
RBF kernel parameter.
References
----------
See also
--------
RBFCurlFreeKernelMap
Curl-free Kernel map
Examples
--------
>>> import operalib as ovk
>>> import numpy as np
>>> X = np.random.randn(100, 2)
>>> K = ovk.RBFCurlFreeKernel(1.)
>>> # The kernel matrix as a linear operator
>>> K(X, X) # doctest: +NORMALIZE_WHITESPACE +ELLIPSIS
<200x200 _CustomLinearOperator with dtype=float64>
"""
def __init__(self, gamma):
"""Initialize the Decomposable Operator-Valued Kernel.
Parameters
----------
gamma : {float}, shape = [n_targets, n_targets]
RBF kernel parameter.
"""
self.gamma = gamma
def get_kernel_map(self, X):
r"""Return the kernel map associated with the data X.
.. math::
K_x: Y \mapsto K(X, Y)
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
Samples.
Returns
-------
K_x : DecomposableKernelMap, callable
.. math::
K_x: Y \mapsto K(X, Y).
"""
from .kernel_maps import RBFCurlFreeKernelMap
return RBFCurlFreeKernelMap(X, self.gamma)
def get_orff_map(self, X, D=100, random_state=0):
r"""Return the Random Fourier Feature map associated with the data X.
.. math::
K_x: Y \mapsto \tilde{\Phi}(X)
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
Samples.
Returns
-------
\tilde{\Phi}(X) : Linear Operator, callable
"""
self.r = 1
if not hasattr(self, 'Xb_'):
self.phi_ = RBFSampler(gamma=self.gamma,
n_components=D, random_state=random_state)
self.phi_.fit(X)
self.Xb_ = self.phi_.transform(X)
self.Xb_ = (self.Xb_.reshape((self.Xb_.shape[0],
1, self.Xb_.shape[1])) *
self.phi_.random_weights_.reshape((1, -1,
self.Xb_.shape[1])))
self.Xb_ = self.Xb_.reshape((-1, self.Xb_.shape[2]))
D = self.phi_.n_components
if X is self.Xb_:
return LinearOperator(self.Xb_.shape,
matvec=lambda b: dot(self.Xb_ * b),
rmatvec=lambda r: dot(self.Xb_.T * r))
else:
Xb = self.phi_.transform(X)
Xb = (Xb.reshape((Xb.shape[0], 1, Xb.shape[1])) *
self.phi_.random_weights_.reshape((1, -1, Xb.shape[1])))
Xb = Xb.reshape((-1, Xb.shape[2]))
return LinearOperator(Xb.shape,
matvec=lambda b: dot(Xb, b),
rmatvec=lambda r: dot(Xb.T, r))
def __call__(self, X, Y=None):
r"""Return the kernel map associated with the data X.
.. math::
K_x: \begin{cases}
Y \mapsto K(X, Y) \enskip\text{if } Y \text{is None,} \\
K(X, Y) \enskip\text{otherwise.}
\end{cases}
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples1, n_features]
Samples.
Y : {array-like, sparse matrix}, shape = [n_samples2, n_features],
default = None
Samples.
Returns
-------
K_x : DecomposableKernelMap, callable or LinearOperator
.. math::
K_x: \begin{cases}
Y \mapsto K(X, Y) \enskip\text{if } Y \text{is None,} \\
K(X, Y) \enskip\text{otherwise}
\end{cases}
"""
Kmap = self.get_kernel_map(X)
if Y is None:
return Kmap
else:
return Kmap(Y)
class RBFDivFreeKernel(object):
r"""
Divergence-free Operator-Valued Kernel of the form:
.. math::
X \mapsto K_X(Y) = exp(-\gamma||X-Y||^2)A_{X,Y},
where,
.. math::
A_{X,Y} = 2\gamma(X-Y)(X-T)^T+((d-1)-2\gamma||X-Y||^2 I).
Attributes
----------
gamma : {float}
RBF kernel parameter.
References
----------
See also
--------
RBFDivFreeKernelMap
Divergence-free Kernel map
Examples
--------
>>> import operalib as ovk
>>> import numpy as np
>>> X = np.random.randn(100, 2)
>>> K = ovk.RBFDivFreeKernel(1.)
>>> # The kernel matrix as a linear operator
>>> K(X, X) # doctest: +NORMALIZE_WHITESPACE +ELLIPSIS
<200x200 _CustomLinearOperator with dtype=float64>
"""
def __init__(self, gamma):
"""Initialize the Decomposable Operator-Valued Kernel.
Parameters
----------
gamma : {float}, shape = [n_targets, n_targets]
RBF kernel parameter.
"""
self.gamma = gamma
def get_kernel_map(self, X):
r"""Return the kernel map associated with the data X.
.. math::
K_x: Y \mapsto K(X, Y)
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
Samples.
Returns
-------
K_x : DecomposableKernelMap, callable
.. math::
K_x: Y \mapsto K(X, Y).
"""
from .kernel_maps import RBFDivFreeKernelMap
return RBFDivFreeKernelMap(X, self.gamma)
def get_orff_map(self, X, D=100, random_state=0):
r"""Return the Random Fourier Feature map associated with the data X.
.. math::
K_x: Y \mapsto \tilde{\Phi}(X)
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
Samples.
Returns
-------
\tilde{\Phi}(X) : Linear Operator, callable
"""
self.r = 1
if not hasattr(self, 'Xb_'):
self.phi_ = RBFSampler(gamma=self.gamma,
n_components=D, random_state=random_state)
self.phi_.fit(X)
self.Xb_ = self.phi_.transform(X)
self.Xb_ = (self.Xb_.reshape((self.Xb_.shape[0],
1, self.Xb_.shape[1])) *
self.phi_.random_weights_.reshape((1, -1,
self.Xb_.shape[1])))
self.Xb_ = self.Xb_.reshape((-1, self.Xb_.shape[2]))
D = self.phi_.n_components
if X is self.Xb_:
return LinearOperator(self.Xb_.shape,
matvec=lambda b: dot(self.Xb_ * b),
rmatvec=lambda r: dot(self.Xb_.T * r))
else:
Xb = self.phi_.transform(X)
# TODO:
# w = self.phi_.random_weights_.reshape((1, -1, Xb.shape[1]))
# wn = np.linalg.norm(w)
# Xb = (Xb.reshape((Xb.shape[0], 1, Xb.shape[1])) *
# wn * np.eye()w np.dot(w.T, w) / wn)
Xb = Xb.reshape((-1, Xb.shape[2]))
return LinearOperator(Xb.shape,
matvec=lambda b: dot(Xb, b),
rmatvec=lambda r: dot(Xb.T, r))
def __call__(self, X, Y=None):
r"""Return the kernel map associated with the data X.
.. math::
K_x: \begin{cases}
Y \mapsto K(X, Y) \enskip\text{if } Y \text{is None,} \\
K(X, Y) \enskip\text{otherwise.}
\end{cases}
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples1, n_features]
Samples.
Y : {array-like, sparse matrix}, shape = [n_samples2, n_features],
default = None
Samples.
Returns
-------
K_x : DecomposableKernelMap, callable or LinearOperator
.. math::
K_x: \begin{cases}
Y \mapsto K(X, Y) \enskip\text{if } Y \text{is None,} \\
K(X, Y) \enskip\text{otherwise}
\end{cases}
"""
Kmap = self.get_kernel_map(X)
if Y is None:
return Kmap
else:
return Kmap(Y)
|
import optparse
import parse_deps
import sys
import os
srcdir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../src"))
html_warning_message = """
<!------------------------------------------------------------------------------
WARNING: This file is generated by generate_about_tracing_contents.py
Do not edit directly.
------------------------------------------------------------------------------->
"""
js_warning_message = """/**
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
* WARNING: This file is generated by generate_about_tracing_contents.py
*
* Do not edit directly.
*/
"""
def generate_html():
f = open(os.path.join(srcdir, "about_tracing.html.template"), 'r')
template = f.read()
f.close()
assert template.find("<WARNING_MESSAGE></WARNING_MESSAGE>") != -1
assert template.find("<STYLE_SHEET_CONTENTS></STYLE_SHEET_CONTENTS>") != -1
filenames = [os.path.join(srcdir, x) for x in ["base.js", "profiling_view.js"]]
filenames = [os.path.relpath(x) for x in filenames]
load_sequence = parse_deps.calc_load_sequence(filenames, srcdir)
style_sheet_contents = ""
for module in load_sequence:
for style_sheet in module.style_sheets:
rel_filename = os.path.relpath(style_sheet.filename, srcdir)
link_tag = """<link rel="stylesheet" href="%s">\n""" % rel_filename
style_sheet_contents += link_tag
result = template
result = result.replace("<WARNING_MESSAGE></WARNING_MESSAGE>", html_warning_message)
result = result.replace("<STYLE_SHEET_CONTENTS></STYLE_SHEET_CONTENTS>", style_sheet_contents)
return result
def generate_js():
f = open(os.path.join(srcdir, "about_tracing.js.template"), 'r')
template = f.read()
f.close()
assert template.find("<WARNING_MESSAGE></WARNING_MESSAGE>") != -1
assert template.find("<SCRIPT_CONTENTS></SCRIPT_CONTENTS>") != -1
filenames = [os.path.join(srcdir, x) for x in ["base.js", "profiling_view.js"]]
filenames = [os.path.relpath(x) for x in filenames]
import parse_deps
load_sequence = parse_deps.calc_load_sequence(filenames, srcdir)
script_contents = ""
script_contents += "window.FLATTENED = {};\n"
for module in load_sequence:
script_contents += "window.FLATTENED['%s'] = true;\n" % module.name
for module in load_sequence:
rel_filename = os.path.relpath(module.filename, srcdir)
script_contents += """<include src="%s">\n""" % rel_filename
result = template
result = result.replace("<WARNING_MESSAGE></WARNING_MESSAGE>",
js_warning_message)
result = result.replace("<SCRIPT_CONTENTS></SCRIPT_CONTENTS>", script_contents)
return result
def is_out_of_date():
olddir = os.getcwd()
try:
os.chdir(srcdir)
o = open(os.path.join(srcdir, "about_tracing.html"), 'r')
existing_result_html = o.read()
o.close()
result_html = generate_html()
if result_html != existing_result_html:
return True
o = open(os.path.join(srcdir, "about_tracing.js"), 'r')
existing_result_js = o.read()
o.close()
result_js = generate_js()
if result_js != existing_result_js:
return True
finally:
os.chdir(olddir)
return False
def main(args):
parser = optparse.OptionParser()
options, args = parser.parse_args(args)
olddir = os.getcwd()
try:
os.chdir(srcdir)
try:
result_html = generate_html()
except parse_deps.DepsException, ex:
sys.stderr.write("Error: %s\n\n" % str(ex))
return 255
o = open(os.path.join(srcdir, "about_tracing.html"), 'w')
o.write(result_html)
o.close()
result_js = generate_js()
o = open(os.path.join(srcdir, "about_tracing.js"), 'w')
o.write(result_js)
o.close()
finally:
os.chdir(olddir)
return 0
if __name__ == "__main__":
sys.exit(main(sys.argv))
|
"""Test RelVars"""
from __future__ import unicode_literals
from copy import copy
from datetime import date, datetime, timedelta
import pytest
from psycopg2 import DatabaseError, IntegrityError
from pyrseas.relation import RelVar, Attribute
from pyrseas.testutils import RelationTestCase
TEST_DATA1 = {'title': "John Doe"}
TEST_DATA1x = {'id': 2, 'title': "Bob Smith"}
TEST_DATA2 = {'num': 123, 'name': "Name 1", 'id': 1}
TEST_DATA3 = {'id1': 1, 'id2': 2, 'code': 'ES', 'descr': 'Una descripción'}
rv1 = RelVar('rv1', [Attribute('id', int, sysdefault=True),
Attribute('title'),
Attribute('descr', nullable=True),
Attribute('updated', datetime, sysdefault=True)],
key=['id'])
rv2 = RelVar('rv2', [Attribute('num', int), Attribute('name'),
Attribute('id', int)], key=['num'])
rv3 = RelVar('rv3', [Attribute('id1', int), Attribute('id2', int),
Attribute('code'), Attribute('descr'),
Attribute('created', date, sysdefault=True)],
key=['id1', 'code', 'id2'])
@pytest.fixture
def relvar1(request):
return rv1
def test_relvar_default_tuple(relvar1):
"Create a tuple with default (blank) values"
tup = relvar1.default_tuple()
assert tup.id == 0
assert tup.title == ''
def test_relvar_tuple_values(relvar1):
"Create a tuple based on relvar and passed-in arguments"
tup = relvar1.tuple(**TEST_DATA1)
assert tup.title == TEST_DATA1['title']
assert tup._heading == (('title', str), )
def test_relvar_invalid_attribute(relvar1):
"Create a tuple based on relvar but with incorrect type"
with pytest.raises(ValueError):
relvar1.tuple(title=12.34)
def test_relvar_tuple_missing_required(relvar1):
"Create a tuple without a required attribute"
with pytest.raises(ValueError):
relvar1.tuple(1)
def test_relvar_tuple_unknown_attribute(relvar1):
"Create a tuple with an unknown attribute"
with pytest.raises(KeyError):
relvar1.tuple(code='abc')
class TestRelvar1(RelationTestCase):
@pytest.fixture(autouse=True)
def setup(self):
self.relvar = rv1
self.relvar.connect(self.db)
self.pgdb.execute("DROP TABLE IF EXISTS rv1 CASCADE")
self.pgdb.execute_commit(
"CREATE TABLE rv1 (id serial PRIMARY KEY, "
"title text NOT NULL UNIQUE, descr text, "
"updated timestamp with time zone DEFAULT CURRENT_TIMESTAMP)")
def insert_one(self):
self.pgdb.execute_commit("INSERT INTO rv1 (title) VALUES (%(title)s)",
(TEST_DATA1))
def delete_one(self, id):
self.pgdb.execute_commit("DELETE FROM rv1 WHERE id = %s", (id,))
def get_one(self, id):
return self.pgdb.fetchone("SELECT xmin, * FROM rv1 WHERE id = %s",
(id,))
def test_relvar_insert_one_serial(self):
"Insert a tuple into a relvar with a sequenced primary key"
newtuple = self.relvar.tuple(**TEST_DATA1)
self.relvar.insert_one(newtuple)
now = datetime.now()
self.db.commit()
row = self.get_one(1)
assert row['title'] == newtuple.title
assert (now - row['updated'].replace(tzinfo=None)) < timedelta(0, 1)
def test_relvar_insert_one_override_pk(self):
"Insert a tuple but override normal sequenced primary key value"
data = TEST_DATA1.copy()
data['id'] = 123
newtuple = self.relvar.tuple(**data)
self.relvar.insert_one(newtuple)
self.db.commit()
row = self.get_one(123)
assert row['title'] == newtuple.title
def test_relvar_insert_one_serial_return_pk(self):
"Insert a tuple and return the generated primary key value"
self.insert_one()
newtuple = self.relvar.tuple(title=TEST_DATA1x['title'])
retval = self.relvar.insert_one(newtuple, True)
self.db.commit()
row = self.get_one(retval.id)
assert row['title'] == newtuple.title
def test_relvar_insert_one_nullables(self):
"Insert a tuple with nullable attributes as blanks"
data = TEST_DATA1.copy()
data['descr'] = ''
newtuple = self.relvar.tuple(**data)
self.relvar.insert_one(newtuple)
self.db.commit()
row = self.get_one(1)
assert row['descr'] is None
def test_relvar_dup_insert_pk(self):
"Insert a duplicate by overriding normal sequenced primary key value"
self.insert_one()
data = TEST_DATA1.copy()
data['id'] = 1
newtuple = self.relvar.tuple(**data)
with pytest.raises(IntegrityError):
self.relvar.insert_one(newtuple)
def test_relvar_dup_insert_alt_key(self):
"Insert a duplicate on a unique attribute"
self.insert_one()
newtuple = self.relvar.tuple(**TEST_DATA1)
with pytest.raises(IntegrityError):
self.relvar.insert_one(newtuple)
def test_relvar_get_one(self):
"Retrieve a single tuple from a relvar"
self.insert_one()
now = datetime.now()
currtuple = self.relvar.get_one(self.relvar.key_tuple(1))
assert currtuple.id == 1
assert currtuple.title == TEST_DATA1['title']
assert (now - currtuple.updated.replace(tzinfo=None)) < timedelta(0, 1)
def test_relvar_get_one_fail(self):
"Fail to retrieve a single tuple from a relvar"
assert self.relvar.get_one(self.relvar.key_tuple(1)) is None
def test_relvar_update_one(self):
"Update a single tuple in a relvar"
self.insert_one()
keytuple = self.relvar.key_tuple(1)
currtuple = self.relvar.get_one(keytuple)
currtuple.title = "Jane Doe"
currtuple.updated = datetime.now()
self.relvar.update_one(currtuple, keytuple)
self.db.commit()
row = self.get_one(1)
assert row['title'] == currtuple.title
assert row['updated'].replace(tzinfo=None) == currtuple.updated
def test_relvar_update_one_from_current(self):
"Update a single tuple from a fetched tuple"
self.insert_one()
keytuple = self.relvar.key_tuple(1)
currtuple = self.relvar.get_one(keytuple)
newtuple = copy(currtuple)
newtuple.title = "Jane Doe"
newtuple.updated = datetime.now()
self.relvar.update_one(newtuple, keytuple, currtuple)
self.db.commit()
row = self.get_one(1)
assert row['title'] == newtuple.title
assert row['xmin'] != newtuple._tuple_version
assert row['updated'].replace(tzinfo=None) == newtuple.updated
def test_relvar_update_one_no_change(self):
"Update a single tuple but without really changing anything"
self.insert_one()
keytuple = self.relvar.key_tuple(1)
currtuple = self.relvar.get_one(keytuple)
newtuple = copy(currtuple)
newtuple.title = "John Doe"
self.relvar.update_one(newtuple, keytuple, currtuple)
self.db.commit()
row = self.get_one(1)
assert row['title'] == newtuple.title
assert row['xmin'] == newtuple._tuple_version
def test_relvar_update_missing(self):
"Attempt to update a tuple that has been deleted since it was fetched"
self.insert_one()
keytuple = self.relvar.key_tuple(1)
currtuple = self.relvar.get_one(keytuple)
currtuple.title = "Jane Doe"
self.delete_one(1)
with pytest.raises(DatabaseError):
self.relvar.update_one(currtuple, keytuple)
def test_relvar_delete_one(self):
"Delete a single tuple from a relvar"
self.insert_one()
keytuple = self.relvar.key_tuple(1)
currtuple = self.relvar.get_one(keytuple)
self.relvar.delete_one(keytuple, currtuple)
self.db.commit()
assert self.get_one(1) is None
def test_relvar_delete_missing(self):
"Attempt to delete a tuple that has been deleted since it was fetched"
self.insert_one()
keytuple = self.relvar.key_tuple(1)
currtuple = self.relvar.get_one(keytuple)
self.delete_one(1)
with pytest.raises(DatabaseError):
self.relvar.delete_one(currtuple, keytuple)
class TestRelvar2(RelationTestCase):
@pytest.fixture(autouse=True)
def setup(self):
self.relvar = rv2
self.relvar.connect(self.db)
self.pgdb.execute("DROP TABLE IF EXISTS rv2, rv1 CASCADE")
self.pgdb.execute("CREATE TABLE rv1 (id integer PRIMARY KEY, "
"title text NOT NULL)")
self.pgdb.execute("CREATE TABLE rv2 (num integer PRIMARY KEY, "
"name text NOT NULL, "
"id integer NOT NULL REFERENCES rv1 (id))")
self.pgdb.execute_commit("INSERT INTO rv1 VALUES (1, %(title)s)",
(TEST_DATA1),)
def insert_one(self):
self.pgdb.execute_commit(
"INSERT INTO rv2 VALUES (%(num)s, %(name)s, %(id)s)",
(TEST_DATA2),)
def get_one(self, num):
return self.pgdb.fetchone("SELECT xmin, * FROM rv2 WHERE num = %s",
(num,))
def test_relvar_insert_fk(self):
"Insert a tuple into a relvar that references another"
# This also tests insert into non-sequenced primary key
newtuple = self.relvar.tuple(**TEST_DATA2)
self.relvar.insert_one(newtuple)
self.db.commit()
row = self.get_one(123)
assert row['name'] == newtuple.name
assert row['id'] == newtuple.id
def test_relvar_dup_insert(self):
"Insert a duplicate primary key value"
self.insert_one()
newtuple = self.relvar.tuple(**TEST_DATA2)
with pytest.raises(IntegrityError):
self.relvar.insert_one(newtuple)
def test_relvar_insert_bad_fk(self):
"Insert a tuple into a relvar with invalid foreign key"
data = TEST_DATA2.copy()
data['id'] = 2
newtuple = self.relvar.tuple(**data)
with pytest.raises(IntegrityError):
self.relvar.insert_one(newtuple)
def test_relvar_update_key(self):
"Update a tuple's primary key"
self.insert_one()
keytuple = self.relvar.key_tuple(123)
currtuple = self.relvar.get_one(keytuple)
currtuple.num = 456
self.relvar.update_one(currtuple, keytuple)
self.db.commit()
row = self.get_one(456)
assert row['num'] == 456
assert row['name'] == TEST_DATA2['name']
assert row['id'] == TEST_DATA2['id']
def test_relvar_update_fk(self):
"Update a tuple's foreign key"
self.pgdb.execute_commit("INSERT INTO rv1 VALUES (%(id)s, %(title)s)",
(TEST_DATA1x))
self.insert_one()
keytuple = self.relvar.key_tuple(123)
currtuple = self.relvar.get_one(keytuple)
currtuple.id = 2
self.relvar.update_one(currtuple, keytuple)
self.db.commit()
row = self.pgdb.fetchone(
"SELECT title FROM rv1 NATURAL JOIN rv2 WHERE num = 123")
assert row['title'] == TEST_DATA1x['title']
def test_relvar_update_fk_fail(self):
"Update a tuple's foreign key to an unknown value"
self.pgdb.execute_commit("DELETE FROM rv1 WHERE id = 2")
self.insert_one()
keytuple = self.relvar.key_tuple(123)
currtuple = self.relvar.get_one(keytuple)
currtuple.id = 2
with pytest.raises(IntegrityError):
self.relvar.update_one(currtuple, keytuple)
class TestRelvar3(RelationTestCase):
@pytest.fixture(autouse=True)
def setup(self):
self.relvar = rv3
self.relvar.connect(self.db)
self.pgdb.execute("DROP TABLE IF EXISTS rv3, rv1 CASCADE")
self.pgdb.execute("CREATE TABLE rv1 (id integer PRIMARY KEY, "
"title text NOT NULL)")
self.pgdb.execute(
"CREATE TABLE rv3 (id1 integer NOT NULL REFERENCES rv1 (id), "
"id2 integer NOT NULL REFERENCES rv1 (id), "
"code char(2) NOT NULL, descr text NOT NULL, "
"created date NOT NULL DEFAULT CURRENT_DATE, "
"PRIMARY KEY (id1, code, id2))")
self.pgdb.execute_commit("INSERT INTO rv1 VALUES (1, %(title)s)",
(TEST_DATA1),)
self.pgdb.execute_commit("INSERT INTO rv1 VALUES (%(id)s, %(title)s)",
(TEST_DATA1x),)
def insert_one(self):
self.pgdb.execute_commit(
"INSERT INTO rv3 VALUES (%(id1)s, %(id2)s, %(code)s, %(descr)s)",
(TEST_DATA3),)
def get_one(self, data):
return self.pgdb.fetchone(
"SELECT xmin, * FROM rv3 WHERE id1 = %(id1)s AND "
"id2 = %(id2)s AND code = %(code)s", (data))
def test_relvar_key_tuple(self):
"Create a key tuple with both args and keyword args"
tup = self.relvar.key_tuple(123, code='EN', id2=456)
assert tup.id1 == 123
assert tup.code == 'EN'
assert tup.id2 == 456
def test_relvar_insert_multi_key(self):
"Insert a tuple into a relvar with a multi-attribute key"
newtuple = self.relvar.tuple(**TEST_DATA3)
self.relvar.insert_one(newtuple)
self.db.commit()
row = self.get_one(TEST_DATA3)
assert row['id1'] == newtuple.id1
assert row['code'] == newtuple.code
assert row['id2'] == newtuple.id2
assert row['descr'] == newtuple.descr
assert row['created'] == date.today()
def test_relvar_update_one(self):
"Update a tuple in a relvar with a multi-attribute key"
self.insert_one()
keytuple = self.relvar.key_tuple(**TEST_DATA3)
currtuple = self.relvar.get_one(keytuple)
currtuple.code = 'FR'
currtuple.descr = "Une description"
self.relvar.update_one(currtuple, keytuple)
self.db.commit()
data = TEST_DATA3.copy()
data['code'] = currtuple.code
row = self.get_one(data)
assert row['id1'] == currtuple.id1
assert row['code'] == currtuple.code
assert row['id2'] == currtuple.id2
assert row['descr'] == currtuple.descr
|
try:
import mpmath as mp
except ImportError:
pass
try:
from sympy.abc import x # type: ignore[import]
except ImportError:
pass
def lagrange_inversion(a):
"""Given a series
f(x) = a[1]*x + a[2]*x**2 + ... + a[n-1]*x**(n - 1),
use the Lagrange inversion formula to compute a series
g(x) = b[1]*x + b[2]*x**2 + ... + b[n-1]*x**(n - 1)
so that f(g(x)) = g(f(x)) = x mod x**n. We must have a[0] = 0, so
necessarily b[0] = 0 too.
The algorithm is naive and could be improved, but speed isn't an
issue here and it's easy to read.
"""
n = len(a)
f = sum(a[i]*x**i for i in range(len(a)))
h = (x/f).series(x, 0, n).removeO()
hpower = [h**0]
for k in range(n):
hpower.append((hpower[-1]*h).expand())
b = [mp.mpf(0)]
for k in range(1, n):
b.append(hpower[k].coeff(x, k - 1)/k)
b = map(lambda x: mp.mpf(x), b)
return b
|
from unittest.mock import patch, Mock
from Crypto.Cipher import AES
from lxml import etree
from federation.protocols.diaspora.encrypted import pkcs7_unpad, EncryptedPayload
from federation.tests.fixtures.keys import get_dummy_private_key
def test_pkcs7_unpad():
assert pkcs7_unpad(b"foobar\x02\x02") == b"foobar"
assert pkcs7_unpad("foobar\x02\x02") == "foobar"
class TestEncryptedPayload:
@patch("federation.protocols.diaspora.encrypted.PKCS1_v1_5.new")
@patch("federation.protocols.diaspora.encrypted.AES.new")
@patch("federation.protocols.diaspora.encrypted.pkcs7_unpad", side_effect=lambda x: x)
@patch("federation.protocols.diaspora.encrypted.b64decode", side_effect=lambda x: x)
def test_decrypt(self, mock_decode, mock_unpad, mock_aes, mock_pkcs1):
mock_decrypt = Mock(return_value=b'{"iv": "foo", "key": "bar"}')
mock_pkcs1.return_value = Mock(decrypt=mock_decrypt)
mock_encrypter = Mock(return_value="<foo>bar</foo>")
mock_aes.return_value = Mock(decrypt=mock_encrypter)
doc = EncryptedPayload.decrypt(
{"aes_key": '{"iv": "foo", "key": "bar"}', "encrypted_magic_envelope": "magically encrypted"},
"private_key",
)
mock_pkcs1.assert_called_once_with("private_key")
mock_decrypt.assert_called_once_with('{"iv": "foo", "key": "bar"}', sentinel=None)
assert mock_decode.call_count == 4
mock_aes.assert_called_once_with("bar", AES.MODE_CBC, "foo")
mock_encrypter.assert_called_once_with("magically encrypted")
assert doc.tag == "foo"
assert doc.text == "bar"
def test_encrypt(self):
private_key = get_dummy_private_key()
public_key = private_key.publickey()
encrypted = EncryptedPayload.encrypt("<spam>eggs</spam>", public_key)
assert "aes_key" in encrypted
assert "encrypted_magic_envelope" in encrypted
# See we can decrypt it too
decrypted = EncryptedPayload.decrypt(encrypted, private_key)
assert etree.tostring(decrypted).decode("utf-8") == "<spam>eggs</spam>"
|
import sys
sys.path.append('/opt/ofelia/vt_manager/src/python/vt_manager/communication/')
|
"""
solace.tests.core_views
~~~~~~~~~~~~~~~~~~~~~~~
Test the kb views.
:copyright: (c) 2009 by Plurk Inc., see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import re
import unittest
from solace.tests import SolaceTestCase
from solace import models, settings
from solace.database import session
_link_re = re.compile(r'http://\S+')
class CoreViewsTestCase(SolaceTestCase):
def test_login(self):
"""Logging a user in"""
models.User('THE_USER', 'the.user@example.com', 'default')
session.commit()
response = self.client.get('/en/')
self.assert_('THE_USER' not in response.data)
self.login('THE_USER', 'default')
response = self.client.get('/en/')
self.assert_('THE_USER' in response.data)
def test_logout(self):
"""Logging a user out"""
models.User('THE_USER', 'the.user@example.com', 'default')
session.commit()
self.login('THE_USER', 'default')
self.logout()
response = self.client.get('/en/')
self.assert_('THE_USER' not in response.data)
def test_register_without_confirm(self):
"""Registering a user without mail confirmation"""
settings.REGISTRATION_REQUIRES_ACTIVATION = False
settings.RECAPTCHA_ENABLE = False
self.submit_form('/register', {
'username': 'A_USER',
'password': 'default',
'password_repeat': 'default',
'email': 'a.user@example.com'
})
self.login('A_USER', 'default')
response = self.client.get('/en/')
self.assert_('A_USER' in response.data)
user = models.User.query.filter_by(username='A_USER').first()
self.assertEqual(user.email, 'a.user@example.com')
self.assertEqual(user.is_active, True)
def test_register_with_confirm(self):
"""Registering a user with mail confirmation"""
settings.REGISTRATION_REQUIRES_ACTIVATION = True
settings.RECAPTCHA_ENABLE = False
self.submit_form('/register', {
'username': 'A_USER',
'password': 'default',
'password_repeat': 'default',
'email': 'a.user@example.com'
})
response = self.login('A_USER', 'default')
self.assert_('not yet activated' in response.data)
mails = self.get_mails()
self.assert_(mails)
for link in _link_re.findall(mails[0].get_payload()):
if 'activate' in link:
self.client.get('/' + link.split('/', 3)[-1])
break
else:
self.assert_(False, 'Did not find activation link')
self.login('A_USER', 'default')
response = self.client.get('/en/')
self.assert_('A_USER' in response.data)
user = models.User.query.filter_by(username='A_USER').first()
self.assertEqual(user.email, 'a.user@example.com')
self.assertEqual(user.is_active, True)
def test_reset_password(self):
"""Reset password."""
settings.RECAPTCHA_ENABLE = False
user = models.User('A_USER', 'a.user@example.com', 'default')
session.commit()
self.submit_form('/_reset_password', {
'username': 'A_USER',
'email': ''
})
mails = self.get_mails()
self.assert_(mails)
for link in _link_re.findall(mails[0].get_payload()):
if 'reset_password' in link:
response = self.client.get('/' + link.split('/', 3)[-1])
break
else:
self.assert_(False, 'Did not find password reset link')
match = re.compile(r'password was reset to <code>(.*?)</code>') \
.search(response.data)
self.assert_(match)
self.login('A_USER', match.group(1))
response = self.client.get('/en/')
self.assert_('A_USER' in response.data)
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(CoreViewsTestCase))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
'''Helper utilities and decorators.'''
from flask import flash, request, url_for, current_app
from wexplorer.explorer.models import FileUploadPassword
def flash_errors(form, category="warning"):
'''Flash all errors for a form.'''
for field, errors in form.errors.items():
for error in errors:
flash("{0} - {1}"
.format(getattr(form, field).label.text, error), category)
def url_for_other_page(page):
args = dict(request.view_args.items() + request.args.to_dict().items())
args['page'] = page
return url_for(request.endpoint, **args)
|
import atmPy.atmos.air as air
from numpy import abs
class TestAir(object):
def __init__(self):
self.a = air.Air()
self.mu_vals = {'T': [-5, 0, 10, 15, 25],
'mu': [1.7105007E-5, 1.7362065e-5, 1.7869785E-5, 1.8120528E-5, 1.861598E-5]
}
self.rho_vals = {'T': [0, 0, 0, 0,
0, 0, 0, 0,
0, 0, 0, 0,
25, 25, 25, 25,
25, 25, 25, 25,
25, 25, 25, 25],
'P': [200, 200, 200, 200,
800, 800, 800, 800,
1000, 1000, 1000, 1000,
200, 200, 200, 200,
800, 800, 800, 800,
1000, 1000, 1000, 1000],
'RH': [25, 50, 75, 90,
25, 50, 75, 90,
25, 50, 75, 90,
25, 50, 75, 90,
25, 50, 75, 90,
25, 50, 75, 90],
'rho': [0.254, 0.254, 0.253, 0.252,
1.020, 1.019, 1.018, 1.018,
1.275, 1.274, 1.273, 1.273,
0.230, 0.227, 0.223, 0.221,
0.931, 0.928, 0.924, 0.922,
1.165, 1.161, 1.158, 1.156
]
}
def test_muvals(self):
print('========= Testing Dynamic Viscocity Calculations =========')
print(' T mu ')
print('======= ========')
for e, i in enumerate(self.mu_vals['T']):
yield self.check_mu, i, self.mu_vals['mu'][e], 1e-3
def test_rhos(self):
print('========= Testing Density Calculations =========')
print(' T P RH rho ')
print('======= ======= ======== =========')
for e, i in enumerate(self.rho_vals['rho']):
yield self.check_rho, {'T': self.rho_vals['T'][e],
'P': self.rho_vals['P'][e],
'RH': self.rho_vals['RH'][e]}, i, 0.2
@staticmethod
def check_mu(t, val, tol):
a = air.Air(t, 850)
print(a.t, a.mu(), abs((val - a.mu()) / val))
assert abs((val - a.mu()) / val) < tol
@staticmethod
def check_rho(atm, val, tol):
kwargs = {"rh": atm['RH']}
a = air.Air(atm['T'], atm['P'], **kwargs)
print(a.t, a.p, atm['RH'], val, a.rho(), abs((val - a.rho()) / val))
assert abs((val - a.rho()) / val) < tol
@staticmethod
def test_mfp():
"""
Test the calculation of the mean free path against the standard using the
mean free path at standard conditions (0.066 um; from Hand 1999, p 21)
"""
p = [100, 500, 800, 1000]
print('')
print('========= Testing MEAN FREE PATH Calculations =========')
print(' P mfp mfp r ')
print('======= ========= ========= =======')
for i in p:
ltest = 0.066 / i * 1013.25 / 1e6
a = air.Air(20, i)
print(i, a.l(), ltest, abs(ltest - a.l()) / ltest)
assert abs(ltest - a.l()) / ltest < 0.01
print(' ')
|
import os
import subprocess
import sys
from config import LIBCHROMIUMCONTENT_COMMIT, BASE_URL, DIST_ARCH
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
def main():
os.chdir(SOURCE_ROOT)
update_gyp()
def update_gyp():
gyp = os.path.join('vendor', 'brightray', 'vendor', 'gyp', 'gyp_main.py')
python = sys.executable
arch = DIST_ARCH
if sys.platform == 'darwin':
# Only have 64bit build on OS X.
arch = 'x64'
elif sys.platform in ['cygwin', 'win32']:
# Only have 32bit build on Windows.
arch = 'ia32'
if sys.platform == 'cygwin':
# Force using win32 python on cygwin.
python = os.path.join('vendor', 'python_26', 'python.exe')
ret = subprocess.call([python, gyp,
'-f', 'ninja', '--depth', '.', 'thrust_shell.gyp',
'-Icommon.gypi', '-Ivendor/brightray/brightray.gypi',
'-Dtarget_arch={0}'.format(arch)])
if ret != 0:
sys.exit(ret)
if __name__ == '__main__':
sys.exit(main())
|
import unittest
import os
from pymatgen.analysis.ferroelectricity.polarization import *
from pymatgen.core.structure import Structure
from pymatgen.io.vasp.outputs import Outcar
from pymatgen.io.vasp.inputs import Potcar
from pymatgen.util.testing import PymatgenTest
import numpy as np
test_dir = os.path.join(os.path.dirname(__file__), "..", "..", "..", "..",
'test_files/BTO_221_99_polarization')
bto_folders = ['nonpolar_polarization']
bto_folders += ['interpolation_{}_polarization'.format(str(i)) for i in range(1,9)][::-1]
bto_folders += ['polar_polarization']
structures = [Structure.from_file(test_dir+"/"+folder+"/POSCAR") for folder in bto_folders]
ions = np.array([[-44.363484, -44.363484, -44.79259988],
[-44.324764, -44.324764, -69.43452043],
[-44.286055, -44.286055, -69.8792077 ],
[-44.247335, -44.247335, -70.32475473],
[-44.208626, -44.208626, -70.77139856],
[-44.169906, -44.169906, -71.21889307],
[-44.131197, -44.131197, -71.66746921],
[-44.092477, -44.092477, -72.1168782 ],
[-44.053768, -44.053768, -72.56736141],
[-44.015048, -44.015048, -73.01874336]])
class UtilsTest(PymatgenTest):
def setUp(self):
self.potcar = Potcar.from_file(test_dir+"/POTCAR")
self.zval_dict = {'Ba': 10.0, 'Ti': 10.0, 'O': 6.0}
self.ions = ions
self.structures = structures
def test_zval_dict_from_potcar(self):
zval_dict = zval_dict_from_potcar(self.potcar)
self.assertDictEqual(self.zval_dict, zval_dict)
def test_get_total_ionic_dipole(self):
p_ion = get_total_ionic_dipole(self.structures[-1],self.zval_dict)
self.assertArrayAlmostEqual(p_ion, self.ions[-1].ravel().tolist())
class PolarizationTest(PymatgenTest):
def setUp(self):
self.p_ions = ions
self.p_ions_outcar = np.array([[0.0, 0.0, 43.93437],
[0.0, 0.0, 19.81697],
[0.0, 0.0, 19.76076],
[0.0, 0.0, 19.70306],
[0.0, 0.0, 19.64372],
[0.0, 0.0, -5.06619],
[0.0, 0.0, -5.18997],
[0.0, 0.0, -5.31457],
[0.0, 0.0, -5.44026],
[0.0, 0.0, -5.56684]])
self.p_elecs = np.array([[4.03304, -4.03304, -3.60393],
[4.02958, 4.02958, -3.77177],
[4.02611, 4.02611, -3.93397],
[4.02264, 4.02263, -4.08851],
[4.01916, 4.01914, 3.99662],
[4.01567, 4.01565, 3.90327],
[4.01217, 4.01214, 3.81998],
[4.00867, 4.00863, 3.74561],
[4.00517, 4.00512, 3.67949],
[0.00024, 0.00019, 3.61674]])
self.same_branch = np.array([[ 9.76948106e-05, -9.76948108e-05, 4.59556390e-05],
[ -1.36325612e-03, -1.36325612e-03, 5.99098550e+00],
[ -2.54781559e-03, -2.54781559e-03, 1.18312234e+01],
[ -3.74896442e-03, -3.50709575e-03, 1.74695147e+01],
[ -4.67728039e-03, -4.19508654e-03, 2.28288548e+01],
[ -5.38348125e-03, -4.90281328e-03, 2.79488973e+01],
[ -5.82178137e-03, -5.10304293e-03, 3.28220345e+01],
[ -6.28132190e-03, -5.32598777e-03, 3.74721262e+01],
[ -6.71430111e-03, -5.52382219e-03, 4.19231297e+01],
[ -5.69679257e-03, -4.50996078e-03, 4.62887982e+01]])
self.same_branch_all_in_polar = np.array([[ 9.42008214e-05, -9.42008221e-05, 4.66464162e-05],
[-1.31996355e-03, -1.31996355e-03, 6.07559443e+00],
[-2.47709492e-03, -2.47709492e-03, 1.19773649e+01],
[-3.65986380e-03, -3.42374356e-03, 1.76543900e+01],
[-4.58474364e-03, -4.11208966e-03, 2.30300987e+01],
[-5.29836518e-03, -4.82529686e-03, 2.81459084e+01],
[-5.75282166e-03, -5.04259676e-03, 3.29954877e+01],
[-6.23177258e-03, -5.28397447e-03, 3.76040802e+01],
[-6.68784798e-03, -5.50205933e-03, 4.19969216e+01],
[-5.69679257e-03, -4.50996079e-03, 4.62887981e+01]])
self.quanta = np.array([[ 98.50186747, 98.50186747, 98.50186747],
[ 98.09416498, 98.09416498, 98.67403571],
[ 97.69065056, 97.69065056, 98.84660662],
[ 97.29131054, 97.29131054, 99.01967988],
[ 96.89603543, 96.89603543, 99.19315873],
[ 96.50481368, 96.50481368, 99.36714337],
[ 96.11753848, 96.11753848, 99.54153654],
[ 95.7342003 , 95.7342003 , 99.71643897],
[ 95.35469487, 95.35469487, 99.89175289],
[ 94.97901455, 94.97901455, 100.06757957]])
self.structures = structures
# We do not use the p_ions values from Outcar.
# We calculate using calc_ionic_from_zval because it is more reliable.
self.polarization = Polarization(self.p_elecs, self.p_ions, self.structures)
self.outcars = [Outcar(test_dir+"/"+folder+"/OUTCAR") for folder in bto_folders]
self.change = np.array([[ -5.79448738e-03, -4.41226597e-03, 4.62887522e+01]])
self.change_norm = 46.288752795325244
self.max_jumps = [0.00021336004941047062, 0.00016254800426403291, 0.038269946959965086]
self.smoothness = [0.00017013512377086267, 0.00013467465540412905, 0.034856268571937743]
self.max_jumps_all_in_polar = [0.0002131725432761777, 0.00016247151626362123, 0.03857992184016461]
self.smoothness_all_in_polar = [0.00016974252210569685, 0.0001343093739679674, 0.03504919463271141]
self.decimal_tol = 5
def test_from_outcars_and_structures(self):
polarization = Polarization.from_outcars_and_structures(self.outcars, self.structures)
p_elecs, p_ions = polarization.get_pelecs_and_pions(convert_to_muC_per_cm2=False)
self.assertArrayAlmostEqual(p_elecs[0].ravel().tolist(), self.p_elecs[0].ravel().tolist())
self.assertArrayAlmostEqual(p_elecs[-1].ravel().tolist(), self.p_elecs[-1].ravel().tolist())
self.assertArrayAlmostEqual(p_ions[0].ravel().tolist(), self.p_ions_outcar[0].ravel().tolist())
self.assertArrayAlmostEqual(p_ions[-1].ravel().tolist(), self.p_ions_outcar[-1].ravel().tolist())
# Test for calc_ionic_from_zval=True
polarization = Polarization.from_outcars_and_structures(self.outcars, self.structures,
calc_ionic_from_zval=True)
p_elecs, p_ions = polarization.get_pelecs_and_pions(convert_to_muC_per_cm2=False)
self.assertArrayAlmostEqual(p_elecs[0].ravel().tolist(), self.p_elecs[0].ravel().tolist())
self.assertArrayAlmostEqual(p_elecs[-1].ravel().tolist(), self.p_elecs[-1].ravel().tolist())
self.assertArrayAlmostEqual(p_ions[0].ravel().tolist(), self.p_ions[0].ravel().tolist())
self.assertArrayAlmostEqual(p_ions[-1].ravel().tolist(), self.p_ions[-1].ravel().tolist())
def test_get_same_branch_polarization_data(self):
same_branch = self.polarization.get_same_branch_polarization_data(convert_to_muC_per_cm2=True, all_in_polar=False)
self.assertArrayAlmostEqual(same_branch[0].ravel().tolist(), self.same_branch[0].ravel().tolist())
self.assertArrayAlmostEqual(same_branch[1].ravel().tolist(), self.same_branch[1].ravel().tolist())
self.assertArrayAlmostEqual(same_branch[3].ravel().tolist(), self.same_branch[3].ravel().tolist())
self.assertArrayAlmostEqual(same_branch[-1].ravel().tolist(), self.same_branch[-1].ravel().tolist())
# This will differ only slightly
same_branch = self.polarization.get_same_branch_polarization_data(convert_to_muC_per_cm2=True, all_in_polar=True)
self.assertArrayAlmostEqual(same_branch[0].ravel().tolist(), self.same_branch_all_in_polar[0].ravel().tolist())
self.assertArrayAlmostEqual(same_branch[1].ravel().tolist(), self.same_branch_all_in_polar[1].ravel().tolist())
self.assertArrayAlmostEqual(same_branch[3].ravel().tolist(), self.same_branch_all_in_polar[3].ravel().tolist())
self.assertArrayAlmostEqual(same_branch[-1].ravel().tolist(), self.same_branch_all_in_polar[-1].ravel().tolist())
def test_get_lattice_quanta(self):
quanta = self.polarization.get_lattice_quanta(convert_to_muC_per_cm2=True, all_in_polar=False)
self.assertArrayAlmostEqual(quanta[0].ravel().tolist(), self.quanta[0].ravel().tolist())
self.assertArrayAlmostEqual(quanta[-1].ravel().tolist(), self.quanta[-1].ravel().tolist())
# For all_in_polar=True, quanta should be identical to polar quantum
quanta = self.polarization.get_lattice_quanta(convert_to_muC_per_cm2=True, all_in_polar=True)
self.assertArrayAlmostEqual(quanta[0].ravel().tolist(), self.quanta[-1].ravel().tolist())
self.assertArrayAlmostEqual(quanta[-1].ravel().tolist(), self.quanta[-1].ravel().tolist())
def test_get_polarization_change(self):
change = self.polarization.get_polarization_change(convert_to_muC_per_cm2=True, all_in_polar=False)
self.assertArrayAlmostEqual(change, self.change)
# Because nonpolar polarization is (0, 0, 0), all_in_polar should have no effect on polarization change
change = self.polarization.get_polarization_change(convert_to_muC_per_cm2=True, all_in_polar=True)
# No change up to 5 decimal
self.assertArrayAlmostEqual(change, self.change, self.decimal_tol)
def test_get_polarization_change_norm(self):
change_norm = self.polarization.get_polarization_change_norm(convert_to_muC_per_cm2=True, all_in_polar=False)
self.assertAlmostEqual(change_norm, self.change_norm)
# Because nonpolar polarization is (0, 0, 0), all_in_polar should have no effect on polarization change norm
change = self.polarization.get_polarization_change(convert_to_muC_per_cm2=True, all_in_polar=True)
# No change up to 5 decimal
self.assertArrayAlmostEqual(change, self.change, self.decimal_tol)
def test_max_spline_jumps(self):
max_jumps = self.polarization.max_spline_jumps(convert_to_muC_per_cm2=True, all_in_polar=False)
self.assertArrayAlmostEqual(self.max_jumps, max_jumps)
# This will differ slightly
max_jumps = self.polarization.max_spline_jumps(convert_to_muC_per_cm2=True, all_in_polar=True)
self.assertArrayAlmostEqual(self.max_jumps_all_in_polar, max_jumps)
def test_smoothness(self):
smoothness = self.polarization.smoothness(convert_to_muC_per_cm2=True, all_in_polar=False)
self.assertArrayAlmostEqual(self.smoothness, smoothness)
# This will differ slightly
smoothness = self.polarization.smoothness(convert_to_muC_per_cm2=True, all_in_polar=True)
self.assertArrayAlmostEqual(self.smoothness_all_in_polar, smoothness)
class EnergyTrendTest(PymatgenTest):
def setUp(self):
self.energies = [-7.97738049,
-7.988621176,
-7.9793246479999995,
-7.987973192,
-7.984676138,
-7.982700144000001,
-7.986539788,
-7.980859048000001,
-7.978240114,
-7.977637218]
self.energy_trend = EnergyTrend(self.energies)
self.smoothness = 0.0029874731764648306
self.max_jump = 0.0058893082867133018
def test_max_spline_jump(self):
max_jump = self.energy_trend.max_spline_jump()
self.assertAlmostEqual(max_jump, self.max_jump)
def test_smoothness(self):
smoothness = self.energy_trend.smoothness()
self.assertAlmostEqual(smoothness, self.smoothness)
def test_endpoints_minima(self):
endpoints = self.energy_trend.endpoints_minima(slope_cutoff=1e-2)
self.assertDictEqual({'polar': True, 'nonpolar': True},
endpoints)
if __name__ == '__main__':
unittest.main()
|
import unittest
from .testcase import BulbsTestCase
from bulbs.model import Node, NodeProxy, Relationship, RelationshipProxy
from bulbs.property import Integer, String, DateTime, Bool
from bulbs.utils import current_datetime
class Knows(Relationship):
label = "knows"
timestamp = DateTime(default=current_datetime)
class Person(Node):
element_type = "person"
name = String(nullable=False)
age = Integer()
is_adult = Bool()
class NodeTestCase(BulbsTestCase):
def setUp(self):
indices = self.vertex_index_proxy(self.index_class,self.client)
self.people = NodeProxy(Person,self.client)
self.people.index = indices.get_or_create("person")
self.james = self.people.create(name="James", age=34, is_adult=True)
def test_properties(self):
#assert type(self.james.eid) == int
assert self.james.element_type == "person"
assert self.james.name == "James"
assert self.james.age == 34
assert self.james.is_adult is True
def test_get(self):
person = self.people.get(self.james.eid)
assert person == self.james
def test_get_all(self):
people = self.people.get_all()
assert len(list(people)) > 1
def test_index_name(self):
index_name = self.people.index.index_name
assert index_name == "person"
# Will this work for autmatic indices?
#def test_index_put_and_get(self):
# must test put/get together b/c self.james gets reset every time
# self.people.index.put(self.james.eid,age=self.james.age)
# james = self.people.index.get_unique("age",'34')
# assert self.james == james
#Person.remove(self.james.eid,dict(age="34"))
class RelationshipTestCase(BulbsTestCase):
def setUp(self):
indicesV = self.vertex_index_proxy(self.index_class,self.client)
indicesE = self.edge_index_proxy(self.index_class,self.client)
self.people = NodeProxy(Person,self.client)
self.people.index = indicesV.get_or_create("people")
self.knows = RelationshipProxy(Knows,self.client)
self.knows.index = indicesE.get_or_create("knows")
self.james = self.people.create(name="James", age=34)
self.julie = self.people.create(name="Julie", age=28)
def test_properties(self):
self.relationship = self.knows.create(self.james,self.julie)
assert self.relationship._label == "knows"
assert self.relationship.outV()._id == self.james.eid
assert self.relationship.inV()._id == self.julie.eid
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(NodeTestCase))
suite.addTest(unittest.makeSuite(RelationshipTestCase))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
from rx import Observable
from rx.internal import extensionmethod
@extensionmethod(Observable, name="slice")
def slice_(self, start=None, stop=None, step=1):
"""Slices the given observable. It is basically a wrapper around the
operators skip(), skip_last(), take(), take_last() and filter().
This marble diagram helps you remember how slices works with streams.
Positive numbers is relative to the start of the events, while negative
numbers are relative to the end (on_completed) of the stream.
r---e---a---c---t---i---v---e---|
0 1 2 3 4 5 6 7 8
-8 -7 -6 -5 -4 -3 -2 -1
Example:
result = source.slice(1, 10)
result = source.slice(1, -2)
result = source.slice(1, -1, 2)
Keyword arguments:
:param Observable self: Observable to slice
:param int start: Number of elements to skip of take last
:param int stop: Last element to take of skip last
:param int step: Takes every step element. Must be larger than zero
:returns: Returns a sliced observable sequence.
:rtype: Observable
"""
source = self
if start is not None:
if start < 0:
source = source.take_last(abs(start))
else:
source = source.skip(start)
if stop is not None:
if stop > 0:
start = start or 0
source = source.take(stop - start)
else:
source = source.skip_last(abs(stop))
if step is not None:
if step > 1:
source = source.filter(lambda x, i: i % step == 0)
elif step < 0:
# Reversing events is not supported
raise TypeError("Negative step not supported.")
return source
@extensionmethod(Observable)
def __getitem__(self, key):
"""Slices the given observable using Python slice notation. The
arguments to slice is start, stop and step given within brackets [] and
separated with the ':' character. It is basically a wrapper around the
operators skip(), skip_last(), take(), take_last() and filter().
This marble diagram helps you remember how slices works with streams.
Positive numbers is relative to the start of the events, while negative
numbers are relative to the end (on_completed) of the stream.
r---e---a---c---t---i---v---e---|
0 1 2 3 4 5 6 7 8
-8 -7 -6 -5 -4 -3 -2 -1
Example:
result = source[1:10]
result = source[1:-2]
result = source[1:-1:2]
Keyword arguments:
:param Observable self: Observable to slice
:param slice key: Slice object
:returns: A sliced observable sequence.
:rtype: Observable
:raises TypeError: If key is not of type int or slice
"""
if isinstance(key, slice):
start, stop, step = key.start, key.stop, key.step
elif isinstance(key, int):
start, stop, step = key, key + 1, 1
else:
raise TypeError("Invalid argument type.")
return self.slice(start, stop, step)
|
"""
Holds the controlfields plugin.
"""
__author__ = 'Jonny Lamb'
__copyright__ = ', '.join([
'Copyright © 2008 Jonny Lamb',
'Copyright © 2010 Jan Dittberner',
'Copyright © 2012 Nicolas Dandrimont',
])
__license__ = 'MIT'
from debian import deb822
import logging
from debexpo.lib import constants
from debexpo.plugins import BasePlugin
log = logging.getLogger(__name__)
fields = ['Homepage', 'Vcs-Browser', 'Vcs-Git', 'Vcs-Svn', 'Vcs-Bzr', 'Vcs-Hg']
class ControlFieldsPlugin(BasePlugin):
def test_control_fields(self):
"""
Checks whether additional debian/control fields are present.
"""
log.debug('Checking whether additional debian/control fields are present')
try:
dsc = deb822.Dsc(file(self.changes.get_dsc()))
except:
log.critical('Could not open dsc file; skipping plugin')
return
data = {}
severity = constants.PLUGIN_SEVERITY_WARNING
outcome = "No Homepage field present"
for item in fields:
if item in dsc:
data[item] = dsc[item]
if "Homepage" in data:
severity = constants.PLUGIN_SEVERITY_INFO
if len(data) > 1:
outcome = "Homepage and VCS control fields present"
else:
outcome = "Homepage control field present"
self.failed(outcome, data, severity)
plugin = ControlFieldsPlugin
|
import re
import numpy as np
import netCDF4
import sys
import pdb
outfile = sys.argv[1]
f = open('../KenaiRiverTemps.dat', 'r')
f.readline()
f.readline()
ttime = []
temp = []
salt = []
for line in f:
nul, a, b, c = re.split('\s+', line)
ttime.append(float(a))
temp.append(float(b))
salt.append(0.0)
out = netCDF4.Dataset(outfile, 'a', format='NETCDF3_64BIT')
out.createDimension('river_tracer_time', len(ttime))
times = out.createVariable('river_tracer_time', 'f8', ('river_tracer_time'))
times.units = 'day'
times.cycle_length = 365.25
times.long_name = 'river tracer time'
temp = out.createVariable('river_temp', 'f8', ('river_tracer_time'))
temp.long_name = 'river runoff potential temperature'
temp.units = 'Celsius'
temp.time = 'river_tracer_time'
salt = out.createVariable('river_salt', 'f8', ('river_tracer_time'))
salt.long_name = 'river runoff salinity'
salt.time = 'river_tracer_time'
out.variables['river_tracer_time'][:] = ttime
out.variables['river_temp'][:] = temp
out.variables['river_salt'][:] = salt
out.close()
|
from __future__ import unicode_literals, print_function
import imaplib, poplib
from frappe.utils import cint
def get_port(doc):
if not doc.incoming_port:
if doc.use_imap:
doc.incoming_port = imaplib.IMAP4_SSL_PORT if doc.use_ssl else imaplib.IMAP4_PORT
else:
doc.incoming_port = poplib.POP3_SSL_PORT if doc.use_ssl else poplib.POP3_PORT
return cint(doc.incoming_port)
|
__author__ = 'rochelle'
|
"""
Created on Tue Dec 15 09:51:21 2015
@author: dan
"""
import pandas as pd
from helper import *
from scipy.optimize import curve_fit
conditions = pd.DataFrame.from_csv('../data/growth_conditions.csv')
conditions = conditions[conditions['reference']=='Schmidt et al. 2015']
conditions = conditions[conditions['strain']=='BW25113']
copies_fL = pd.DataFrame.from_csv('../data/meta_abundance[copies_fL].csv')
copies_fL = copies_fL[conditions.index]
ribosome_genes = genes_by_function('Ribosome')
morethanone = {'b3985':2, 'b3986':4}
ribosome_stoichiometry = {g:morethanone[g] if g in morethanone else 1 for g in ribosome_genes}
ribosome_stoichiometry = pd.DataFrame.from_dict(ribosome_stoichiometry.items()).set_index(0)
del ribosome_stoichiometry.index.name
ribosome_stoichiometry = ribosome_stoichiometry[1]
ribosomal_genes_abundance = copies_fL.loc[ribosome_genes] # copies_fL
ribosomal_genes_weighted_abundance = ribosomal_genes_abundance.mul(ribosome_stoichiometry,axis=0).dropna()
ribosome_complex_abundance = (ribosomal_genes_weighted_abundance.sum() /
ribosome_stoichiometry[ribosomal_genes_weighted_abundance.index].sum()) #copies_fL
x = conditions['growth rate [h-1]']
y = ribosome_complex_abundance/1000
b_to_length = {row[0:5]:int(row[75:84]) for row in open("../data/all_ecoli_genes.txt", 'r')}
gene_length = pd.DataFrame.from_dict(b_to_length.items()).set_index(0)
gene_length = gene_length[1]
aa_abundance = copies_fL.mul(gene_length,axis=0).sum() #aa_fL
v = aa_abundance * x / 3600 #aa_fL_s
k_robosome = v/ribosome_complex_abundance
plt.figure(figsize=(6,6))
fontsize=15
ax = plt.axes()
plt.scatter(x, y,s=50,c='r',edgecolor='')
popt, pcov = curve_fit(lambda a,b,x: a*x+b, x, y)
intercept, slope = popt
plt.plot(x, x*slope+intercept)
ax.set_xlim(0)
ax.set_ylim(0)
ax.set_xlabel(r'growth rate $\left[ h^{-1} \right]$', size=fontsize)
ax.set_ylabel(r'ribosome abudance $\left[ \frac{copies}{fL} \times 10^3\right]$', size=fontsize)
[tick.label.set_fontsize(fontsize) for tick in ax.xaxis.get_major_ticks()]
[tick.label.set_fontsize(fontsize) for tick in ax.yaxis.get_major_ticks()]
plt.tight_layout()
plt.savefig('../res/ribosome abundance by growth rate.pdf')
|
"""
* Palindrome Index (Python)
* HackerRank Algorithm Challenge
* https://www.hackerrank.com/challenges/two-strings
*
* michael@softwareontheshore.com
*
"""
testA = 'hello\nworld'
testB = 'hi\nworld'
def testTwoStrings(input):
input = input.split('\n')
textA = input[0]
textB = input[1]
# remove duplicates
textA = ''.join(set(textA))
textB = ''.join(set(textB))
for char in textA:
if char in textB: return 'YES'
return 'NO'
result = testTwoStrings(testB)
print result
|
from flask import request
from werkzeug.exceptions import NotFound
from indico.modules.attachments.models.attachments import Attachment, AttachmentType
from indico.modules.attachments.models.folders import AttachmentFolder
class SpecificAttachmentMixin:
"""Mixin for RHs that reference a specific attachment."""
normalize_url_spec = {
'args': {
'folder_id': lambda self: self.attachment.folder_id,
'filename': lambda self: (self.attachment.file.filename if self.attachment.type == AttachmentType.file
else 'go')
},
'locators': {
lambda self: self.attachment.folder.object
},
'preserved_args': {'attachment_id'}
}
def _process_args(self):
self.attachment = Attachment.query.filter_by(id=request.view_args['attachment_id'], is_deleted=False).one()
if self.attachment.folder.is_deleted:
raise NotFound
class SpecificFolderMixin:
"""Mixin for RHs that reference a specific folder."""
normalize_url_spec = {
'locators': {
lambda self: self.folder.object
},
'preserved_args': {'folder_id'}
}
def _process_args(self):
self.folder = AttachmentFolder.query.filter_by(id=request.view_args['folder_id'], is_deleted=False).one()
|
"""
Unit tests for easyconfig/parser.py
@author: Stijn De Weirdt (Ghent University)
"""
import os
import sys
from test.framework.utilities import EnhancedTestCase, TestLoaderFiltered
from unittest import TextTestRunner
from vsc.utils.fancylogger import setLogLevelDebug, logToScreen
import easybuild.tools.build_log
from easybuild.framework.easyconfig.format.format import Dependency
from easybuild.framework.easyconfig.format.pyheaderconfigobj import build_easyconfig_constants_dict
from easybuild.framework.easyconfig.format.version import EasyVersion
from easybuild.framework.easyconfig.parser import EasyConfigParser
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.filetools import read_file
TESTDIRBASE = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs')
class EasyConfigParserTest(EnhancedTestCase):
"""Test the parser"""
def test_v10(self):
ecp = EasyConfigParser(os.path.join(TESTDIRBASE, 'v1.0', 'g', 'GCC', 'GCC-4.6.3.eb'))
self.assertEqual(ecp._formatter.VERSION, EasyVersion('1.0'))
ec = ecp.get_config_dict()
self.assertEqual(ec['toolchain'], {'name': 'dummy', 'version': 'dummy'})
self.assertEqual(ec['name'], 'GCC')
self.assertEqual(ec['version'], '4.6.3')
def test_v20(self):
"""Test parsing of easyconfig in format v2."""
# hard enable experimental
orig_experimental = easybuild.tools.build_log.EXPERIMENTAL
easybuild.tools.build_log.EXPERIMENTAL = True
fn = os.path.join(TESTDIRBASE, 'v2.0', 'GCC.eb')
ecp = EasyConfigParser(fn)
formatter = ecp._formatter
self.assertEqual(formatter.VERSION, EasyVersion('2.0'))
self.assertTrue('name' in formatter.pyheader_localvars)
self.assertFalse('version' in formatter.pyheader_localvars)
self.assertFalse('toolchain' in formatter.pyheader_localvars)
# this should be ok: ie the default values
ec = ecp.get_config_dict()
self.assertEqual(ec['toolchain'], {'name': 'dummy', 'version': 'dummy'})
self.assertEqual(ec['name'], 'GCC')
self.assertEqual(ec['version'], '4.6.2')
# restore
easybuild.tools.build_log.EXPERIMENTAL = orig_experimental
def test_v20_extra(self):
"""Test parsing of easyconfig in format v2."""
# hard enable experimental
orig_experimental = easybuild.tools.build_log.EXPERIMENTAL
easybuild.tools.build_log.EXPERIMENTAL = True
fn = os.path.join(TESTDIRBASE, 'v2.0', 'doesnotexist.eb')
ecp = EasyConfigParser(fn)
formatter = ecp._formatter
self.assertEqual(formatter.VERSION, EasyVersion('2.0'))
self.assertTrue('name' in formatter.pyheader_localvars)
self.assertFalse('version' in formatter.pyheader_localvars)
self.assertFalse('toolchain' in formatter.pyheader_localvars)
# restore
easybuild.tools.build_log.EXPERIMENTAL = orig_experimental
def test_v20_deps(self):
"""Test parsing of easyconfig in format v2 that includes dependencies."""
# hard enable experimental
orig_experimental = easybuild.tools.build_log.EXPERIMENTAL
easybuild.tools.build_log.EXPERIMENTAL = True
fn = os.path.join(TESTDIRBASE, 'v2.0', 'libpng.eb')
ecp = EasyConfigParser(fn)
ec = ecp.get_config_dict()
self.assertEqual(ec['name'], 'libpng')
# first version/toolchain listed is default
self.assertEqual(ec['version'], '1.5.10')
self.assertEqual(ec['toolchain'], {'name': 'goolf', 'version': '1.4.10'})
# dependencies should be parsed correctly
deps = ec['dependencies']
self.assertTrue(isinstance(deps[0], Dependency))
self.assertEqual(deps[0].name(), 'zlib')
self.assertEqual(deps[0].version(), '1.2.5')
fn = os.path.join(TESTDIRBASE, 'v2.0', 'goolf.eb')
ecp = EasyConfigParser(fn)
ec = ecp.get_config_dict()
self.assertEqual(ec['name'], 'goolf')
self.assertEqual(ec['version'], '1.4.10')
self.assertEqual(ec['toolchain'], {'name': 'dummy', 'version': 'dummy'})
# dependencies should be parsed correctly
deps = [
# name, version, versionsuffix, toolchain
('GCC', '4.7.2', None, None),
('OpenMPI', '1.6.4', None, {'name': 'GCC', 'version': '4.7.2'}),
('OpenBLAS', '0.2.6', '-LAPACK-3.4.2', {'name': 'gompi', 'version': '1.4.10'}),
('FFTW', '3.3.3', None, {'name': 'gompi', 'version': '1.4.10'}),
('ScaLAPACK', '2.0.2', '-OpenBLAS-0.2.6-LAPACK-3.4.2', {'name': 'gompi', 'version': '1.4.10'}),
]
for i, (name, version, versionsuffix, toolchain) in enumerate(deps):
self.assertEqual(ec['dependencies'][i].name(), name)
self.assertEqual(ec['dependencies'][i].version(), version)
self.assertEqual(ec['dependencies'][i].versionsuffix(), versionsuffix)
self.assertEqual(ec['dependencies'][i].toolchain(), toolchain)
# restore
easybuild.tools.build_log.EXPERIMENTAL = orig_experimental
def test_raw(self):
"""Test passing of raw contents to EasyConfigParser."""
ec_file1 = os.path.join(TESTDIRBASE, 'v1.0', 'g', 'GCC', 'GCC-4.6.3.eb')
ec_txt1 = read_file(ec_file1)
ec_file2 = os.path.join(TESTDIRBASE, 'v1.0', 'g', 'gzip', 'gzip-1.5-goolf-1.4.10.eb')
ec_txt2 = read_file(ec_file2)
ecparser = EasyConfigParser(ec_file1)
self.assertEqual(ecparser.rawcontent, ec_txt1)
ecparser = EasyConfigParser(rawcontent=ec_txt2)
self.assertEqual(ecparser.rawcontent, ec_txt2)
# rawcontent supersedes passed filepath
ecparser = EasyConfigParser(ec_file1, rawcontent=ec_txt2)
self.assertEqual(ecparser.rawcontent, ec_txt2)
ec = ecparser.get_config_dict()
self.assertEqual(ec['name'], 'gzip')
self.assertEqual(ec['toolchain']['name'], 'goolf')
self.assertErrorRegex(EasyBuildError, "Neither filename nor rawcontent provided", EasyConfigParser)
def test_easyconfig_constants(self):
"""Test available easyconfig constants."""
constants = build_easyconfig_constants_dict()
# make sure both keys and values are only strings
for constant_name in constants:
self.assertTrue(isinstance(constant_name, basestring), "Constant name %s is a string" % constant_name)
val = constants[constant_name]
self.assertTrue(isinstance(val, basestring), "Constant value %s is a string" % val)
# check a couple of randomly picked constant values
self.assertEqual(constants['SOURCE_TAR_GZ'], '%(name)s-%(version)s.tar.gz')
self.assertEqual(constants['PYPI_SOURCE'], 'https://pypi.python.org/packages/source/%(nameletter)s/%(name)s')
self.assertEqual(constants['GPLv2'], 'LicenseGPLv2')
self.assertEqual(constants['EXTERNAL_MODULE'], 'EXTERNAL_MODULE')
def test_check_value_types(self):
"""Test checking of easyconfig parameter value types."""
test_ec = os.path.join(TESTDIRBASE, 'test_ecs', 'g', 'gzip', 'gzip-1.4-broken.eb')
error_msg_pattern = "Type checking of easyconfig parameter values failed: .*'version'.*"
ecp = EasyConfigParser(test_ec, auto_convert_value_types=False)
self.assertErrorRegex(EasyBuildError, error_msg_pattern, ecp.get_config_dict)
# test default behaviour: auto-converting of mismatched value types
ecp = EasyConfigParser(test_ec)
ecdict = ecp.get_config_dict()
self.assertEqual(ecdict['version'], '1.4')
def suite():
""" returns all the testcases in this module """
return TestLoaderFiltered().loadTestsFromTestCase(EasyConfigParserTest, sys.argv[1:])
if __name__ == '__main__':
# logToScreen(enable=True)
# setLogLevelDebug()
TextTestRunner(verbosity=1).run(suite())
|
"""
Python standard logging doesn't super-intelligent and won't expose filehandles,
which we want. So we're not using it.
Copyright 2009, Red Hat, Inc and Others
Michael DeHaan <michael.dehaan AT gmail>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301 USA
"""
import time
import os
ERROR = "ERROR"
WARNING = "WARNING"
DEBUG = "DEBUG"
INFO = "INFO"
class Logger:
def __init__(self, logfile="/var/log/cobbler/cobbler.log"):
self.logfile = None
# Main logfile is append mode, other logfiles not.
if not os.path.exists(logfile) and os.path.exists(os.path.dirname(logfile)):
self.logfile = open(logfile, "a")
self.logfile.close()
try:
self.logfile = open(logfile, "a")
except IOError:
# You likely don't have write access, this logger will just print
# things to stdout.
pass
def warning(self, msg):
self.__write(WARNING, msg)
def error(self, msg):
self.__write(ERROR, msg)
def debug(self, msg):
self.__write(DEBUG, msg)
def info(self, msg):
self.__write(INFO, msg)
def flat(self, msg):
self.__write(None, msg)
def __write(self, level, msg):
if level is not None:
msg = "%s - %s | %s" % (time.asctime(), level, msg)
if self.logfile is not None:
self.logfile.write(msg)
self.logfile.write("\n")
self.logfile.flush()
else:
print(msg)
def handle(self):
return self.logfile
def close(self):
self.logfile.close()
|
import unittest
import serial
import struct
import sys
import StringIO
import time
from collections import namedtuple
sys.path.append('..')
import esptool, espefuse
global serialport
serialport = None
class EspEfuseArgs(object):
def __init__(self):
self.do_not_confirm = True
self.no_protect_key = False
self.force_write_always = False
self.voltage = None
self.block = None
self.keyfile = None
class EfuseTestCase(unittest.TestCase):
def setUp(self):
# reset and zero efuses
serialport.dtr = False
serialport.rts = True
time.sleep(0.05)
serialport.rts = False
time.sleep(0.05)
serialport.dtr = True
# connect & verify efuses are really zero
self.esp = esptool.ESP32ROM(serialport)
self.esp.connect('no_reset') # takes ~7 seconds
self.efuses = espefuse.EspEfuses(self.esp)
# Check every efuse is zero (~1 second)
for efuse in self.efuses:
val = efuse.get_raw()
BAD_EFUSE_MSG = "Efuse %s not all zeroes - either this is a real ESP32 chip (VERY BAD, read top of file), or the reset is not erasing all efuses correctly." % efuse.register_name
try:
self.assertEqual(b'\x00'*len(val), val, BAD_EFUSE_MSG)
except TypeError:
self.assertEqual(0, val, BAD_EFUSE_MSG)
def _set_34_coding_scheme(self):
self.efuses["CODING_SCHEME"].burn(1)
# EspEfuses constructor needs to re-load CODING_SCHEME
self.efuses = espefuse.EspEfuses(self.esp)
class TestBurnKey(EfuseTestCase):
def test_burn_key_no_coding_scheme(self):
key_256bit = b"".join(chr(x+1) for x in range(32))
self._test_burn_key_common(key_256bit, b"\x00"*32)
def test_burn_key_34_coding_scheme(self):
self._set_34_coding_scheme()
key_192bit = b"".join(chr(x+0xAA) for x in range(24))
self._test_burn_key_common(key_192bit, b"\x00"*24)
def _test_burn_key_common(self, new_key, empty_key):
# Burning key common routine, works in both coding schemes
args = EspEfuseArgs()
args.keyfile = StringIO.StringIO(new_key)
args.do_not_confirm = True
burn_params = (self.esp, self.efuses, args)
# Burn BLK1 with no protection
args.block = "BLK1"
args.no_protect_key = True
espefuse.burn_key(*burn_params)
key_val = self.efuses["BLK1"].get_key()
self.assertEqual(new_key, key_val)
# Burn BLK2 and read/write protect
args.no_protect_key = False
args.block = "BLK2"
espefuse.burn_key(*burn_params)
key_val = self.efuses["BLK2"].get_key()
self.assertEqual(empty_key, key_val)
# Try to burn BLK1 again, will fail as not empty
with self.assertRaises(esptool.FatalError) as fail:
args.block = "BLK1"
espefuse.burn_key(*burn_params)
self.assertIn("already", str(fail.exception))
# Try to burn BLK2 again, will fail as protected
with self.assertRaises(esptool.FatalError) as fail:
args.block = "BLK2"
espefuse.burn_key(*burn_params)
self.assertIn("already", str(fail.exception))
# Force BLK1 to be burned again (and read protect this time)
args.force_write_always = True
args.block = "BLK1"
espefuse.burn_key(*burn_params)
key_val = self.efuses["BLK1"].get_key()
self.assertEqual(empty_key, key_val)
self.assertEqual(0, self.efuses.get_coding_scheme_warnings())
class TestBurnBlockData(EfuseTestCase):
def test_burn_block_data_normal(self):
word_a = 0x1234
word_b = 0x789A
data = struct.pack("<II", word_a, word_b)
args = EspEfuseArgs()
args.do_not_confirm = True
args.block = 'BLK1'
args.datafile = StringIO.StringIO(data)
args.offset = 4
burn_params = (self.esp, self.efuses, args)
espefuse.burn_block_data(*burn_params)
words = self.efuses["BLK1"].get_words()
self.assertEqual([0, word_a, word_b, 0, 0, 0, 0, 0], words)
args.offset = 24
args.force_write_always = True
args.datafile = StringIO.StringIO(data)
espefuse.burn_block_data(*burn_params)
words = self.efuses["BLK1"].get_words()
self.assertEqual([0, word_a, word_b, 0, 0, 0, word_a, word_b], words)
self.assertEqual(0, self.efuses.get_coding_scheme_warnings())
def test_burn_block_data_34_coding(self):
self._set_34_coding_scheme()
data = b"1234EA"
args = EspEfuseArgs()
args.do_not_confirm = True
args.force_write_always = True
args.block = 'BLK3'
args.datafile = StringIO.StringIO(data)
args.offset = 6
burn_params = (self.esp, self.efuses, args)
espefuse.burn_block_data(*burn_params)
words = self.efuses["BLK3"].get_words()
self.assertEqual([0,
struct.unpack("<H", "12")[0] << 16,
struct.unpack("<I", "34EA")[0],
0,
0,
0], words)
args.offset = 12
args.datafile = StringIO.StringIO(data)
espefuse.burn_block_data(*burn_params)
words = self.efuses["BLK3"].get_words()
self.assertEqual([0,
struct.unpack("<H", "12")[0] << 16,
struct.unpack("<I", "34EA")[0],
struct.unpack("<I", "1234")[0],
struct.unpack("<H", "EA")[0],
0], words)
self.assertEqual(0, self.efuses.get_coding_scheme_warnings())
if __name__ == '__main__':
if len(sys.argv) < 2:
print("Usage: %s <serial port> [optional tests]" % sys.argv[0])
sys.exit(1)
serialport = serial.Serial(sys.argv[1], 115200)
serialport.dtr = False
serialport.rts = False
# unittest also uses argv, so trim the args we used
sys.argv = [ sys.argv[0] ] + sys.argv[2:]
print("Running espefuse.py tests...")
unittest.main(buffer=True)
|
from unidecode import unidecode
from django.template.defaultfilters import slugify as django_slugify
from django.utils import crypto
def slugify(string):
string = unicode(string)
string = unidecode(string)
return django_slugify(string.replace('_', ' '))
def random_string(length):
return crypto.get_random_string(length, "1234567890qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM")
def short_string(string, length=16):
if len(string) <= length:
return string;
string = string[0:length - 3]
bits = string.split()
if len(bits[-1]) < 3:
bits.pop()
return '%s...' % (' '.join(bits))
def html_escape(html):
html = html.replace('&', '&')
html = html.replace('<', '<')
html = html.replace('>', '>')
return html.replace('"', '"')
|
"""
Simple shim for mspms development.
"""
import msprime.cli
if __name__ == "__main__":
msprime.cli.mspms_main()
|
import numpy as np
from hyperspy.components1d import EELSArctan
def test_function2():
g = EELSArctan()
g.A.value = 10
g.k.value = 2
g.x0.value = 1
np.testing.assert_allclose(g.function(0), 4.63647609)
np.testing.assert_allclose(g.function(1), 10*np.pi/2)
np.testing.assert_allclose(g.function(1e4), 10*np.pi,1e-4)
|
from django.db import models
from django.contrib.auth.models import User
from django.forms import ValidationError
import datetime
YEAR_CHOICES = []
for y in range((datetime.datetime.now().year - 7), (datetime.datetime.now().year + 1)):
YEAR_CHOICES.append((y, y))
SEX_CHOICES = (
('Laki-laki', 'Laki-laki'),
('Perempuan', 'Perempuan'),
)
STATUS_CHOICES = (
(0, 'Tidak Aktif'),
(1, 'Aktif'),
)
URUTAN_CHOICES = (
(1, 1),
(2, 2),
(3, 3)
)
class Agama(models.Model):
nama = models.CharField('Nama', max_length = 100)
aktif = models.BooleanField('Ya/Tidak', help_text= 'Centang jika ingin mengaktifkan')
def __unicode__(self):
return self.nama
class Meta:
db_table = 'agama'
verbose_name_plural = u'Agama'
class Fakultas(models.Model):
kode_fakultas = models.CharField('Kode Fakultas', max_length=10, unique = True)
nama_fakultas = models.CharField('Nama Fakultas', max_length=100)
def __unicode__(self):
return self.nama_fakultas
class Meta:
db_table = 'fakultas'
verbose_name_plural = 'Fakultas'
class Jurusan(models.Model):
kode_jurusan = models.CharField('Kode Jurusan', max_length=10, unique = True)
nama_jurusan = models.CharField('Nama Jurusan', max_length=100)
fakultas = models.ForeignKey(Fakultas, verbose_name='Fakultas')
jenjang = models.CharField('Jenjang', max_length=5)
def __unicode__(self):
return self.nama_jurusan
class Meta:
db_table = 'tbjurusan'
verbose_name_plural = 'Jurusan'
class RegisterAkademik(models.Model):
tahun_akademik = models.CharField('Tahun Akademik', max_length = 5)
aktif = models.BooleanField('Aktif', help_text= 'Centang jika ingin mengaktifkan tahun akademik')
def __unicode__(self):
return self.tahun_akademik
class Meta:
db_table = 'regakademik'
ordering = ('-tahun_akademik',)
class ProgramStudi(models.Model):
kode_prodi = models.CharField('Kode Prodi', max_length=10, unique = True)
nama_prodi = models.CharField('Nama Prodi', max_length=100)
def __unicode__(self):
return self.kode_prodi
class Meta:
db_table = 'tbprodi'
verbose_name_plural = 'Program Studi'
class Dosen(models.Model):
nama = models.CharField('Nama', max_length = 100)
user = models.ForeignKey(User, unique=True)
tanggal_lahir = models.DateField('Tanggal Lahir', help_text = 'Format Tanggal: YYYY-MM-DD')
jenis_kelamin = models.CharField('Jenis Kelamin', max_length = 10, choices=SEX_CHOICES, default='Laki-laki')
agama = models.ForeignKey(Agama, verbose_name='Agama')
alamat = models.CharField('Alamat', max_length = 100)
email = models.CharField('Email', max_length = 100)
telepon = models.CharField('Telepon', max_length = 100)
keterangan = models.CharField('Keterangan', max_length = 100)
image = models.ImageField(u'Photo', upload_to = 'dosen-photos')
aktif = models.BooleanField('Ya/Tidak', help_text= 'Centang jika ingin mengaktifkan')
def __unicode__(self):
return self.nama
class Meta:
db_table = 'dosen'
verbose_name_plural = u'Dosen'
class Mahasiswa(models.Model):
nim = models.CharField('NIM', max_length = 20, unique = True)
nama = models.CharField('Nama', max_length = 80)
user = models.ForeignKey(User, unique=True)
jenis_kelamin = models.CharField('Jenis Kelamin', max_length = 10, choices=SEX_CHOICES, default='Laki-laki')
agama = models.ForeignKey(Agama, verbose_name='Agama')
tanggal_lahir = models.DateField('Tanggal Lahir', help_text = 'Format Tanggal: YYYY-MM-DD')
alamat = models.CharField('Alamat', max_length = 100)
telepon = models.CharField('Telepon', max_length = 100)
email = models.CharField('Email', max_length = 100)
fakultas = models.ForeignKey(Fakultas, verbose_name='Fakultas')
jurusan = models.ForeignKey(Jurusan, verbose_name='Jurusan')
prodi = models.ForeignKey(ProgramStudi, verbose_name='Program Studi')
tahun_akademik = models.ForeignKey(RegisterAkademik, verbose_name='Angkatan')
dosen = models.ForeignKey(Dosen, verbose_name='Dosen')
ipk = models.IntegerField('IPK', max_length = 3)
#angkatan = models.IntegerField('Angkatan', max_length=4, choices=YEAR_CHOICES, default=datetime.datetime.now().year)
image = models.ImageField(u'Photo', upload_to = 'mahasiswa-photos')
aktif = models.BooleanField('Ya/Tidak', help_text= 'Centang jika ingin mengaktifkan')
def __unicode__(self):
return ' '.join([
self.nim,
self.nama,
])
#return self.nim + " - " + self.nama
class Meta:
db_table = 'tbmahasiswa'
verbose_name_plural = u'Mahasiswa'
class BahanAjar(models.Model):
nama = models.CharField('Nama', max_length = 80)
user_upload = models.CharField('Alamat', max_length = 180)
tanggal_upload = models.DateField('Tanggal Lahir', help_text = 'Format Tanggal: YYYY-MM-DD')
jumlah_download = models.IntegerField('Jumlah Download')
status = models.BooleanField('Status', choices=STATUS_CHOICES, default=0)
image = models.ImageField(u'File', upload_to = 'gallery-upload')
def __unicode__(self):
return self.nim
class Meta:
db_table = 'tbbahan_ajar'
class MataKuliah(models.Model):
prodi = models.ForeignKey(ProgramStudi, verbose_name='Program Studi')
kode_mtk = models.CharField('Kode Mata Kuliah', max_length = 30, unique = True)
nama_mtk = models.CharField('Nama Mata Kuliah', max_length = 50)
semester = models.CharField('Semester', max_length = 2)
sks = models.IntegerField('SKS', max_length = 2)
fakultas = models.ForeignKey(Fakultas, verbose_name='Fakultas')
jurusan = models.ForeignKey(Jurusan, verbose_name='Jurusan')
def __unicode__(self):
return self.nama_mtk
class Meta:
db_table = 'tbmatakuliah'
verbose_name_plural = 'Mata Kuliah'
class NamaHari(models.Model):
nama_hari = models.CharField('Nama Hari', max_length=10, unique = True)
def __unicode__(self):
return self.nama_hari
class Meta:
db_table = 'tbhari'
verbose_name_plural = 'Nama Hari'
class Jadwal(models.Model):
tahun_akademik = models.ForeignKey(RegisterAkademik, verbose_name='Tahun Akademik')
prodi = models.ForeignKey(ProgramStudi, verbose_name='Program Studi')
mata_kuliah = models.ForeignKey(MataKuliah, verbose_name='Mata Kuliah')
jurusan = models.ForeignKey(Jurusan, verbose_name='Jurusan')
ruang = models.CharField('Ruang', max_length=10)
kelas = models.CharField('Kelas', max_length=5)
dosen = models.ForeignKey(Dosen, verbose_name='Dosen')
hari = models.ForeignKey(NamaHari, verbose_name='Hari')
jam_mulai = models.TimeField('Jam Mulai')
jam_selesai = models.TimeField('Jam Selesai')
uts_tanggal = models.DateField('Tanggal UTS')
uts_mulai = models.TimeField('Jam Mulai UTS')
uts_selesai = models.TimeField('Jam Selesai UTS')
uts_ruang = models.CharField('Ruangan UTS', max_length=10)
uas_tanggal = models.DateField('Tanggal UAS')
uas_mulai = models.TimeField('Jam Mulai UAS')
uas_selesai = models.TimeField('Jam Selesai UAS')
uas_ruang = models.CharField('Ruangan UAS', max_length=10)
def __unicode__(self):
return self.mata_kuliah.nama_mtk
def jadwal_matkul(self):
return self.mata_kuliah.nama_mtk
class Meta:
db_table = 'tbjadwal'
verbose_name_plural = 'Jadwal'
class RegisterMahasiswa(models.Model):
tahun_akademik = models.ForeignKey(RegisterAkademik, verbose_name='Tahun Akademik')
nim = models.ForeignKey(Mahasiswa, verbose_name='Mahasiswa')
tanggal_register = models.DateField('Tanggal Register')
aktif = models.BooleanField('Ya/Tidak', help_text= 'Centang jika ingin mengaktifkan')
class Meta:
db_table = 'tbregister_mahasiswa'
verbose_name_plural = 'Register Mahasiswa'
class Krs(models.Model):
mahasiswa = models.ForeignKey(Mahasiswa, verbose_name='Mahasiswa', null=True)
tahun_akademik = models.ForeignKey(RegisterAkademik, verbose_name='Tahun Akademik', null=True)
jadwal = models.ForeignKey(Jadwal, verbose_name='Mata Kuliah', null=True)
tugas1 = models.IntegerField('Tugas 1', max_length=3, blank=True, null=True)
tugas2 = models.IntegerField('Tugas 2', max_length=3, blank=True, null=True)
tugas3 = models.IntegerField('Tugas 3', max_length=3, blank=True, null=True)
tugas4 = models.IntegerField('Tugas 4', max_length=3, blank=True, null=True)
nilai_mid = models.IntegerField('Nilai MID', max_length=3, blank=True, null=True)
nilai_uas = models.IntegerField('Nilai UAS', max_length=3, blank=True, null=True)
def nama_mahasiswa(self):
return self.mahasiswa.nama
def nama_mata_kuliah(self):
return self.jadwal.mata_kuliah.nama_mtk
class Meta:
db_table = 'tbkrs'
verbose_name_plural = 'KRS'
class Banner(models.Model):
nama = models.CharField('Nama', max_length = 50)
urutan = models.IntegerField('Urutan', choices=URUTAN_CHOICES)
banner = models.ImageField(u'Banner', upload_to = 'img-banner')
def __unicode__(self):
return self.nama
class Meta:
db_table = 'tbbanner'
class ProfileSoftware(models.Model):
nama = models.CharField('Nama', max_length = 30)
alamat = models.CharField('alamat', max_length = 200)
def __unicode__(self):
return self.nama
class Meta:
db_table = 'tbprofile_software'
|
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2012, Kovid Goyal <kovid@kovidgoyal.net>'
__docformat__ = 'restructuredtext en'
import copy, logging
from functools import partial
from collections import defaultdict, namedtuple
from io import BytesIO
from struct import pack
import cssutils
from lxml import etree
from calibre import isbytestring, force_unicode
from calibre.ebooks.mobi.utils import (create_text_record, to_base,
is_guide_ref_start)
from calibre.ebooks.compression.palmdoc import compress_doc
from calibre.ebooks.oeb.base import (OEB_DOCS, OEB_STYLES, SVG_MIME, XPath,
extract, XHTML, urlnormalize)
from calibre.ebooks.oeb.parse_utils import barename
from calibre.ebooks.mobi.writer8.skeleton import Chunker, aid_able_tags, to_href
from calibre.ebooks.mobi.writer8.index import (NCXIndex, SkelIndex,
ChunkIndex, GuideIndex, NonLinearNCXIndex)
from calibre.ebooks.mobi.writer8.mobi import KF8Book
from calibre.ebooks.mobi.writer8.tbs import apply_trailing_byte_sequences
from calibre.ebooks.mobi.writer8.toc import TOCAdder
XML_DOCS = OEB_DOCS | {SVG_MIME}
to_ref = partial(to_base, base=32, min_num_digits=4)
class KF8Writer(object):
def __init__(self, oeb, opts, resources):
self.oeb, self.opts, self.log = oeb, opts, oeb.log
self.compress = not self.opts.dont_compress
self.has_tbs = False
self.log.info('Creating KF8 output')
# Create an inline ToC if one does not already exist
self.toc_adder = TOCAdder(oeb, opts)
self.used_images = set()
self.resources = resources
self.flows = [None] # First flow item is reserved for the text
self.records = [None] # Placeholder for zeroth record
self.log('\tGenerating KF8 markup...')
self.dup_data()
self.cleanup_markup()
self.replace_resource_links()
self.extract_css_into_flows()
self.extract_svg_into_flows()
self.replace_internal_links_with_placeholders()
self.insert_aid_attributes()
self.chunk_it_up()
# Dump the cloned data as it is no longer needed
del self._data_cache
self.create_text_records()
self.log('\tCreating indices...')
self.create_fdst_records()
self.create_indices()
self.create_guide()
# We do not want to use this ToC for MOBI 6, so remove it
self.toc_adder.remove_generated_toc()
def dup_data(self):
''' Duplicate data so that any changes we make to markup/CSS only
affect KF8 output and not MOBI 6 output '''
self._data_cache = {}
# Suppress cssutils logging output as it is duplicated anyway earlier
# in the pipeline
cssutils.log.setLevel(logging.CRITICAL)
for item in self.oeb.manifest:
if item.media_type in XML_DOCS:
self._data_cache[item.href] = copy.deepcopy(item.data)
elif item.media_type in OEB_STYLES:
# I can't figure out how to make an efficient copy of the
# in-memory CSSStylesheet, as deepcopy doesn't work (raises an
# exception)
self._data_cache[item.href] = cssutils.parseString(
item.data.cssText, validate=False)
def data(self, item):
return self._data_cache.get(item.href, item.data)
def cleanup_markup(self):
for item in self.oeb.spine:
root = self.data(item)
# Remove empty script tags as they are pointless
for tag in XPath('//h:script')(root):
if not tag.text and not tag.get('src', False):
tag.getparent().remove(tag)
def replace_resource_links(self):
''' Replace links to resources (raster images/fonts) with pointers to
the MOBI record containing the resource. The pointers are of the form:
kindle:embed:XXXX?mime=image/* The ?mime= is apparently optional and
not used for fonts. '''
def pointer(item, oref):
ref = urlnormalize(item.abshref(oref))
idx = self.resources.item_map.get(ref, None)
if idx is not None:
is_image = self.resources.records[idx-1][:4] not in {b'FONT'}
idx = to_ref(idx)
if is_image:
self.used_images.add(ref)
return 'kindle:embed:%s?mime=%s'%(idx,
self.resources.mime_map[ref])
else:
return 'kindle:embed:%s'%idx
return oref
for item in self.oeb.manifest:
if item.media_type in XML_DOCS:
root = self.data(item)
for tag in XPath('//h:img|//svg:image')(root):
for attr, ref in tag.attrib.iteritems():
if attr.split('}')[-1].lower() in {'src', 'href'}:
tag.attrib[attr] = pointer(item, ref)
for tag in XPath('//h:style')(root):
if tag.text:
sheet = cssutils.parseString(tag.text, validate=False)
replacer = partial(pointer, item)
cssutils.replaceUrls(sheet, replacer,
ignoreImportRules=True)
repl = sheet.cssText
if isbytestring(repl):
repl = repl.decode('utf-8')
tag.text = '\n'+ repl + '\n'
elif item.media_type in OEB_STYLES:
sheet = self.data(item)
replacer = partial(pointer, item)
cssutils.replaceUrls(sheet, replacer, ignoreImportRules=True)
def extract_css_into_flows(self):
inlines = defaultdict(list) # Ensure identical <style>s not repeated
sheets = {}
for item in self.oeb.manifest:
if item.media_type in OEB_STYLES:
data = self.data(item).cssText
sheets[item.href] = len(self.flows)
self.flows.append(force_unicode(data, 'utf-8'))
for item in self.oeb.spine:
root = self.data(item)
for link in XPath('//h:link[@href]')(root):
href = item.abshref(link.get('href'))
idx = sheets.get(href, None)
if idx is not None:
idx = to_ref(idx)
link.set('href', 'kindle:flow:%s?mime=text/css'%idx)
for tag in XPath('//h:style')(root):
p = tag.getparent()
idx = p.index(tag)
raw = tag.text
if not raw or not raw.strip():
extract(tag)
continue
repl = etree.Element(XHTML('link'), type='text/css',
rel='stylesheet')
repl.tail='\n'
p.insert(idx, repl)
extract(tag)
inlines[raw].append(repl)
for raw, elems in inlines.iteritems():
idx = to_ref(len(self.flows))
self.flows.append(raw)
for link in elems:
link.set('href', 'kindle:flow:%s?mime=text/css'%idx)
def extract_svg_into_flows(self):
images = {}
for item in self.oeb.manifest:
if item.media_type == SVG_MIME:
data = self.data(item)
images[item.href] = len(self.flows)
self.flows.append(etree.tostring(data, encoding='UTF-8',
with_tail=True, xml_declaration=True))
for item in self.oeb.spine:
root = self.data(item)
for svg in XPath('//svg:svg')(root):
raw = etree.tostring(svg, encoding=unicode, with_tail=False)
idx = len(self.flows)
self.flows.append(raw)
p = svg.getparent()
pos = p.index(svg)
img = etree.Element(XHTML('img'),
src="kindle:flow:%s?mime=image/svg+xml"%to_ref(idx))
p.insert(pos, img)
extract(svg)
for img in XPath('//h:img[@src]')(root):
src = img.get('src')
abshref = item.abshref(src)
idx = images.get(abshref, None)
if idx is not None:
img.set('src', 'kindle:flow:%s?mime=image/svg+xml'%
to_ref(idx))
def replace_internal_links_with_placeholders(self):
self.link_map = {}
count = 0
hrefs = {item.href for item in self.oeb.spine}
for item in self.oeb.spine:
root = self.data(item)
for a in XPath('//h:a[@href]')(root):
count += 1
ref = item.abshref(a.get('href'))
href, _, frag = ref.partition('#')
href = urlnormalize(href)
if href in hrefs:
placeholder = 'kindle:pos:fid:0000:off:%s'%to_href(count)
self.link_map[placeholder] = (href, frag)
a.set('href', placeholder)
def insert_aid_attributes(self):
self.id_map = {}
for i, item in enumerate(self.oeb.spine):
root = self.data(item)
aidbase = i * int(1e6)
j = 0
for tag in root.iterdescendants(etree.Element):
id_ = tag.attrib.get('id', None)
if id_ is None and tag.tag == XHTML('a'):
# Can happen during tweaking
id_ = tag.attrib.get('name', None)
if id_ is not None:
tag.attrib['id'] = id_
if id_ is not None or barename(tag.tag).lower() in aid_able_tags:
aid = aidbase + j
tag.attrib['aid'] = to_base(aid, base=32)
if tag.tag == XHTML('body'):
self.id_map[(item.href, '')] = tag.attrib['aid']
if id_ is not None:
self.id_map[(item.href, id_)] = tag.attrib['aid']
j += 1
def chunk_it_up(self):
placeholder_map = {}
for placeholder, x in self.link_map.iteritems():
href, frag = x
aid = self.id_map.get(x, None)
if aid is None:
aid = self.id_map.get((href, ''))
placeholder_map[placeholder] = aid
chunker = Chunker(self.oeb, self.data, placeholder_map)
for x in ('skel_table', 'chunk_table', 'aid_offset_map'):
setattr(self, x, getattr(chunker, x))
self.flows[0] = chunker.text
def create_text_records(self):
self.flows = [x.encode('utf-8') if isinstance(x, unicode) else x for x
in self.flows]
text = b''.join(self.flows)
self.text_length = len(text)
text = BytesIO(text)
nrecords = 0
records_size = 0
self.uncompressed_record_lengths = []
if self.compress:
self.oeb.logger.info('\tCompressing markup...')
while text.tell() < self.text_length:
data, overlap = create_text_record(text)
self.uncompressed_record_lengths.append(len(data))
if self.compress:
data = compress_doc(data)
data += overlap
data += pack(b'>B', len(overlap))
self.records.append(data)
records_size += len(data)
nrecords += 1
self.last_text_record_idx = nrecords
self.first_non_text_record_idx = nrecords + 1
# Pad so that the next records starts at a 4 byte boundary
if records_size % 4 != 0:
self.records.append(b'\x00'*(records_size % 4))
self.first_non_text_record_idx += 1
def create_fdst_records(self):
FDST = namedtuple('Flow', 'start end')
entries = []
self.fdst_table = []
for i, flow in enumerate(self.flows):
start = 0 if i == 0 else self.fdst_table[-1].end
self.fdst_table.append(FDST(start, start + len(flow)))
entries.extend(self.fdst_table[-1])
rec = (b'FDST' + pack(b'>LL', 12, len(self.fdst_table)) +
pack(b'>%dL'%len(entries), *entries))
self.fdst_records = [rec]
self.fdst_count = len(self.fdst_table)
def create_indices(self):
self.skel_records = SkelIndex(self.skel_table)()
self.chunk_records = ChunkIndex(self.chunk_table)()
self.ncx_records = []
toc = self.oeb.toc
entries = []
is_periodical = self.opts.mobi_periodical
if toc.count() < 2:
self.log.warn('Document has no ToC, MOBI will have no NCX index')
return
# Flatten the ToC into a depth first list
fl = toc.iterdescendants()
for i, item in enumerate(fl):
entry = {'id': id(item), 'index': i, 'label':(item.title or
_('Unknown')), 'children':[]}
entry['depth'] = getattr(item, 'ncx_hlvl', 0)
p = getattr(item, 'ncx_parent', None)
if p is not None:
entry['parent_id'] = p
for child in item:
child.ncx_parent = entry['id']
child.ncx_hlvl = entry['depth'] + 1
entry['children'].append(id(child))
if is_periodical:
if item.author:
entry['author'] = item.author
if item.description:
entry['description'] = item.description
entries.append(entry)
href = item.href or ''
href, frag = href.partition('#')[0::2]
aid = self.id_map.get((href, frag), None)
if aid is None:
aid = self.id_map.get((href, ''), None)
if aid is None:
pos, fid = 0, 0
chunk = self.chunk_table[pos]
offset = chunk.insert_pos + fid
else:
pos, fid, offset = self.aid_offset_map[aid]
entry['pos_fid'] = (pos, fid)
entry['offset'] = offset
# The Kindle requires entries to be sorted by (depth, playorder)
# However, I cannot figure out how to deal with non linear ToCs, i.e.
# ToCs whose nth entry at depth d has an offset after its n+k entry at
# the same depth, so we sort on (depth, offset) instead. This re-orders
# the ToC to be linear. A non-linear ToC causes section to section
# jumping to not work. kindlegen somehow handles non-linear tocs, but I
# cannot figure out how.
original = sorted(entries,
key=lambda entry: (entry['depth'], entry['index']))
linearized = sorted(entries,
key=lambda entry: (entry['depth'], entry['offset']))
is_non_linear = original != linearized
entries = linearized
is_non_linear = False # False as we are using the linearized entries
if is_non_linear:
for entry in entries:
entry['kind'] = 'chapter'
for i, entry in enumerate(entries):
entry['index'] = i
id_to_index = {entry['id']:entry['index'] for entry in entries}
# Write the hierarchical information
for entry in entries:
children = entry.pop('children')
if children:
entry['first_child'] = id_to_index[children[0]]
entry['last_child'] = id_to_index[children[-1]]
if 'parent_id' in entry:
entry['parent'] = id_to_index[entry.pop('parent_id')]
# Write the lengths
def get_next_start(entry):
enders = [e['offset'] for e in entries if e['depth'] <=
entry['depth'] and e['offset'] > entry['offset']]
if enders:
return min(enders)
return len(self.flows[0])
for entry in entries:
entry['length'] = get_next_start(entry) - entry['offset']
self.has_tbs = apply_trailing_byte_sequences(entries, self.records,
self.uncompressed_record_lengths)
idx_type = NonLinearNCXIndex if is_non_linear else NCXIndex
self.ncx_records = idx_type(entries)()
def create_guide(self):
self.start_offset = None
self.guide_table = []
self.guide_records = []
GuideRef = namedtuple('GuideRef', 'title type pos_fid')
for ref in self.oeb.guide.values():
href, frag = ref.href.partition('#')[0::2]
aid = self.id_map.get((href, frag), None)
if aid is None:
aid = self.id_map.get((href, ''))
if aid is None:
continue
pos, fid, offset = self.aid_offset_map[aid]
if is_guide_ref_start(ref):
self.start_offset = offset
self.guide_table.append(GuideRef(ref.title or
_('Unknown'), ref.type, (pos, fid)))
if self.guide_table:
self.guide_table.sort(key=lambda x:x.type) # Needed by the Kindle
self.guide_records = GuideIndex(self.guide_table)()
def create_kf8_book(oeb, opts, resources, for_joint=False):
writer = KF8Writer(oeb, opts, resources)
return KF8Book(writer, for_joint=for_joint)
|
__author__ = 'ketchup'
__version__= '0.1'
__modified_by = 'ketchup'
import sys
import xml.dom.minidom
class Script:
scriptId = ''
output = ''
def __init__( self, ScriptNode ):
if not (ScriptNode is None):
self.scriptId = ScriptNode.getAttribute('id')
self.output = ScriptNode.getAttribute('output')
if __name__ == '__main__':
dom = xml.dom.minidom.parse('a-full.xml')
for scriptNode in dom.getElementsByTagName('script'):
script = Script( scriptNode )
print script.scriptId
print script.output
|
import numpy as np
from sqlalchemy import Column, Integer, ForeignKey, \
String, Unicode, LargeBinary
from sqlalchemy.orm import relationship
from chemiris.models import Base, JSONDict
from chemiris.timeseries.TimeSeries import dumps, loads
import chemiris.peaks.Math as peakmath
class Peak(Base):
__tablename__ = 'peaks'
peak_id = Column(Integer, primary_key=True)
peak_type = Column(String(8))
name = Column(Unicode(255))
pt_id = Column(Integer, ForeignKey('palette_traces.palette_id'))
peakgroup_id = Column(Integer, ForeignKey('peakgroups.peakgroup_id'))
model = Column(JSONDict)
other = Column(JSONDict) # name, p-create, trace
rawdata_ = Column(LargeBinary)
baseline_ = Column(LargeBinary)
children = []
def __init__(self, name='', data=None, baseline=None, **kwargs):
self.rawdata = data
#if 'baseline' in kwargs:
# self.baseline = kwargs['baseline']
#else:
# self.baseline = None
pass
@property
def parent(self):
if self.peakgroup is None:
return self.cgram
else:
return self.peakgroup
@property
def rawdata(self):
if self.rawdata_ is not None:
return loads(self.rawdata_)
@rawdata.setter
def rawdata(self, value):
if value is not None:
self.rawdata_ = dumps(value)
else:
self.rawdata_ = None
@property
def data(self):
#FIXME: make this return models too
return self.rawdata
def as_poly(self, ion=None, sub_base=False):
# add in the baseline on either side
if ion is None:
row = 0
elif not self.rawdata.has_ion(ion):
row = 0
else:
try:
row = self.rawdata.ions.index(float(ion))
except ValueError:
row = self.rawdata.ions.index(ion)
pk = np.vstack([self.rawdata.times, self.rawdata.data.T[row]]).T
#base = self.baseline(ion)
#if sub_base:
# # this subtracts out the base line before returning it
# # it's useful for numerical fxns that don't take baseline
# if base is None:
# base_pts = np.interp(pk[:, 0], [pk[1, 0], pk[-1, 0]], \
# [pk[0, 1], pk[-1, 1]])
# else:
# base_pts = np.interp(pk[:, 0], *base)
# ply = np.array([pk[:, 0], pk[:, 1] - base_pts]).T
#if base is None:
# ply = pk
#else:
# ply = np.vstack([base[0], pk, base[:0:-1]])
ply = pk
return ply[np.logical_not(np.any(np.isnan(ply), axis=1))]
def contains(self, x, y, ion=None):
if not self.data.has_ion(ion):
return False
return peakmath.contains(self.as_poly(ion), x, y)
class PeakGroup(Base):
__tablename__ = 'peakgroups'
peakgroup_id = Column(Integer, primary_key=True)
cgram_id = Column(Integer, \
ForeignKey('chromatograms.cgram_id'))
cgram = relationship('traces', backref='pkgps')
start_time = ''
end_time = ''
peaks = relationship('Peak', backref='peakgroup')
@property
def children(self):
return self.peaks
@property
def parent(self):
return self.cgram
|
"""
pythoner.net
Copyright (C) 2013 PYTHONER.ORG
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from django import template
from topic.models import Topic,Favorite,Tag
from django.contrib.auth.models import User
register = template.Library()
@register.inclusion_tag('topic_latest.tag.html')
def get_latest_topic(count=10):
"""
得到最新的话题
"""
topics = Topic.objects.order_by('-sub_time')[0:count]
return {'entrys':topics}
@register.inclusion_tag('topic_list_by_user.tag.html')
def get_topic_list_by_user(user,count=10):
"""
用户发起的话题
"""
return {'topics':Topic.objects.filter(author=user,deleted=False)[0:count],
'user':user}
@register.inclusion_tag('topic_mark_list.tag.html')
def get_user_favorite(user_id):
"""
获取用户的收藏
"""
try:
user_id = int(user_id)
user = User.objects.get(id=user_id)
except (ValueError,User.DoesNotExist):
pass
favorites = Favorite.objects.filter(user=user)[0:5]
return {'entrys':favorites,'title':'收藏的'}
@register.filter
def mark(user,topic):
"""
返回用户是否标记
"""
count = Favorite.objects.filter(topic=topic).count()
try:
Favorite.objects.get(user=user,topic=topic)
except Favorite.DoesNotExist:
result = '+ Mark'
else:
result = 'Marked'
return result
@register.filter
def click_count(count):
if count <= 999:
return count
return str(count/1000)+'k'
@register.inclusion_tag('topic_tag.tag.html')
def get_topic_tag():
return {'tags': Tag.objects.all()}
|
'''
Constants for GoodCrypto app.
Copyright 2014-2016 GoodCrypto
Last modified: 2016-11-07
'''
import os, os.path
WARNING_WARNING_WARNING_TESTING_ONLY_DO_NOT_SHIP = False
if WARNING_WARNING_WARNING_TESTING_ONLY_DO_NOT_SHIP:
WARNING = 'WARNING! WARNING! WARNING! TESTING ONLY! DO NOT SHIP!'
PROJECT = 'GoodCrypto'
FULL_PROJECT_NAME = '{} Private Server'.format(PROJECT)
ISO_NAME = '{}.iso'.format(FULL_PROJECT_NAME.lower().replace(' ', '_'))
BASE_PROJECT_DIR = '/var/local/projects'
SHORT_NAME = PROJECT.lower()
PROJECT_DIR = os.path.join(BASE_PROJECT_DIR, SHORT_NAME)
GOODCRYPTO_DATA_DIR = os.path.join(PROJECT_DIR, 'server', 'data')
VM_NAME = 'GoodCrypto Private Server'
VM_OWNER = PROJECT.lower() # set a default
VM_SCRIPT_DIR = os.path.join(os.path.dirname(__file__), 'vms')
HOME_ROOT = '/home'
try:
home_dirs = os.listdir(HOME_ROOT)
for home_dir in home_dirs:
server_filename = os.path.join(HOME_ROOT, home_dir, 'VirtualBox VMs', VM_NAME)
if os.path.exists(server_filename):
VM_OWNER = home_dir
break
except:
pass
if WARNING_WARNING_WARNING_TESTING_ONLY_DO_NOT_SHIP:
FORWARDED_APP_SSH_PORT = 8022
FORWARDED_HOST_SSH_PORT = 8122
FORWARDED_MAILSERVER_SSH_PORT = 8222
HTTP_PORT = 8080
HTTPS_PORT = 8443
STATUS_GREEN = 'green'
STATUS_RED = 'red'
STATUS_YELLOW = 'yellow'
DEFAULT_DB = 'default'
MAIL_DB = DEFAULT_DB #'mail'
SYSTEM_DB = 'system'
WEB_DB = 'webfirewall'
DEFAULT_USER_ID = 1371
TIMESTAMP_DIR = '/var/lib/goodcrypto'
TIMESTAMP_FILENAME = '{}.timestamp'.format(ISO_NAME)
TIMESTAMP_PATH = os.path.join(TIMESTAMP_DIR, TIMESTAMP_FILENAME)
|
def dict_to_css(dictionary, name, local):
# todo: decide if we need `word-wrap: break-word;` or not?
KEYS = {
'fontStyle' : 'font-style',
'foreground': 'color',
'background': 'background-color'
}
output = []
output.append('/*\n*{auth}\n*{name} syntax highlight theme\n*\n*{comm}\n*/\n'.format(
auth = dictionary['author'],
name = dictionary['name'],
comm = dictionary['comment']
)
)
output.append('body\n{\n\tbackground: #282828;\n}\n')
pre = dictionary['settings'][0]['settings']
output.append('pre, code\n{{\n{default}{dynamic}\n}}\n'.format(
default = (
'\tmargin: 0px;\n'
'\tpadding-left: 20px;\n'
'\tfont-size: 12.5px;\n'
"\tfont-family: 'Menlo', monospace;\n"
),
dynamic = '\n'.join(
[
'\tbackground: {};'.format(pre['background']),
'\tcolor: {};'.format(pre['foreground']),
]
)
)
)
output.append('::selection\n{{\n\tbackground: {};\n}}\n'.format(
hex_to_rgba(pre['selection']))
)
for item in dictionary['settings'][1:]:
try:
_name = item['scope']
prefs = []
for key, value in item['settings'].items():
try:
k = KEYS[key]
except KeyError:
k = key
if value.startswith('#') and len(value) == 9:
v = hex_to_rgba(value)
else:
v = value
prefs.append('\t{}: {};'.format(k, v))
output.append('pre .{}\n{{\n{}\n}}\n'.format(_name, '\n'.join(prefs)))
except KeyError:
pass
with open(os.path.join(os.pardir, local, '{}.css'.format(name)), 'w') as f:
f.write('\n'.join(output))
print(name, 'style dictionary has been converted and placed.')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.