gt
stringclasses 1
value | context
stringlengths 2.49k
119k
|
|---|---|
# -*- coding: utf-8 -*-
# Copyright (c) Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
""" This module provides a (functional) API to OpenGL ES 2.0.
There are multiple backend implementations of this API, available as
submodules of this module. One can use one of the backends directly,
or call `gl.use_gl()` to select one. The backend system allow running
visualizations using Angle, WebGL, or other forms of remote rendering.
This is in part possible by the widespread availability of OpenGL ES 2.0.
All functions that this API provides accept and return Python arguments
(no ctypes is required); strings are real strings and you can pass
data as numpy arrays. In general the input arguments are not checked
(for performance reasons). Each function results in exactly one OpenGL
API call, except when using the pyopengl backend.
The functions do not have docstrings, but most IDE's should provide you
with the function signature. For more documentation see
http://www.khronos.org/opengles/sdk/docs/man/
"""
# NOTE: modules in this package that start with one underscore are
# autogenerated, and should not be edited.
from __future__ import division
import os
from ...util import config, logger
from ._constants import * # noqa
from ._proxy import BaseGLProxy
# Variable that will hold the module corresponding to the current backend
# This variable is used in our proxy classes to call the right functions.
current_backend = None
class MainProxy(BaseGLProxy):
""" Main proxy for the GL ES 2.0 API.
The functions in this namespace always call into the correct GL
backend. Therefore these function objects can be safely stored for
reuse. However, for efficienty it would probably be better to store the
function name and then do ``func = getattr(gloo.gl, funcname)``.
"""
def __call__(self, funcname, returns, *args):
func = getattr(current_backend, funcname)
return func(*args)
# Instantiate proxy objects
proxy = MainProxy()
def use_gl(target='gl2'):
""" Let Vispy use the target OpenGL ES 2.0 implementation
Also see ``vispy.use()``.
Parameters
----------
target : str
The target GL backend to use.
Available backends:
* gl2 - Use ES 2.0 subset of desktop (i.e. normal) OpenGL
* gl+ - Use the desktop ES 2.0 subset plus all non-deprecated GL
functions on your system (requires PyOpenGL)
* es2 - Use the ES2 library (Angle/DirectX on Windows)
* pyopengl2 - Use ES 2.0 subset of pyopengl (for fallback and testing)
* dummy - Prevent usage of gloo.gl (for when rendering occurs elsewhere)
You can use vispy's config option "gl_debug" to check for errors
on each API call. Or, one can specify it as the target, e.g. "gl2
debug". (Debug does not apply to 'gl+', since PyOpenGL has its own
debug mechanism)
"""
target = target or 'gl2'
target = target.replace('+', 'plus')
# Get options
target, _, options = target.partition(' ')
debug = config['gl_debug'] or 'debug' in options
# Select modules to import names from
try:
mod = __import__(target, globals(), level=1)
except ImportError as err:
msg = 'Could not import gl target "%s":\n%s' % (target, str(err))
raise RuntimeError(msg)
# Apply
global current_backend
current_backend = mod
_clear_namespace()
if 'plus' in target:
# Copy PyOpenGL funcs, extra funcs, constants, no debug
_copy_gl_functions(mod._pyopengl2, globals(), debug=debug)
_copy_gl_functions(mod, globals(), True, debug=debug)
else:
_copy_gl_functions(mod, globals(), debug=debug)
def _clear_namespace():
""" Clear names that are not part of the strict ES API
"""
ok_names = set(default_backend.__dict__)
ok_names.update(['gl2', 'glplus']) # don't remove the module
NS = globals()
for name in list(NS.keys()):
if name.lower().startswith('gl'):
if name not in ok_names:
del NS[name]
def _copy_gl_functions(source, dest, constants=False, debug=False):
""" Inject all objects that start with 'gl' from the source
into the dest. source and dest can be dicts, modules or BaseGLProxy's.
"""
# Get dicts
if isinstance(source, BaseGLProxy):
s = {}
for key in dir(source):
s[key] = getattr(source, key)
source = s
elif not isinstance(source, dict):
source = source.__dict__
if not isinstance(dest, dict):
dest = dest.__dict__
# Copy names
funcnames = [name for name in source.keys() if name.startswith('gl')]
for name in funcnames:
if debug and name != 'glGetError':
dest[name] = make_debug_wrapper(source[name])
else:
dest[name] = source[name]
# Copy constants
if constants:
constnames = [name for name in source.keys() if name.startswith('GL_')]
for name in constnames:
dest[name] = source[name]
def _arg_repr(arg):
""" Get a useful (and not too large) represetation of an argument.
"""
r = repr(arg)
max = 40
if len(r) > max:
if hasattr(arg, 'shape'):
r = 'array:' + 'x'.join([repr(s) for s in arg.shape])
else:
r = r[:max-3] + '...'
return r
def make_debug_wrapper(fn):
def gl_debug_wrapper(*args):
# Log function call
argstr = ', '.join(map(_arg_repr, args))
logger.debug("%s(%s)" % (fn.__name__, argstr))
# Call function
ret = fn(*args)
# Log return value
if ret is not None:
if fn.__name__ == 'glReadPixels':
logger.debug(" <= %s[%s]" % (type(ret), len(ret)))
else:
logger.debug(" <= %s" % repr(ret))
# Check for errors (raises if an error occured)
check_error(fn.__name__)
# Return
return ret
gl_debug_wrapper.__name__ = fn.__name__ + '_debug_wrapper'
# Store reference to wrapped function just for introspection
gl_debug_wrapper._wrapped_function = fn
return gl_debug_wrapper
def check_error(when='periodic check'):
""" Check this from time to time to detect GL errors.
Parameters
----------
when : str
Shown in the exception to help the developer determine when
this check was done.
"""
errors = []
while True:
err = glGetError()
if err == GL_NO_ERROR or (errors and err == errors[-1]):
break
errors.append(err)
if errors:
msg = ', '.join([repr(ENUM_MAP.get(e, e)) for e in errors])
err = RuntimeError('OpenGL got errors (%s): %s' % (when, msg))
err.errors = errors
err.err = errors[-1] # pyopengl compat
raise err
def _fix_osmesa_gl_lib_if_testing():
"""
This functions checks if we a running test with the osmesa backends and
fix the GL library if needed.
Since we have to fix the VISPY_GL_LIB *before* any import from the gl
module, we have to run this here.
Test discovery utilities (like pytest) will try to import modules
before running tests, so we have to modify the GL lib really early.
The other solution would be to setup pre-run hooks for the test utility,
but there doesn't seem to be a standard way to do that (e.g. conftest.py
for py.test)
"""
test_name = os.getenv('_VISPY_TESTING_APP', None)
if test_name == 'osmesa':
from ...util.osmesa_gl import fix_osmesa_gl_lib
fix_osmesa_gl_lib()
_fix_osmesa_gl_lib_if_testing()
# Load default gl backend
from . import gl2 as default_backend # noqa
# Call use to start using our default backend
use_gl()
|
|
import unittest
from robot.utils.asserts import assert_equal
from robot.model.statistics import Statistics
from robot.result import TestCase, TestSuite
def verify_stat(stat, name, passed, failed, skipped, critical=None, combined=None,
id=None, elapsed=0):
assert_equal(stat.name, name, 'stat.name')
assert_equal(stat.passed, passed)
assert_equal(stat.failed, failed)
assert_equal(stat.skipped, skipped)
assert_equal(stat.total, passed + failed + skipped)
if hasattr(stat, 'critical'):
assert_equal(stat.critical,
False if critical is None else bool(critical))
assert_equal(stat.non_critical,
False if critical is None else not bool(critical))
assert_equal(stat.combined, combined)
if hasattr(stat, 'id'):
assert_equal(stat.id, id)
assert_equal(stat.elapsed, elapsed)
def verify_suite(suite, name, id, passed, failed, skipped):
verify_stat(suite.stat, name, passed, failed, skipped, id=id)
def generate_suite():
suite = TestSuite(name='Root Suite')
suite.set_criticality(critical_tags=['smoke'])
s1 = suite.suites.create(name='First Sub Suite')
s2 = suite.suites.create(name='Second Sub Suite')
s11 = s1.suites.create(name='Sub Suite 1_1')
s12 = s1.suites.create(name='Sub Suite 1_2')
s13 = s1.suites.create(name='Sub Suite 1_3')
s21 = s2.suites.create(name='Sub Suite 2_1')
s11.tests = [TestCase(status='PASS'), TestCase(status='FAIL', tags=['t1'])]
s12.tests = [TestCase(status='PASS', tags=['t_1','t2',]),
TestCase(status='PASS', tags=['t1','smoke']),
TestCase(status='FAIL', tags=['t1','t2','t3','smoke'])]
s13.tests = [TestCase(status='PASS', tags=['t1','t 2','smoke'])]
s21.tests = [TestCase(status='FAIL', tags=['t3','Smoke'])]
return suite
class TestStatisticsSimple(unittest.TestCase):
def setUp(self):
suite = TestSuite(name='Hello')
suite.tests = [TestCase(status='PASS'), TestCase(status='PASS'),
TestCase(status='FAIL')]
self.statistics = Statistics(suite)
def test_total(self):
verify_stat(self.statistics.total.critical, 'Critical Tests', 2, 1, 0)
verify_stat(self.statistics.total.all, 'All Tests', 2, 1, 0)
def test_suite(self):
verify_suite(self.statistics.suite, 'Hello', 's1', 2, 1, 0)
def test_tags(self):
assert_equal(list(self.statistics.tags), [])
class TestStatisticsNotSoSimple(unittest.TestCase):
def setUp(self):
suite = generate_suite()
suite.set_criticality(critical_tags=['smoke'])
self.statistics = Statistics(suite, 2, ['t*','smoke'], ['t3'],
[('t? & smoke', ''), ('none NOT t1', 'a title')])
def test_total(self):
verify_stat(self.statistics.total.all, 'All Tests', 4, 3, 0)
verify_stat(self.statistics.total.critical, 'Critical Tests', 2, 2, 0)
def test_suite(self):
suite = self.statistics.suite
verify_suite(suite, 'Root Suite', 's1', 4, 3, 0)
[s1, s2] = suite.suites
verify_suite(s1, 'Root Suite.First Sub Suite', 's1-s1', 4, 2, 0)
verify_suite(s2, 'Root Suite.Second Sub Suite', 's1-s2', 0, 1, 0)
assert_equal(len(s1.suites), 0)
assert_equal(len(s2.suites), 0)
def test_tags(self):
# Tag stats are tested more thoroughly in test_tagstatistics.py
tags = self.statistics.tags
verify_stat(tags.tags['smoke'], 'smoke', 2, 2, 0)
verify_stat(tags.tags['t1'], 't1', 3, 2, 0)
verify_stat(tags.tags['t2'], 't2', 2, 1, 0)
#expected = [(u'smoke', 4), (u't1', 5), (u't2', 3)]
expected = [(u'smoke', 4), ('a title', 0), ('t? & smoke', 4), (u't1', 5), (u't2', 3)]
assert_equal(len(list(tags)), len(expected))
for t, e in zip(tags, expected):
verify_stat(t, *e)
class TestSuiteStatistics(unittest.TestCase):
def test_all_levels(self):
suite = Statistics(generate_suite()).suite
verify_suite(suite, 'Root Suite', 's1', 4, 3, 0)
[s1, s2] = suite.suites
verify_suite(s1, 'Root Suite.First Sub Suite', 's1-s1', 4, 2, 0)
verify_suite(s2, 'Root Suite.Second Sub Suite', 's1-s2', 0, 1, 0)
[s11, s12, s13] = s1.suites
verify_suite(s11, 'Root Suite.First Sub Suite.Sub Suite 1_1', 's1-s1-s1', 1, 1, 0)
verify_suite(s12, 'Root Suite.First Sub Suite.Sub Suite 1_2', 's1-s1-s2', 2, 1, 0)
verify_suite(s13, 'Root Suite.First Sub Suite.Sub Suite 1_3', 's1-s1-s3', 1, 0, 0)
[s21] = s2.suites
verify_suite(s21, 'Root Suite.Second Sub Suite.Sub Suite 2_1', 's1-s2-s1', 0, 1, 0)
def test_only_root_level(self):
suite = Statistics(generate_suite(), suite_stat_level=1).suite
verify_suite(suite, 'Root Suite', 's1', 4, 3, 0)
assert_equal(len(suite.suites), 0)
def test_deeper_level(self):
PASS = TestCase(status='PASS')
FAIL = TestCase(status='FAIL')
SKIP = TestCase(status='SKIP')
suite = TestSuite(name='1')
suite.suites = [TestSuite(name='1'), TestSuite(name='2'), TestSuite(name='3')]
suite.suites[0].suites = [TestSuite(name='1')]
suite.suites[1].suites = [TestSuite(name='1'), TestSuite(name='2')]
suite.suites[2].tests = [PASS, FAIL]
suite.suites[0].suites[0].suites = [TestSuite(name='1')]
suite.suites[1].suites[0].tests = [PASS, PASS, PASS, FAIL]
suite.suites[1].suites[1].tests = [PASS, PASS, FAIL, FAIL]
suite.suites[0].suites[0].suites[0].tests = [FAIL, FAIL, FAIL]
s1 = Statistics(suite, suite_stat_level=3).suite
verify_suite(s1, '1', 's1', 6, 7, 0)
[s11, s12, s13] = s1.suites
verify_suite(s11, '1.1', 's1-s1', 0, 3, 0)
verify_suite(s12, '1.2', 's1-s2', 5, 3, 0)
verify_suite(s13, '1.3', 's1-s3', 1, 1, 0)
[s111] = s11.suites
verify_suite(s111, '1.1.1', 's1-s1-s1', 0, 3, 0)
[s121, s122] = s12.suites
verify_suite(s121, '1.2.1', 's1-s2-s1', 3, 1, 0)
verify_suite(s122, '1.2.2', 's1-s2-s2', 2, 2, 0)
assert_equal(len(s111.suites), 0)
def test_iter_only_one_level(self):
[stat] = list(Statistics(generate_suite(), suite_stat_level=1).suite)
verify_stat(stat, 'Root Suite', 4, 3, 0, id='s1')
def test_iter_also_sub_suites(self):
stats = list(Statistics(generate_suite()).suite)
verify_stat(stats[0], 'Root Suite', 4, 3, 0, id='s1')
verify_stat(stats[1], 'Root Suite.First Sub Suite', 4, 2, 0, id='s1-s1')
verify_stat(stats[2], 'Root Suite.First Sub Suite.Sub Suite 1_1', 1, 1, 0, id='s1-s1-s1')
verify_stat(stats[3], 'Root Suite.First Sub Suite.Sub Suite 1_2', 2, 1, 0, id='s1-s1-s2')
verify_stat(stats[4], 'Root Suite.First Sub Suite.Sub Suite 1_3', 1, 0, 0, id='s1-s1-s3')
verify_stat(stats[5], 'Root Suite.Second Sub Suite', 0, 1, 0, id='s1-s2')
verify_stat(stats[6], 'Root Suite.Second Sub Suite.Sub Suite 2_1', 0, 1, 0, id='s1-s2-s1')
class TestElapsedTime(unittest.TestCase):
def setUp(self):
ts = '20120816 00:00:'
suite = TestSuite(starttime=ts+'00.000', endtime=ts+'59.999')
suite.suites = [
TestSuite(starttime=ts+'00.000', endtime=ts+'30.000'),
TestSuite(starttime=ts+'30.000', endtime=ts+'42.042')
]
suite.suites[0].tests = [
TestCase(starttime=ts+'00.000', endtime=ts+'00.001', tags=['t1']),
TestCase(starttime=ts+'00.001', endtime=ts+'01.001', tags=['t1', 't2'])
]
suite.suites[1].tests = [
TestCase(starttime=ts+'30.000', endtime=ts+'40.000', tags=['t1', 't2', 't3'])
]
suite.set_criticality(critical_tags=['t2'])
self.stats = Statistics(suite, tag_stat_combine=[('?2', 'combined')])
def test_total_stats(self):
assert_equal(self.stats.total.all.elapsed, 11001)
assert_equal(self.stats.total.critical.elapsed, 11000)
def test_tag_stats(self):
t1, t2, t3 = self.stats.tags.tags.values()
verify_stat(t1, 't1', 0, 3, 0, elapsed=11001)
verify_stat(t2, 't2', 0, 2, 0, elapsed=11000)
verify_stat(t3, 't3', 0, 1, 0, elapsed=10000)
def test_combined_tag_stats(self):
combined = self.stats.tags.combined[0]
verify_stat(combined, 'combined', 0, 2, 0, combined='?2', elapsed=11000)
def test_suite_stats(self):
assert_equal(self.stats.suite.stat.elapsed, 59999)
assert_equal(self.stats.suite.suites[0].stat.elapsed, 30000)
assert_equal(self.stats.suite.suites[1].stat.elapsed, 12042)
def test_suite_stats_when_suite_has_no_times(self):
suite = TestSuite()
assert_equal(Statistics(suite).suite.stat.elapsed, 0)
ts = '20120816 00:00:'
suite.tests = [TestCase(starttime=ts+'00.000', endtime=ts+'00.001'),
TestCase(starttime=ts+'00.001', endtime=ts+'01.001')]
assert_equal(Statistics(suite).suite.stat.elapsed, 1001)
suite.suites = [TestSuite(starttime=ts+'02.000', endtime=ts+'12.000'),
TestSuite()]
assert_equal(Statistics(suite).suite.stat.elapsed, 11001)
def test_elapsed_from_get_attributes(self):
for time, expected in [('00:00:00.000', '00:00:00'),
('00:00:00.001', '00:00:00'),
('00:00:00.500', '00:00:01'),
('00:00:00.999', '00:00:01'),
('00:00:01.000', '00:00:01'),
('00:00:01.001', '00:00:01'),
('00:00:01.499', '00:00:01'),
('00:00:01.500', '00:00:02'),
('01:59:59:499', '01:59:59'),
('01:59:59:500', '02:00:00')]:
suite = TestSuite(starttime='20120817 00:00:00.000',
endtime='20120817 ' + time)
stat = Statistics(suite).suite.stat
elapsed = stat.get_attributes(include_elapsed=True)['elapsed']
assert_equal(elapsed, expected, time)
if __name__ == "__main__":
unittest.main()
|
|
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
# Copyright (c) 2012 VMware, Inc.
# Copyright (c) 2011 Citrix Systems, Inc.
# Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
A connection to the VMware vCenter platform.
"""
import re
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_utils import versionutils as v_utils
from oslo_vmware import api
from oslo_vmware import exceptions as vexc
from oslo_vmware import pbm
from oslo_vmware import vim
from oslo_vmware import vim_util
from nova.compute import task_states
import nova.conf
from nova import exception
from nova.i18n import _, _LI, _LE, _LW
from nova.virt import driver
from nova.virt.vmwareapi import constants
from nova.virt.vmwareapi import error_util
from nova.virt.vmwareapi import host
from nova.virt.vmwareapi import vim_util as nova_vim_util
from nova.virt.vmwareapi import vm_util
from nova.virt.vmwareapi import vmops
from nova.virt.vmwareapi import volumeops
LOG = logging.getLogger(__name__)
CONF = nova.conf.CONF
TIME_BETWEEN_API_CALL_RETRIES = 1.0
class VMwareVCDriver(driver.ComputeDriver):
"""The VC host connection object."""
capabilities = {
"has_imagecache": True,
"supports_recreate": False,
"supports_migrate_to_same_host": True,
"supports_attach_interface": True
}
# Legacy nodename is of the form: <mo id>(<cluster name>)
# e.g. domain-26(TestCluster)
# We assume <mo id> consists of alphanumeric, _ and -.
# We assume cluster name is everything between the first ( and the last ).
# We pull out <mo id> for re-use.
LEGACY_NODENAME = re.compile('([\w-]+)\(.+\)')
# The vCenter driver includes API that acts on ESX hosts or groups
# of ESX hosts in clusters or non-cluster logical-groupings.
#
# vCenter is not a hypervisor itself, it works with multiple
# hypervisor host machines and their guests. This fact can
# subtly alter how vSphere and OpenStack interoperate.
def __init__(self, virtapi, scheme="https"):
super(VMwareVCDriver, self).__init__(virtapi)
if (CONF.vmware.host_ip is None or
CONF.vmware.host_username is None or
CONF.vmware.host_password is None):
raise Exception(_("Must specify host_ip, host_username and "
"host_password to use vmwareapi.VMwareVCDriver"))
self._datastore_regex = None
if CONF.vmware.datastore_regex:
try:
self._datastore_regex = re.compile(CONF.vmware.datastore_regex)
except re.error:
raise exception.InvalidInput(reason=
_("Invalid Regular Expression %s")
% CONF.vmware.datastore_regex)
self._session = VMwareAPISession(scheme=scheme)
self._check_min_version()
# Update the PBM location if necessary
if CONF.vmware.pbm_enabled:
self._update_pbm_location()
self._validate_configuration()
self._cluster_name = CONF.vmware.cluster_name
self._cluster_ref = vm_util.get_cluster_ref_by_name(self._session,
self._cluster_name)
if self._cluster_ref is None:
raise exception.NotFound(_("The specified cluster '%s' was not "
"found in vCenter")
% self._cluster_name)
self._vcenter_uuid = self._get_vcenter_uuid()
self._nodename = self._create_nodename(self._cluster_ref.value)
self._volumeops = volumeops.VMwareVolumeOps(self._session,
self._cluster_ref)
self._vmops = vmops.VMwareVMOps(self._session,
virtapi,
self._volumeops,
self._cluster_ref,
datastore_regex=self._datastore_regex)
self._vc_state = host.VCState(self._session,
self._nodename,
self._cluster_ref,
self._datastore_regex)
# Register the OpenStack extension
self._register_openstack_extension()
def _check_min_version(self):
min_version = v_utils.convert_version_to_int(constants.MIN_VC_VERSION)
next_min_ver = v_utils.convert_version_to_int(
constants.NEXT_MIN_VC_VERSION)
vc_version = vim_util.get_vc_version(self._session)
LOG.info(_LI("VMware vCenter version: %s"), vc_version)
if v_utils.convert_version_to_int(vc_version) < min_version:
raise exception.NovaException(
_('Detected vCenter version %(version)s. Nova requires VMware '
'vCenter version %(min_version)s or greater.') % {
'version': vc_version,
'min_version': constants.MIN_VC_VERSION})
elif v_utils.convert_version_to_int(vc_version) < next_min_ver:
LOG.warning(_LW('Running Nova with a VMware vCenter version less '
'than %(version)s is deprecated. The required '
'minimum version of vCenter will be raised to '
'%(version)s in the 16.0.0 release.'),
{'version': constants.NEXT_MIN_VC_VERSION})
@property
def need_legacy_block_device_info(self):
return False
def _update_pbm_location(self):
if CONF.vmware.pbm_wsdl_location:
pbm_wsdl_loc = CONF.vmware.pbm_wsdl_location
else:
version = vim_util.get_vc_version(self._session)
pbm_wsdl_loc = pbm.get_pbm_wsdl_location(version)
self._session.pbm_wsdl_loc_set(pbm_wsdl_loc)
def _validate_configuration(self):
if CONF.vmware.pbm_enabled:
if not CONF.vmware.pbm_default_policy:
raise error_util.PbmDefaultPolicyUnspecified()
if not pbm.get_profile_id_by_name(
self._session,
CONF.vmware.pbm_default_policy):
raise error_util.PbmDefaultPolicyDoesNotExist()
if CONF.vmware.datastore_regex:
LOG.warning(_LW(
"datastore_regex is ignored when PBM is enabled"))
self._datastore_regex = None
def init_host(self, host):
vim = self._session.vim
if vim is None:
self._session._create_session()
def cleanup_host(self, host):
self._session.logout()
def _register_openstack_extension(self):
# Register an 'OpenStack' extension in vCenter
LOG.debug('Registering extension %s with vCenter',
constants.EXTENSION_KEY)
os_extension = self._session._call_method(vim_util, 'find_extension',
constants.EXTENSION_KEY)
if os_extension is None:
LOG.debug('Extension does not exist. Registering type %s.',
constants.EXTENSION_TYPE_INSTANCE)
self._session._call_method(vim_util, 'register_extension',
constants.EXTENSION_KEY,
constants.EXTENSION_TYPE_INSTANCE)
def cleanup(self, context, instance, network_info, block_device_info=None,
destroy_disks=True, migrate_data=None, destroy_vifs=True):
"""Cleanup after instance being destroyed by Hypervisor."""
pass
def resume_state_on_host_boot(self, context, instance, network_info,
block_device_info=None):
"""resume guest state when a host is booted."""
# Check if the instance is running already and avoid doing
# anything if it is.
state = vm_util.get_vm_state(self._session, instance)
ignored_states = ['poweredon', 'suspended']
if state.lower() in ignored_states:
return
# Instance is not up and could be in an unknown state.
# Be as absolute as possible about getting it back into
# a known and running state.
self.reboot(context, instance, network_info, 'hard',
block_device_info)
def list_instance_uuids(self):
"""List VM instance UUIDs."""
return self._vmops.list_instances()
def list_instances(self):
"""List VM instances from the single compute node."""
return self._vmops.list_instances()
def migrate_disk_and_power_off(self, context, instance, dest,
flavor, network_info,
block_device_info=None,
timeout=0, retry_interval=0):
"""Transfers the disk of a running instance in multiple phases, turning
off the instance before the end.
"""
# TODO(PhilDay): Add support for timeout (clean shutdown)
return self._vmops.migrate_disk_and_power_off(context, instance,
dest, flavor)
def confirm_migration(self, context, migration, instance, network_info):
"""Confirms a resize, destroying the source VM."""
self._vmops.confirm_migration(migration, instance, network_info)
def finish_revert_migration(self, context, instance, network_info,
block_device_info=None, power_on=True):
"""Finish reverting a resize, powering back on the instance."""
self._vmops.finish_revert_migration(context, instance, network_info,
block_device_info, power_on)
def finish_migration(self, context, migration, instance, disk_info,
network_info, image_meta, resize_instance,
block_device_info=None, power_on=True):
"""Completes a resize, turning on the migrated instance."""
self._vmops.finish_migration(context, migration, instance, disk_info,
network_info, image_meta, resize_instance,
block_device_info, power_on)
def get_instance_disk_info(self, instance, block_device_info=None):
pass
def get_vnc_console(self, context, instance):
"""Return link to instance's VNC console using vCenter logic."""
# vCenter does not actually run the VNC service
# itself. You must talk to the VNC host underneath vCenter.
return self._vmops.get_vnc_console(instance)
def get_mks_console(self, context, instance):
return self._vmops.get_mks_console(instance)
def _get_vcenter_uuid(self):
"""Retrieves the vCenter UUID."""
about = self._session._call_method(nova_vim_util, 'get_about_info')
return about.instanceUuid
def _create_nodename(self, mo_id):
"""Return a nodename which uniquely describes a cluster.
The name will be of the form:
<mo id>.<vcenter uuid>
e.g.
domain-26.9d51f082-58a4-4449-beed-6fd205a5726b
"""
return '%s.%s' % (mo_id, self._vcenter_uuid)
def _get_available_resources(self, host_stats):
return {'vcpus': host_stats['vcpus'],
'memory_mb': host_stats['host_memory_total'],
'local_gb': host_stats['disk_total'],
'vcpus_used': 0,
'memory_mb_used': host_stats['host_memory_total'] -
host_stats['host_memory_free'],
'local_gb_used': host_stats['disk_used'],
'hypervisor_type': host_stats['hypervisor_type'],
'hypervisor_version': host_stats['hypervisor_version'],
'hypervisor_hostname': host_stats['hypervisor_hostname'],
# The VMWare driver manages multiple hosts, so there are
# likely many different CPU models in use. As such it is
# impossible to provide any meaningful info on the CPU
# model of the "host"
'cpu_info': None,
'supported_instances': host_stats['supported_instances'],
'numa_topology': None,
}
def get_available_resource(self, nodename):
"""Retrieve resource info.
This method is called when nova-compute launches, and
as part of a periodic task.
:returns: dictionary describing resources
"""
host_stats = self._vc_state.get_host_stats(refresh=True)
stats_dict = self._get_available_resources(host_stats)
return stats_dict
def get_available_nodes(self, refresh=False):
"""Returns nodenames of all nodes managed by the compute service.
This driver supports only one compute node.
"""
return [self._nodename]
def spawn(self, context, instance, image_meta, injected_files,
admin_password, network_info=None, block_device_info=None):
"""Create VM instance."""
self._vmops.spawn(context, instance, image_meta, injected_files,
admin_password, network_info, block_device_info)
def attach_volume(self, context, connection_info, instance, mountpoint,
disk_bus=None, device_type=None, encryption=None):
"""Attach volume storage to VM instance."""
return self._volumeops.attach_volume(connection_info, instance)
def detach_volume(self, connection_info, instance, mountpoint,
encryption=None):
"""Detach volume storage to VM instance."""
return self._volumeops.detach_volume(connection_info, instance)
def get_volume_connector(self, instance):
"""Return volume connector information."""
return self._volumeops.get_volume_connector(instance)
def get_host_ip_addr(self):
"""Returns the IP address of the vCenter host."""
return CONF.vmware.host_ip
def snapshot(self, context, instance, image_id, update_task_state):
"""Create snapshot from a running VM instance."""
self._vmops.snapshot(context, instance, image_id, update_task_state)
def reboot(self, context, instance, network_info, reboot_type,
block_device_info=None, bad_volumes_callback=None):
"""Reboot VM instance."""
self._vmops.reboot(instance, network_info, reboot_type)
def _detach_instance_volumes(self, instance, block_device_info):
# We need to detach attached volumes
block_device_mapping = driver.block_device_info_get_mapping(
block_device_info)
if block_device_mapping:
# Certain disk types, for example 'IDE' do not support hot
# plugging. Hence we need to power off the instance and update
# the instance state.
self._vmops.power_off(instance)
for disk in block_device_mapping:
connection_info = disk['connection_info']
try:
self.detach_volume(connection_info, instance,
disk.get('device_name'))
except exception.DiskNotFound:
LOG.warning(_LW('The volume %s does not exist!'),
disk.get('device_name'),
instance=instance)
except Exception as e:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Failed to detach %(device_name)s. "
"Exception: %(exc)s"),
{'device_name': disk.get('device_name'),
'exc': e},
instance=instance)
def destroy(self, context, instance, network_info, block_device_info=None,
destroy_disks=True, migrate_data=None):
"""Destroy VM instance."""
# Destroy gets triggered when Resource Claim in resource_tracker
# is not successful. When resource claim is not successful,
# node is not set in instance. Perform destroy only if node is set
if not instance.node:
return
# A resize uses the same instance on the VC. We do not delete that
# VM in the event of a revert
if instance.task_state == task_states.RESIZE_REVERTING:
return
# We need to detach attached volumes
if block_device_info is not None:
try:
self._detach_instance_volumes(instance, block_device_info)
except vexc.ManagedObjectNotFoundException:
LOG.warning(_LW('Instance does not exists. Proceeding to '
'delete instance properties on datastore'),
instance=instance)
self._vmops.destroy(instance, destroy_disks)
def pause(self, instance):
"""Pause VM instance."""
self._vmops.pause(instance)
def unpause(self, instance):
"""Unpause paused VM instance."""
self._vmops.unpause(instance)
def suspend(self, context, instance):
"""Suspend the specified instance."""
self._vmops.suspend(instance)
def resume(self, context, instance, network_info, block_device_info=None):
"""Resume the suspended VM instance."""
self._vmops.resume(instance)
def rescue(self, context, instance, network_info, image_meta,
rescue_password):
"""Rescue the specified instance."""
self._vmops.rescue(context, instance, network_info, image_meta)
def unrescue(self, instance, network_info):
"""Unrescue the specified instance."""
self._vmops.unrescue(instance)
def power_off(self, instance, timeout=0, retry_interval=0):
"""Power off the specified instance."""
# TODO(PhilDay): Add support for timeout (clean shutdown)
self._vmops.power_off(instance)
def power_on(self, context, instance, network_info,
block_device_info=None):
"""Power on the specified instance."""
self._vmops.power_on(instance)
def poll_rebooting_instances(self, timeout, instances):
"""Poll for rebooting instances."""
self._vmops.poll_rebooting_instances(timeout, instances)
def get_info(self, instance):
"""Return info about the VM instance."""
return self._vmops.get_info(instance)
def get_diagnostics(self, instance):
"""Return data about VM diagnostics."""
return self._vmops.get_diagnostics(instance)
def get_instance_diagnostics(self, instance):
"""Return data about VM diagnostics."""
return self._vmops.get_instance_diagnostics(instance)
def host_power_action(self, action):
"""Host operations not supported by VC driver.
This needs to override the ESX driver implementation.
"""
raise NotImplementedError()
def host_maintenance_mode(self, host, mode):
"""Host operations not supported by VC driver.
This needs to override the ESX driver implementation.
"""
raise NotImplementedError()
def set_host_enabled(self, enabled):
"""Host operations not supported by VC driver.
This needs to override the ESX driver implementation.
"""
raise NotImplementedError()
def get_host_uptime(self):
"""Host uptime operation not supported by VC driver."""
msg = _("Multiple hosts may be managed by the VMWare "
"vCenter driver; therefore we do not return "
"uptime for just one host.")
raise NotImplementedError(msg)
def inject_network_info(self, instance, nw_info):
"""inject network info for specified instance."""
self._vmops.inject_network_info(instance, nw_info)
def manage_image_cache(self, context, all_instances):
"""Manage the local cache of images."""
self._vmops.manage_image_cache(context, all_instances)
def instance_exists(self, instance):
"""Efficient override of base instance_exists method."""
return self._vmops.instance_exists(instance)
def attach_interface(self, context, instance, image_meta, vif):
"""Attach an interface to the instance."""
self._vmops.attach_interface(context, instance, image_meta, vif)
def detach_interface(self, context, instance, vif):
"""Detach an interface from the instance."""
self._vmops.detach_interface(context, instance, vif)
class VMwareAPISession(api.VMwareAPISession):
"""Sets up a session with the VC/ESX host and handles all
the calls made to the host.
"""
def __init__(self, host_ip=CONF.vmware.host_ip,
host_port=CONF.vmware.host_port,
username=CONF.vmware.host_username,
password=CONF.vmware.host_password,
retry_count=CONF.vmware.api_retry_count,
scheme="https",
cacert=CONF.vmware.ca_file,
insecure=CONF.vmware.insecure):
super(VMwareAPISession, self).__init__(
host=host_ip,
port=host_port,
server_username=username,
server_password=password,
api_retry_count=retry_count,
task_poll_interval=CONF.vmware.task_poll_interval,
scheme=scheme,
create_session=True,
cacert=cacert,
insecure=insecure)
def _is_vim_object(self, module):
"""Check if the module is a VIM Object instance."""
return isinstance(module, vim.Vim)
def _call_method(self, module, method, *args, **kwargs):
"""Calls a method within the module specified with
args provided.
"""
if not self._is_vim_object(module):
return self.invoke_api(module, method, self.vim, *args, **kwargs)
else:
return self.invoke_api(module, method, *args, **kwargs)
def _wait_for_task(self, task_ref):
"""Return a Deferred that will give the result of the given task.
The task is polled until it completes.
"""
return self.wait_for_task(task_ref)
|
|
# -*- coding: utf-8 -*-
"""Test object privileges
The majority of other tests exclude access privileges. These
explicitly request it. In addition, the roles 'user1' and 'user2'
are created if they don't exist.
"""
from pyrseas.testutils import DatabaseToMapTestCase
from pyrseas.testutils import InputMapToSqlTestCase
CREATE_TABLE = "CREATE TABLE t1 (c1 integer, c2 text)"
SOURCE1 = "SELECT 'dummy'::text"
CREATE_FUNC = "CREATE FUNCTION f1() RETURNS text LANGUAGE sql IMMUTABLE AS " \
"$_$%s$_$" % SOURCE1
CREATE_FDW = "CREATE FOREIGN DATA WRAPPER fdw1"
CREATE_FS = "CREATE SERVER fs1 FOREIGN DATA WRAPPER fdw1"
GRANT_SELECT = "GRANT SELECT ON TABLE t1 TO %s"
GRANT_INSUPD = "GRANT INSERT, UPDATE ON TABLE t1 TO %s"
def check_extra_users(db):
"Check existence of extra test users"
for user in ['user1', 'user2']:
row = db.fetchone("SELECT 1 FROM pg_roles WHERE rolname = %s", (user,))
if row is None:
db.execute_commit("CREATE ROLE %s" % user)
class PrivilegeToMapTestCase(DatabaseToMapTestCase):
"""Test mapping of object privilege information"""
def setUp(self):
super(DatabaseToMapTestCase, self).setUp()
check_extra_users(self.db)
def test_map_schema(self):
"Map a schema with some GRANTs"
stmts = ["CREATE SCHEMA s1", "GRANT USAGE ON SCHEMA s1 TO PUBLIC",
"GRANT CREATE, USAGE ON SCHEMA s1 TO user1"]
dbmap = self.to_map(stmts, no_privs=False)
expmap = {'privileges': [{self.db.user: ['all']},
{'PUBLIC': ['usage']}, {'user1': ['all']}]}
assert dbmap['schema s1'] == expmap
def test_map_table(self):
"Map a table with various GRANTs"
stmts = [CREATE_TABLE, GRANT_SELECT % 'PUBLIC', GRANT_INSUPD % 'user1',
"GRANT REFERENCES, TRIGGER ON t1 TO user2 WITH GRANT OPTION"]
dbmap = self.to_map(stmts, no_privs=False)
expmap = {'columns': [{'c1': {'type': 'integer'}},
{'c2': {'type': 'text'}}],
'privileges': [{self.db.user: ['all']},
{'PUBLIC': ['select']},
{'user1': ['insert', 'update']},
{'user2': [{'trigger': {'grantable': True}},
{'references': {
'grantable': True}}]}]}
assert dbmap['schema public']['table t1'] == expmap
def test_map_column(self):
"Map a table with GRANTs on column"
self.maxDiff = None
stmts = [CREATE_TABLE, GRANT_SELECT % 'PUBLIC',
"GRANT INSERT (c1, c2) ON t1 TO user1",
"GRANT INSERT (c2), UPDATE (c2) ON t1 TO user2"]
dbmap = self.to_map(stmts, no_privs=False)
expmap = {'columns': [
{'c1': {'type': 'integer', 'privileges': [{'user1': ['insert']}]}},
{'c2': {'type': 'text', 'privileges': [
{'user1': ['insert']}, {'user2': ['insert', 'update']}]}}],
'privileges': [{self.db.user: ['all']}, {'PUBLIC': ['select']}]}
assert dbmap['schema public']['table t1'] == expmap
def test_map_sequence(self):
"Map a sequence with various GRANTs"
stmts = ["CREATE SEQUENCE seq1",
"GRANT SELECT ON SEQUENCE seq1 TO PUBLIC",
"GRANT USAGE, UPDATE ON SEQUENCE seq1 TO user1"]
dbmap = self.to_map(stmts, no_privs=False)
expmap = {'start_value': 1, 'increment_by': 1, 'max_value': None,
'min_value': None, 'cache_value': 1,
'privileges': [{self.db.user: ['all']},
{'PUBLIC': ['select']},
{'user1': ['usage', 'update']}]}
assert dbmap['schema public']['sequence seq1'] == expmap
def test_map_view(self):
"Map a view with various GRANTs"
stmts = ["CREATE VIEW v1 AS SELECT now()::date AS today",
"GRANT SELECT ON v1 TO PUBLIC",
"GRANT REFERENCES ON v1 TO user1"]
dbmap = self.to_map(stmts, no_privs=False)
expmap = {'definition': " SELECT now()::date AS today;",
'privileges': [{self.db.user: ['all']},
{'PUBLIC': ['select']},
{'user1': ['references']}]}
assert dbmap['schema public']['view v1'] == expmap
def test_map_function(self):
"Map a function with a GRANT and REVOKE from PUBLIC"
stmts = [CREATE_FUNC, "REVOKE ALL ON FUNCTION f1() FROM PUBLIC",
"GRANT EXECUTE ON FUNCTION f1() TO user1"]
dbmap = self.to_map(stmts, no_privs=False)
expmap = {'language': 'sql', 'returns': 'text',
'source': SOURCE1, 'volatility': 'immutable',
'privileges': [{self.db.user: ['execute']},
{'user1': ['execute']}]}
assert dbmap['schema public']['function f1()'] == expmap
def test_map_language(self):
"Map a language but REVOKE default privilege"
if self.db.version >= 90100:
self.skipTest('Only available before PG 9.1')
stmts = ["DROP LANGUAGE IF EXISTS plperl CASCADE",
"CREATE LANGUAGE plperl",
"REVOKE USAGE ON LANGUAGE plperl FROM PUBLIC"]
dbmap = self.to_map(stmts, no_privs=False)
self.db.execute_commit("DROP LANGUAGE plperl")
expmap = {'trusted': True, 'privileges': [{self.db.user: ['usage']}]}
assert dbmap['language plperl'] == expmap
def test_map_fd_wrapper(self):
"Map a foreign data wrapper with a GRANT"
stmts = [CREATE_FDW,
"GRANT USAGE ON FOREIGN DATA WRAPPER fdw1 TO PUBLIC"]
dbmap = self.to_map(stmts, no_privs=False, superuser=True)
expmap = {'privileges': [{self.db.user: ['usage']},
{'PUBLIC': ['usage']}]}
assert dbmap['foreign data wrapper fdw1'] == expmap
def test_map_server(self):
"Map a foreign server with a GRANT"
stmts = [CREATE_FDW, CREATE_FS,
"GRANT USAGE ON FOREIGN SERVER fs1 TO user1"]
dbmap = self.to_map(stmts, no_privs=False, superuser=True)
expmap = {'privileges': [{self.db.user: ['usage']},
{'user1': ['usage']}]}
assert dbmap['foreign data wrapper fdw1']['server fs1'] == expmap
def test_map_foreign_table(self):
"Map a foreign table with various GRANTs"
if self.db.version < 90100:
self.skipTest('Only available on PG 9.1')
stmts = [CREATE_FDW, CREATE_FS,
"CREATE FOREIGN TABLE ft1 (c1 integer, c2 text) SERVER fs1",
"GRANT SELECT ON ft1 TO PUBLIC",
"GRANT INSERT, UPDATE ON ft1 TO user1"]
dbmap = self.to_map(stmts, no_privs=False, superuser=True)
expmap = {'columns': [{'c1': {'type': 'integer'}},
{'c2': {'type': 'text'}}], 'server': 'fs1',
'privileges': [{self.db.user: ['all']},
{'PUBLIC': ['select']},
{'user1': ['insert', 'update']}]}
assert dbmap['schema public']['foreign table ft1'] == expmap
class PrivilegeToSqlTestCase(InputMapToSqlTestCase):
"""Test SQL generation of privilege information (GRANTs)"""
def setUp(self):
super(InputMapToSqlTestCase, self).setUp()
check_extra_users(self.db)
def test_create_schema(self):
"Create a schema with various privileges"
inmap = self.std_map()
inmap.update({'schema s1': {
'owner': self.db.user, 'privileges': [{
self.db.user: ['all']}, {'PUBLIC': ['usage', 'create']}]}})
sql = self.to_sql(inmap)
# sql[0] = CREATE SCHEMA
# sql[1] = ALTER SCHEMA OWNER
assert sql[2] == "GRANT ALL ON SCHEMA s1 TO %s" % self.db.user
assert sql[3] == "GRANT ALL ON SCHEMA s1 TO PUBLIC"
def test_schema_new_grant(self):
"Grant privileges on an existing schema"
inmap = self.std_map()
inmap.update({'schema s1': {
'owner': self.db.user, 'privileges': [{self.db.user: ['all']},
{'PUBLIC': ['create']}]}})
sql = sorted(self.to_sql(inmap, ["CREATE SCHEMA s1"]))
assert len(sql) == 2
assert sql[0] == "GRANT ALL ON SCHEMA s1 TO %s" % self.db.user
assert sql[1] == "GRANT CREATE ON SCHEMA s1 TO PUBLIC"
def test_create_table(self):
"Create a table with various privileges"
inmap = self.std_map()
inmap['schema public'].update({'table t1': {
'columns': [{'c1': {'type': 'integer'}}, {'c2': {'type': 'text'}}],
'owner': self.db.user,
'privileges': [{self.db.user: ['all']}, {'PUBLIC': ['select']},
{'user1': ['insert', 'update']},
{'user2': [{'trigger': {'grantable': True}},
{'references': {'grantable': True}}]}]}})
sql = self.to_sql(inmap)
# sql[0] = CREATE TABLE
# sql[1] = ALTER TABLE OWNER
assert sql[2] == "GRANT ALL ON TABLE t1 TO %s" % self.db.user
assert sql[3] == GRANT_SELECT % 'PUBLIC'
assert sql[4] == GRANT_INSUPD % 'user1'
assert sql[5] == "GRANT TRIGGER, REFERENCES ON TABLE t1 " \
"TO user2 WITH GRANT OPTION"
def test_create_column_grants(self):
"Create a table with colum-level privileges"
inmap = self.std_map()
inmap['schema public'].update({'table t1': {
'columns': [{'c1': {'type': 'integer', 'privileges': [{'user1': [
'insert']}]}}, {'c2': {'type': 'text', 'privileges': [{'user1': [
'insert']}, {'user2': ['insert', 'update']}]}}],
'owner': self.db.user, 'privileges': [{self.db.user: ['all']},
{'PUBLIC': ['select']}]}})
sql = self.to_sql(inmap)
assert len(sql) == 7
# sql[0] = CREATE TABLE
# sql[1] = ALTER TABLE OWNER
assert sql[2] == "GRANT ALL ON TABLE t1 TO %s" % self.db.user
assert sql[3] == GRANT_SELECT % 'PUBLIC'
assert sql[4] == "GRANT INSERT (c1) ON TABLE t1 TO user1"
assert sql[5] == "GRANT INSERT (c2) ON TABLE t1 TO user1"
assert sql[6] == "GRANT INSERT (c2), UPDATE (c2) ON TABLE t1 TO user2"
def test_table_new_grant(self):
"Grant select privileges on an existing table"
inmap = self.std_map()
inmap['schema public'].update({'table t1': {
'columns': [{'c1': {'type': 'integer'}}, {'c2': {'type': 'text'}}],
'owner': self.db.user,
'privileges': [{self.db.user: ['all']}, {'user1': ['select']}]}})
sql = self.to_sql(inmap, [CREATE_TABLE])
assert len(sql) == 2
sql = sorted(sql)
assert sql[0] == "GRANT ALL ON TABLE t1 TO %s" % self.db.user
assert sql[1] == GRANT_SELECT % 'user1'
def test_table_change_grant(self):
"Grant select privileges on an existing table"
inmap = self.std_map()
inmap['schema public'].update({'table t1': {
'columns': [{'c1': {'type': 'integer'}}, {'c2': {'type': 'text'}}],
'owner': self.db.user,
'privileges': [{self.db.user: ['all']}, {'PUBLIC': ['select']},
{'user1': ['insert', 'update']}]}})
sql = self.to_sql(inmap, [CREATE_TABLE, GRANT_SELECT % 'user1'])
assert len(sql) == 3
assert sorted(sql) == [GRANT_INSUPD % 'user1', GRANT_SELECT % 'PUBLIC',
"REVOKE SELECT ON TABLE t1 FROM user1"]
def test_column_change_grants(self):
"Change existing colum-level privileges"
inmap = self.std_map()
inmap['schema public'].update(
{'table t1': {'columns': [{'c1': {
'type': 'integer', 'privileges': [{
'user1': ['insert']}, {'user2': ['insert', 'update']}]}},
{'c2': {'type': 'text', 'privileges': [{'user1': ['insert']}]}}],
'owner': self.db.user, 'privileges': [{self.db.user: ['all']},
{'PUBLIC': ['select']}]}})
stmts = [CREATE_TABLE, GRANT_SELECT % 'PUBLIC',
"GRANT INSERT (c1, c2) ON t1 TO user1",
"GRANT INSERT (c2), UPDATE (c2) ON t1 TO user2"]
sql = self.to_sql(inmap, stmts)
assert len(sql) == 2
assert sql[0] == "GRANT INSERT (c1), UPDATE (c1) ON TABLE t1 TO user2"
assert sql[1] == "REVOKE INSERT (c2), UPDATE (c2) ON TABLE t1 " \
"FROM user2"
def test_table_revoke_all(self):
"Revoke all privileges on an existing table"
inmap = self.std_map()
inmap['schema public'].update({'table t1': {
'columns': [{'c1': {'type': 'integer'}}, {'c2': {'type': 'text'}}],
'owner': self.db.user}})
stmts = [CREATE_TABLE, GRANT_SELECT % 'PUBLIC', GRANT_INSUPD % 'user1']
sql = sorted(self.to_sql(inmap, stmts))
assert len(sql) == 3
assert sql[0] == "REVOKE ALL ON TABLE t1 FROM %s" % self.db.user
assert sql[1] == "REVOKE INSERT, UPDATE ON TABLE t1 FROM user1"
assert sql[2] == "REVOKE SELECT ON TABLE t1 FROM PUBLIC"
def test_create_sequence(self):
"Create a sequence with some privileges"
inmap = self.std_map()
inmap['schema public'].update({'sequence seq1': {
'start_value': 1, 'increment_by': 1, 'max_value': None,
'min_value': None, 'cache_value': 1, 'owner': self.db.user,
'privileges': [{self.db.user: ['all']}, {'PUBLIC': ['select']}]}})
sql = self.to_sql(inmap)
# sql[0] = CREATE SEQUENCE
# sql[1] = ALTER SEQUENCE OWNER
assert sql[2] == "GRANT ALL ON SEQUENCE seq1 TO %s" % self.db.user
assert sql[3] == "GRANT SELECT ON SEQUENCE seq1 TO PUBLIC"
def test_sequence_new_grant(self):
"Grant privileges on an existing sequence"
inmap = self.std_map()
inmap['schema public'].update({'sequence seq1': {
'start_value': 1, 'increment_by': 1, 'max_value': None,
'min_value': None, 'cache_value': 1, 'owner': self.db.user,
'privileges': [{self.db.user: ['all']}, {'PUBLIC': ['select']}]}})
sql = sorted(self.to_sql(inmap, ["CREATE SEQUENCE seq1"]))
assert len(sql) == 2
assert sql[0] == "GRANT ALL ON SEQUENCE seq1 TO %s" % self.db.user
assert sql[1] == "GRANT SELECT ON SEQUENCE seq1 TO PUBLIC"
def test_create_view(self):
"Create a view with some privileges"
inmap = self.std_map()
inmap['schema public'].update({'view v1': {
'definition': " SELECT now()::date AS today;",
'owner': self.db.user,
'privileges': [{self.db.user: ['all']}, {'user1': ['select']}]}})
sql = self.to_sql(inmap)
# sql[0] = CREATE VIEW
# sql[1] = ALTER VIEW OWNER
assert sql[2] == "GRANT ALL ON TABLE v1 TO %s" % self.db.user
assert sql[3] == "GRANT SELECT ON TABLE v1 TO user1"
def test_view_new_grant(self):
"Grant privileges on an existing view"
inmap = self.std_map()
inmap['schema public'].update({'view v1': {
'definition': " SELECT now()::date AS today;",
'owner': self.db.user,
'privileges': [{self.db.user: ['all']}, {'user1': ['select']}]}})
sql = sorted(self.to_sql(inmap, ["CREATE VIEW v1 AS "
"SELECT now()::date AS today"]))
assert len(sql) == 2
assert sql[0] == "GRANT ALL ON TABLE v1 TO %s" % self.db.user
assert sql[1] == "GRANT SELECT ON TABLE v1 TO user1"
def test_create_function(self):
"Create a function with some privileges"
inmap = self.std_map()
inmap['schema public'].update({'function f1()': {
'language': 'sql', 'returns': 'text', 'source': SOURCE1,
'volatility': 'immutable', 'owner': self.db.user,
'privileges': [{self.db.user: ['all']}, {'PUBLIC': ['execute']}]}})
sql = self.to_sql(inmap)
# sql[0] = SET check_function_bodies
# sql[1] = CREATE FUNCTION
# sql[2] = ALTER FUNCTION OWNER
assert sql[3] == "GRANT EXECUTE ON FUNCTION f1() TO %s" % self.db.user
assert sql[4] == "GRANT EXECUTE ON FUNCTION f1() TO PUBLIC"
def test_function_new_grant(self):
"Grant privileges on an existing function"
inmap = self.std_map()
inmap['schema public'].update({'function f1()': {
'language': 'sql', 'returns': 'text', 'source': SOURCE1,
'volatility': 'immutable', 'owner': self.db.user,
'privileges': [{self.db.user: ['all']}, {'PUBLIC': ['execute']}]}})
sql = self.to_sql(inmap, [CREATE_FUNC])
assert len(sql) == 2
sql = sorted(sql)
# assumes self.db.user > PUBLIC
assert sql[0] == "GRANT EXECUTE ON FUNCTION f1() TO PUBLIC"
assert sql[1] == "GRANT EXECUTE ON FUNCTION f1() TO %s" % self.db.user
def test_create_fd_wrapper(self):
"Create a foreign data wrapper with some privileges"
inmap = self.std_map()
inmap.update({'foreign data wrapper fdw1': {
'owner': self.db.user,
'privileges': [{self.db.user: ['all']}, {'PUBLIC': ['usage']}]}})
sql = self.to_sql(inmap)
# sql[0] = CREATE FDW
# sql[1] = ALTER FDW OWNER
assert sql[2] == "GRANT USAGE ON FOREIGN DATA WRAPPER fdw1 " \
"TO %s" % self.db.user
assert sql[3] == "GRANT USAGE ON FOREIGN DATA WRAPPER fdw1 TO PUBLIC"
def test_fd_wrapper_new_grant(self):
"Grant privileges on an existing foreign data wrapper"
inmap = self.std_map()
inmap.update({'foreign data wrapper fdw1': {
'owner': self.db.user,
'privileges': [{self.db.user: ['all']}, {'PUBLIC': ['usage']}]}})
sql = sorted(self.to_sql(inmap, [CREATE_FDW], superuser=True))
assert len(sql) == 2
# assumes self.db.user > PUBLIC
assert sql[0] == "GRANT USAGE ON FOREIGN DATA WRAPPER fdw1 TO PUBLIC"
assert sql[1] == "GRANT USAGE ON FOREIGN DATA WRAPPER fdw1 " \
"TO %s" % self.db.user
def test_create_server(self):
"Create a foreign server with some privileges"
inmap = self.std_map()
inmap.update({'foreign data wrapper fdw1': {'server fs1': {
'owner': self.db.user,
'privileges': [{self.db.user: ['all']}, {'user2': ['usage']}]}}})
sql = self.to_sql(inmap, [CREATE_FDW], superuser=True)
# sql[0] = CREATE SERVER
# sql[1] = ALTER SERVER OWNER
assert sql[2] == "GRANT USAGE ON FOREIGN SERVER fs1 TO %s" % \
self.db.user
assert sql[3] == "GRANT USAGE ON FOREIGN SERVER fs1 TO user2"
def test_server_new_grant(self):
"Grant privileges on an existing foreign server"
inmap = self.std_map()
inmap.update({'foreign data wrapper fdw1': {'server fs1': {
'owner': self.db.user,
'privileges': [{self.db.user: ['all']}, {'user2': ['usage']}]}}})
sql = sorted(self.to_sql(inmap, [CREATE_FDW, CREATE_FS],
superuser=True))
assert len(sql) == 2
assert sql[0] == "GRANT USAGE ON FOREIGN SERVER fs1 TO %s" % \
self.db.user
assert sql[1] == "GRANT USAGE ON FOREIGN SERVER fs1 TO user2"
def test_create_foreign_table(self):
"Create a foreign table with some privileges"
if self.db.version < 90100:
self.skipTest('Only available on PG 9.1')
inmap = self.std_map()
inmap.update({'foreign data wrapper fdw1': {'server fs1': {}}})
inmap['schema public'].update({'foreign table ft1': {
'columns': [{'c1': {'type': 'integer'}},
{'c2': {'type': 'text'}}], 'server': 'fs1',
'owner': self.db.user,
'privileges': [{self.db.user: ['all']}, {'PUBLIC': ['select']},
{'user1': ['insert', 'update']}]}})
sql = self.to_sql(inmap, [CREATE_FDW, CREATE_FS], superuser=True)
# sql[0] = CREATE TABLE
# sql[1] = ALTER TABLE OWNER
assert sql[2] == "GRANT ALL ON TABLE ft1 TO %s" % self.db.user
assert sql[3] == "GRANT SELECT ON TABLE ft1 TO PUBLIC"
assert sql[4] == "GRANT INSERT, UPDATE ON TABLE ft1 TO user1"
def test_foreign_table_new_grant(self):
"Grant privileges on an existing foreign table"
if self.db.version < 90100:
self.skipTest('Only available on PG 9.1')
inmap = self.std_map()
inmap.update({'foreign data wrapper fdw1': {'server fs1': {}}})
inmap['schema public'].update({'foreign table ft1': {
'columns': [{'c1': {'type': 'integer'}}, {'c2': {'type': 'text'}}],
'server': 'fs1', 'owner': self.db.user,
'privileges': [{self.db.user: ['all']}, {'PUBLIC': ['select']},
{'user1': ['insert', 'update']}]}})
sql = sorted(self.to_sql(inmap, [
CREATE_FDW, CREATE_FS,
"CREATE FOREIGN TABLE ft1 (c1 integer, c2 text) SERVER fs1"],
superuser=True))
assert len(sql) == 3
assert sql[0] == "GRANT ALL ON TABLE ft1 TO %s" % self.db.user
assert sql[1] == "GRANT INSERT, UPDATE ON TABLE ft1 TO user1"
assert sql[2] == "GRANT SELECT ON TABLE ft1 TO PUBLIC"
|
|
#!/usr/bin/env python
import sys
import math
barwidth=15
barchars=" -=#"
class StatusWrapper:
def __init__(self, fd):
self.displayed=""
self.fd = fd
def write(self, s):
if self.displayed== "":
self.fd.write(s)
else:
old=self.displayed
self.status("")
self.write(s)
self.status(old)
def status(self, m):
if self.displayed!=m:
sys.__stderr__.write("\r"+m+"".ljust(len(self.displayed)-len(m)))
if m == "":
sys.__stderr__.write("\r")
self.displayed=m
def has_displayed(self):
return self.displayed!=""
def flush(self):
self.fd.flush()
sys.stderr = StatusWrapper(sys.stderr)
setstatusline = lambda s: sys.stderr.status(s)
#setstatusline = lambda s: None
hasdisplayed = sys.stderr.has_displayed
currentline = lambda: 0
replaceline = None
def prettybar(pct):
p=int(math.floor(pct*barwidth*len(barchars)/100.0))
box=p/len(barchars)
chr=p%len(barchars)
s=''.join([barchars[-1] for i in xrange(box)])
if box<barchars:
s+=barchars[chr]
s+=''.join([barchars[0] for i in xrange(barwidth-1-box)])
return "[" + s + "]"
class Progress:
ticks=0
parent=None
maxRemaining=-1
curRemaining=0
nextRemaining=0
curMsg=""
def __init__(self, parent=None):
self.parent=parent
def hasParent(self):
return self.parent is not None and self.parent.maxRemaining>=0
def remaining(self, n, nx=None):
n=float(n)
self.maxRemaining=max(self.maxRemaining, n)
self.curRemaining=n
if nx is None:
self.nextRemaining=max(0,n-1.0)
else:
self.nextRemaining=nx
self.update()
def status(self, m):
self.curMsg=m
self.update()
def startPercent(self):
if not self.hasParent():
return 0.0
else:
return self.parent.percent()
def endPercent(self):
if not self.hasParent():
return 100.0
else:
return self.parent.nextPercent()
def scale(self):
return self.endPercent()-self.startPercent()
def localPercent(self):
if self.maxRemaining>0:
return 100.0*(1-self.curRemaining/self.maxRemaining)
else:
return 0.0
def debug(self):
if self.hasParent():
rv=self.parent.debug()
else:
rv=tuple()
if self.maxRemaining>0:
return rv+("%.4f:%.4f"%(self.percent(),self.nextPercent()),)
else:
return rv+('-',)
def percent(self):
if self.maxRemaining>0:
return self.startPercent()+(self.scale()*(1-self.curRemaining/self.maxRemaining))
else:
return self.startPercent()
def nextPercent(self):
if self.maxRemaining>0:
return self.startPercent()+(self.scale()*(1-self.nextRemaining/self.maxRemaining))
else:
return self.endPercent()
def progressStr(self):
if self.parent:
m = self.parent.progressStr()
else:
m = ""
if self.maxRemaining>1:
m += "[%d/%d]" % (self.maxRemaining-self.curRemaining, self.maxRemaining)
return m
def getStatus(self):
if type(self.curMsg) is type(lambda:""):
return self.curMsg()
if len(self.curMsg)>0:
return self.curMsg
if self.parent is not None:
return self.parent.getStatus()
return ""
def update(self):
#p = self.percent()
#if p>0:
# m = prettybar(p)
#else:
# m = ""
m = self.progressStr()
m += " "
m += self.getStatus()
if self.hasParent() and self.maxRemaining>=0:
m+=" (%.0f%%)"%self.localPercent()
setstatusline(m)
current=Progress()
remaining=lambda n, nx=None: current.remaining(n,nx)
status=lambda m: current.status(m)
clear=lambda : setstatusline("")
update=lambda : current.update()
class OutputWrapper:
def __init__(self, fd):
self.fd = fd
def write(self, s):
if len(s):
clear()
self.fd.write(s)
if s[-1] == '\n':
update()
def flush(self):
self.fd.flush()
sys.stdout = OutputWrapper(sys.stdout)
def push():
global current
clear()
current=Progress(current)
current.update()
def pop():
global current
clear()
current=current.parent
if hasdisplayed():
current.update()
def subtask(n, fn):
if n>0:
remaining(n)
push()
try:
return fn()
finally:
pop()
def remainingTicks(n):
current.ticks=n
remaining(n)
def tick(n=1):
current.ticks-=n
remaining(current.ticks)
assert current.ticks >= 0
def untick(n=1):
current.ticks+=n
remaining(current.ticks)
def curseswrapper(fn):
clear()
import curses
#redirect a stream to curses
class CursesPrinter:
def __init__(self, window):
self.window = window
self.log = [""]
def write(self, s):
lines=str(s).split('\n')
self.log[-1] += lines[0]
for l in lines[1:]:
self.log.append(l)
self.window.erase()
h,w=self.window.getmaxyx()
cnt=map(lambda s: s[:w], self.log[-h:])
for i in xrange(len(cnt)):
self.window.insstr(i,0,cnt[i])
self.window.refresh()
def dump(self):
for str in log.log:
sys.__stdout__.write(str + "\n")
def replace(self, s = ""):
self.log = [""]
self.write(s)
def replaceline(self, n, s):
while len(self.log)<n:
self.log.append("")
if self.log[n] == s:
return
self.log[n] = s
self.flush()
def flush(self):
self.write("")
stdscr = curses.initscr()
curses.curs_set(0)
h,w = stdscr.getmaxyx()
log = CursesPrinter(stdscr.derwin(h-1, w, 0, 0))
status = CursesPrinter(stdscr.derwin(1, w, h-1, 0))
global setstatusline
global replaceline
oldstdout = sys.stdout
oldstderr = sys.stderr
oldstatusline = setstatusline
oldreplaceline = replaceline
sys.stdout = log
sys.stderr = log
setstatusline = lambda s: status.replace(s)
replaceline = lambda n, s: log.replaceline(n, s)
def cleanup():
status.replace()
curses.endwin()
sys.stdout = oldstdout
sys.stderr = oldstderr
setstatusline = oldstatusline
replaceline = oldreplaceline
log.dump()
try:
fn()
except:
cleanup()
raise
cleanup()
def pause(m):
clear()
raw_input(m)
update()
def test():
import time
for i in xrange(100):
print "hello",i
remaining(100-i)
status("foo foo foo foo "+str(i))
time.sleep(0.1)
def disable():
while type(sys.stdout) is StatusWrapper:
sys.stdout = sys.stdout.fd
while type(sys.stderr) is StatusWrapper:
sys.stderr= sys.stderr.fd
global setstatusline
setstatusline = lambda s: None
class Scope:
def __init__(self, msg=None, cnt=None):
self.msg=msg
self.cnt=cnt
def __enter__(self):
push()
if self.msg:
status(self.msg)
if self.cnt:
remainingTicks(self.cnt)
return self
def __exit__(self, type, value, traceback):
pop()
def __call__(self):
tick()
if __name__ == "__main__":
test()
curseswrapper(test)
|
|
# Authors: Mainak Jas <mainak@neuro.hut.fi>
# Teon Brooks <teon.brooks@gmail.com>
#
# License: BSD (3-clause)
import os
import os.path as op
import glob
import warnings
import shutil
from nose.tools import assert_true, assert_equal, assert_raises
from mne import Epochs, read_events, pick_types, read_evokeds
from mne.io import Raw
from mne.datasets import testing
from mne.report import Report
from mne.utils import (_TempDir, requires_mayavi, requires_nibabel,
requires_PIL, run_tests_if_main, slow_test)
from mne.viz import plot_trans
import matplotlib
matplotlib.use('Agg') # for testing don't use X server
data_dir = testing.data_path(download=False)
subjects_dir = op.join(data_dir, 'subjects')
report_dir = op.join(data_dir, 'MEG', 'sample')
raw_fname = op.join(report_dir, 'sample_audvis_trunc_raw.fif')
event_fname = op.join(report_dir, 'sample_audvis_trunc_raw-eve.fif')
cov_fname = op.join(report_dir, 'sample_audvis_trunc-cov.fif')
fwd_fname = op.join(report_dir, 'sample_audvis_trunc-meg-eeg-oct-6-fwd.fif')
trans_fname = op.join(report_dir, 'sample_audvis_trunc-trans.fif')
inv_fname = op.join(report_dir,
'sample_audvis_trunc-meg-eeg-oct-6-meg-inv.fif')
mri_fname = op.join(subjects_dir, 'sample', 'mri', 'T1.mgz')
base_dir = op.realpath(op.join(op.dirname(__file__), '..', 'io', 'tests',
'data'))
evoked_fname = op.join(base_dir, 'test-ave.fif')
# Set our plotters to test mode
warnings.simplefilter('always') # enable b/c these tests throw warnings
@slow_test
@testing.requires_testing_data
@requires_PIL
def test_render_report():
"""Test rendering -*.fif files for mne report.
"""
tempdir = _TempDir()
raw_fname_new = op.join(tempdir, 'temp_raw.fif')
event_fname_new = op.join(tempdir, 'temp_raw-eve.fif')
cov_fname_new = op.join(tempdir, 'temp_raw-cov.fif')
fwd_fname_new = op.join(tempdir, 'temp_raw-fwd.fif')
inv_fname_new = op.join(tempdir, 'temp_raw-inv.fif')
for a, b in [[raw_fname, raw_fname_new],
[event_fname, event_fname_new],
[cov_fname, cov_fname_new],
[fwd_fname, fwd_fname_new],
[inv_fname, inv_fname_new]]:
shutil.copyfile(a, b)
# create and add -epo.fif and -ave.fif files
epochs_fname = op.join(tempdir, 'temp-epo.fif')
evoked_fname = op.join(tempdir, 'temp-ave.fif')
raw = Raw(raw_fname_new)
picks = pick_types(raw.info, meg='mag', eeg=False) # faster with one type
epochs = Epochs(raw, read_events(event_fname), 1, -0.2, 0.2, picks=picks)
epochs.save(epochs_fname)
epochs.average().save(evoked_fname)
report = Report(info_fname=raw_fname_new, subjects_dir=subjects_dir)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
report.parse_folder(data_path=tempdir, on_error='raise')
assert_true(len(w) >= 1)
# Check correct paths and filenames
fnames = glob.glob(op.join(tempdir, '*.fif'))
for fname in fnames:
assert_true(op.basename(fname) in
[op.basename(x) for x in report.fnames])
assert_true(''.join(report.html).find(op.basename(fname)) != -1)
assert_equal(len(report.fnames), len(fnames))
assert_equal(len(report.html), len(report.fnames))
# Check saving functionality
report.data_path = tempdir
report.save(fname=op.join(tempdir, 'report.html'), open_browser=False)
assert_true(op.isfile(op.join(tempdir, 'report.html')))
assert_equal(len(report.html), len(fnames))
assert_equal(len(report.html), len(report.fnames))
# Check saving same report to new filename
report.save(fname=op.join(tempdir, 'report2.html'), open_browser=False)
assert_true(op.isfile(op.join(tempdir, 'report2.html')))
# Check overwriting file
report.save(fname=op.join(tempdir, 'report.html'), open_browser=False,
overwrite=True)
assert_true(op.isfile(op.join(tempdir, 'report.html')))
# Check pattern matching with multiple patterns
pattern = ['*raw.fif', '*eve.fif']
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
report.parse_folder(data_path=tempdir, pattern=pattern)
assert_true(len(w) >= 1)
fnames = glob.glob(op.join(tempdir, '*.raw')) + \
glob.glob(op.join(tempdir, '*.raw'))
for fname in fnames:
assert_true(op.basename(fname) in
[op.basename(x) for x in report.fnames])
assert_true(''.join(report.html).find(op.basename(fname)) != -1)
@testing.requires_testing_data
@requires_mayavi
@requires_PIL
def test_render_add_sections():
"""Test adding figures/images to section.
"""
from PIL import Image
tempdir = _TempDir()
import matplotlib.pyplot as plt
report = Report(subjects_dir=subjects_dir)
# Check add_figs_to_section functionality
fig = plt.plot([1, 2], [1, 2])[0].figure
report.add_figs_to_section(figs=fig, # test non-list input
captions=['evoked response'], scale=1.2,
image_format='svg')
assert_raises(ValueError, report.add_figs_to_section, figs=[fig, fig],
captions='H')
assert_raises(ValueError, report.add_figs_to_section, figs=fig,
captions=['foo'], scale=0, image_format='svg')
assert_raises(ValueError, report.add_figs_to_section, figs=fig,
captions=['foo'], scale=1e-10, image_format='svg')
# need to recreate because calls above change size
fig = plt.plot([1, 2], [1, 2])[0].figure
# Check add_images_to_section with png and then gif
img_fname = op.join(tempdir, 'testimage.png')
fig.savefig(img_fname)
report.add_images_to_section(fnames=[img_fname],
captions=['evoked response'])
im = Image.open(img_fname)
op.join(tempdir, 'testimage.gif')
im.save(img_fname) # matplotlib does not support gif
report.add_images_to_section(fnames=[img_fname],
captions=['evoked response'])
assert_raises(ValueError, report.add_images_to_section,
fnames=[img_fname, img_fname], captions='H')
assert_raises(ValueError, report.add_images_to_section,
fnames=['foobar.xxx'], captions='H')
evoked = read_evokeds(evoked_fname, condition='Left Auditory',
baseline=(-0.2, 0.0))
fig = plot_trans(evoked.info, trans_fname, subject='sample',
subjects_dir=subjects_dir)
report.add_figs_to_section(figs=fig, # test non-list input
captions='random image', scale=1.2)
@slow_test
@testing.requires_testing_data
@requires_mayavi
@requires_nibabel()
def test_render_mri():
"""Test rendering MRI for mne report.
"""
tempdir = _TempDir()
trans_fname_new = op.join(tempdir, 'temp-trans.fif')
for a, b in [[trans_fname, trans_fname_new]]:
shutil.copyfile(a, b)
report = Report(info_fname=raw_fname,
subject='sample', subjects_dir=subjects_dir)
with warnings.catch_warnings(record=True):
warnings.simplefilter('always')
report.parse_folder(data_path=tempdir, mri_decim=30, pattern='*',
n_jobs=2)
report.save(op.join(tempdir, 'report.html'), open_browser=False)
@testing.requires_testing_data
@requires_nibabel()
def test_render_mri_without_bem():
"""Test rendering MRI without BEM for mne report.
"""
tempdir = _TempDir()
os.mkdir(op.join(tempdir, 'sample'))
os.mkdir(op.join(tempdir, 'sample', 'mri'))
shutil.copyfile(mri_fname, op.join(tempdir, 'sample', 'mri', 'T1.mgz'))
report = Report(info_fname=raw_fname,
subject='sample', subjects_dir=tempdir)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
report.parse_folder(tempdir)
assert_true(len(w) >= 1)
report.save(op.join(tempdir, 'report.html'), open_browser=False)
@testing.requires_testing_data
@requires_nibabel()
def test_add_htmls_to_section():
"""Test adding html str to mne report.
"""
report = Report(info_fname=raw_fname,
subject='sample', subjects_dir=subjects_dir)
html = '<b>MNE-Python is AWESOME</b>'
caption, section = 'html', 'html_section'
report.add_htmls_to_section(html, caption, section)
idx = report._sectionlabels.index('report_' + section)
html_compare = report.html[idx]
assert_true(html in html_compare)
def test_add_slider_to_section():
"""Test adding a slider with a series of images to mne report.
"""
tempdir = _TempDir()
from matplotlib import pyplot as plt
report = Report(info_fname=raw_fname,
subject='sample', subjects_dir=subjects_dir)
section = 'slider_section'
figs = list()
figs.append(plt.figure())
plt.plot([1, 2, 3])
plt.close('all')
figs.append(plt.figure())
plt.plot([3, 2, 1])
plt.close('all')
report.add_slider_to_section(figs, section=section)
report.save(op.join(tempdir, 'report.html'), open_browser=False)
assert_raises(NotImplementedError, report.add_slider_to_section,
[figs, figs])
assert_raises(ValueError, report.add_slider_to_section, figs, ['wug'])
assert_raises(TypeError, report.add_slider_to_section, figs, 'wug')
def test_validate_input():
report = Report()
items = ['a', 'b', 'c']
captions = ['Letter A', 'Letter B', 'Letter C']
section = 'ABCs'
comments = ['First letter of the alphabet.',
'Second letter of the alphabet',
'Third letter of the alphabet']
assert_raises(ValueError, report._validate_input, items, captions[:-1],
section, comments=None)
assert_raises(ValueError, report._validate_input, items, captions, section,
comments=comments[:-1])
values = report._validate_input(items, captions, section, comments=None)
items_new, captions_new, comments_new = values
assert_equal(len(comments_new), len(items))
run_tests_if_main()
|
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio/grpc_core_dependencies.py.template`!!!
CORE_SOURCE_FILES = [
'third_party/address_sorting/address_sorting.c',
'third_party/address_sorting/address_sorting_posix.c',
'third_party/address_sorting/address_sorting_windows.c',
'src/core/lib/gpr/alloc.cc',
'src/core/lib/gpr/arena.cc',
'src/core/lib/gpr/atm.cc',
'src/core/lib/gpr/cpu_iphone.cc',
'src/core/lib/gpr/cpu_linux.cc',
'src/core/lib/gpr/cpu_posix.cc',
'src/core/lib/gpr/cpu_windows.cc',
'src/core/lib/gpr/env_linux.cc',
'src/core/lib/gpr/env_posix.cc',
'src/core/lib/gpr/env_windows.cc',
'src/core/lib/gpr/host_port.cc',
'src/core/lib/gpr/log.cc',
'src/core/lib/gpr/log_android.cc',
'src/core/lib/gpr/log_linux.cc',
'src/core/lib/gpr/log_posix.cc',
'src/core/lib/gpr/log_windows.cc',
'src/core/lib/gpr/mpscq.cc',
'src/core/lib/gpr/murmur_hash.cc',
'src/core/lib/gpr/string.cc',
'src/core/lib/gpr/string_posix.cc',
'src/core/lib/gpr/string_util_windows.cc',
'src/core/lib/gpr/string_windows.cc',
'src/core/lib/gpr/sync.cc',
'src/core/lib/gpr/sync_posix.cc',
'src/core/lib/gpr/sync_windows.cc',
'src/core/lib/gpr/time.cc',
'src/core/lib/gpr/time_posix.cc',
'src/core/lib/gpr/time_precise.cc',
'src/core/lib/gpr/time_windows.cc',
'src/core/lib/gpr/tls_pthread.cc',
'src/core/lib/gpr/tmpfile_msys.cc',
'src/core/lib/gpr/tmpfile_posix.cc',
'src/core/lib/gpr/tmpfile_windows.cc',
'src/core/lib/gpr/wrap_memcpy.cc',
'src/core/lib/gprpp/fork.cc',
'src/core/lib/gprpp/thd_posix.cc',
'src/core/lib/gprpp/thd_windows.cc',
'src/core/lib/profiling/basic_timers.cc',
'src/core/lib/profiling/stap_timers.cc',
'src/core/lib/surface/init.cc',
'src/core/lib/avl/avl.cc',
'src/core/lib/backoff/backoff.cc',
'src/core/lib/channel/channel_args.cc',
'src/core/lib/channel/channel_stack.cc',
'src/core/lib/channel/channel_stack_builder.cc',
'src/core/lib/channel/channel_trace.cc',
'src/core/lib/channel/channelz.cc',
'src/core/lib/channel/channelz_registry.cc',
'src/core/lib/channel/connected_channel.cc',
'src/core/lib/channel/handshaker.cc',
'src/core/lib/channel/handshaker_registry.cc',
'src/core/lib/channel/status_util.cc',
'src/core/lib/compression/compression.cc',
'src/core/lib/compression/compression_internal.cc',
'src/core/lib/compression/message_compress.cc',
'src/core/lib/compression/stream_compression.cc',
'src/core/lib/compression/stream_compression_gzip.cc',
'src/core/lib/compression/stream_compression_identity.cc',
'src/core/lib/debug/stats.cc',
'src/core/lib/debug/stats_data.cc',
'src/core/lib/http/format_request.cc',
'src/core/lib/http/httpcli.cc',
'src/core/lib/http/parser.cc',
'src/core/lib/iomgr/buffer_list.cc',
'src/core/lib/iomgr/call_combiner.cc',
'src/core/lib/iomgr/combiner.cc',
'src/core/lib/iomgr/endpoint.cc',
'src/core/lib/iomgr/endpoint_pair_posix.cc',
'src/core/lib/iomgr/endpoint_pair_uv.cc',
'src/core/lib/iomgr/endpoint_pair_windows.cc',
'src/core/lib/iomgr/error.cc',
'src/core/lib/iomgr/ev_epoll1_linux.cc',
'src/core/lib/iomgr/ev_epollex_linux.cc',
'src/core/lib/iomgr/ev_poll_posix.cc',
'src/core/lib/iomgr/ev_posix.cc',
'src/core/lib/iomgr/ev_windows.cc',
'src/core/lib/iomgr/exec_ctx.cc',
'src/core/lib/iomgr/executor.cc',
'src/core/lib/iomgr/fork_posix.cc',
'src/core/lib/iomgr/fork_windows.cc',
'src/core/lib/iomgr/gethostname_fallback.cc',
'src/core/lib/iomgr/gethostname_host_name_max.cc',
'src/core/lib/iomgr/gethostname_sysconf.cc',
'src/core/lib/iomgr/grpc_if_nametoindex_posix.cc',
'src/core/lib/iomgr/grpc_if_nametoindex_unsupported.cc',
'src/core/lib/iomgr/internal_errqueue.cc',
'src/core/lib/iomgr/iocp_windows.cc',
'src/core/lib/iomgr/iomgr.cc',
'src/core/lib/iomgr/iomgr_custom.cc',
'src/core/lib/iomgr/iomgr_internal.cc',
'src/core/lib/iomgr/iomgr_posix.cc',
'src/core/lib/iomgr/iomgr_uv.cc',
'src/core/lib/iomgr/iomgr_windows.cc',
'src/core/lib/iomgr/is_epollexclusive_available.cc',
'src/core/lib/iomgr/load_file.cc',
'src/core/lib/iomgr/lockfree_event.cc',
'src/core/lib/iomgr/polling_entity.cc',
'src/core/lib/iomgr/pollset.cc',
'src/core/lib/iomgr/pollset_custom.cc',
'src/core/lib/iomgr/pollset_set.cc',
'src/core/lib/iomgr/pollset_set_custom.cc',
'src/core/lib/iomgr/pollset_set_windows.cc',
'src/core/lib/iomgr/pollset_uv.cc',
'src/core/lib/iomgr/pollset_windows.cc',
'src/core/lib/iomgr/resolve_address.cc',
'src/core/lib/iomgr/resolve_address_custom.cc',
'src/core/lib/iomgr/resolve_address_posix.cc',
'src/core/lib/iomgr/resolve_address_windows.cc',
'src/core/lib/iomgr/resource_quota.cc',
'src/core/lib/iomgr/sockaddr_utils.cc',
'src/core/lib/iomgr/socket_factory_posix.cc',
'src/core/lib/iomgr/socket_mutator.cc',
'src/core/lib/iomgr/socket_utils_common_posix.cc',
'src/core/lib/iomgr/socket_utils_linux.cc',
'src/core/lib/iomgr/socket_utils_posix.cc',
'src/core/lib/iomgr/socket_utils_uv.cc',
'src/core/lib/iomgr/socket_utils_windows.cc',
'src/core/lib/iomgr/socket_windows.cc',
'src/core/lib/iomgr/tcp_client.cc',
'src/core/lib/iomgr/tcp_client_custom.cc',
'src/core/lib/iomgr/tcp_client_posix.cc',
'src/core/lib/iomgr/tcp_client_windows.cc',
'src/core/lib/iomgr/tcp_custom.cc',
'src/core/lib/iomgr/tcp_posix.cc',
'src/core/lib/iomgr/tcp_server.cc',
'src/core/lib/iomgr/tcp_server_custom.cc',
'src/core/lib/iomgr/tcp_server_posix.cc',
'src/core/lib/iomgr/tcp_server_utils_posix_common.cc',
'src/core/lib/iomgr/tcp_server_utils_posix_ifaddrs.cc',
'src/core/lib/iomgr/tcp_server_utils_posix_noifaddrs.cc',
'src/core/lib/iomgr/tcp_server_windows.cc',
'src/core/lib/iomgr/tcp_uv.cc',
'src/core/lib/iomgr/tcp_windows.cc',
'src/core/lib/iomgr/time_averaged_stats.cc',
'src/core/lib/iomgr/timer.cc',
'src/core/lib/iomgr/timer_custom.cc',
'src/core/lib/iomgr/timer_generic.cc',
'src/core/lib/iomgr/timer_heap.cc',
'src/core/lib/iomgr/timer_manager.cc',
'src/core/lib/iomgr/timer_uv.cc',
'src/core/lib/iomgr/udp_server.cc',
'src/core/lib/iomgr/unix_sockets_posix.cc',
'src/core/lib/iomgr/unix_sockets_posix_noop.cc',
'src/core/lib/iomgr/wakeup_fd_cv.cc',
'src/core/lib/iomgr/wakeup_fd_eventfd.cc',
'src/core/lib/iomgr/wakeup_fd_nospecial.cc',
'src/core/lib/iomgr/wakeup_fd_pipe.cc',
'src/core/lib/iomgr/wakeup_fd_posix.cc',
'src/core/lib/json/json.cc',
'src/core/lib/json/json_reader.cc',
'src/core/lib/json/json_string.cc',
'src/core/lib/json/json_writer.cc',
'src/core/lib/slice/b64.cc',
'src/core/lib/slice/percent_encoding.cc',
'src/core/lib/slice/slice.cc',
'src/core/lib/slice/slice_buffer.cc',
'src/core/lib/slice/slice_intern.cc',
'src/core/lib/slice/slice_string_helpers.cc',
'src/core/lib/surface/api_trace.cc',
'src/core/lib/surface/byte_buffer.cc',
'src/core/lib/surface/byte_buffer_reader.cc',
'src/core/lib/surface/call.cc',
'src/core/lib/surface/call_details.cc',
'src/core/lib/surface/call_log_batch.cc',
'src/core/lib/surface/channel.cc',
'src/core/lib/surface/channel_init.cc',
'src/core/lib/surface/channel_ping.cc',
'src/core/lib/surface/channel_stack_type.cc',
'src/core/lib/surface/completion_queue.cc',
'src/core/lib/surface/completion_queue_factory.cc',
'src/core/lib/surface/event_string.cc',
'src/core/lib/surface/lame_client.cc',
'src/core/lib/surface/metadata_array.cc',
'src/core/lib/surface/server.cc',
'src/core/lib/surface/validate_metadata.cc',
'src/core/lib/surface/version.cc',
'src/core/lib/transport/bdp_estimator.cc',
'src/core/lib/transport/byte_stream.cc',
'src/core/lib/transport/connectivity_state.cc',
'src/core/lib/transport/error_utils.cc',
'src/core/lib/transport/metadata.cc',
'src/core/lib/transport/metadata_batch.cc',
'src/core/lib/transport/pid_controller.cc',
'src/core/lib/transport/service_config.cc',
'src/core/lib/transport/static_metadata.cc',
'src/core/lib/transport/status_conversion.cc',
'src/core/lib/transport/status_metadata.cc',
'src/core/lib/transport/timeout_encoding.cc',
'src/core/lib/transport/transport.cc',
'src/core/lib/transport/transport_op_string.cc',
'src/core/lib/uri/uri_parser.cc',
'src/core/lib/debug/trace.cc',
'src/core/ext/transport/chttp2/server/secure/server_secure_chttp2.cc',
'src/core/ext/transport/chttp2/transport/bin_decoder.cc',
'src/core/ext/transport/chttp2/transport/bin_encoder.cc',
'src/core/ext/transport/chttp2/transport/chttp2_plugin.cc',
'src/core/ext/transport/chttp2/transport/chttp2_transport.cc',
'src/core/ext/transport/chttp2/transport/context_list.cc',
'src/core/ext/transport/chttp2/transport/flow_control.cc',
'src/core/ext/transport/chttp2/transport/frame_data.cc',
'src/core/ext/transport/chttp2/transport/frame_goaway.cc',
'src/core/ext/transport/chttp2/transport/frame_ping.cc',
'src/core/ext/transport/chttp2/transport/frame_rst_stream.cc',
'src/core/ext/transport/chttp2/transport/frame_settings.cc',
'src/core/ext/transport/chttp2/transport/frame_window_update.cc',
'src/core/ext/transport/chttp2/transport/hpack_encoder.cc',
'src/core/ext/transport/chttp2/transport/hpack_parser.cc',
'src/core/ext/transport/chttp2/transport/hpack_table.cc',
'src/core/ext/transport/chttp2/transport/http2_settings.cc',
'src/core/ext/transport/chttp2/transport/huffsyms.cc',
'src/core/ext/transport/chttp2/transport/incoming_metadata.cc',
'src/core/ext/transport/chttp2/transport/parsing.cc',
'src/core/ext/transport/chttp2/transport/stream_lists.cc',
'src/core/ext/transport/chttp2/transport/stream_map.cc',
'src/core/ext/transport/chttp2/transport/varint.cc',
'src/core/ext/transport/chttp2/transport/writing.cc',
'src/core/ext/transport/chttp2/alpn/alpn.cc',
'src/core/ext/filters/http/client/http_client_filter.cc',
'src/core/ext/filters/http/http_filters_plugin.cc',
'src/core/ext/filters/http/message_compress/message_compress_filter.cc',
'src/core/ext/filters/http/server/http_server_filter.cc',
'src/core/lib/http/httpcli_security_connector.cc',
'src/core/lib/security/context/security_context.cc',
'src/core/lib/security/credentials/alts/alts_credentials.cc',
'src/core/lib/security/credentials/composite/composite_credentials.cc',
'src/core/lib/security/credentials/credentials.cc',
'src/core/lib/security/credentials/credentials_metadata.cc',
'src/core/lib/security/credentials/fake/fake_credentials.cc',
'src/core/lib/security/credentials/google_default/credentials_generic.cc',
'src/core/lib/security/credentials/google_default/google_default_credentials.cc',
'src/core/lib/security/credentials/iam/iam_credentials.cc',
'src/core/lib/security/credentials/jwt/json_token.cc',
'src/core/lib/security/credentials/jwt/jwt_credentials.cc',
'src/core/lib/security/credentials/jwt/jwt_verifier.cc',
'src/core/lib/security/credentials/local/local_credentials.cc',
'src/core/lib/security/credentials/oauth2/oauth2_credentials.cc',
'src/core/lib/security/credentials/plugin/plugin_credentials.cc',
'src/core/lib/security/credentials/ssl/ssl_credentials.cc',
'src/core/lib/security/credentials/tls/grpc_tls_credentials_options.cc',
'src/core/lib/security/security_connector/alts/alts_security_connector.cc',
'src/core/lib/security/security_connector/fake/fake_security_connector.cc',
'src/core/lib/security/security_connector/load_system_roots_fallback.cc',
'src/core/lib/security/security_connector/load_system_roots_linux.cc',
'src/core/lib/security/security_connector/local/local_security_connector.cc',
'src/core/lib/security/security_connector/security_connector.cc',
'src/core/lib/security/security_connector/ssl/ssl_security_connector.cc',
'src/core/lib/security/security_connector/ssl_utils.cc',
'src/core/lib/security/transport/client_auth_filter.cc',
'src/core/lib/security/transport/secure_endpoint.cc',
'src/core/lib/security/transport/security_handshaker.cc',
'src/core/lib/security/transport/server_auth_filter.cc',
'src/core/lib/security/transport/target_authority_table.cc',
'src/core/lib/security/transport/tsi_error.cc',
'src/core/lib/security/util/json_util.cc',
'src/core/lib/surface/init_secure.cc',
'src/core/tsi/alts/crypt/aes_gcm.cc',
'src/core/tsi/alts/crypt/gsec.cc',
'src/core/tsi/alts/frame_protector/alts_counter.cc',
'src/core/tsi/alts/frame_protector/alts_crypter.cc',
'src/core/tsi/alts/frame_protector/alts_frame_protector.cc',
'src/core/tsi/alts/frame_protector/alts_record_protocol_crypter_common.cc',
'src/core/tsi/alts/frame_protector/alts_seal_privacy_integrity_crypter.cc',
'src/core/tsi/alts/frame_protector/alts_unseal_privacy_integrity_crypter.cc',
'src/core/tsi/alts/frame_protector/frame_handler.cc',
'src/core/tsi/alts/handshaker/alts_handshaker_client.cc',
'src/core/tsi/alts/handshaker/alts_shared_resource.cc',
'src/core/tsi/alts/handshaker/alts_tsi_handshaker.cc',
'src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_integrity_only_record_protocol.cc',
'src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_privacy_integrity_record_protocol.cc',
'src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_record_protocol_common.cc',
'src/core/tsi/alts/zero_copy_frame_protector/alts_iovec_record_protocol.cc',
'src/core/tsi/alts/zero_copy_frame_protector/alts_zero_copy_grpc_protector.cc',
'src/core/lib/security/credentials/alts/check_gcp_environment.cc',
'src/core/lib/security/credentials/alts/check_gcp_environment_linux.cc',
'src/core/lib/security/credentials/alts/check_gcp_environment_no_op.cc',
'src/core/lib/security/credentials/alts/check_gcp_environment_windows.cc',
'src/core/lib/security/credentials/alts/grpc_alts_credentials_client_options.cc',
'src/core/lib/security/credentials/alts/grpc_alts_credentials_options.cc',
'src/core/lib/security/credentials/alts/grpc_alts_credentials_server_options.cc',
'src/core/tsi/alts/handshaker/alts_handshaker_service_api.cc',
'src/core/tsi/alts/handshaker/alts_handshaker_service_api_util.cc',
'src/core/tsi/alts/handshaker/alts_tsi_utils.cc',
'src/core/tsi/alts/handshaker/transport_security_common_api.cc',
'src/core/tsi/alts/handshaker/altscontext.pb.c',
'src/core/tsi/alts/handshaker/handshaker.pb.c',
'src/core/tsi/alts/handshaker/transport_security_common.pb.c',
'third_party/nanopb/pb_common.c',
'third_party/nanopb/pb_decode.c',
'third_party/nanopb/pb_encode.c',
'src/core/tsi/transport_security.cc',
'src/core/ext/transport/chttp2/client/insecure/channel_create.cc',
'src/core/ext/transport/chttp2/client/insecure/channel_create_posix.cc',
'src/core/ext/transport/chttp2/client/authority.cc',
'src/core/ext/transport/chttp2/client/chttp2_connector.cc',
'src/core/ext/filters/client_channel/backup_poller.cc',
'src/core/ext/filters/client_channel/channel_connectivity.cc',
'src/core/ext/filters/client_channel/client_channel.cc',
'src/core/ext/filters/client_channel/client_channel_channelz.cc',
'src/core/ext/filters/client_channel/client_channel_factory.cc',
'src/core/ext/filters/client_channel/client_channel_plugin.cc',
'src/core/ext/filters/client_channel/connector.cc',
'src/core/ext/filters/client_channel/global_subchannel_pool.cc',
'src/core/ext/filters/client_channel/health/health_check_client.cc',
'src/core/ext/filters/client_channel/http_connect_handshaker.cc',
'src/core/ext/filters/client_channel/http_proxy.cc',
'src/core/ext/filters/client_channel/lb_policy.cc',
'src/core/ext/filters/client_channel/lb_policy_registry.cc',
'src/core/ext/filters/client_channel/local_subchannel_pool.cc',
'src/core/ext/filters/client_channel/parse_address.cc',
'src/core/ext/filters/client_channel/proxy_mapper.cc',
'src/core/ext/filters/client_channel/proxy_mapper_registry.cc',
'src/core/ext/filters/client_channel/resolver.cc',
'src/core/ext/filters/client_channel/resolver_registry.cc',
'src/core/ext/filters/client_channel/resolver_result_parsing.cc',
'src/core/ext/filters/client_channel/resolving_lb_policy.cc',
'src/core/ext/filters/client_channel/retry_throttle.cc',
'src/core/ext/filters/client_channel/server_address.cc',
'src/core/ext/filters/client_channel/subchannel.cc',
'src/core/ext/filters/client_channel/subchannel_pool_interface.cc',
'src/core/ext/filters/deadline/deadline_filter.cc',
'src/core/ext/filters/client_channel/health/health.pb.c',
'src/core/tsi/fake_transport_security.cc',
'src/core/tsi/local_transport_security.cc',
'src/core/tsi/ssl/session_cache/ssl_session_boringssl.cc',
'src/core/tsi/ssl/session_cache/ssl_session_cache.cc',
'src/core/tsi/ssl/session_cache/ssl_session_openssl.cc',
'src/core/tsi/ssl_transport_security.cc',
'src/core/tsi/transport_security_grpc.cc',
'src/core/ext/transport/chttp2/server/chttp2_server.cc',
'src/core/ext/transport/chttp2/client/secure/secure_channel_create.cc',
'src/core/ext/transport/chttp2/server/insecure/server_chttp2.cc',
'src/core/ext/transport/chttp2/server/insecure/server_chttp2_posix.cc',
'src/core/ext/transport/inproc/inproc_plugin.cc',
'src/core/ext/transport/inproc/inproc_transport.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/client_load_reporting_filter.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_channel_secure.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_client_stats.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/load_balancer_api.cc',
'src/core/ext/filters/client_channel/resolver/fake/fake_resolver.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/google/protobuf/duration.pb.c',
'src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/google/protobuf/timestamp.pb.c',
'src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/load_balancer.pb.c',
'src/core/ext/filters/client_channel/lb_policy/xds/xds.cc',
'src/core/ext/filters/client_channel/lb_policy/xds/xds_channel_secure.cc',
'src/core/ext/filters/client_channel/lb_policy/xds/xds_client_stats.cc',
'src/core/ext/filters/client_channel/lb_policy/xds/xds_load_balancer_api.cc',
'src/core/ext/filters/client_channel/lb_policy/pick_first/pick_first.cc',
'src/core/ext/filters/client_channel/lb_policy/round_robin/round_robin.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/dns_resolver_ares.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_posix.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_windows.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_fallback.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_posix.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_windows.cc',
'src/core/ext/filters/client_channel/resolver/dns/native/dns_resolver.cc',
'src/core/ext/filters/client_channel/resolver/sockaddr/sockaddr_resolver.cc',
'src/core/ext/filters/census/grpc_context.cc',
'src/core/ext/filters/max_age/max_age_filter.cc',
'src/core/ext/filters/message_size/message_size_filter.cc',
'src/core/ext/filters/http/client_authority_filter.cc',
'src/core/ext/filters/workarounds/workaround_cronet_compression_filter.cc',
'src/core/ext/filters/workarounds/workaround_utils.cc',
'src/core/plugin_registry/grpc_plugin_registry.cc',
'src/boringssl/err_data.c',
'third_party/boringssl/crypto/asn1/a_bitstr.c',
'third_party/boringssl/crypto/asn1/a_bool.c',
'third_party/boringssl/crypto/asn1/a_d2i_fp.c',
'third_party/boringssl/crypto/asn1/a_dup.c',
'third_party/boringssl/crypto/asn1/a_enum.c',
'third_party/boringssl/crypto/asn1/a_gentm.c',
'third_party/boringssl/crypto/asn1/a_i2d_fp.c',
'third_party/boringssl/crypto/asn1/a_int.c',
'third_party/boringssl/crypto/asn1/a_mbstr.c',
'third_party/boringssl/crypto/asn1/a_object.c',
'third_party/boringssl/crypto/asn1/a_octet.c',
'third_party/boringssl/crypto/asn1/a_print.c',
'third_party/boringssl/crypto/asn1/a_strnid.c',
'third_party/boringssl/crypto/asn1/a_time.c',
'third_party/boringssl/crypto/asn1/a_type.c',
'third_party/boringssl/crypto/asn1/a_utctm.c',
'third_party/boringssl/crypto/asn1/a_utf8.c',
'third_party/boringssl/crypto/asn1/asn1_lib.c',
'third_party/boringssl/crypto/asn1/asn1_par.c',
'third_party/boringssl/crypto/asn1/asn_pack.c',
'third_party/boringssl/crypto/asn1/f_enum.c',
'third_party/boringssl/crypto/asn1/f_int.c',
'third_party/boringssl/crypto/asn1/f_string.c',
'third_party/boringssl/crypto/asn1/tasn_dec.c',
'third_party/boringssl/crypto/asn1/tasn_enc.c',
'third_party/boringssl/crypto/asn1/tasn_fre.c',
'third_party/boringssl/crypto/asn1/tasn_new.c',
'third_party/boringssl/crypto/asn1/tasn_typ.c',
'third_party/boringssl/crypto/asn1/tasn_utl.c',
'third_party/boringssl/crypto/asn1/time_support.c',
'third_party/boringssl/crypto/base64/base64.c',
'third_party/boringssl/crypto/bio/bio.c',
'third_party/boringssl/crypto/bio/bio_mem.c',
'third_party/boringssl/crypto/bio/connect.c',
'third_party/boringssl/crypto/bio/fd.c',
'third_party/boringssl/crypto/bio/file.c',
'third_party/boringssl/crypto/bio/hexdump.c',
'third_party/boringssl/crypto/bio/pair.c',
'third_party/boringssl/crypto/bio/printf.c',
'third_party/boringssl/crypto/bio/socket.c',
'third_party/boringssl/crypto/bio/socket_helper.c',
'third_party/boringssl/crypto/bn_extra/bn_asn1.c',
'third_party/boringssl/crypto/bn_extra/convert.c',
'third_party/boringssl/crypto/buf/buf.c',
'third_party/boringssl/crypto/bytestring/asn1_compat.c',
'third_party/boringssl/crypto/bytestring/ber.c',
'third_party/boringssl/crypto/bytestring/cbb.c',
'third_party/boringssl/crypto/bytestring/cbs.c',
'third_party/boringssl/crypto/chacha/chacha.c',
'third_party/boringssl/crypto/cipher_extra/cipher_extra.c',
'third_party/boringssl/crypto/cipher_extra/derive_key.c',
'third_party/boringssl/crypto/cipher_extra/e_aesccm.c',
'third_party/boringssl/crypto/cipher_extra/e_aesctrhmac.c',
'third_party/boringssl/crypto/cipher_extra/e_aesgcmsiv.c',
'third_party/boringssl/crypto/cipher_extra/e_chacha20poly1305.c',
'third_party/boringssl/crypto/cipher_extra/e_null.c',
'third_party/boringssl/crypto/cipher_extra/e_rc2.c',
'third_party/boringssl/crypto/cipher_extra/e_rc4.c',
'third_party/boringssl/crypto/cipher_extra/e_ssl3.c',
'third_party/boringssl/crypto/cipher_extra/e_tls.c',
'third_party/boringssl/crypto/cipher_extra/tls_cbc.c',
'third_party/boringssl/crypto/cmac/cmac.c',
'third_party/boringssl/crypto/conf/conf.c',
'third_party/boringssl/crypto/cpu-aarch64-fuchsia.c',
'third_party/boringssl/crypto/cpu-aarch64-linux.c',
'third_party/boringssl/crypto/cpu-arm-linux.c',
'third_party/boringssl/crypto/cpu-arm.c',
'third_party/boringssl/crypto/cpu-intel.c',
'third_party/boringssl/crypto/cpu-ppc64le.c',
'third_party/boringssl/crypto/crypto.c',
'third_party/boringssl/crypto/curve25519/spake25519.c',
'third_party/boringssl/crypto/dh/check.c',
'third_party/boringssl/crypto/dh/dh.c',
'third_party/boringssl/crypto/dh/dh_asn1.c',
'third_party/boringssl/crypto/dh/params.c',
'third_party/boringssl/crypto/digest_extra/digest_extra.c',
'third_party/boringssl/crypto/dsa/dsa.c',
'third_party/boringssl/crypto/dsa/dsa_asn1.c',
'third_party/boringssl/crypto/ec_extra/ec_asn1.c',
'third_party/boringssl/crypto/ecdh/ecdh.c',
'third_party/boringssl/crypto/ecdsa_extra/ecdsa_asn1.c',
'third_party/boringssl/crypto/engine/engine.c',
'third_party/boringssl/crypto/err/err.c',
'third_party/boringssl/crypto/evp/digestsign.c',
'third_party/boringssl/crypto/evp/evp.c',
'third_party/boringssl/crypto/evp/evp_asn1.c',
'third_party/boringssl/crypto/evp/evp_ctx.c',
'third_party/boringssl/crypto/evp/p_dsa_asn1.c',
'third_party/boringssl/crypto/evp/p_ec.c',
'third_party/boringssl/crypto/evp/p_ec_asn1.c',
'third_party/boringssl/crypto/evp/p_ed25519.c',
'third_party/boringssl/crypto/evp/p_ed25519_asn1.c',
'third_party/boringssl/crypto/evp/p_rsa.c',
'third_party/boringssl/crypto/evp/p_rsa_asn1.c',
'third_party/boringssl/crypto/evp/pbkdf.c',
'third_party/boringssl/crypto/evp/print.c',
'third_party/boringssl/crypto/evp/scrypt.c',
'third_party/boringssl/crypto/evp/sign.c',
'third_party/boringssl/crypto/ex_data.c',
'third_party/boringssl/crypto/fipsmodule/bcm.c',
'third_party/boringssl/crypto/fipsmodule/is_fips.c',
'third_party/boringssl/crypto/hkdf/hkdf.c',
'third_party/boringssl/crypto/lhash/lhash.c',
'third_party/boringssl/crypto/mem.c',
'third_party/boringssl/crypto/obj/obj.c',
'third_party/boringssl/crypto/obj/obj_xref.c',
'third_party/boringssl/crypto/pem/pem_all.c',
'third_party/boringssl/crypto/pem/pem_info.c',
'third_party/boringssl/crypto/pem/pem_lib.c',
'third_party/boringssl/crypto/pem/pem_oth.c',
'third_party/boringssl/crypto/pem/pem_pk8.c',
'third_party/boringssl/crypto/pem/pem_pkey.c',
'third_party/boringssl/crypto/pem/pem_x509.c',
'third_party/boringssl/crypto/pem/pem_xaux.c',
'third_party/boringssl/crypto/pkcs7/pkcs7.c',
'third_party/boringssl/crypto/pkcs7/pkcs7_x509.c',
'third_party/boringssl/crypto/pkcs8/p5_pbev2.c',
'third_party/boringssl/crypto/pkcs8/pkcs8.c',
'third_party/boringssl/crypto/pkcs8/pkcs8_x509.c',
'third_party/boringssl/crypto/poly1305/poly1305.c',
'third_party/boringssl/crypto/poly1305/poly1305_arm.c',
'third_party/boringssl/crypto/poly1305/poly1305_vec.c',
'third_party/boringssl/crypto/pool/pool.c',
'third_party/boringssl/crypto/rand_extra/deterministic.c',
'third_party/boringssl/crypto/rand_extra/forkunsafe.c',
'third_party/boringssl/crypto/rand_extra/fuchsia.c',
'third_party/boringssl/crypto/rand_extra/rand_extra.c',
'third_party/boringssl/crypto/rand_extra/windows.c',
'third_party/boringssl/crypto/rc4/rc4.c',
'third_party/boringssl/crypto/refcount_c11.c',
'third_party/boringssl/crypto/refcount_lock.c',
'third_party/boringssl/crypto/rsa_extra/rsa_asn1.c',
'third_party/boringssl/crypto/stack/stack.c',
'third_party/boringssl/crypto/thread.c',
'third_party/boringssl/crypto/thread_none.c',
'third_party/boringssl/crypto/thread_pthread.c',
'third_party/boringssl/crypto/thread_win.c',
'third_party/boringssl/crypto/x509/a_digest.c',
'third_party/boringssl/crypto/x509/a_sign.c',
'third_party/boringssl/crypto/x509/a_strex.c',
'third_party/boringssl/crypto/x509/a_verify.c',
'third_party/boringssl/crypto/x509/algorithm.c',
'third_party/boringssl/crypto/x509/asn1_gen.c',
'third_party/boringssl/crypto/x509/by_dir.c',
'third_party/boringssl/crypto/x509/by_file.c',
'third_party/boringssl/crypto/x509/i2d_pr.c',
'third_party/boringssl/crypto/x509/rsa_pss.c',
'third_party/boringssl/crypto/x509/t_crl.c',
'third_party/boringssl/crypto/x509/t_req.c',
'third_party/boringssl/crypto/x509/t_x509.c',
'third_party/boringssl/crypto/x509/t_x509a.c',
'third_party/boringssl/crypto/x509/x509.c',
'third_party/boringssl/crypto/x509/x509_att.c',
'third_party/boringssl/crypto/x509/x509_cmp.c',
'third_party/boringssl/crypto/x509/x509_d2.c',
'third_party/boringssl/crypto/x509/x509_def.c',
'third_party/boringssl/crypto/x509/x509_ext.c',
'third_party/boringssl/crypto/x509/x509_lu.c',
'third_party/boringssl/crypto/x509/x509_obj.c',
'third_party/boringssl/crypto/x509/x509_r2x.c',
'third_party/boringssl/crypto/x509/x509_req.c',
'third_party/boringssl/crypto/x509/x509_set.c',
'third_party/boringssl/crypto/x509/x509_trs.c',
'third_party/boringssl/crypto/x509/x509_txt.c',
'third_party/boringssl/crypto/x509/x509_v3.c',
'third_party/boringssl/crypto/x509/x509_vfy.c',
'third_party/boringssl/crypto/x509/x509_vpm.c',
'third_party/boringssl/crypto/x509/x509cset.c',
'third_party/boringssl/crypto/x509/x509name.c',
'third_party/boringssl/crypto/x509/x509rset.c',
'third_party/boringssl/crypto/x509/x509spki.c',
'third_party/boringssl/crypto/x509/x_algor.c',
'third_party/boringssl/crypto/x509/x_all.c',
'third_party/boringssl/crypto/x509/x_attrib.c',
'third_party/boringssl/crypto/x509/x_crl.c',
'third_party/boringssl/crypto/x509/x_exten.c',
'third_party/boringssl/crypto/x509/x_info.c',
'third_party/boringssl/crypto/x509/x_name.c',
'third_party/boringssl/crypto/x509/x_pkey.c',
'third_party/boringssl/crypto/x509/x_pubkey.c',
'third_party/boringssl/crypto/x509/x_req.c',
'third_party/boringssl/crypto/x509/x_sig.c',
'third_party/boringssl/crypto/x509/x_spki.c',
'third_party/boringssl/crypto/x509/x_val.c',
'third_party/boringssl/crypto/x509/x_x509.c',
'third_party/boringssl/crypto/x509/x_x509a.c',
'third_party/boringssl/crypto/x509v3/pcy_cache.c',
'third_party/boringssl/crypto/x509v3/pcy_data.c',
'third_party/boringssl/crypto/x509v3/pcy_lib.c',
'third_party/boringssl/crypto/x509v3/pcy_map.c',
'third_party/boringssl/crypto/x509v3/pcy_node.c',
'third_party/boringssl/crypto/x509v3/pcy_tree.c',
'third_party/boringssl/crypto/x509v3/v3_akey.c',
'third_party/boringssl/crypto/x509v3/v3_akeya.c',
'third_party/boringssl/crypto/x509v3/v3_alt.c',
'third_party/boringssl/crypto/x509v3/v3_bcons.c',
'third_party/boringssl/crypto/x509v3/v3_bitst.c',
'third_party/boringssl/crypto/x509v3/v3_conf.c',
'third_party/boringssl/crypto/x509v3/v3_cpols.c',
'third_party/boringssl/crypto/x509v3/v3_crld.c',
'third_party/boringssl/crypto/x509v3/v3_enum.c',
'third_party/boringssl/crypto/x509v3/v3_extku.c',
'third_party/boringssl/crypto/x509v3/v3_genn.c',
'third_party/boringssl/crypto/x509v3/v3_ia5.c',
'third_party/boringssl/crypto/x509v3/v3_info.c',
'third_party/boringssl/crypto/x509v3/v3_int.c',
'third_party/boringssl/crypto/x509v3/v3_lib.c',
'third_party/boringssl/crypto/x509v3/v3_ncons.c',
'third_party/boringssl/crypto/x509v3/v3_pci.c',
'third_party/boringssl/crypto/x509v3/v3_pcia.c',
'third_party/boringssl/crypto/x509v3/v3_pcons.c',
'third_party/boringssl/crypto/x509v3/v3_pku.c',
'third_party/boringssl/crypto/x509v3/v3_pmaps.c',
'third_party/boringssl/crypto/x509v3/v3_prn.c',
'third_party/boringssl/crypto/x509v3/v3_purp.c',
'third_party/boringssl/crypto/x509v3/v3_skey.c',
'third_party/boringssl/crypto/x509v3/v3_sxnet.c',
'third_party/boringssl/crypto/x509v3/v3_utl.c',
'third_party/boringssl/ssl/bio_ssl.cc',
'third_party/boringssl/ssl/custom_extensions.cc',
'third_party/boringssl/ssl/d1_both.cc',
'third_party/boringssl/ssl/d1_lib.cc',
'third_party/boringssl/ssl/d1_pkt.cc',
'third_party/boringssl/ssl/d1_srtp.cc',
'third_party/boringssl/ssl/dtls_method.cc',
'third_party/boringssl/ssl/dtls_record.cc',
'third_party/boringssl/ssl/handoff.cc',
'third_party/boringssl/ssl/handshake.cc',
'third_party/boringssl/ssl/handshake_client.cc',
'third_party/boringssl/ssl/handshake_server.cc',
'third_party/boringssl/ssl/s3_both.cc',
'third_party/boringssl/ssl/s3_lib.cc',
'third_party/boringssl/ssl/s3_pkt.cc',
'third_party/boringssl/ssl/ssl_aead_ctx.cc',
'third_party/boringssl/ssl/ssl_asn1.cc',
'third_party/boringssl/ssl/ssl_buffer.cc',
'third_party/boringssl/ssl/ssl_cert.cc',
'third_party/boringssl/ssl/ssl_cipher.cc',
'third_party/boringssl/ssl/ssl_file.cc',
'third_party/boringssl/ssl/ssl_key_share.cc',
'third_party/boringssl/ssl/ssl_lib.cc',
'third_party/boringssl/ssl/ssl_privkey.cc',
'third_party/boringssl/ssl/ssl_session.cc',
'third_party/boringssl/ssl/ssl_stat.cc',
'third_party/boringssl/ssl/ssl_transcript.cc',
'third_party/boringssl/ssl/ssl_versions.cc',
'third_party/boringssl/ssl/ssl_x509.cc',
'third_party/boringssl/ssl/t1_enc.cc',
'third_party/boringssl/ssl/t1_lib.cc',
'third_party/boringssl/ssl/tls13_both.cc',
'third_party/boringssl/ssl/tls13_client.cc',
'third_party/boringssl/ssl/tls13_enc.cc',
'third_party/boringssl/ssl/tls13_server.cc',
'third_party/boringssl/ssl/tls_method.cc',
'third_party/boringssl/ssl/tls_record.cc',
'third_party/boringssl/third_party/fiat/curve25519.c',
'third_party/zlib/adler32.c',
'third_party/zlib/compress.c',
'third_party/zlib/crc32.c',
'third_party/zlib/deflate.c',
'third_party/zlib/gzclose.c',
'third_party/zlib/gzlib.c',
'third_party/zlib/gzread.c',
'third_party/zlib/gzwrite.c',
'third_party/zlib/infback.c',
'third_party/zlib/inffast.c',
'third_party/zlib/inflate.c',
'third_party/zlib/inftrees.c',
'third_party/zlib/trees.c',
'third_party/zlib/uncompr.c',
'third_party/zlib/zutil.c',
'third_party/cares/cares/ares__close_sockets.c',
'third_party/cares/cares/ares__get_hostent.c',
'third_party/cares/cares/ares__read_line.c',
'third_party/cares/cares/ares__timeval.c',
'third_party/cares/cares/ares_cancel.c',
'third_party/cares/cares/ares_create_query.c',
'third_party/cares/cares/ares_data.c',
'third_party/cares/cares/ares_destroy.c',
'third_party/cares/cares/ares_expand_name.c',
'third_party/cares/cares/ares_expand_string.c',
'third_party/cares/cares/ares_fds.c',
'third_party/cares/cares/ares_free_hostent.c',
'third_party/cares/cares/ares_free_string.c',
'third_party/cares/cares/ares_getenv.c',
'third_party/cares/cares/ares_gethostbyaddr.c',
'third_party/cares/cares/ares_gethostbyname.c',
'third_party/cares/cares/ares_getnameinfo.c',
'third_party/cares/cares/ares_getopt.c',
'third_party/cares/cares/ares_getsock.c',
'third_party/cares/cares/ares_init.c',
'third_party/cares/cares/ares_library_init.c',
'third_party/cares/cares/ares_llist.c',
'third_party/cares/cares/ares_mkquery.c',
'third_party/cares/cares/ares_nowarn.c',
'third_party/cares/cares/ares_options.c',
'third_party/cares/cares/ares_parse_a_reply.c',
'third_party/cares/cares/ares_parse_aaaa_reply.c',
'third_party/cares/cares/ares_parse_mx_reply.c',
'third_party/cares/cares/ares_parse_naptr_reply.c',
'third_party/cares/cares/ares_parse_ns_reply.c',
'third_party/cares/cares/ares_parse_ptr_reply.c',
'third_party/cares/cares/ares_parse_soa_reply.c',
'third_party/cares/cares/ares_parse_srv_reply.c',
'third_party/cares/cares/ares_parse_txt_reply.c',
'third_party/cares/cares/ares_platform.c',
'third_party/cares/cares/ares_process.c',
'third_party/cares/cares/ares_query.c',
'third_party/cares/cares/ares_search.c',
'third_party/cares/cares/ares_send.c',
'third_party/cares/cares/ares_strcasecmp.c',
'third_party/cares/cares/ares_strdup.c',
'third_party/cares/cares/ares_strerror.c',
'third_party/cares/cares/ares_timeout.c',
'third_party/cares/cares/ares_version.c',
'third_party/cares/cares/ares_writev.c',
'third_party/cares/cares/bitncmp.c',
'third_party/cares/cares/inet_net_pton.c',
'third_party/cares/cares/inet_ntop.c',
'third_party/cares/cares/windows_port.c',
]
|
|
import re
import sys
import time
import datetime
import random
from six import iteritems
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
from .PostProcessorBase import PostProcessorBase
from pandajedi.jedirefine import RefinerUtils
from pandaserver.taskbuffer import EventServiceUtils
# post processor for ATLAS production
class AtlasAnalPostProcessor (PostProcessorBase):
# constructor
def __init__(self,taskBufferIF,ddmIF):
PostProcessorBase.__init__(self,taskBufferIF,ddmIF)
self.taskParamMap = None
# main
def doPostProcess(self,taskSpec,tmpLog):
# freeze datasets
try:
# get DDM I/F
ddmIF = self.ddmIF.getInterface(taskSpec.vo)
# shuffle datasets
random.shuffle(taskSpec.datasetSpecList)
# loop over all datasets
useLib = False
nOkLib = 0
lockUpdateTime = datetime.datetime.utcnow()
for datasetSpec in taskSpec.datasetSpecList:
# ignore template
if datasetSpec.type.startswith('tmpl_'):
continue
# only output, log or lib datasets
if not datasetSpec.type.endswith('log') and not datasetSpec.type.endswith('output') \
and not datasetSpec.type == 'lib':
continue
# only user group, or panda dataset
if not datasetSpec.datasetName.startswith('user') and not datasetSpec.datasetName.startswith('panda') \
and not datasetSpec.datasetName.startswith('group'):
continue
# check if already closed
datasetAttrs = self.taskBufferIF.getDatasetAttributes_JEDI(datasetSpec.jediTaskID,datasetSpec.datasetID,['state'])
if 'state' in datasetAttrs and datasetAttrs['state'] == 'closed':
tmpLog.info('skip freezing closed datasetID={0}:Name={1}'.format(datasetSpec.datasetID,datasetSpec.datasetName))
closedFlag = True
else:
closedFlag = False
# remove wrong files
if not closedFlag and datasetSpec.type in ['output']:
# get successful files
okFiles = self.taskBufferIF.getSuccessfulFiles_JEDI(datasetSpec.jediTaskID,datasetSpec.datasetID)
if okFiles is None:
tmpLog.warning('failed to get successful files for {0}'.format(datasetSpec.datasetName))
return self.SC_FAILED
# get files in dataset
ddmFiles = ddmIF.getFilesInDataset(datasetSpec.datasetName,skipDuplicate=False)
tmpLog.debug('datasetID={0}:Name={1} has {2} files in DB, {3} files in DDM'.format(datasetSpec.datasetID,
datasetSpec.datasetName,
len(okFiles),len(ddmFiles)))
# check all files
toDelete = []
for tmpGUID,attMap in iteritems(ddmFiles):
if attMap['lfn'] not in okFiles:
did = {'scope':attMap['scope'], 'name':attMap['lfn']}
toDelete.append(did)
tmpLog.debug('delete {0} from {1}'.format(attMap['lfn'],datasetSpec.datasetName))
# delete
if toDelete != []:
ddmIF.deleteFilesFromDataset(datasetSpec.datasetName,toDelete)
# freeze datasets
if not closedFlag and not (datasetSpec.type.startswith('trn_') and datasetSpec.type not in ['trn_log']):
tmpLog.debug('freeze datasetID={0}:Name={1}'.format(datasetSpec.datasetID,datasetSpec.datasetName))
ddmIF.freezeDataset(datasetSpec.datasetName,ignoreUnknown=True)
else:
if datasetSpec.type.startswith('trn_') and datasetSpec.type not in ['trn_log']:
tmpLog.debug('skip freezing transient datasetID={0}:Name={1}'.format(datasetSpec.datasetID,datasetSpec.datasetName))
# update dataset
datasetSpec.state = 'closed'
datasetSpec.stateCheckTime = datetime.datetime.utcnow()
# check if build step was succeeded
if datasetSpec.type == 'lib':
useLib = True
else:
nOkLib += 1
# delete transient or empty datasets
if not closedFlag:
emptyOnly = True
if datasetSpec.type.startswith('trn_') and datasetSpec.type not in ['trn_log']:
emptyOnly = False
retStr = ddmIF.deleteDataset(datasetSpec.datasetName,emptyOnly,ignoreUnknown=True)
tmpLog.debug(retStr)
# extend lifetime
if datasetSpec.type in ['output'] and datasetSpec.datasetName.startswith('user'):
tmpLog.debug('extend lifetime datasetID={0}:Name={1}'.format(datasetSpec.datasetID,datasetSpec.datasetName))
ddmIF.updateReplicationRules(datasetSpec.datasetName,{'type=.+':{'lifetime':14*24*60*60},
'(SCRATCH|USER)DISK':{'lifetime':14*24*60*60}})
# update dataset in DB
self.taskBufferIF.updateDatasetAttributes_JEDI(datasetSpec.jediTaskID,datasetSpec.datasetID,
{'state':datasetSpec.state,
'stateCheckTime':datasetSpec.stateCheckTime})
# update task lock
if datetime.datetime.utcnow()-lockUpdateTime > datetime.timedelta(minutes=5):
lockUpdateTime = datetime.datetime.utcnow()
# update lock
self.taskBufferIF.updateTaskLock_JEDI(taskSpec.jediTaskID)
# dialog
if useLib and nOkLib == 0:
taskSpec.setErrDiag('No build jobs succeeded',True)
except Exception:
errtype,errvalue = sys.exc_info()[:2]
tmpLog.warning('failed to freeze datasets with {0}:{1}'.format(errtype.__name__,errvalue))
retVal = self.SC_SUCCEEDED
try:
self.doBasicPostProcess(taskSpec,tmpLog)
except Exception:
errtype,errvalue = sys.exc_info()[:2]
tmpLog.error('doBasicPostProcess failed with {0}:{1}'.format(errtype.__name__,errvalue))
retVal = self.SC_FATAL
return retVal
# final procedure
def doFinalProcedure(self,taskSpec,tmpLog):
# check email address
toAdd = self.getEmail(taskSpec.userName,taskSpec.vo,tmpLog)
# read task parameters
try:
taskParam = self.taskBufferIF.getTaskParamsWithID_JEDI(taskSpec.jediTaskID)
self.taskParamMap = RefinerUtils.decodeJSON(taskParam)
except Exception:
errtype,errvalue = sys.exc_info()[:2]
tmpLog.error('task param conversion from json failed with {0}:{1}'.format(errtype.__name__,errvalue))
if toAdd is None or \
(self.taskParamMap is not None and 'noEmail' in self.taskParamMap and self.taskParamMap['noEmail'] is True):
tmpLog.debug('email notification is suppressed')
else:
# send email notification
fromAdd = self.senderAddress()
msgBody = self.composeMessage(taskSpec,fromAdd,toAdd)
self.sendMail(taskSpec.jediTaskID,fromAdd,toAdd,msgBody,3,False,tmpLog)
return self.SC_SUCCEEDED
# compose mail message
def composeMessage(self,taskSpec,fromAdd,toAdd):
# get full task parameters
urlData = {}
urlData['job'] = '*'
urlData['jobsetID'] = taskSpec.reqID
urlData['user'] = taskSpec.userName
newUrlData = {}
newUrlData['jobtype'] = 'analysis'
newUrlData['jobsetID'] = taskSpec.reqID
newUrlData['prodUserName'] = taskSpec.userName
newUrlData['hours'] = 71
# summary
listInDS = []
listOutDS = []
listLogDS = []
numTotal = 0
numOK = 0
numNG = 0
numCancel = 0
if not taskSpec.is_hpo_workflow():
inputStr = 'Inputs'
cancelledStr = 'Cancelled '
for datasetSpec in taskSpec.datasetSpecList:
# dataset summary
if datasetSpec.type == 'log':
if datasetSpec.containerName not in listLogDS:
listLogDS.append(datasetSpec.containerName)
elif datasetSpec.type == 'input':
if datasetSpec.containerName not in listInDS:
listInDS.append(datasetSpec.containerName)
elif datasetSpec.type == 'output':
if datasetSpec.containerName not in listOutDS:
listOutDS.append(datasetSpec.containerName)
# process summary
if datasetSpec.isMasterInput():
try:
numTotal += datasetSpec.nFiles
numOK += datasetSpec.nFilesFinished
numNG += datasetSpec.nFilesFailed
except Exception:
pass
else:
inputStr = 'Points'
cancelledStr = 'Unprocessed'
numTotal = taskSpec.get_total_num_jobs()
event_stat = self.taskBufferIF.get_event_statistics(taskSpec.jediTaskID)
if event_stat is not None:
numOK = event_stat.get(EventServiceUtils.ST_finished, 0)
numNG = event_stat.get(EventServiceUtils.ST_failed, 0)
try:
numCancel = numTotal - numOK - numNG
except Exception:
pass
if numOK == numTotal:
msgSucceeded = 'All Succeeded'
else:
msgSucceeded = 'Succeeded'
listInDS.sort()
listOutDS.sort()
listLogDS.sort()
dsSummary = ''
for tmpDS in listInDS:
dsSummary += 'In : {0}\n'.format(tmpDS)
for tmpDS in listOutDS:
dsSummary += 'Out : {0}\n'.format(tmpDS)
for tmpDS in listLogDS:
dsSummary += 'Log : {0}\n'.format(tmpDS)
dsSummary = dsSummary[:-1]
# CLI param
if 'cliParams' in self.taskParamMap:
cliParams = self.taskParamMap['cliParams']
else:
cliParams = None
# make message
message = \
"""Subject: JEDI notification for TaskID:{jediTaskID} ({numOK}/{numTotal} {msgSucceeded})
From: {fromAdd}
To: {toAdd}
Summary of TaskID:{jediTaskID}
Created : {creationDate} (UTC)
Ended : {endTime} (UTC)
Final Status : {status}
Total Number of {strInput} : {numTotal}
Succeeded : {numOK}
Failed : {numNG}
{strCancelled} : {numCancel}
Error Dialog : {errorDialog}
{dsSummary}
Parameters : {params}
PandaMonURL : http://bigpanda.cern.ch/task/{jediTaskID}/""".format(
jediTaskID=taskSpec.jediTaskID,
JobsetID=taskSpec.reqID,
fromAdd=fromAdd,
toAdd=toAdd,
creationDate=taskSpec.creationDate,
endTime=taskSpec.endTime,
status=taskSpec.status,
errorDialog=self.removeTags(taskSpec.errorDialog),
params=cliParams,
taskName=taskSpec.taskName,
oldPandaMon=urlencode(urlData),
newPandaMon=urlencode(newUrlData),
numTotal=numTotal,
numOK=numOK,
numNG=numNG,
numCancel=numCancel,
dsSummary=dsSummary,
msgSucceeded=msgSucceeded,
strInput=inputStr,
strCancelled=cancelledStr,
)
# tailer
message += \
"""
Report Panda problems of any sort to
the eGroup for help request
hn-atlas-dist-analysis-help@cern.ch
the JIRA portal for software bug
https://its.cern.ch/jira/browse/ATLASPANDA
"""
# return
return message
# get email
def getEmail(self,userName,vo,tmpLog):
# return to suppress mail
retSupp = None
# get DN
tmpLog.debug("getting email for {0}".format(userName))
# get email from MetaDB
mailAddrInDB,dn,dbUptime = self.taskBufferIF.getEmailAddr(userName,withDN=True)
tmpLog.debug("email from MetaDB : {0}".format(mailAddrInDB))
# email mortification is suppressed
notSendMail = False
if mailAddrInDB is not None and mailAddrInDB.startswith('notsend'):
notSendMail = True
# DN is unavilable
if dn in ['',None]:
tmpLog.debug("DN is empty")
notSendMail = True
else:
# avoid too frequently lookup
if dbUptime is not None and datetime.datetime.utcnow()-dbUptime < datetime.timedelta(hours=1):
tmpLog.debug("no lookup")
if notSendMail or mailAddrInDB in [None,'']:
return retSupp
else:
return mailAddrInDB.split(':')[-1]
else:
# get email from DQ2
tmpLog.debug("getting email using dq2Info.finger({0})".format(dn))
nTry = 3
for iDDMTry in range(nTry):
try:
userInfo = self.ddmIF.getInterface(vo).finger(dn)
mailAddr = userInfo['email']
tmpLog.debug("email from DQ2 : {0}".format(mailAddr))
if mailAddr is None:
mailAddr = ''
# make email field to update DB
mailAddrToDB = ''
if notSendMail:
mailAddrToDB += 'notsend:'
mailAddrToDB += mailAddr
# update database
tmpLog.debug("update email to {0}".format(mailAddrToDB))
self.taskBufferIF.setEmailAddr(userName,mailAddrToDB)
# return
if notSendMail or mailAddr == '':
return retSupp
return mailAddr
except Exception:
if iDDMTry+1 < nTry:
tmpLog.debug("sleep for retry {0}/{1}".format(iDDMTry,nTry))
time.sleep(10)
else:
errType,errValue = sys.exc_info()[:2]
tmpLog.error("{0}:{1}".format(errType,errValue))
# not send email
return retSupp
# remove tags
def removeTags(self,tmpStr):
try:
if tmpStr is not None:
tmpStr = re.sub('>[^<]+<','><',tmpStr)
tmpStr = re.sub('<[^<]+>','',tmpStr)
except Exception:
pass
return tmpStr
|
|
##
# Copyright (c) 2010-2014 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
"""
General utility client code for interfacing with DB-API 2.0 modules.
"""
from twext.enterprise.util import mapOracleOutputType
try:
import os
# In order to encode and decode values going to and from the database,
# cx_Oracle depends on Oracle's NLS support, which in turn relies upon
# libclntsh's reading of environment variables. It doesn't matter what the
# database language is; the database may contain iCalendar data in many
# languages, but we MUST set NLS_LANG to a value that includes an encoding
# (character set?) that includes all of Unicode, so that the connection can
# encode and decode any valid unicode data. This is not to encode and
# decode bytes, but rather, to faithfully relay Python unicode strings to
# the database. The default connection encoding is US-ASCII, which is
# definitely no good. NLS_LANG needs to be set before the first call to
# connect(), not actually before the module gets imported, but this is as
# good a place as any. I am explicitly setting this rather than inheriting
# it, because it's not a configuration value in the sense that multiple
# values may possibly be correct; _only_ UTF-8 is ever correct to work with
# our software, and other values will fail CalDAVTester. (The state is,
# however, process-global; after the first call to connect(), all
# subsequent connections inherit this encoding even if the environment
# variable changes.) -glyph
os.environ['NLS_LANG'] = '.AL32UTF8'
import cx_Oracle
except ImportError:
cx_Oracle = None
class DiagnosticCursorWrapper(object):
"""
Diagnostic wrapper around a DB-API 2.0 cursor for debugging connection
status.
"""
def __init__(self, realCursor, connectionWrapper):
self.realCursor = realCursor
self.connectionWrapper = connectionWrapper
@property
def rowcount(self):
return self.realCursor.rowcount
@property
def description(self):
return self.realCursor.description
def execute(self, sql, args=()):
self.connectionWrapper.state = 'executing %r' % (sql,)
# Use log.debug
# sys.stdout.write(
# "Really executing SQL %r in thread %r\n" %
# ((sql % tuple(args)), thread.get_ident())
# )
self.realCursor.execute(sql, args)
def close(self):
self.realCursor.close()
def fetchall(self):
results = self.realCursor.fetchall()
# Use log.debug
# sys.stdout.write(
# "Really fetching results %r thread %r\n" %
# (results, thread.get_ident())
# )
return results
class OracleCursorWrapper(DiagnosticCursorWrapper):
"""
Wrapper for cx_Oracle DB-API connections which implements fetchall() to read
all CLOB objects into strings.
"""
def fetchall(self):
accum = []
for row in self.realCursor:
newRow = []
for column in row:
newRow.append(mapOracleOutputType(column))
accum.append(newRow)
return accum
def var(self, *args):
"""
Create a cx_Oracle variable bound to this cursor. (Forwarded in
addition to the standard methods so that implementors of
L{IDerivedParameter} do not need to be specifically aware of this
layer.)
"""
return self.realCursor.var(*args)
def execute(self, sql, args=()):
realArgs = []
for arg in args:
if isinstance(arg, str):
# We use NCLOB everywhere, so cx_Oracle requires a unicode-type
# input. But we mostly pass around utf-8 encoded bytes at the
# application layer as they consume less memory, so do the
# conversion here.
arg = arg.decode('utf-8')
if isinstance(arg, unicode) and len(arg) > 1024:
# This *may* cause a type mismatch, but none of the non-CLOB
# strings that we're passing would allow a value this large
# anyway. Smaller strings will be automatically converted by
# the bindings; larger ones will generate an error. I'm not
# sure why cx_Oracle itself doesn't just do the following hack
# automatically and internally for larger values too, but, here
# it is:
v = self.var(cx_Oracle.NCLOB, len(arg) + 1)
v.setvalue(0, arg)
else:
v = arg
realArgs.append(v)
return super(OracleCursorWrapper, self).execute(sql, realArgs)
class DiagnosticConnectionWrapper(object):
"""
Diagnostic wrapper around a DB-API 2.0 connection for debugging connection
status.
"""
wrapper = DiagnosticCursorWrapper
def __init__(self, realConnection, label):
self.realConnection = realConnection
self.label = label
self.state = 'idle (start)'
def cursor(self):
return self.wrapper(self.realConnection.cursor(), self)
def close(self):
self.realConnection.close()
self.state = 'closed'
def commit(self):
self.realConnection.commit()
self.state = 'idle (after commit)'
def rollback(self):
self.realConnection.rollback()
self.state = 'idle (after rollback)'
class DBAPIConnector(object):
"""
A simple wrapper for DB-API connectors.
@ivar dbModule: the DB-API module to use.
"""
wrapper = DiagnosticConnectionWrapper
def __init__(self, dbModule, preflight, *connectArgs, **connectKw):
self.dbModule = dbModule
self.connectArgs = connectArgs
self.connectKw = connectKw
self.preflight = preflight
def connect(self, label="<unlabeled>"):
connection = self.dbModule.connect(*self.connectArgs, **self.connectKw)
w = self.wrapper(connection, label)
self.preflight(w)
return w
class OracleConnectionWrapper(DiagnosticConnectionWrapper):
wrapper = OracleCursorWrapper
class OracleConnector(DBAPIConnector):
"""
A connector for cx_Oracle connections, with some special-cased behavior to
make it work more like other DB-API bindings.
Note: this is currently necessary to make our usage of twext.enterprise.dal
work with cx_Oracle, and should be factored somewhere higher-level.
"""
wrapper = OracleConnectionWrapper
def __init__(self, dsn):
super(OracleConnector, self).__init__(
cx_Oracle, oraclePreflight, dsn, threaded=True)
def oraclePreflight(connection):
"""
Pre-flight function for Oracle connections: set the timestamp format to be
something closely resembling our default assumption from Postgres.
"""
c = connection.cursor()
c.execute(
"alter session set NLS_TIMESTAMP_FORMAT = "
"'YYYY-MM-DD HH24:MI:SS.FF'"
)
c.execute(
"alter session set NLS_TIMESTAMP_TZ_FORMAT = "
"'YYYY-MM-DD HH:MI:SS.FF+TZH:TZM'"
)
connection.commit()
c.close()
def postgresPreflight(connection):
"""
Pre-flight function for PostgreSQL connections: enable standard conforming
strings, and set a non-infinite statement timeout.
"""
c = connection.cursor()
# Turn on standard conforming strings. This option is _required_ if
# you want to get correct behavior out of parameter-passing with the
# pgdb module. If it is not set then the server is potentially
# vulnerable to certain types of SQL injection.
c.execute("set standard_conforming_strings=on")
# Abort any second that takes more than 30 seconds (30000ms) to
# execute. This is necessary as a temporary workaround since it's
# hypothetically possible that different database operations could
# block each other, while executing SQL in the same process (in the
# same thread, since SQL executes in the main thread now). It's
# preferable to see some exceptions while we're in this state than to
# have the entire worker process hang.
c.execute("set statement_timeout=30000")
# pgdb (as per DB-API 2.0) automatically puts the connection into a
# 'executing a transaction' state when _any_ statement is executed on
# it (even these not-touching-any-data statements); make sure to commit
# first so that the application sees a fresh transaction, and the
# connection can safely be pooled without executing anything on it.
connection.commit()
c.close()
|
|
import sys
import warnings
try:
import itertools.izip as zip
except ImportError:
pass
from itertools import product
import numpy as np
from .. import util
from ..dimension import dimension_name
from ..element import Element
from ..ndmapping import NdMapping, item_check, sorted_context
from .interface import DataError, Interface
from .pandas import PandasInterface
from .util import finite_range
class cuDFInterface(PandasInterface):
"""
The cuDFInterface allows a Dataset objects to wrap a cuDF
DataFrame object. Using cuDF allows working with columnar
data on a GPU. Most operations leave the data in GPU memory,
however to plot the data it has to be loaded into memory.
The cuDFInterface covers almost the complete API exposed
by the PandasInterface with two notable exceptions:
1) Aggregation and groupby do not have a consistent sort order
(see https://github.com/rapidsai/cudf/issues/4237)
3) Not all functions can be easily applied to a cuDF so
some functions applied with aggregate and reduce will not work.
"""
datatype = 'cuDF'
types = ()
@classmethod
def loaded(cls):
return 'cudf' in sys.modules
@classmethod
def applies(cls, obj):
if not cls.loaded():
return False
import cudf
return isinstance(obj, (cudf.DataFrame, cudf.Series))
@classmethod
def init(cls, eltype, data, kdims, vdims):
import cudf
import pandas as pd
element_params = eltype.param.objects()
kdim_param = element_params['kdims']
vdim_param = element_params['vdims']
if isinstance(data, (cudf.Series, pd.Series)):
data = data.to_frame()
if not isinstance(data, cudf.DataFrame):
data, _, _ = PandasInterface.init(eltype, data, kdims, vdims)
data = cudf.from_pandas(data)
columns = list(data.columns)
ncols = len(columns)
index_names = [data.index.name]
if index_names == [None]:
index_names = ['index']
if eltype._auto_indexable_1d and ncols == 1 and kdims is None:
kdims = list(index_names)
if isinstance(kdim_param.bounds[1], int):
ndim = min([kdim_param.bounds[1], len(kdim_param.default)])
else:
ndim = None
nvdim = vdim_param.bounds[1] if isinstance(vdim_param.bounds[1], int) else None
if kdims and vdims is None:
vdims = [c for c in columns if c not in kdims]
elif vdims and kdims is None:
kdims = [c for c in columns if c not in vdims][:ndim]
elif kdims is None:
kdims = list(columns[:ndim])
if vdims is None:
vdims = [d for d in columns[ndim:((ndim+nvdim) if nvdim else None)]
if d not in kdims]
elif kdims == [] and vdims is None:
vdims = list(columns[:nvdim if nvdim else None])
# Handle reset of index if kdims reference index by name
for kd in kdims:
kd = dimension_name(kd)
if kd in columns:
continue
if any(kd == ('index' if name is None else name)
for name in index_names):
data = data.reset_index()
break
if any(isinstance(d, (np.int64, int)) for d in kdims+vdims):
raise DataError("cudf DataFrame column names used as dimensions "
"must be strings not integers.", cls)
if kdims:
kdim = dimension_name(kdims[0])
if eltype._auto_indexable_1d and ncols == 1 and kdim not in columns:
data = data.copy()
data.insert(0, kdim, np.arange(len(data)))
for d in kdims+vdims:
d = dimension_name(d)
if len([c for c in columns if c == d]) > 1:
raise DataError('Dimensions may not reference duplicated DataFrame '
'columns (found duplicate %r columns). If you want to plot '
'a column against itself simply declare two dimensions '
'with the same name. '% d, cls)
return data, {'kdims':kdims, 'vdims':vdims}, {}
@classmethod
def range(cls, dataset, dimension):
dimension = dataset.get_dimension(dimension, strict=True)
column = dataset.data[dimension.name]
if dimension.nodata is not None:
column = cls.replace_value(column, dimension.nodata)
if column.dtype.kind == 'O':
return np.NaN, np.NaN
else:
return finite_range(column, column.min(), column.max())
@classmethod
def values(cls, dataset, dim, expanded=True, flat=True, compute=True,
keep_index=False):
dim = dataset.get_dimension(dim, strict=True)
data = dataset.data[dim.name]
if not expanded:
data = data.unique()
return data.values_host if compute else data.values
elif keep_index:
return data
elif compute:
return data.values_host
try:
return data.values
except Exception:
return data.values_host
@classmethod
def groupby(cls, dataset, dimensions, container_type, group_type, **kwargs):
# Get dimensions information
dimensions = [dataset.get_dimension(d).name for d in dimensions]
kdims = [kdim for kdim in dataset.kdims if kdim not in dimensions]
# Update the kwargs appropriately for Element group types
group_kwargs = {}
group_type = dict if group_type == 'raw' else group_type
if issubclass(group_type, Element):
group_kwargs.update(util.get_param_values(dataset))
group_kwargs['kdims'] = kdims
group_kwargs.update(kwargs)
# Propagate dataset
group_kwargs['dataset'] = dataset.dataset
# Find all the keys along supplied dimensions
keys = product(*(dataset.data[dimensions[0]].unique().values_host for d in dimensions))
# Iterate over the unique entries applying selection masks
grouped_data = []
for unique_key in util.unique_iterator(keys):
group_data = dataset.select(**dict(zip(dimensions, unique_key)))
if not len(group_data):
continue
group_data = group_type(group_data, **group_kwargs)
grouped_data.append((unique_key, group_data))
if issubclass(container_type, NdMapping):
with item_check(False), sorted_context(False):
kdims = [dataset.get_dimension(d) for d in dimensions]
return container_type(grouped_data, kdims=kdims)
else:
return container_type(grouped_data)
@classmethod
def select_mask(cls, dataset, selection):
"""
Given a Dataset object and a dictionary with dimension keys and
selection keys (i.e. tuple ranges, slices, sets, lists, or literals)
return a boolean mask over the rows in the Dataset object that
have been selected.
"""
mask = None
for dim, sel in selection.items():
if isinstance(sel, tuple):
sel = slice(*sel)
arr = cls.values(dataset, dim, keep_index=True)
if util.isdatetime(arr) and util.pd:
try:
sel = util.parse_datetime_selection(sel)
except:
pass
new_masks = []
if isinstance(sel, slice):
with warnings.catch_warnings():
warnings.filterwarnings('ignore', r'invalid value encountered')
if sel.start is not None:
new_masks.append(sel.start <= arr)
if sel.stop is not None:
new_masks.append(arr < sel.stop)
if not new_masks:
continue
new_mask = new_masks[0]
for imask in new_masks[1:]:
new_mask &= imask
elif isinstance(sel, (set, list)):
for v in sel:
new_masks.append(arr==v)
if not new_masks:
continue
new_mask = new_masks[0]
for imask in new_masks[1:]:
new_mask |= imask
elif callable(sel):
new_mask = sel(arr)
else:
new_mask = arr == sel
if mask is None:
mask = new_mask
else:
mask &= new_mask
return mask
@classmethod
def select(cls, dataset, selection_mask=None, **selection):
df = dataset.data
if selection_mask is None:
selection_mask = cls.select_mask(dataset, selection)
indexed = cls.indexed(dataset, selection)
if selection_mask is not None:
df = df.loc[selection_mask]
if indexed and len(df) == 1 and len(dataset.vdims) == 1:
return df[dataset.vdims[0].name].iloc[0]
return df
@classmethod
def concat_fn(cls, dataframes, **kwargs):
import cudf
return cudf.concat(dataframes, **kwargs)
@classmethod
def add_dimension(cls, dataset, dimension, dim_pos, values, vdim):
data = dataset.data.copy()
if dimension.name not in data:
data[dimension.name] = values
return data
@classmethod
def aggregate(cls, dataset, dimensions, function, **kwargs):
data = dataset.data
cols = [d.name for d in dataset.kdims if d in dimensions]
vdims = dataset.dimensions('value', label='name')
reindexed = data[cols+vdims]
agg = function.__name__
if len(dimensions):
agg_map = {'amin': 'min', 'amax': 'max'}
agg = agg_map.get(agg, agg)
grouped = reindexed.groupby(cols, sort=False)
if not hasattr(grouped, agg):
raise ValueError('%s aggregation is not supported on cudf DataFrame.' % agg)
df = getattr(grouped, agg)().reset_index()
else:
agg_map = {'amin': 'min', 'amax': 'max', 'size': 'count'}
agg = agg_map.get(agg, agg)
if not hasattr(reindexed, agg):
raise ValueError('%s aggregation is not supported on cudf DataFrame.' % agg)
agg = getattr(reindexed, agg)()
data = dict(((col, [v]) for col, v in zip(agg.index.values_host, agg.to_array())))
df = util.pd.DataFrame(data, columns=list(agg.index.values_host))
dropped = []
for vd in vdims:
if vd not in df.columns:
dropped.append(vd)
return df, dropped
@classmethod
def iloc(cls, dataset, index):
import cudf
rows, cols = index
scalar = False
columns = list(dataset.data.columns)
if isinstance(cols, slice):
cols = [d.name for d in dataset.dimensions()][cols]
elif np.isscalar(cols):
scalar = np.isscalar(rows)
cols = [dataset.get_dimension(cols).name]
else:
cols = [dataset.get_dimension(d).name for d in index[1]]
col_index = [columns.index(c) for c in cols]
if np.isscalar(rows):
rows = [rows]
if scalar:
return dataset.data[cols[0]].iloc[rows[0]]
result = dataset.data.iloc[rows, col_index]
# cuDF does not handle single rows and cols indexing correctly
# as of cudf=0.10.0 so we have to convert Series back to DataFrame
if isinstance(result, cudf.Series):
if len(cols) == 1:
result = result.to_frame(cols[0])
else:
result = result.to_frame().T
return result
@classmethod
def sort(cls, dataset, by=[], reverse=False):
cols = [dataset.get_dimension(d, strict=True).name for d in by]
return dataset.data.sort_values(by=cols, ascending=not reverse)
@classmethod
def dframe(cls, dataset, dimensions):
if dimensions:
return dataset.data[dimensions].to_pandas()
else:
return dataset.data.to_pandas()
Interface.register(cuDFInterface)
|
|
#!/usr/bin/env python
"""
Tkinter GUI for editing Compass projects.
"""
try:
# Python 3
from tkinter import *
import tkinter.ttk as ttk
import tkinter.filedialog as tkFileDialog
except ImportError as e:
# Python 2
from Tkinter import *
import ttk
import tkFileDialog
import sys
import logging
from davies import compass
from davies.event import event
class OffsetShotEditor(ttk.Frame):
"""Offset survey paper style editor for Survey shots."""
NUM_SHOTS = 20
def __init__(self, parent, **kwargs):
ttk.Frame.__init__(self, parent, **kwargs)
self.stations = []
self.distances = []
self.azm_fss, self.azm_bss = [], []
self.clino_fss, self.clino_bss = [], []
self.lefts, self.rights, self.ups, self.downs = [], [], [], []
self.comments = []
#for i in range(22):
# self.rowconfigure(i, weight=1)
#for i in range(11):
# self.columnconfigure(i, weight=1)
self.columnconfigure(0, weight=1)
self.columnconfigure(10, weight=10)
ttk.Label(self, text='STA').grid(row=0, column=0, rowspan=2, sticky=S)
ttk.Label(self, text=' ').grid(row=0, column=1, columnspan=5)
ttk.Label(self, text='DIST').grid(row=1, column=1, rowspan=2, sticky=S)
ttk.Label(self, text='AZM').grid(row=1, column=2, rowspan=2, sticky=S)
ttk.Label(self, text='INC').grid(row=1, column=4, rowspan=2, sticky=S)
ttk.Label(self, text='L').grid(row=0, column=6, rowspan=2, sticky=S)
ttk.Label(self, text='R').grid(row=0, column=7, rowspan=2, sticky=S)
ttk.Label(self, text='U').grid(row=0, column=8, rowspan=2, sticky=S)
ttk.Label(self, text='D').grid(row=0, column=9, rowspan=2, sticky=S)
ttk.Label(self, text='NOTE').grid(row=0, column=10, rowspan=2, sticky=S)
ro = 2 # frame row offset
for i in range(2 * OffsetShotEditor.NUM_SHOTS + 1):
if i % 2 == 0:
# station-oriented
station = StringVar()
self.stations.append(station)
ttk.Entry(self, width=6, textvariable=station).grid(row=i+ro, column=0, rowspan=2)
l = IntVar()
self.lefts.append(l)
ttk.Entry(self, width=3, textvariable=l, justify=RIGHT).grid(row=i+ro, column=6, rowspan=2)
r = IntVar()
self.rights.append(r)
ttk.Entry(self, width=3, textvariable=r, justify=RIGHT).grid(row=i+ro, column=7, rowspan=2)
u = IntVar()
self.ups.append(u)
ttk.Entry(self, width=3, textvariable=u, justify=RIGHT).grid(row=i+ro, column=8, rowspan=2)
d = IntVar()
self.downs.append(d)
ttk.Entry(self, width=3, textvariable=d, justify=RIGHT).grid(row=i+ro, column=9, rowspan=2)
comment = StringVar()
self.comments.append(comment)
ttk.Entry(self, width=10, textvariable=comment).grid(row=i+ro, column=10, rowspan=2, sticky=E+W)
else:
# shot-oriented
dist = StringVar() # DoubleVar()
self.distances.append(dist)
ttk.Entry(self, width=6, textvariable=dist, justify=RIGHT).grid(row=i+ro, column=1, rowspan=2)
azm_fs = StringVar() # DoubleVar()
self.azm_fss.append(azm_fs)
ttk.Entry(self, width=6, textvariable=azm_fs, justify=RIGHT).grid(row=i+ro, column=2, rowspan=2)
clino_fs = StringVar() # DoubleVar()
self.clino_fss.append(clino_fs)
ttk.Entry(self, width=5, textvariable=clino_fs, justify=RIGHT).grid(row=i+ro, column=4, rowspan=2)
def populate(self, shots):
# TODO: some surveys may have LRUD associated with the 'TO' station rather than 'FROM' station
vars = self.stations, self.distances, self.azm_fss, self.clino_fss, self.lefts, self.rights, self.ups, self.downs, self.comments
keys = 'FROM', 'LENGTH', 'BEARING', 'INC', 'LEFT', 'RIGHT', 'UP', 'DOWN', 'COMMENTS'
offset_keys = 'LENGTH', 'BEARING', 'INC'
prev_station = None
ro = 0 # row offset
for i, shot in enumerate(shots[:2*OffsetShotEditor.NUM_SHOTS+1/2]):
#print shot
if prev_station and shot['FROM'] != prev_station:
ro += 1
for var, key in zip(vars, keys):
row = i+ro if key in offset_keys else i
var[row].set(shot.get(key, ''))
prev_station = shot['TO']
self.stations[len(shots) + ro].set(prev_station)
class SurveyEditor(ttk.Frame):
"""Compass Survey editor Frame."""
def __init__(self, parent, **kwargs):
ttk.Frame.__init__(self, parent, **kwargs) # 2x1
self.grid(row=0, column=0, sticky=N+S+E+W)
self.columnconfigure(0, weight=1)
self.rowconfigure(0, weight=0)
self.rowconfigure(1, weight=1)
self.header_frame = header_frame = ttk.Frame(self) # 4x3
header_frame.grid(row=0, column=0, sticky=E+W)
header_frame.columnconfigure(0, weight=1)
header_frame.columnconfigure(1, weight=10)
header_frame.columnconfigure(2, weight=1)
header_frame.columnconfigure(3, weight=10)
self.name = StringVar()
self.date = StringVar()
self.comment = StringVar()
self.team = StringVar()
ttk.Label(header_frame, text='Name:', anchor=E).grid(row=0, column=0)
ttk.Entry(header_frame, textvariable=self.name, width=9).grid(row=0, column=1, sticky=E+W)
ttk.Label(header_frame, text='Date:', anchor=E).grid(row=0, column=2)
ttk.Entry(header_frame, textvariable=self.date, width=9).grid(row=0, column=3, sticky=E+W)
ttk.Label(header_frame, text='Comment:', anchor=E).grid(row=1, column=0)
ttk.Entry(header_frame, textvariable=self.comment, width=40).grid(row=1, column=1, columnspan=3, sticky=E+W)
ttk.Label(header_frame, text='Team:', anchor=E).grid(row=2, column=0)
ttk.Entry(header_frame, textvariable=self.team, width=40).grid(row=2, column=1, columnspan=3, sticky=E+W)
self.shot_frame = shot_frame = OffsetShotEditor(self)
shot_frame.grid(row=1, column=0, sticky=N+S+E+W)
def populate(self, survey):
self.name.set(survey.name)
self.date.set(survey.date)
self.comment.set(survey.comment)
self.team.set(', '.join(survey.team))
self.shot_frame.populate(survey.shots)
class ProjectTreeview(ttk.Treeview):
"""Treeview widget for rendering and navigating Compass Project/DatFile/Survey hierarchy."""
COLUMNS = ('date', 'footage', 'comment')
def __init__(self, parent, **kwargs):
ttk.Treeview.__init__(self, parent, selectmode='browse', columns=ProjectTreeview.COLUMNS, **kwargs)
self.heading('#0', text='Name')
self.heading('date', text='Date')
self.column('date', stretch=FALSE, width=100)
self.heading('footage', text='Length')
self.column('footage', stretch=FALSE, width=100, anchor=E)
self.heading('comment', text='Comment')
#self.tag_configure('project', font='* 14 bold') # Python 3
#self.tag_configure('datfile', font='* 12 bold') # Python 3
self.bind('<<TreeviewSelect>>', self.onTreeSelect)
#self.tag_bind('survey', '<1>', self.onSelectSurvey)
self.columnconfigure(0, weight=1)
self.rowconfigure(0, weight=1)
self.grid(row=0, column=0, sticky=N+S+E+W)
self.current_project_iid = self.current_datfile_iid = self.current_survey_iid = ''
def reset(self):
for child_iid in self.get_children():
self.delete(child_iid)
self.current_project_iid = self.current_datfile_iid = self.current_survey_iid = ''
def doSetProject(self, project):
self.reset()
self.current_project_iid = self.insert('', 'end', '', text=project.name, values=('', '', ''), open=TRUE, tags=('project',))
def doAddDatfile(self, datfile):
values = '', '%0.1f' % datfile.length, ''
self.current_datfile_iid = self.insert(self.current_project_iid, 'end', text=datfile.name, values=values, tags=('datfile',))
def doAddSurvey(self, survey):
values = str(survey.date), '%0.1f' % survey.length, survey.comment
self.current_survey_iid = self.insert(self.current_datfile_iid, 'end', text=survey.name, values=values, tags=('survey',))
@event
def survey_selected(self, datfilename, surveyname):
"""Event fired when a Survey node has been selected"""
@event
def datfile_selected(self, datfilename):
"""Event fired when a DatFile node has been selected"""
@event
def project_selected(self):
"""Event fired when the Project top-level node has been selected"""
def onTreeSelect(self, e):
node_iid = self.focus()
node = self.item(node_iid)
if 'survey' in node['tags']:
surveyname = node['text']
datfilename = self.item(self.parent(node_iid))['text']
self.survey_selected(datfilename, surveyname)
# TODO: datfile, project
class AppGui(ttk.Frame):
"""Main application GUI."""
def __init__(self, parent, **kwargs):
ttk.Frame.__init__(self, parent, **kwargs)
self.parent = parent
menubar = Menu(parent)
parent['menu'] = menubar
menu_file = Menu(menubar)
menu_file.add_command(label='Open', command=self.openFile)
menubar.add_cascade(menu=menu_file, label='File')
mainframe = ttk.Frame(parent, padding=5) # encapsulates entire main application, 1x2
mainframe.grid(row=0, column=0, sticky=N+S+E+W)
mainframe.columnconfigure(0, weight=1)
mainframe.rowconfigure(0, weight=1)
pane = ttk.Panedwindow(mainframe, orient=HORIZONTAL) # encapsulates project tree and edit form
pane.grid(row=0, column=0, sticky=N+S+E+W)
#pane.grid_propagate(False)
treeframe = ttk.Frame(pane, relief='sunken', padding=5) # encapsulates project tree, 1x1
pane.add(treeframe, weight=1)
#treeframe.grid(row=0, column=0, sticky=N+S+E+W)
treeframe.columnconfigure(0, weight=1)
treeframe.rowconfigure(0, weight=1)
self.tree = tree = ProjectTreeview(treeframe)
self.tree.survey_selected += self.OnSurveySelected
self.editframe = editframe = ttk.Frame(pane, relief='sunken', padding=5) # encapsulates edit form, 1x1
pane.add(editframe, weight=1)
#editframe.grid(row=0, column=1, sticky=N+S+E+W)
editframe.columnconfigure(0, weight=1)
editframe.rowconfigure(0, weight=1)
self.editor = None
def openFile(self):
filetypes = [('Compass Project Files', ('*.MAK', '*.mak'))]
makfilename = tkFileDialog.askopenfilename(title='Choose a Compass Project file', defaultextension='.MAK', filetypes=filetypes)
if makfilename:
self.OnProjectOpen(makfilename)
@event
def OnProjectOpen(self, makfilename):
"""Event fired when user has selected a new Project file to open"""
@event
def OnSurveySelected(self, datfilename, surveyname):
"""Event fired when user clicks a Survey node"""
def doSetProject(self, project):
self.parent.title(project.name)
self.tree.doSetProject(project)
def doAddDatfile(self, datfile):
self.tree.doAddDatfile(datfile)
def doAddSurvey(self, survey):
self.tree.doAddSurvey(survey)
def doSurveySelected(self, survey):
if self.editor:
self.editor.grid_remove()
self.editor = editor = SurveyEditor(self.editframe)
editor.grid(row=0, column=0, sticky=N+S+E+W)
editor.populate(survey)
class AppController(object):
def __init__(self, parent):
self.project = None
self.ui = AppGui(parent, height=600, width=800)
self.wire_model()
self.wire_ui()
def wire_model(self):
pass
def wire_ui(self):
self.ui.OnProjectOpen += self.doOpenProject
self.ui.OnSurveySelected += self.doSurveySelected
def doOpenProject(self, makfilepath):
self.project = compass.CompassProjectParser(makfilepath).parse()
self.ui.doSetProject(self.project)
for datfile in self.project:
self.ui.doAddDatfile(datfile)
for survey in datfile:
self.ui.doAddSurvey(survey)
def doSurveySelected(self, datfilename, surveyname):
survey = self.project[datfilename][surveyname]
self.ui.doSurveySelected(survey)
def main(parent):
app = AppController(parent)
if __name__ == '__main__':
log_level = logging.DEBUG if '--verbose' in sys.argv else logging.INFO
logging.basicConfig(level=log_level)
root = Tk()
root.option_add('*tearOff', FALSE)
root.columnconfigure(0, weight=1)
root.rowconfigure(0, weight=1)
main(root)
root.mainloop()
|
|
import datetime
import math
import os
from functools import partial
import numpy
import pyproj
import rasterio
from django.conf import settings
from ncdjango.models import Service
from netCDF4 import Dataset
from rasterio.crs import CRS
from rasterio.enums import Resampling
from rasterio.features import rasterize
from rasterio.vrt import WarpedVRT
from shapely.geometry import Point
from shapely.ops import transform
from trefoil.netcdf.variable import SpatialCoordinateVariables
class Constraint(object):
def __init__(self, data, region):
self.data = data
self.region = region
self.mask = None
self.slice = None
@staticmethod
def by_name(constraint):
return {
'elevation': ElevationConstraint,
'photoperiod': PhotoperiodConstraint,
'latitude': LatitudeConstraint,
'longitude': LongitudeConstraint,
'distance': DistanceConstraint,
'shapefile': GeometryConstraint,
'raster': RasterConstraint
}[constraint]
def apply_constraint(self, **kwargs):
if self.mask is None:
self.mask = self.get_mask(**kwargs)
crop = numpy.argwhere(self.mask == False)
if crop.any():
(y_start, x_start), (y_stop, x_stop) = crop.min(0), crop.max(0) + 1
self.slice = (slice(x_start, x_stop), slice(y_start, y_stop))
self.mask = self.mask[self.slice[1], self.slice[0]]
mask = self.data.mask[self.slice[1], self.slice[0]] if self.slice else self.data.mask
data = self.data[self.slice[1], self.slice[0]] if self.slice else self.data
return numpy.ma.masked_where(self.mask | mask, data)
def get_mask(self, **kwargs):
raise NotImplemented
class ElevationConstraint(Constraint):
def get_mask(self, **kwargs):
try:
min_elevation = kwargs['min']
max_elevation = kwargs['max']
except KeyError:
raise ValueError('Missing constraint arguments')
service = Service.objects.get(name='{}_dem'.format(self.region))
with Dataset(os.path.join(settings.NC_SERVICE_DATA_ROOT, service.data_path)) as ds:
v = service.variable_set.first()
coords = SpatialCoordinateVariables.from_bbox(
v.full_extent, ds.variables[v.x_dimension].size, ds.variables[v.y_dimension].size, dtype='float64'
)
window = coords.get_window_for_bbox(self.data.extent)
elevation = ds.variables['elevation'][window.y_slice, window.x_slice]
mask = elevation < min_elevation
mask |= elevation > max_elevation
return mask
class PhotoperiodConstraint(Constraint):
def get_julian_day(self, date):
a = (14 - date.month) // 12
y = date.year + 4800 - a
m = date.month + 12 * a - 3
julian_date = date.day + (153 * m + 2) // 5 + 365 * y + y // 4 - y // 100 + y // 400 - 32045
return julian_date - 2451545 + .0008
def daylight(self, date, lat, lon):
""" Returns daylight hours for a single lat/lon point """
julian_day = self.get_julian_day(date)
solar_noon = julian_day - lon//360
solar_anomaly = (357.5291 + 0.98560028*solar_noon) % 360
equation_of_center = (
1.9148*math.sin(math.radians(solar_anomaly)) +
0.0200*math.sin(math.radians(2*solar_anomaly)) +
0.0003*math.sin(math.radians(3*solar_anomaly))
)
ecliptic_longitude = (solar_anomaly + equation_of_center + 180 + 102.9372) % 360
solar_transit = (
2451545.5 + solar_noon + 0.0053*math.sin(math.radians(solar_anomaly)) -
0.0069*math.sin(math.radians(2*ecliptic_longitude))
)
declination = math.asin(math.sin(math.radians(ecliptic_longitude))*math.sin(math.radians(23.44)))
hour_angle = math.acos(
(math.sin(math.radians(-.83)) - math.sin(math.radians(lat))*math.sin(declination)) /
(math.cos(math.radians(lat))*math.cos(declination))
)
sunrise = solar_transit - math.degrees(hour_angle)/360
sunset = solar_transit + math.degrees(hour_angle)/360
return (sunset - sunrise) * 24
def daylight_array(self, date, lat, lon):
""" Returns daylight hours for arrays of lat/lon points """
julian_day = self.get_julian_day(date)
lat_arr = numpy.tile(lat.reshape(len(lat), 1), (1, len(lon)))
lon_arr = numpy.tile(lon, (len(lat), 1))
# solar_noon = julian_day - lon//360
solar_noon = lon_arr
del lon_arr
solar_noon //= 360
solar_noon -= julian_day
solar_noon *= -1
# solar_anomaly = (357.5291 + 0.98560028 * solar_noon) % 360
solar_anomaly = solar_noon * 0.98560028
solar_anomaly += 357.5291
solar_anomaly %= 360
# equation_of_center = (
# 1.9148 * math.sin(math.radians(solar_anomaly)) +
# 0.0200 * math.sin(math.radians(2 * solar_anomaly)) +
# 0.0003 * math.sin(math.radians(3 * solar_anomaly))
# )
equation_of_center = numpy.radians(solar_anomaly)
numpy.sin(equation_of_center, equation_of_center)
equation_of_center *= 1.9148
equation_of_center_2 = solar_anomaly * 2
numpy.radians(equation_of_center_2, equation_of_center_2)
numpy.sin(equation_of_center_2, equation_of_center_2)
equation_of_center_2 *= 0.0200
equation_of_center += equation_of_center_2
equation_of_center_2 = solar_anomaly * 3
numpy.radians(equation_of_center_2, equation_of_center_2)
numpy.sin(equation_of_center_2, equation_of_center_2)
equation_of_center_2 *= 0.0003
equation_of_center += equation_of_center_2
del equation_of_center_2
# ecliptic_longitude = (solar_anomaly + equation_of_center + 180 + 102.9372) % 360
ecliptic_longitude = equation_of_center
del equation_of_center
ecliptic_longitude += solar_anomaly
ecliptic_longitude += 282.9372 # 180 + 102.9372
ecliptic_longitude %= 360
# solar_transit = (
# 2451545.5 + solar_noon + 0.0053*math.sin(math.radians(solar_anomaly)) -
# 0.0069*math.sin(math.radians(2*ecliptic_longitude))
# )
solar_transit = solar_noon
del solar_noon
solar_transit += 2451545.5
numpy.radians(solar_anomaly, solar_anomaly)
numpy.sin(solar_anomaly, solar_anomaly)
solar_anomaly *= 0.0053
solar_transit += solar_anomaly
del solar_anomaly
solar_transit_2 = ecliptic_longitude * 2
numpy.radians(solar_transit_2, solar_transit_2)
numpy.sin(solar_transit_2, solar_transit_2)
solar_transit_2 *= 0.0069
solar_transit -= solar_transit_2
del solar_transit_2
# declination = math.asin(math.sin(math.radians(ecliptic_longitude))*math.sin(math.radians(23.44)))
declination = ecliptic_longitude
del ecliptic_longitude
numpy.radians(declination, declination)
numpy.sin(declination, declination)
declination *= math.sin(math.radians(23.44))
numpy.arcsin(declination, declination)
# hour_angle = math.acos(
# (math.sin(math.radians(-.83)) - math.sin(math.radians(lat)) * math.sin(declination)) /
# math.cos(math.radians(lat)) * math.cos(declination)
# )
hour_angle = numpy.radians(lat_arr)
numpy.sin(hour_angle, hour_angle)
hour_angle *= numpy.sin(declination)
hour_angle -= math.sin(math.radians(-.83))
hour_angle *= -1
numpy.radians(lat_arr, lat_arr)
numpy.cos(lat_arr, lat_arr)
numpy.cos(declination, declination)
lat_arr *= declination
del declination
hour_angle /= lat_arr
del lat_arr
numpy.arccos(hour_angle, hour_angle)
# sunrise = solar_transit - math.degrees(hour_angle) / 360
# sunset = solar_transit + math.degrees(hour_angle) / 360
# return (sunset - sunrise) * 24
numpy.degrees(hour_angle, hour_angle)
hour_angle /= 360
solar_transit = solar_transit.astype('float64')
days = (solar_transit + hour_angle) - (solar_transit - hour_angle)
days *= 24
return days
def get_mask(self, hours, lat, lon, year, month, day):
date = datetime.date(year, month, day)
daylight = self.daylight(date, lat, lon)
service = Service.objects.get(name='{}_dem'.format(self.region))
with Dataset(os.path.join(settings.NC_SERVICE_DATA_ROOT, service.data_path)) as ds:
lat_arr = ds['lat'][:]
lon_arr = ds['lon'][:]
coords = SpatialCoordinateVariables.from_bbox(
service.full_extent, ds.variables['lon'].size, ds.variables['lat'].size, dtype='float64'
)
window = coords.get_window_for_bbox(self.data.extent)
daylight_arr = self.daylight_array(date, lat_arr[window.y_slice], lon_arr[window.x_slice])
mask = daylight_arr < (daylight - hours)
mask |= daylight_arr > (daylight + hours)
return mask
class LatitudeConstraint(Constraint):
def get_mask(self, **kwargs):
try:
min_lat = kwargs['min']
max_lat = kwargs['max']
except KeyError:
raise ValueError('Missing constraint arguments')
min_lat, max_lat = sorted((min_lat, max_lat))
coords = SpatialCoordinateVariables.from_bbox(self.data.extent, *reversed(self.data.shape))
half_pixel_size = float(coords.y.pixel_size) / 2
start, stop = coords.y.indices_for_range(min_lat + half_pixel_size, max_lat - half_pixel_size)
mask = numpy.zeros_like(self.data, 'bool')
mask[:start][:] = True
mask[stop+1:][:] = True
return mask
class LongitudeConstraint(Constraint):
def get_mask(self, **kwargs):
try:
min_lon = kwargs['min']
max_lon = kwargs['max']
except KeyError:
raise ValueError('Missing constraint arguments')
min_lon, max_lon = sorted((min_lon, max_lon))
coords = SpatialCoordinateVariables.from_bbox(self.data.extent, *reversed(self.data.shape))
half_pixel_size = float(coords.x.pixel_size) / 2
start, stop = coords.x.indices_for_range(min_lon + half_pixel_size, max_lon - half_pixel_size)
mask = numpy.zeros_like(self.data, 'bool')
mask[:,:start] = True
mask[:,stop+1:] = True
return mask
class DistanceConstraint(Constraint):
def get_mask(self, lat, lon, distance):
wgs84 = pyproj.Proj('+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')
p = pyproj.Proj({
'proj': 'tmerc',
'lat_0': lat,
'lon_0': lon,
'k': 1,
'x_0': 0,
'y_0': 0,
'ellps': 'WGS84',
'towgs84': '0,0,0,0,0,0,0',
'units': 'm'
})
# Snap point to nearest cell center
coords = SpatialCoordinateVariables.from_bbox(self.data.extent, *reversed(self.data.shape))
lat_pixel_size = coords.y.pixel_size if coords.y.is_ascending_order() else -1 * coords.y.pixel_size
lat = (
sorted(coords.y.values)[
int((lat - coords.y.pixel_size * 1.5 - self.data.extent.ymin) / coords.y.pixel_size)
] - lat_pixel_size/2
)
lon = (
sorted(coords.x.values)[
int((lon - coords.x.pixel_size * 1.5 - self.data.extent.xmin) / coords.x.pixel_size)
] - coords.x.pixel_size/2
)
project_to_custom = partial(pyproj.transform, wgs84, p)
project_to_data = partial(pyproj.transform, p, self.data.extent.projection)
shape = transform(
project_to_data, transform(project_to_custom, Point(lon, lat)).buffer(distance * 1000, resolution=64)
)
return rasterize(
[shape], out_shape=self.data.shape, fill=1, transform=coords.affine, all_touched=True, default_value=0,
dtype=numpy.uint8
)
class GeometryConstraint(Constraint):
def get_mask(self, **kwargs):
try:
geoJSON = kwargs['geoJSON']
except ValueError:
raise ValueError('Missing constraint arguments')
features = geoJSON['features']
geometries = [f['geometry'] for f in features]
coords = SpatialCoordinateVariables.from_bbox(self.data.extent, *reversed(self.data.shape))
return rasterize(
geometries, out_shape=self.data.shape, fill=1, transform=coords.affine, all_touched=True, default_value=0,
dtype=numpy.uint8
)
class RasterConstraint(Constraint):
def warp_to_grid(self, path):
with rasterio.open(path) as dataset:
bbox = self.data.extent
vrt_options = {
'resampling': Resampling.nearest,
'dst_crs': CRS.from_string(bbox.projection.srs),
'dst_transform': SpatialCoordinateVariables.from_bbox(
bbox, self.data.shape[1], self.data.shape[0]
).affine,
'dst_height': self.data.shape[self.data.y_dim],
'dst_width': self.data.shape[self.data.x_dim]
}
with WarpedVRT(dataset, **vrt_options) as vrt:
return vrt.read(1, masked=True)
def get_mask(self, **kwargs):
try:
service_name = kwargs['service']
except ValueError:
raise ValueError('Missing constraint arguments')
try:
service = Service.objects.get(name=service_name)
except Service.DoesNotExist:
raise ValueError('Service {} does not exist'.format(service_name))
raster = self.warp_to_grid(os.path.join(settings.NC_SERVICE_DATA_ROOT, service.data_path))
return raster < 1
|
|
# This file contains various useful constants for py3status
GENERAL_DEFAULTS = {
"color_bad": "#FF0000",
"color_degraded": "#FFFF00",
"color_good": "#00FF00",
"color_separator": "#333333",
"colors": True,
"interval": 5,
"output_format": "i3bar",
}
MAX_NESTING_LEVELS = 4
TIME_FORMAT = "%Y-%m-%d %H:%M:%S"
TZTIME_FORMAT = "%Y-%m-%d %H:%M:%S %Z"
TIME_MODULES = ["time", "tztime"]
I3S_INSTANCE_MODULES = [
"battery",
"cpu_temperature",
"disk",
"ethernet",
"memory",
"path_exists",
"read_file",
"run_watch",
"tztime",
"volume",
"wireless",
]
I3S_SINGLE_NAMES = ["cpu_usage", "ddate", "ipv6", "load", "time"]
I3S_ALLOWED_COLORS = ["color_bad", "color_good", "color_degraded"]
# i3status modules that allow colors to be passed.
# general section also allows colors so is included.
I3S_COLOR_MODULES = ["general", "battery", "cpu_temperature", "disk", "load"]
I3S_MODULE_NAMES = I3S_SINGLE_NAMES + I3S_INSTANCE_MODULES
CONFIG_FILE_SPECIAL_SECTIONS = ["general", "py3status"]
ERROR_CONFIG = """
general {colors = true interval = 60}
order += "static_string py3status"
order += "tztime local"
order += "group error"
static_string py3status {format = "py3status"}
tztime local {format = "%c"}
group error{
button_next = 1
button_prev = 0
fixed_width = False
format = "{output}"
static_string error_min {format = "CONFIG ERROR" color = "#FF0000"}
static_string error {format = "$error" color = "#FF0000"}
}
"""
COLOR_NAMES_EXCLUDED = ["good", "bad", "degraded", "separator", "threshold", "None"]
COLOR_NAMES = {
"aliceblue": "#F0F8FF",
"antiquewhite": "#FAEBD7",
"aqua": "#00FFFF",
"aquamarine": "#7FFFD4",
"azure": "#F0FFFF",
"beige": "#F5F5DC",
"bisque": "#FFE4C4",
"black": "#000000",
"blanchedalmond": "#FFEBCD",
"blue": "#0000FF",
"blueviolet": "#8A2BE2",
"brown": "#A52A2A",
"burlywood": "#DEB887",
"cadetblue": "#5F9EA0",
"chartreuse": "#7FFF00",
"chocolate": "#D2691E",
"coral": "#FF7F50",
"cornflowerblue": "#6495ED",
"cornsilk": "#FFF8DC",
"crimson": "#DC143C",
"cyan": "#00FFFF",
"darkblue": "#00008B",
"darkcyan": "#008B8B",
"darkgoldenrod": "#B8860B",
"darkgray": "#A9A9A9",
"darkgrey": "#A9A9A9",
"darkgreen": "#006400",
"darkkhaki": "#BDB76B",
"darkmagenta": "#8B008B",
"darkolivegreen": "#556B2F",
"darkorange": "#FF8C00",
"darkorchid": "#9932CC",
"darkred": "#8B0000",
"darksalmon": "#E9967A",
"darkseagreen": "#8FBC8F",
"darkslateblue": "#483D8B",
"darkslategray": "#2F4F4F",
"darkslategrey": "#2F4F4F",
"darkturquoise": "#00CED1",
"darkviolet": "#9400D3",
"deeppink": "#FF1493",
"deepskyblue": "#00BFFF",
"dimgray": "#696969",
"dimgrey": "#696969",
"dodgerblue": "#1E90FF",
"firebrick": "#B22222",
"floralwhite": "#FFFAF0",
"forestgreen": "#228B22",
"fuchsia": "#FF00FF",
"gainsboro": "#DCDCDC",
"ghostwhite": "#F8F8FF",
"gold": "#FFD700",
"goldenrod": "#DAA520",
"gray": "#808080",
"grey": "#808080",
"green": "#008000",
"greenyellow": "#ADFF2F",
"honeydew": "#F0FFF0",
"hotpink": "#FF69B4",
"indianred": "#CD5C5C",
"indigo": "#4B0082",
"ivory": "#FFFFF0",
"khaki": "#F0E68C",
"lavender": "#E6E6FA",
"lavenderblush": "#FFF0F5",
"lawngreen": "#7CFC00",
"lemonchiffon": "#FFFACD",
"lightblue": "#ADD8E6",
"lightcoral": "#F08080",
"lightcyan": "#E0FFFF",
"lightgoldenrodyellow": "#FAFAD2",
"lightgray": "#D3D3D3",
"lightgrey": "#D3D3D3",
"lightgreen": "#90EE90",
"lightpink": "#FFB6C1",
"lightsalmon": "#FFA07A",
"lightseagreen": "#20B2AA",
"lightskyblue": "#87CEFA",
"lightslategray": "#778899",
"lightslategrey": "#778899",
"lightsteelblue": "#B0C4DE",
"lightyellow": "#FFFFE0",
"lime": "#00FF00",
"limegreen": "#32CD32",
"linen": "#FAF0E6",
"magenta": "#FF00FF",
"maroon": "#800000",
"mediumaquamarine": "#66CDAA",
"mediumblue": "#0000CD",
"mediumorchid": "#BA55D3",
"mediumpurple": "#9370DB",
"mediumseagreen": "#3CB371",
"mediumslateblue": "#7B68EE",
"mediumspringgreen": "#00FA9A",
"mediumturquoise": "#48D1CC",
"mediumvioletred": "#C71585",
"midnightblue": "#191970",
"mintcream": "#F5FFFA",
"mistyrose": "#FFE4E1",
"moccasin": "#FFE4B5",
"navajowhite": "#FFDEAD",
"navy": "#000080",
"oldlace": "#FDF5E6",
"olive": "#808000",
"olivedrab": "#6B8E23",
"orange": "#FFA500",
"orangered": "#FF4500",
"orchid": "#DA70D6",
"palegoldenrod": "#EEE8AA",
"palegreen": "#98FB98",
"paleturquoise": "#AFEEEE",
"palevioletred": "#DB7093",
"papayawhip": "#FFEFD5",
"peachpuff": "#FFDAB9",
"peru": "#CD853F",
"pink": "#FFC0CB",
"plum": "#DDA0DD",
"powderblue": "#B0E0E6",
"purple": "#800080",
"rebeccapurple": "#663399",
"red": "#FF0000",
"rosybrown": "#BC8F8F",
"royalblue": "#4169E1",
"saddlebrown": "#8B4513",
"salmon": "#FA8072",
"sandybrown": "#F4A460",
"seagreen": "#2E8B57",
"seashell": "#FFF5EE",
"sienna": "#A0522D",
"silver": "#C0C0C0",
"skyblue": "#87CEEB",
"slateblue": "#6A5ACD",
"slategray": "#708090",
"slategrey": "#708090",
"snow": "#FFFAFA",
"springgreen": "#00FF7F",
"steelblue": "#4682B4",
"tan": "#D2B48C",
"teal": "#008080",
"thistle": "#D8BFD8",
"tomato": "#FF6347",
"turquoise": "#40E0D0",
"violet": "#EE82EE",
"wheat": "#F5DEB3",
"white": "#FFFFFF",
"whitesmoke": "#F5F5F5",
"yellow": "#FFFF00",
"yellowgreen": "#9ACD32",
}
ON_TRIGGER_ACTIONS = ["refresh", "refresh_and_freeze"]
POSITIONS = ["left", "center", "right"]
RETIRED_MODULES = {
"bitcoin_price": {
"new": ["coin_market"],
"msg": "Module {old} is no longer available due to unmaintained APIs. You can try a different module {new}.",
},
"nvidia_temp": {
"new": ["nvidia_smi"],
"msg": "Module {old} has been replaced with a module {new}.",
},
"scratchpad_async": {
"new": ["scratchpad"],
"msg": "Module {old} has been replaced with a consolidated module {new}.",
},
"scratchpad_counter": {
"new": ["scratchpad"],
"msg": "Module {old} has been replaced with a consolidated module {new}.",
},
"window_title": {
"new": ["window"],
"msg": "Module {old} has been replaced with a consolidated module {new}.",
},
"window_title_async": {
"new": ["window"],
"msg": "Module {old} has been replaced with a consolidated module {new}.",
},
"weather_yahoo": {
"new": ["weather_owm"],
"msg": "Module {old} is no longer available due to retired Yahoo Weather APIs and new Oath requirements. You can try a different module {new}.",
},
"xkb_layouts": {
"new": ["xkb_input"],
"msg": "Module {old} has been replaced with a module {new} to support sway too.",
},
}
MARKUP_LANGUAGES = ["pango", "none"]
ON_ERROR_VALUES = ["hide", "show"]
|
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# TODO Move this to a package shared by CLI and SDK
from enum import Enum
from functools import total_ordering
from importlib import import_module
from knack.log import get_logger
logger = get_logger(__name__)
class APIVersionException(Exception):
def __init__(self, type_name, api_profile):
super(APIVersionException, self).__init__(type_name, api_profile)
self.type_name = type_name
self.api_profile = api_profile
def __str__(self):
return "Unable to get API version for type '{}' in profile '{}'".format(
self.type_name, self.api_profile)
# Sentinel value for profile
PROFILE_TYPE = object()
class CustomResourceType: # pylint: disable=too-few-public-methods
def __init__(self, import_prefix, client_name):
self.import_prefix = import_prefix
self.client_name = client_name
class ResourceType(Enum): # pylint: disable=too-few-public-methods
MGMT_APIMANAGEMENT = ('azure.mgmt.apimanagement', 'ApiManagementClient')
MGMT_KUSTO = ('azure.mgmt.kusto', 'KustoManagementClient')
MGMT_KEYVAULT = ('azure.mgmt.keyvault', 'KeyVaultManagementClient')
MGMT_STORAGE = ('azure.mgmt.storage', 'StorageManagementClient')
MGMT_COMPUTE = ('azure.mgmt.compute', 'ComputeManagementClient')
MGMT_NETWORK = ('azure.mgmt.network', 'NetworkManagementClient')
MGMT_NETWORK_DNS = ('azure.mgmt.dns', 'DnsManagementClient')
MGMT_AUTHORIZATION = ('azure.mgmt.authorization', 'AuthorizationManagementClient')
MGMT_CONTAINERREGISTRY = ('azure.mgmt.containerregistry', 'ContainerRegistryManagementClient')
MGMT_RESOURCE_FEATURES = ('azure.mgmt.resource.features', 'FeatureClient')
MGMT_RESOURCE_LINKS = ('azure.mgmt.resource.links', 'ManagementLinkClient')
MGMT_RESOURCE_LOCKS = ('azure.mgmt.resource.locks', 'ManagementLockClient')
MGMT_RESOURCE_POLICY = ('azure.mgmt.resource.policy', 'PolicyClient')
MGMT_RESOURCE_RESOURCES = ('azure.mgmt.resource.resources', 'ResourceManagementClient')
MGMT_RESOURCE_SUBSCRIPTIONS = ('azure.mgmt.resource.subscriptions', 'SubscriptionClient')
MGMT_RESOURCE_DEPLOYMENTSCRIPTS = ('azure.mgmt.resource.deploymentscripts', 'DeploymentScriptsClient')
MGMT_RESOURCE_TEMPLATESPECS = ('azure.mgmt.resource.templatespecs', 'TemplateSpecsClient')
MGMT_MONITOR = ('azure.mgmt.monitor', 'MonitorManagementClient')
DATA_KEYVAULT = ('azure.keyvault', 'KeyVaultClient')
DATA_KEYVAULT_KEYS = ('azure.keyvault.keys', 'KeyClient')
DATA_PRIVATE_KEYVAULT = ('azure.cli.command_modules.keyvault.vendored_sdks.azure_keyvault_t1', 'KeyVaultClient')
DATA_KEYVAULT_ADMINISTRATION_BACKUP = ('azure.keyvault.administration', 'KeyVaultBackupClient')
DATA_KEYVAULT_ADMINISTRATION_ACCESS_CONTROL = ('azure.keyvault.administration', 'KeyVaultAccessControlClient')
MGMT_EVENTHUB = ('azure.mgmt.eventhub', 'EventHubManagementClient')
MGMT_APPSERVICE = ('azure.mgmt.web', 'WebSiteManagementClient')
MGMT_IOTCENTRAL = ('azure.mgmt.iotcentral', 'IotCentralClient')
MGMT_IOTHUB = ('azure.mgmt.iothub', 'IotHubClient')
MGMT_IOTDPS = ('azure.mgmt.iothubprovisioningservices', 'IotDpsClient')
MGMT_ARO = ('azure.mgmt.redhatopenshift', 'AzureRedHatOpenShift4Client')
MGMT_DATABOXEDGE = ('azure.mgmt.databoxedge', 'DataBoxEdgeManagementClient')
MGMT_CUSTOMLOCATION = ('azure.mgmt.extendedlocation', 'CustomLocations')
MGMT_CONTAINERSERVICE = ('azure.mgmt.containerservice', 'ContainerServiceClient')
# the "None" below will stay till a command module fills in the type so "get_mgmt_service_client"
# can be provided with "ResourceType.XXX" to initialize the client object. This usually happens
# when related commands start to support Multi-API
DATA_COSMOS_TABLE = ('azure.multiapi.cosmosdb', None)
MGMT_ADVISOR = ('azure.mgmt.advisor', None)
MGMT_MEDIA = ('azure.mgmt.media', None)
MGMT_BACKUP = ('azure.mgmt.recoveryservicesbackup', None)
MGMT_BATCH = ('azure.mgmt.batch', None)
MGMT_BATCHAI = ('azure.mgmt.batchai', None)
MGMT_BILLING = ('azure.mgmt.billing', None)
MGMT_BOTSERVICE = ('azure.mgmt.botservice', None)
MGMT_CDN = ('azure.mgmt.cdn', None)
MGMT_COGNITIVESERVICES = ('azure.mgmt.cognitiveservices', None)
MGMT_CONSUMPTION = ('azure.mgmt.consumption', None)
MGMT_CONTAINERINSTANCE = ('azure.mgmt.containerinstance', None)
MGMT_COSMOSDB = ('azure.mgmt.cosmosdb', None)
MGMT_DEPLOYMENTMANAGER = ('azure.mgmt.deploymentmanager', None)
MGMT_DATALAKE_ANALYTICS = ('azure.mgmt.datalake.analytics', None)
MGMT_DATALAKE_STORE = ('azure.mgmt.datalake.store', None)
MGMT_DATAMIGRATION = ('azure.mgmt.datamigration', None)
MGMT_EVENTGRID = ('azure.mgmt.eventgrid', None)
MGMT_DEVTESTLABS = ('azure.mgmt.devtestlabs', None)
MGMT_MAPS = ('azure.mgmt.maps', None)
MGMT_POLICYINSIGHTS = ('azure.mgmt.policyinsights', None)
MGMT_RDBMS = ('azure.mgmt.rdbms', None)
MGMT_REDIS = ('azure.mgmt.redis', None)
MGMT_RELAY = ('azure.mgmt.relay', None)
MGMT_RESERVATIONS = ('azure.mgmt.reservations', None)
MGMT_SEARCH = ('azure.mgmt.search', None)
MGMT_SERVICEBUS = ('azure.mgmt.servicebus', None)
MGMT_SERVICEFABRIC = ('azure.mgmt.servicefabric', None)
MGMT_SIGNALR = ('azure.mgmt.signalr', None)
MGMT_SQL = ('azure.mgmt.sql', None)
MGMT_SQLVM = ('azure.mgmt.sqlvirtualmachine', None)
MGMT_MANAGEDSERVICES = ('azure.mgmt.managedservices', None)
MGMT_NETAPPFILES = ('azure.mgmt.netappfiles', None)
DATA_STORAGE = ('azure.multiapi.storage', None)
DATA_STORAGE_BLOB = ('azure.multiapi.storagev2.blob', None)
DATA_STORAGE_FILEDATALAKE = ('azure.multiapi.storagev2.filedatalake', None)
DATA_STORAGE_FILESHARE = ('azure.multiapi.storagev2.fileshare', None)
DATA_STORAGE_QUEUE = ('azure.multiapi.storagev2.queue', None)
def __init__(self, import_prefix, client_name):
"""Constructor.
:param import_prefix: Path to the (unversioned) module.
:type import_prefix: str.
:param client_name: Name the client for this resource type.
:type client_name: str.
"""
self.import_prefix = import_prefix
self.client_name = client_name
class SDKProfile: # pylint: disable=too-few-public-methods
def __init__(self, default_api_version, profile=None):
"""Constructor.
:param str default_api_version: Default API version if not overridden by a profile. Nullable.
:param profile: A dict operation group name to API version.
:type profile: dict[str, str]
"""
self.profile = profile if profile is not None else {}
self.profile[None] = default_api_version
@property
def default_api_version(self):
return self.profile[None]
AZURE_API_PROFILES = {
'latest': {
ResourceType.MGMT_STORAGE: '2021-06-01',
ResourceType.MGMT_NETWORK: '2021-05-01',
ResourceType.MGMT_COMPUTE: SDKProfile('2021-07-01', {
'resource_skus': '2019-04-01',
'disks': '2021-04-01',
'disk_encryption_sets': '2020-12-01',
'disk_accesses': '2020-05-01',
'snapshots': '2021-04-01',
'galleries': '2021-07-01',
'gallery_images': '2020-09-30',
'gallery_image_versions': '2021-07-01',
'shared_galleries': '2020-09-30',
'virtual_machine_scale_sets': '2021-07-01',
}),
ResourceType.MGMT_RESOURCE_FEATURES: '2021-07-01',
ResourceType.MGMT_RESOURCE_LINKS: '2016-09-01',
ResourceType.MGMT_RESOURCE_LOCKS: '2016-09-01',
ResourceType.MGMT_RESOURCE_POLICY: '2021-06-01',
ResourceType.MGMT_RESOURCE_RESOURCES: '2021-04-01',
ResourceType.MGMT_RESOURCE_SUBSCRIPTIONS: '2019-11-01',
ResourceType.MGMT_RESOURCE_DEPLOYMENTSCRIPTS: '2020-10-01',
ResourceType.MGMT_RESOURCE_TEMPLATESPECS: '2021-05-01',
ResourceType.MGMT_NETWORK_DNS: '2018-05-01',
ResourceType.MGMT_KEYVAULT: SDKProfile('2021-04-01-preview', {
'vaults': '2021-06-01-preview'
}),
ResourceType.MGMT_AUTHORIZATION: SDKProfile('2020-04-01-preview', {
'classic_administrators': '2015-06-01',
'role_definitions': '2018-01-01-preview',
'provider_operations_metadata': '2018-01-01-preview'
}),
ResourceType.MGMT_CONTAINERREGISTRY: SDKProfile('2021-08-01-preview', {
'agent_pools': '2019-06-01-preview',
'tasks': '2019-06-01-preview',
'task_runs': '2019-06-01-preview',
'runs': '2019-06-01-preview',
}),
# The order does make things different.
# Please keep ResourceType.DATA_KEYVAULT_KEYS before ResourceType.DATA_KEYVAULT
ResourceType.DATA_KEYVAULT_KEYS: None,
ResourceType.DATA_KEYVAULT: '7.0',
ResourceType.DATA_PRIVATE_KEYVAULT: '7.2',
ResourceType.DATA_KEYVAULT_ADMINISTRATION_BACKUP: '7.2-preview',
ResourceType.DATA_KEYVAULT_ADMINISTRATION_ACCESS_CONTROL: '7.2-preview',
ResourceType.DATA_STORAGE: '2018-11-09',
ResourceType.DATA_STORAGE_BLOB: '2020-10-02',
ResourceType.DATA_STORAGE_FILEDATALAKE: '2020-02-10',
ResourceType.DATA_STORAGE_FILESHARE: '2019-07-07',
ResourceType.DATA_STORAGE_QUEUE: '2018-03-28',
ResourceType.DATA_COSMOS_TABLE: '2017-04-17',
ResourceType.MGMT_EVENTHUB: '2021-06-01-preview',
ResourceType.MGMT_MONITOR: SDKProfile('2019-06-01', {
'action_groups': '2021-09-01',
'activity_log_alerts': '2017-04-01',
'activity_logs': '2015-04-01',
'alert_rule_incidents': '2016-03-01',
'alert_rules': '2016-03-01',
'autoscale_settings': '2015-04-01',
'baseline': '2018-09-01',
'baselines': '2019-03-01',
'diagnostic_settings': '2017-05-01-preview',
'diagnostic_settings_category': '2017-05-01-preview',
'event_categories': '2015-04-01',
'guest_diagnostics_settings': '2018-06-01-preview',
'guest_diagnostics_settings_association': '2018-06-01-preview',
'log_profiles': '2016-03-01',
'metric_alerts': '2018-03-01',
'metric_alerts_status': '2018-03-01',
'metric_baseline': '2018-09-01',
'metric_definitions': '2018-01-01',
'metric_namespaces': '2017-12-01-preview',
'metrics': '2018-01-01',
'operations': '2015-04-01',
'scheduled_query_rules': '2018-04-16',
'service_diagnostic_settings': '2016-09-01',
'tenant_activity_logs': '2015-04-01',
'vm_insights': '2018-11-27-preview',
'private_link_resources': '2019-10-17-preview',
'private_link_scoped_resources': '2019-10-17-preview',
'private_link_scope_operation_status': '2019-10-17-preview',
'private_link_scopes': '2019-10-17-preview',
'private_endpoint_connections': '2019-10-17-preview',
'subscription_diagnostic_settings': '2017-05-01-preview'
}),
ResourceType.MGMT_APPSERVICE: '2020-09-01',
ResourceType.MGMT_IOTHUB: '2021-07-01',
ResourceType.MGMT_IOTDPS: '2020-03-01',
ResourceType.MGMT_IOTCENTRAL: '2018-09-01',
ResourceType.MGMT_ARO: '2020-04-30',
ResourceType.MGMT_DATABOXEDGE: '2021-02-01-preview',
ResourceType.MGMT_CUSTOMLOCATION: '2021-03-15-preview',
ResourceType.MGMT_CONTAINERSERVICE: SDKProfile('2021-07-01', {
'container_services': '2017-07-01',
'open_shift_managed_clusters': '2019-09-30-preview'
})
},
'2020-09-01-hybrid': {
ResourceType.MGMT_STORAGE: '2019-06-01',
ResourceType.MGMT_NETWORK: '2018-11-01',
ResourceType.MGMT_COMPUTE: SDKProfile('2020-06-01', {
'resource_skus': '2019-04-01',
'disks': '2019-07-01',
'disk_encryption_sets': '2019-07-01',
'disk_accesses': '2020-05-01',
'snapshots': '2019-07-01',
'galleries': '2019-12-01',
'gallery_images': '2019-12-01',
'gallery_image_versions': '2019-12-01',
'virtual_machine_scale_sets': '2020-06-01'
}),
ResourceType.MGMT_KEYVAULT: '2016-10-01',
ResourceType.MGMT_RESOURCE_FEATURES: '2021-07-01',
ResourceType.MGMT_RESOURCE_LINKS: '2016-09-01',
ResourceType.MGMT_RESOURCE_LOCKS: '2016-09-01',
ResourceType.MGMT_RESOURCE_POLICY: '2016-12-01',
ResourceType.MGMT_RESOURCE_RESOURCES: '2019-10-01',
ResourceType.MGMT_RESOURCE_SUBSCRIPTIONS: '2016-06-01',
ResourceType.MGMT_RESOURCE_TEMPLATESPECS: '2015-01-01',
ResourceType.MGMT_NETWORK_DNS: '2016-04-01',
ResourceType.MGMT_AUTHORIZATION: SDKProfile('2015-07-01', {
'classic_administrators': '2015-06-01',
'policy_assignments': '2016-12-01',
'policy_definitions': '2016-12-01'
}),
# The order does make things different.
# Please keep ResourceType.DATA_KEYVAULT_KEYS before ResourceType.DATA_KEYVAULT
ResourceType.DATA_KEYVAULT_KEYS: None,
ResourceType.DATA_KEYVAULT: '2016-10-01',
ResourceType.DATA_STORAGE: '2018-11-09',
ResourceType.DATA_STORAGE_BLOB: '2019-07-07',
ResourceType.DATA_STORAGE_FILEDATALAKE: '2019-07-07',
ResourceType.DATA_STORAGE_FILESHARE: '2019-07-07',
ResourceType.DATA_STORAGE_QUEUE: '2019-07-07',
ResourceType.DATA_COSMOS_TABLE: '2017-04-17',
ResourceType.MGMT_APPSERVICE: '2018-02-01',
ResourceType.MGMT_EVENTHUB: '2021-06-01-preview',
ResourceType.MGMT_IOTHUB: '2019-07-01-preview',
ResourceType.MGMT_DATABOXEDGE: '2019-08-01',
ResourceType.MGMT_CONTAINERREGISTRY: '2019-05-01',
ResourceType.MGMT_CONTAINERSERVICE: SDKProfile('2020-11-01', {
'container_services': '2017-07-01',
'open_shift_managed_clusters': '2019-09-30-preview'
})
},
'2019-03-01-hybrid': {
ResourceType.MGMT_STORAGE: '2017-10-01',
ResourceType.MGMT_NETWORK: '2017-10-01',
ResourceType.MGMT_COMPUTE: SDKProfile('2017-12-01', {
'resource_skus': '2017-09-01',
'disks': '2017-03-30',
'snapshots': '2017-03-30'
}),
ResourceType.MGMT_RESOURCE_LINKS: '2016-09-01',
ResourceType.MGMT_RESOURCE_LOCKS: '2016-09-01',
ResourceType.MGMT_RESOURCE_POLICY: '2016-12-01',
ResourceType.MGMT_RESOURCE_RESOURCES: '2018-05-01',
ResourceType.MGMT_RESOURCE_SUBSCRIPTIONS: '2016-06-01',
ResourceType.MGMT_RESOURCE_TEMPLATESPECS: '2015-01-01',
ResourceType.MGMT_NETWORK_DNS: '2016-04-01',
ResourceType.MGMT_KEYVAULT: '2016-10-01',
ResourceType.MGMT_AUTHORIZATION: SDKProfile('2015-07-01', {
'classic_administrators': '2015-06-01',
'policy_assignments': '2016-12-01',
'policy_definitions': '2016-12-01'
}),
# The order does make things different.
# Please keep ResourceType.DATA_KEYVAULT_KEYS before ResourceType.DATA_KEYVAULT
ResourceType.DATA_KEYVAULT_KEYS: None,
ResourceType.DATA_KEYVAULT: '2016-10-01',
ResourceType.DATA_STORAGE: '2017-11-09',
ResourceType.DATA_STORAGE_BLOB: '2017-11-09',
ResourceType.DATA_STORAGE_FILEDATALAKE: '2017-11-09',
ResourceType.DATA_STORAGE_FILESHARE: '2017-11-09',
ResourceType.DATA_STORAGE_QUEUE: '2017-11-09',
ResourceType.DATA_COSMOS_TABLE: '2017-04-17',
# Full MultiAPI support is not done in AppService, the line below is merely
# to have commands show up in the hybrid profile which happens to have the latest
# API versions
ResourceType.MGMT_APPSERVICE: '2018-02-01',
ResourceType.MGMT_EVENTHUB: '2021-06-01-preview',
ResourceType.MGMT_IOTHUB: '2019-03-22',
ResourceType.MGMT_DATABOXEDGE: '2019-08-01'
},
'2018-03-01-hybrid': {
ResourceType.MGMT_STORAGE: '2016-01-01',
ResourceType.MGMT_NETWORK: '2017-10-01',
ResourceType.MGMT_COMPUTE: SDKProfile('2017-03-30'),
ResourceType.MGMT_RESOURCE_LINKS: '2016-09-01',
ResourceType.MGMT_RESOURCE_LOCKS: '2016-09-01',
ResourceType.MGMT_RESOURCE_POLICY: '2016-12-01',
ResourceType.MGMT_RESOURCE_RESOURCES: '2018-02-01',
ResourceType.MGMT_RESOURCE_SUBSCRIPTIONS: '2016-06-01',
ResourceType.MGMT_RESOURCE_TEMPLATESPECS: '2015-01-01',
ResourceType.MGMT_NETWORK_DNS: '2016-04-01',
ResourceType.MGMT_KEYVAULT: '2016-10-01',
ResourceType.MGMT_AUTHORIZATION: SDKProfile('2015-07-01', {
'classic_administrators': '2015-06-01'
}),
# The order does make things different.
# Please keep ResourceType.DATA_KEYVAULT_KEYS before ResourceType.DATA_KEYVAULT
ResourceType.DATA_KEYVAULT_KEYS: None,
ResourceType.DATA_KEYVAULT: '2016-10-01',
ResourceType.DATA_STORAGE: '2017-04-17',
ResourceType.DATA_STORAGE_BLOB: '2017-04-17',
ResourceType.DATA_STORAGE_FILEDATALAKE: '2017-04-17',
ResourceType.DATA_STORAGE_FILESHARE: '2017-04-17',
ResourceType.DATA_STORAGE_QUEUE: '2017-04-17',
ResourceType.DATA_COSMOS_TABLE: '2017-04-17'
},
'2017-03-09-profile': {
ResourceType.MGMT_STORAGE: '2016-01-01',
ResourceType.MGMT_NETWORK: '2015-06-15',
ResourceType.MGMT_COMPUTE: SDKProfile('2016-03-30'),
ResourceType.MGMT_RESOURCE_LINKS: '2016-09-01',
ResourceType.MGMT_RESOURCE_LOCKS: '2015-01-01',
ResourceType.MGMT_RESOURCE_POLICY: '2015-10-01-preview',
ResourceType.MGMT_RESOURCE_RESOURCES: '2016-02-01',
ResourceType.MGMT_RESOURCE_SUBSCRIPTIONS: '2016-06-01',
ResourceType.MGMT_RESOURCE_TEMPLATESPECS: '2015-01-01',
ResourceType.MGMT_NETWORK_DNS: '2016-04-01',
ResourceType.MGMT_KEYVAULT: '2016-10-01',
ResourceType.MGMT_AUTHORIZATION: SDKProfile('2015-07-01', {
'classic_administrators': '2015-06-01'
}),
# The order does make things different.
# Please keep ResourceType.DATA_KEYVAULT_KEYS before ResourceType.DATA_KEYVAULT
ResourceType.DATA_KEYVAULT_KEYS: None,
ResourceType.DATA_KEYVAULT: '2016-10-01',
ResourceType.DATA_STORAGE: '2015-04-05',
ResourceType.DATA_STORAGE_BLOB: '2015-04-05',
ResourceType.DATA_STORAGE_FILEDATALAKE: '2015-04-05',
ResourceType.DATA_STORAGE_FILESHARE: '2015-04-05',
ResourceType.DATA_STORAGE_QUEUE: '2015-04-05'
}
}
# We should avoid using ad hoc API versions,
# use the version in a profile as much as possible.
AD_HOC_API_VERSIONS = {
ResourceType.MGMT_NETWORK: {
'vm_default_target_network': '2018-01-01',
'nw_connection_monitor': '2019-06-01',
'container_network': '2018-08-01',
'appservice_network': '2020-04-01',
'appservice_ensure_subnet': '2019-02-01'
}
}
class _ApiVersions: # pylint: disable=too-few-public-methods
def __init__(self, client_type, sdk_profile, post_process):
self._client_type = client_type
self._sdk_profile = sdk_profile
self._post_process = post_process
self._operations_groups_value = None
self._resolved = False
def _resolve(self):
if self._resolved:
return
self._operations_groups_value = {}
for operation_group_name, operation_type in self._client_type.__dict__.items():
if not isinstance(operation_type, property):
continue
value_to_save = self._sdk_profile.profile.get(
operation_group_name,
self._sdk_profile.default_api_version
)
self._operations_groups_value[operation_group_name] = self._post_process(value_to_save)
self._resolved = True
def __getattr__(self, item):
try:
self._resolve()
return self._operations_groups_value[item]
except KeyError:
raise AttributeError('Attribute {} does not exist.'.format(item))
def _get_api_version_tuple(resource_type, sdk_profile, post_process=lambda x: x):
"""Return a _ApiVersion instance where key are operation group and value are api version."""
return _ApiVersions(client_type=get_client_class(resource_type),
sdk_profile=sdk_profile,
post_process=post_process)
def get_api_version(api_profile, resource_type, as_sdk_profile=False):
"""Get the API version of a resource type given an API profile.
:param api_profile: The name of the API profile.
:type api_profile: str.
:param resource_type: The resource type.
:type resource_type: ResourceType.
:returns: str -- the API version.
:raises: APIVersionException
"""
try:
api_version = AZURE_API_PROFILES[api_profile][resource_type]
if as_sdk_profile:
return api_version # Could be SDKProfile or string
if isinstance(api_version, SDKProfile):
api_version = _get_api_version_tuple(resource_type, api_version)
return api_version
except KeyError:
raise APIVersionException(resource_type, api_profile)
@total_ordering
class _SemVerAPIFormat:
"""Basic semver x.y.z API format.
Supports x, or x.y, or x.y.z
"""
def __init__(self, api_version_str):
try:
parts = api_version_str.split('.')
parts += [0, 0] # At worst never read, at best minor/patch
self.major = int(parts[0])
self.minor = int(parts[1])
self.patch = int(parts[2])
except (ValueError, TypeError):
raise ValueError('The API version {} is not in a '
'semver format'.format(api_version_str))
def __eq__(self, other):
return (self.major, self.minor, self.patch) == (other.major, other.minor, other.patch)
def __lt__(self, other):
return (self.major, self.minor, self.patch) < (other.major, other.minor, other.patch)
@total_ordering # pylint: disable=too-few-public-methods
class _DateAPIFormat:
""" Class to support comparisons for API versions in
YYYY-MM-DD, YYYY-MM-DD-preview, YYYY-MM-DD-profile, YYYY-MM-DD-profile-preview
or any string that starts with YYYY-MM-DD format. A special case is made for 'latest'.
"""
def __init__(self, api_version_str):
try:
self.latest = self.preview = False
self.yyyy = self.mm = self.dd = None
if api_version_str == 'latest':
self.latest = True
else:
if 'preview' in api_version_str:
self.preview = True
parts = api_version_str.split('-')
self.yyyy = int(parts[0])
self.mm = int(parts[1])
self.dd = int(parts[2])
except (ValueError, TypeError):
raise ValueError('The API version {} is not in a '
'supported format'.format(api_version_str))
def __eq__(self, other):
return self.latest == other.latest and self.yyyy == other.yyyy and self.mm == other.mm and \
self.dd == other.dd and self.preview == other.preview
def __lt__(self, other): # pylint: disable=too-many-return-statements
if self.latest or other.latest:
if not self.latest and other.latest:
return True
if self.latest and not other.latest:
return False
return False
if self.yyyy < other.yyyy:
return True
if self.yyyy == other.yyyy:
if self.mm < other.mm:
return True
if self.mm == other.mm:
if self.dd < other.dd:
return True
if self.dd == other.dd:
if self.preview and not other.preview:
return True
return False
def _parse_api_version(api_version):
"""Will try to parse it as a date, and if not working
as semver, and if still not working raise.
"""
try:
return _DateAPIFormat(api_version)
except ValueError:
return _SemVerAPIFormat(api_version)
def _cross_api_format_less_than(api_version, other):
"""LT strategy that supports if types are different.
For now, let's assume that any Semver is higher than any DateAPI
This fits KeyVault, if later we have a counter-example we'll update
"""
api_version = _parse_api_version(api_version)
other = _parse_api_version(other)
if type(api_version) is type(other):
return api_version < other
return isinstance(api_version, _DateAPIFormat) and isinstance(other, _SemVerAPIFormat)
def _validate_api_version(api_version_str, min_api=None, max_api=None):
"""Validate if api_version is inside the interval min_api/max_api.
"""
if min_api and _cross_api_format_less_than(api_version_str, min_api):
return False
if max_api and _cross_api_format_less_than(max_api, api_version_str):
return False
return True
def supported_api_version(api_profile, resource_type, min_api=None, max_api=None, operation_group=None):
"""
Returns True if current API version for the resource type satisfies min/max range.
To compare profile versions, set resource type to None.
Can return a tuple<operation_group, bool> if the resource_type supports SDKProfile.
note: Currently supports YYYY-MM-DD, YYYY-MM-DD-preview, YYYY-MM-DD-profile
or YYYY-MM-DD-profile-preview formatted strings.
"""
if not isinstance(resource_type, (ResourceType, CustomResourceType)) and resource_type != PROFILE_TYPE:
raise ValueError("'resource_type' is required.")
if min_api is None and max_api is None:
raise ValueError('At least a min or max version must be specified')
api_version_obj = get_api_version(api_profile, resource_type, as_sdk_profile=True) \
if isinstance(resource_type, (ResourceType, CustomResourceType)) else api_profile
if isinstance(api_version_obj, SDKProfile):
api_version_obj = api_version_obj.profile.get(operation_group or '', api_version_obj.default_api_version)
return _validate_api_version(api_version_obj, min_api, max_api)
def supported_resource_type(api_profile, resource_type):
if api_profile == 'latest' or resource_type is None:
return True
try:
return bool(AZURE_API_PROFILES[api_profile][resource_type])
except KeyError:
return False
def _get_attr(sdk_path, mod_attr_path, checked=True):
try:
attr_mod, attr_path = mod_attr_path.split('#') \
if '#' in mod_attr_path else (mod_attr_path, '')
full_mod_path = '{}.{}'.format(sdk_path, attr_mod) if attr_mod else sdk_path
op = import_module(full_mod_path)
if attr_path:
# Only load attributes if needed
for part in attr_path.split('.'):
op = getattr(op, part)
return op
except (ImportError, AttributeError) as ex:
import traceback
logger.debug(traceback.format_exc())
if checked:
return None
raise ex
def get_client_class(resource_type):
return _get_attr(resource_type.import_prefix, '#' + resource_type.client_name)
def get_versioned_sdk_path(api_profile, resource_type, operation_group=None):
""" Patch the unversioned sdk path to include the appropriate API version for the
resource type in question.
e.g. Converts azure.mgmt.storage.operations.storage_accounts_operations to
azure.mgmt.storage.v2016_12_01.operations.storage_accounts_operations
azure.keyvault.v7_0.models.KeyVault
"""
api_version = get_api_version(api_profile, resource_type)
if api_version is None:
return resource_type.import_prefix
if isinstance(api_version, _ApiVersions):
if operation_group is None:
raise ValueError("operation_group is required for resource type '{}'".format(resource_type))
api_version = getattr(api_version, operation_group)
return '{}.v{}'.format(resource_type.import_prefix, api_version.replace('-', '_').replace('.', '_'))
def get_versioned_sdk(api_profile, resource_type, *attr_args, **kwargs):
checked = kwargs.get('checked', True)
sub_mod_prefix = kwargs.get('mod', None)
operation_group = kwargs.get('operation_group', None)
sdk_path = get_versioned_sdk_path(api_profile, resource_type, operation_group)
if not attr_args:
# No attributes to load. Return the versioned sdk
return import_module(sdk_path)
results = []
for mod_attr_path in attr_args:
if sub_mod_prefix and '#' not in mod_attr_path:
mod_attr_path = '{}#{}'.format(sub_mod_prefix, mod_attr_path)
loaded_obj = _get_attr(sdk_path, mod_attr_path, checked)
results.append(loaded_obj)
return results[0] if len(results) == 1 else results
|
|
"""
===================================
The :mod:`mpi_array.globale` Module
===================================
Defines :obj:`gndarray` class and factory functions for
creating multi-dimensional distributed arrays (Partitioned Global Address Space).
Classes
=======
.. autosummary::
:toctree: generated/
gndarray - A :obj:`numpy.ndarray` like distributed array.
PerAxisRmaHaloUpdater - Helper class for performing ghost element updates.
RmaRedistributeUpdater - Helper class for redistributing elements between distributions.
Functions
=========
.. autosummary::
:toctree: generated/
copyto - Copy elements of one array to another array.
"""
from __future__ import absolute_import
import mpi4py.MPI as _mpi
import numpy as _np
from numpy.lib.mixins import NDArrayOperatorsMixin as _NDArrayOperatorsMixin
from .license import license as _license, copyright as _copyright, version as _version
from .update import UpdatesForRedistribute as _UpdatesForRedistribute
from .update import MpiUpdatesForGet as _MpiUpdatesForGet
from .update import MpiHalosUpdate as _MpiHalosUpdate
from .update import MpiPairExtentUpdate as _MpiPairExtentUpdate
from .update import MpiPairExtentUpdateDifferentDtypes as _MpiPairExtentUpdateDifferentDtypes
from .update import RmaUpdateExecutor as _RmaUpdateExecutor
from .locale import win_lndarray as _win_lndarray
from .distribution import LocaleExtent as _LocaleExtent
from .indexing import HaloIndexingExtent as _HaloIndexingExtent
__author__ = "Shane J. Latham"
__license__ = _license()
__copyright__ = _copyright()
__version__ = _version()
_builtin_slice = slice
class CommLogger:
"""
"""
def __init__(self, rank_logger=None, root_logger=None):
self._rank_logger = None
self._root_logger = None
@property
def rank_logger(self):
return self._rank_logger
@rank_logger.setter
def rank_logger(self, logger):
self._rank_logger = logger
@property
def root_logger(self):
return self._root_logger
@root_logger.setter
def root_logger(self, logger):
self._root_logger = logger
class PerAxisRmaHaloUpdater(CommLogger):
"""
Helper class for performing halo data transfer using RMA
via MPI windows (:obj:`mpi4py.MPI.Win` objects).
"""
#: Halo "low index" indices.
LO = _HaloIndexingExtent.LO
#: Halo "high index" indices.
HI = _HaloIndexingExtent.HI
def __init__(self, locale_extents, dtype, order, inter_locale_win, dst_buffer):
"""
Initialise.
:type locale_extents: sequence of :obj:`mpi_array.distributon.LocaleExtent`
:param locale_extents: :samp:`locale_extents[r]` is the extent of the array
elements which reside on rank :samp:`r` of the :samp:`inter_locale_comm`
communicator.
:type dtype: :obj:`numpy.dtype`
:param dtype: Data type of elements in array.
:type order: :obj:`str`
:param order: The array order, :samp:`'C'` for C memory layout.
:type inter_locale_win: :obj:`mpi4py.MPI.Win`
:param inter_locale_win: The window used to exchange halo element data.
:type dst_buffer: :obj:`memoryview`
:param dst_buffer: The buffer into which the halo elements are written.
"""
CommLogger.__init__(self)
self._locale_extents = locale_extents
self._dtype = dtype
self._order = order
self._inter_locale_win = inter_locale_win
self._dst_buffer = dst_buffer
self._halo_updates = None
self._have_axis_updates = None
@property
def locale_extents(self):
"""
Sequence of :obj:`mpi_array.distribution.LocaleExtent` objects which
define the partitioning of the array.
"""
return self._locale_extents
@property
def dtype(self):
"""
The :obj:`numpy.dtype` of the data to be exchanged in the halo update.
"""
return self._dtype
@property
def order(self):
"""
Array order :obj:`str`, :samp:`'C'` for C memory layout.
"""
return self._order
def calc_halo_updates(self):
"""
Calculates the per-axis halo-region updates for
all inter-locale ranks (of the :samp:`inter_locale_comm`).
:rtype: :obj:`tuple` pair
:return: A :samp:`(rank_2_updates_dict, bool_sequence)` pair
where `rank_2_updates` is a :obj:`dict` of :samp:`{inter_locale_rank, halos_update}`,
where :samp:`inter_locale_rank` is an :obj:`int` indicating the
rank of the process in :samp:`inter_locale_comm` and :samp:`halos_update`
is a :obj:`mpi_array.update.MpiHalosUpdate` containing the description
of regions which are required to be fetched from remote processes.
The :samp:`bool_sequence` is of length :attr:`ndim` and :samp:`bool_sequence[a] is True`
indicates that halo updates are required on axis :samp:`a`.
"""
halo_updates_dict = dict()
ndim = self.locale_extents[0].ndim
have_axis_updates = _np.zeros((ndim, ), dtype="bool")
for inter_locale_rank in range(len(self.locale_extents)):
rank_inter_locale_updates = \
_MpiHalosUpdate(
inter_locale_rank,
self.locale_extents
)
halo_updates_dict[inter_locale_rank] = rank_inter_locale_updates
have_axis_updates = \
_np.logical_or(
have_axis_updates,
_np.array(
[
(rank_inter_locale_updates.updates_per_axis[a] is not None)
and
(
(len(rank_inter_locale_updates.updates_per_axis[a][self.LO]) > 0)
or
(len(rank_inter_locale_updates.updates_per_axis[a][self.HI]) > 0)
)
for a in range(ndim)
],
dtype="bool"
)
)
if _np.any(have_axis_updates):
for a in range(ndim):
if not have_axis_updates[a]:
for inter_locale_rank in range(len(self.locale_extents)):
halo_updates_dict[inter_locale_rank].updates_per_axis[a] = None
else:
halo_updates_dict = None
return halo_updates_dict, have_axis_updates
@property
def dst_buffer(self):
"""
A :obj:`memoryview` which provides the buffer
into which the halo data is written.
"""
return self._dst_buffer
@property
def halo_updates(self):
"""
The :samp:`(rank_2_updates_dict, bool_sequence)` pair calculated
by :meth:`calc_halo_updates`.
"""
if self._halo_updates is None:
self._halo_updates, self._have_axis_updates = self.calc_halo_updates()
return self._halo_updates
def update_halos(self):
"""
Performs the data exchange required to update the halo (ghost)
elements of the array buffer :attr:`dst_buffer`:samp:`.buffer`.
Can be called :samp:`peer_comm` collectively.
"""
self.do_update_halos(self.halo_updates)
def do_update_halos(self, halo_updates):
"""
Performs the data exchange required to update the halo (ghost)
elements of the array buffer :attr:`dst_buffer`:samp:`.buffer`.
Can be called :samp:`peer_comm` collectively.
:type halo_updates: :obj:`mpi_array.update.MpiHalosUpdate`
:param halo_updates: A :obj:`dict` of per :samp:`inter_locale_rank`
halo region updates. See :meth:`calc_halo_updates`.
"""
if halo_updates is not None:
# Get the halo updates for this rank
rank_inter_locale_updates = halo_updates[self._inter_locale_win.group.rank]
# Get the updates separated into per-axis (hyper-slab) updates
rank_updates_per_axis = rank_inter_locale_updates.updates_per_axis
# rank_updates_per_axis is None, on *all* inter_locale_win.group ranks,
# when there are no halos on any axis.
if rank_updates_per_axis is not None:
for a in range(len(rank_updates_per_axis)):
lo_hi_updates_pair = rank_updates_per_axis[a]
# When axis doesn't have a halo then lo_hi_updates_pair
# is None on all inter_locale_comm ranks, and we avoid calling the Fence
# in this case
if lo_hi_updates_pair is not None:
axis_inter_locale_rank_updates = \
lo_hi_updates_pair[rank_inter_locale_updates.LO] + \
lo_hi_updates_pair[rank_inter_locale_updates.HI]
self.rank_logger.debug(
"BEG: Fence(_mpi.MODE_NOPUT | _mpi.MODE_NOPRECEDE)..."
)
self._inter_locale_win.Fence(
_mpi.MODE_NOPUT | _mpi.MODE_NOPRECEDE)
self.rank_logger.debug(
"END: Fence(_mpi.MODE_NOPUT | _mpi.MODE_NOPRECEDE)..."
)
for single_update in axis_inter_locale_rank_updates:
single_update.initialise_data_types(self.dtype, self.order)
self.rank_logger.debug(
"BEG: Getting update:\n%s\n%s",
single_update._header_str,
single_update
)
self._inter_locale_win.Get(
[self._dst_buffer, 1, single_update.dst_data_type],
single_update.src_extent.cart_rank,
[0, 1, single_update.src_data_type]
)
self.rank_logger.debug(
"END: Getting update:\n%s\n%s",
single_update._header_str,
single_update
)
self.rank_logger.debug(
"BEG: Fence(_mpi.MODE_NOSUCCEED)."
)
self._inter_locale_win.Fence(_mpi.MODE_NOSUCCEED)
self.rank_logger.debug(
"END: Fence(_mpi.MODE_NOSUCCEED)."
)
class RankTranslator(object):
"""
Translate ranks between two `mpi4py.MPI.Group` objects.
"""
def __init__(self, dst_group, src_group):
"""
"""
object.__init__(self)
self._dst_group = dst_group
self._src_group = src_group
def dst_to_src(self, ranks):
"""
Returns :samp:`mpi4py.MPI.Group.Translate_ranks(self.dst_group, ranks, self.src_group)`.
"""
r = _np.array(ranks, copy=True)
r.ravel()[...] = _mpi.Group.Translate_ranks(self.dst_group, r.ravel(), self.src_group)
return r
def src_to_dst(self, ranks):
"""
Returns :samp:`mpi4py.MPI.Group.Translate_ranks(self.src_group, ranks, self.dst_group)`.
"""
r = _np.array(ranks, copy=True)
r.ravel()[...] = _mpi.Group.Translate_ranks(self.src_group, r.ravel(), self.dst_group)
return r
@property
def dst_group(self):
"""
A :obj:`mpi4py.MPI.Group`.
"""
return self._dst_group
@property
def src_group(self):
"""
A :obj:`mpi4py.MPI.Group`.
"""
return self._src_group
class RmaRedistributeUpdater(_UpdatesForRedistribute):
"""
Helper class for redistributing array to new distribution.
Calculates sequence of :obj:`mpi_array.distribution.ExtentUpdate`
objects which are used to copy elements from
remote :samp:`{src}` locales to local :samp:`{dst}` locales.
"""
def __init__(self, dst, src, casting="same_kind"):
"""
"""
self._dst = dst
self._src = src
self._casting = casting
self._mpi_pair_extent_update_type = _MpiPairExtentUpdate
self._max_outstanding_requests = 32 * 32
self._min_outstanding_requests_per_proc = 2
self._max_ranks_per_inter_locale_sub_group = 128
if self._dst.dtype != self._src.dtype:
self._mpi_pair_extent_update_type = _MpiPairExtentUpdateDifferentDtypes
_UpdatesForRedistribute.__init__(
self,
dst.comms_and_distrib.distribution,
src.comms_and_distrib.distribution,
peer_rank_translator=RankTranslator(
self._dst.locale_comms.peer_comm.group,
self._src.locale_comms.peer_comm.group
)
)
self._inter_win = self._src.rma_window_buffer.peer_win
self._max_outstanding_requests_per_proc = \
_np.max(
(
self._min_outstanding_requests_per_proc,
self._max_outstanding_requests // self._max_ranks_per_inter_locale_sub_group
)
)
seed_str = str(2 ** 31)[1:]
rank_str = str(self._inter_win.group.rank + 1)
seed_str = rank_str + seed_str[len(rank_str):]
seed_str = seed_str[0:-len(rank_str)] + rank_str[::-1]
self._random_state = _np.random.RandomState(seed=int(seed_str))
def calc_can_use_existing_src_peer_comm(self):
"""
Returns :samp:`True` if :samp:`self._src.locale_comms.peer_comm`
can be used to redistribute to the distribution of the :samp:`self._dst` array.
:rtype: :obj:`bool`
:return: :samp:`True` if :samp:`self._src.locale_comms.peer_comm` is a super-set
of the processes of :samp:`self._dst.locale_comms.peer_comm`
"""
can_use_existing_src_peer_comm = self._src.locale_comms.peer_comm is not None
if self._dst.locale_comms.have_valid_inter_locale_comm:
if self._src.locale_comms.peer_comm != _mpi.COMM_NULL:
can_use_existing_src_peer_comm = \
(
(
_mpi.Group.Intersection(
self._dst.locale_comms.inter_locale_comm.group,
self._src.locale_comms.peer_comm.group
).size
==
self._dst.locale_comms.inter_locale_comm.group.size
)
)
self._dst.rank_logger.debug(
"BEG: self._dst_cad.locale_comms.intra_locale_comm.allreduce...")
can_use_existing_src_peer_comm = \
self._dst.locale_comms.intra_locale_comm.allreduce(
can_use_existing_src_peer_comm,
_mpi.BAND
)
self._dst.rank_logger.debug("END: self._dst_cad.locale_comms.intra_locale_comm.allreduce.")
self._dst.rank_logger.debug(
"can_use_existing_src_peer_comm = %s",
can_use_existing_src_peer_comm
)
return can_use_existing_src_peer_comm
def create_pair_extent_update(
self,
dst_extent,
src_extent,
intersection_extent
):
"""
Factory method which creates sequence of
of :obj:`mpi_array.distribution.MpiPairExtentUpdate` objects.
"""
updates = \
[
self._mpi_pair_extent_update_type(
self._dst.distribution.locale_extents[dst_extent.inter_locale_rank],
self._src.distribution.locale_extents[src_extent.inter_locale_rank],
intersection_extent,
intersection_extent
),
]
for update in updates:
update.initialise_data_types(
dst_dtype=self._dst.dtype,
src_dtype=self._src.dtype,
dst_order=self._dst.lndarray_proxy.md.order,
src_order=self._src.lndarray_proxy.md.order
)
update.casting = self._casting
return updates
def wait_all(self, req_list):
"""
"""
self._dst.rank_logger.debug(
"BEG: Waiting for outstanding rget requests, len(req_list)=%s...",
len(req_list)
)
_mpi.Request.Waitall(req_list)
self._dst.rank_logger.debug(
"END: Waiting for outstanding rget requests, len(req_list)=%s.",
len(req_list)
)
def do_locale_cpy2_update(self):
"""
Performs direct copy updates.
"""
updates = self._dst_cpy2_updates[self._dst.this_locale.inter_locale_rank]
my_dst_peer_rank = self._dst.locale_comms.peer_comm.rank
my_src_peer_rank = self._src.locale_comms.peer_comm.rank
src_lndarray = self._src.lndarray_proxy.lndarray
dst_lndarray = self._dst.lndarray_proxy.lndarray
for update in updates:
src_translated_peer_ranks = \
self._src_translated_peer_ranks[update.src_extent.inter_locale_rank]
dst_translated_peer_ranks = \
self._dst_translated_peer_ranks[update.dst_extent.inter_locale_rank]
if (
(
(my_src_peer_rank == update.src_extent.peer_rank)
and
(update.src_extent.peer_rank in dst_translated_peer_ranks)
)
or
(
(my_dst_peer_rank == update.dst_extent.peer_rank)
and
(update.dst_extent.peer_rank in src_translated_peer_ranks)
)
):
self._dst.rank_logger.debug(
"Copying update: mdpr=%s, mspr=%s\nsrc_t_ranks=%s\ndst_t_ranks=%s\n%s\n%s",
my_dst_peer_rank,
my_src_peer_rank,
src_translated_peer_ranks,
dst_translated_peer_ranks,
update._header_str,
update
)
update.copyto(dst_lndarray, src_lndarray, casting=self._casting)
def do_locale_rma_update(self):
"""
Performs RMA to get elements from remote locales to
update the locale extent array.
"""
can_use_existing_src_peer_comm = self.calc_can_use_existing_src_peer_comm()
self._dst.rank_logger.debug(
"%s.%s: "
+
"can_use_existing_src_peer_comm=%s",
self.__class__.__name__,
"do_locale_rma_update",
can_use_existing_src_peer_comm
)
if can_use_existing_src_peer_comm:
inter_win = _mpi.WIN_NULL
if self._dst.locale_comms.have_valid_inter_locale_comm:
inter_win = self._inter_win
update_executor = \
_RmaUpdateExecutor(
inter_win=inter_win,
dst_lndarray=self._dst.lndarray_proxy.lndarray,
src_inter_win_rank_attr="peer_rank",
rank_logger=self._dst.rank_logger
)
# Fetch remote data.
updates = self._dst_rget_updates[self._dst.this_locale.inter_locale_rank]
update_executor.do_locale_rma_update(updates)
else:
raise RuntimeError(
(
"can_use_existing_src_peer_comm=%s: "
+
"incompatible dst inter_locale_comma and src peer_comm."
)
%
(can_use_existing_src_peer_comm,)
)
self._dst.locale_comms.intra_locale_comm.barrier()
def do_locale_update(self):
self.do_locale_rma_update()
self.do_locale_cpy2_update()
def do_update(self):
self.barrier()
self._dst.locale_comms.rank_logger.debug(
"%s: BEG: do_locale_cpy2_update()...", self.__class__.__name__
)
self.do_locale_cpy2_update()
self._dst.locale_comms.rank_logger.debug(
"%s: END: do_locale_cpy2_update().", self.__class__.__name__
)
self.barrier()
self._dst.locale_comms.rank_logger.debug(
"%s: BEG: do_locale_rma_update()...", self.__class__.__name__
)
self.do_locale_rma_update()
self._dst.locale_comms.rank_logger.debug(
"%s: END: do_locale_rma_update().", self.__class__.__name__
)
self.barrier()
def barrier(self):
"""
MPI barrier.
"""
self._dst.locale_comms.rank_logger.debug(
"%s: BEG: self._src.locale_comms.peer_comm.barrier()...", self.__class__.__name__
)
self._src.locale_comms.peer_comm.barrier()
self._dst.locale_comms.rank_logger.debug(
"%s: END: self._src.locale_comms.peer_comm.barrier().", self.__class__.__name__
)
class gndarray(_NDArrayOperatorsMixin):
"""
A Partitioned Global Address Space array with :obj:`numpy.ndarray` API.
"""
def __new__(
cls,
comms_and_distrib,
rma_window_buffer,
lndarray_proxy
):
"""
Construct, at least one of :samp:{shape} or :samp:`comms_and_distrib` should
be specified (i.e. at least one should not be :samp:`None`).
:type comms_and_distrib: :obj:`mpi_array.distribution.Decomposition`
:param comms_and_distrib: Array distribution info and used to allocate (possibly)
shared memory.
"""
self = _NDArrayOperatorsMixin.__new__(cls)
self._comms_and_distrib = comms_and_distrib
self._rma_window_buffer = rma_window_buffer
self._lndarray_proxy = lndarray_proxy
self._halo_updater = None
return self
def free(self):
"""
Collective (all samp:`peer_comm` processes) free of MPI windows (and locale array memory).
"""
self._halo_updater = None
if self._comms_and_distrib is not None:
self._comms_and_distrib = None
if self._lndarray_proxy is not None:
self._lndarray_proxy.free()
self._lndarray_proxy = None
if self._rma_window_buffer is not None:
self._rma_window_buffer.free()
self._rma_window_buffer = None
def __del__(self):
"""
Calls :meth:`free`.
"""
self.free()
def __enter__(self):
"""
For use with :samp:`with` contexts.
"""
return self
def __exit__(self, type, value, traceback):
"""
For use with :samp:`with` contexts.
"""
self.free()
return False
def __getitem__(self, i):
"""
"""
self.rank_logger.debug("__getitem__: i=%s", i)
return None
def __setitem__(self, i, v):
"""
"""
self.rank_logger.debug("__setitem__: i=%s, v=%s", i, v)
def __array_ufunc__(self, *args, **kwargs):
"""
"""
from . import globale_ufunc as _globale_ufunc
return _globale_ufunc.gndarray_array_ufunc(self, *args, **kwargs)
@property
def this_locale(self):
return self._comms_and_distrib.this_locale
@property
def locale_comms(self):
return self._comms_and_distrib.locale_comms
@property
def distribution(self):
return self._comms_and_distrib.distribution
@property
def comms_and_distrib(self):
return self._comms_and_distrib
@property
def rma_window_buffer(self):
return self._rma_window_buffer
@property
def lndarray_proxy(self):
return self._lndarray_proxy
@property
def ndim(self):
return len(self.shape)
@property
def num_locales(self):
"""
"""
return self._comms_and_distrib.locale_comms.num_locales
@property
def shape(self):
return self._comms_and_distrib.distribution.globale_extent.shape_n
@property
def dtype(self):
return self._lndarray_proxy.dtype
@property
def order(self):
return self._lndarray_proxy.md.order
@property
def view_n(self):
return self._lndarray_proxy.view_n
@property
def view_h(self):
return self._lndarray_proxy.view_h
@property
def rank_view_n(self):
return self._lndarray_proxy.rank_view_n
@property
def rank_view_h(self):
return self._lndarray_proxy.rank_view_h
@property
def rank_logger(self):
"""
"""
return self._comms_and_distrib.locale_comms.rank_logger
@property
def root_logger(self):
"""
"""
return self._comms_and_distrib.locale_comms.root_logger
def initialise_windows(self):
"""
Creates the RMA windows required for inter-locale (and peer) one-sided RMA comms.
"""
self.rma_window_buffer.initialise_windows()
def intra_locale_barrier(self):
"""
"""
self.rank_logger.debug(
"BEG: self.comms_and_distrib.locale_comms.intra_locale_comm.barrier()..."
)
self.comms_and_distrib.locale_comms.intra_locale_comm.barrier()
self.rank_logger.debug(
"END: self.comms_and_distrib.locale_comms.intra_locale_comm.barrier()."
)
def inter_locale_barrier(self):
"""
"""
if self.comms_and_distrib.locale_comms.have_valid_inter_locale_comm:
self.rank_logger.debug(
"BEG: self.comms_and_distrib.locale_comms.inter_locale_comm.barrier()..."
)
self.comms_and_distrib.locale_comms.inter_locale_comm.barrier()
self.rank_logger.debug(
"END: self.comms_and_distrib.locale_comms.inter_locale_comm.barrier()."
)
@property
def halo_updater(self):
if self._halo_updater is None:
self._halo_updater = \
PerAxisRmaHaloUpdater(
locale_extents=self.distribution.locale_extents,
dtype=self.dtype,
order=self.order,
inter_locale_win=self.rma_window_buffer.inter_locale_win,
dst_buffer=self.lndarray_proxy.lndarray
)
self._halo_updater.rank_logger = self.rank_logger
self._halo_updater.root_logger = self.root_logger
return self._halo_updater
def update(self):
"""
"""
# If running on single locale then there are no halos to update.
if self.comms_and_distrib.locale_comms.num_locales > 1:
rank_logger = self.comms_and_distrib.locale_comms.rank_logger
# Only communicate data between the ranks
# of self.comms_and_distrib.locale_comms.inter_locale_comm
self.comms_and_distrib.locale_comms.peer_comm.barrier()
if (
self.comms_and_distrib.locale_comms.have_valid_inter_locale_comm
):
rank_logger.debug(
"BEG: update_halos..."
)
self.halo_updater.update_halos()
rank_logger.debug(
"END: update_halos."
)
self.intra_locale_barrier()
def calculate_copyfrom_updates(self, src, casting="same_kind"):
return \
RmaRedistributeUpdater(
self,
src,
casting
)
def copyfrom(self, src, casting="same_kind"):
"""
Copy the elements of the :samp:`{src}` array to corresponding elements of
the :samp:`{self}` array.
:type src: :obj:`gndarray`
:param src: Global array from which elements are copied.
:type casting: :obj:`str`
:param casting: See :samp:`{casting}` parameter in :func:`numpy.copyto`.
"""
if not isinstance(src, gndarray):
raise ValueError(
"Got type(src)=%s, expected %s." % (type(src), gndarray)
)
redistribute_updater = self.calculate_copyfrom_updates(src, casting)
redistribute_updater.do_update()
def all(self, **unused_kwargs):
return \
self.locale_comms.peer_comm.allreduce(
bool(self.lndarray_proxy.rank_view_n.all()),
op=_mpi.BAND
)
def fill(self, value):
"""
Fill the array (excluding ghost elements) with a scalar value.
:type value: scalar
:param value: All non-ghost elements will be assigned this value.
"""
self.lndarray_proxy.fill(value)
self.intra_locale_barrier()
def fill_h(self, value):
"""
Fill all array elements (including ghost elements) with a scalar value.
:type value: scalar
:param value: All elements will be assigned this value.
"""
self.lndarray_proxy.fill_h(value)
self.intra_locale_barrier()
def copy(self, order='C'):
from . import globale_creation as _globale_creation
ary_out = _globale_creation.empty_like(self, order=order)
ary_out.lndarray_proxy.rank_view_partition_h[...] = \
self.lndarray_proxy.rank_view_partition_h[...]
self.intra_locale_barrier()
return ary_out
def get_view(self, slice=None, start=None, stop=None, halo=0):
"""
Returns :samp:`(ary, extent)` pair, where :samp:`ary` is a
view from the locale extent array corresponding to the
specified extent arguments. If any of the globale slice
lies outside the locale extent, then :samp:`ary` is :samp:`None`.
The :samp:`extent` element is a :obj:`mpi_array.distribution.LocaleExtent`
instance which corresponds to the specified extent arguments.
"""
if slice is not None:
tmp = _np.array(list([s.start, s.stop] for s in slice))
start = tmp[:, 0]
stop = tmp[:, 1]
# Create an extent object equivalent to the argument slice.
locale_extent = self.lndarray_proxy.locale_extent
dst_extent =\
_LocaleExtent(
peer_rank=locale_extent.peer_rank,
inter_locale_rank=locale_extent.inter_locale_rank,
start=start,
stop=stop,
slice=slice,
globale_extent=self.distribution.globale_extent,
halo=halo,
)
locale_ary = None
if _np.all(
_np.logical_and(
dst_extent.start_h >= locale_extent.start_n,
dst_extent.stop_h <= locale_extent.stop_n
)
):
# Can return a view of the locale array data
shape = dst_extent.shape_h
lstart = locale_extent.globale_to_locale_h(dst_extent.start_h)
lstop = lstart + shape
slc = tuple(_builtin_slice(lstart[a], lstop[a]) for a in range(locale_extent.ndim))
locale_ary = self.lndarray_proxy.lndarray[slc]
return locale_ary, dst_extent
def reshape(self, shape):
"""
Returns an array containing the same data with a new shape equal to :samp:`{shape}`.
"""
raise NotImplementedError()
def locale_get(self, slice=None, start=None, stop=None, halo=0):
"""
Collective over :samp:`{self}.comms.intra_locale_comm` to
get a portion of the globale array. Returns a view from the
locale extent of the array if possible, otherwise allocates
shared memory and performs one-sided RMA to fetch data from
remote locales.
"""
locale_ary, dst_extent = self.get_view(slice=slice, start=start, stop=stop, halo=halo)
if locale_ary is None:
# Need to fetch remote data
if not self.rma_window_buffer.inter_locale_win_initialised:
raise ValueError(
"Attempting inter-locale one-sided RMA without having created"
+
" the inter-locale window, call the initialise_windows method"
+
" (all *peer* ranks)"
+
" to create windows before performing one-sided RMA."
)
# Allocate (shared) memory for the data to be returned.
locale_ary = \
_win_lndarray(
shape=dst_extent.shape_h,
dtype=self.dtype,
comm=self.locale_comms.intra_locale_comm
)
if self.locale_comms.have_valid_inter_locale_comm:
# Calculate the update objects which indicate where to fetch the data.
update_calculator = \
_MpiUpdatesForGet(
dst_extent=dst_extent,
src_distrib=self.distribution,
dtype=self.dtype,
order=self.order,
update_dst_halo=True
)
update_executor = \
_RmaUpdateExecutor(
inter_win=self.rma_window_buffer.inter_locale_win,
dst_lndarray=locale_ary,
src_inter_win_rank_attr="inter_locale_rank",
rank_logger=self.rank_logger
)
# Perform the updates, copy locale array data to locale_ary first.
updates = update_calculator._dst_cpy2_updates[dst_extent.inter_locale_rank]
update_executor.do_direct_cpy2_update(updates, self.lndarray_proxy.lndarray)
# Fetch remote data.
updates = update_calculator._dst_rget_updates[dst_extent.inter_locale_rank]
update_executor.do_locale_rma_update(updates)
# All locale processes wait for data fetch to conclude
self.intra_locale_barrier()
return locale_ary
def peer_rank_get(self, slice=None, start=None, stop=None, halo=0):
"""
Non-collective, one-sided fetch of data to this peer rank process.
Returns a view from the locale extent of the array if possible,
otherwise allocates non-shared memory and performs one-sided RMA
to fetch data from remote locales.
"""
locale_ary, dst_extent = self.get_view(slice=slice, start=start, stop=stop, halo=halo)
if locale_ary is None:
# Need to fetch remote data
if not self.rma_window_buffer.peer_win_initialised:
raise ValueError(
"Attempting peer one-sided RMA without having created"
+
" the peer window, call the initialise_windows method (all *peer* ranks)"
+
" to create windows before performing one-sided RMA."
)
# Allocate memory for the data to be returned.
locale_ary = \
_win_lndarray(
shape=dst_extent.shape_h,
dtype=self.dtype,
comm=_mpi.COMM_SELF
)
update_calculator = \
_MpiUpdatesForGet(
dst_extent=dst_extent,
src_distrib=self.distribution,
dtype=self.dtype,
order=self.order,
update_dst_halo=True
)
update_executor = \
_RmaUpdateExecutor(
inter_win=self.rma_window_buffer.peer_win,
dst_lndarray=locale_ary,
src_inter_win_rank_attr="peer_rank",
rank_logger=self.rank_logger
)
# Perform the updates, copy locale array data to locale_ary first.
updates = update_calculator._dst_cpy2_updates[dst_extent.inter_locale_rank]
update_executor.do_direct_cpy2_update(updates, self.lndarray_proxy.lndarray)
# Fetch remote data.
updates = update_calculator._dst_rget_updates[dst_extent.inter_locale_rank]
update_executor.do_locale_rma_update(updates)
return locale_ary
def free_all(objects):
"""
Call the :samp:`free` attribute on all arguments.
:type objects: sequence of :obj:`object`
:param objects: Call the :samp:`free` attribute for all objects in this
sequence (if it exists and it is :obj:`callable`).
"""
for obj in objects:
if hasattr(obj, "free") and hasattr(obj.free, "__call__"):
obj.free()
def copyto(dst, src, casting="same_kind", **kwargs):
"""
Copy the elements of the :samp:`{src}` array to corresponding elements of
the :samp:`dst` array.
:type dst: :obj:`gndarray`
:param dst: Global array which receives elements.
:type src: :obj:`gndarray`
:param src: Global array from which elements are copied.
:type casting: :obj:`str`
:param casting: See :samp:`{casting}` parameter in :func:`numpy.copyto`.
"""
if not isinstance(dst, gndarray):
raise ValueError(
"Got type(dst)=%s, expected %s." % (type(dst), gndarray)
)
dst.copyfrom(src, casting=casting)
__all__ = [s for s in dir() if not s.startswith('_')]
|
|
# -*- coding: utf-8 -*-
from nose.tools import * # flake8: noqa
from urlparse import urlparse
from framework.auth.core import Auth
from website.models import NodeLog
from api.base.settings.defaults import API_BASE
from tests.base import ApiTestCase
from tests.factories import (
ProjectFactory,
RegistrationFactory,
AuthUserFactory
)
from tests.utils import assert_logs
node_url_for = lambda n_id: '/{}nodes/{}/'.format(API_BASE, n_id)
class TestNodeLinksList(ApiTestCase):
def setUp(self):
super(TestNodeLinksList, self).setUp()
self.user = AuthUserFactory()
self.project = ProjectFactory(is_public=False, creator=self.user)
self.pointer_project = ProjectFactory(is_public=False, creator=self.user)
self.project.add_pointer(self.pointer_project, auth=Auth(self.user))
self.private_url = '/{}nodes/{}/node_links/'.format(API_BASE, self.project._id)
self.public_project = ProjectFactory(is_public=True, creator=self.user)
self.public_pointer_project = ProjectFactory(is_public=True, creator=self.user)
self.public_project.add_pointer(self.public_pointer_project, auth=Auth(self.user))
self.public_url = '/{}nodes/{}/node_links/'.format(API_BASE, self.public_project._id)
self.user_two = AuthUserFactory()
def test_return_public_node_pointers_logged_out(self):
res = self.app.get(self.public_url)
res_json = res.json['data']
assert_equal(len(res_json), 1)
assert_equal(res.status_code, 200)
assert_equal(res.content_type, 'application/vnd.api+json')
expected_path = node_url_for(self.public_pointer_project._id)
actual_path = urlparse(res_json[0]['relationships']['target_node']['links']['related']['href']).path
assert_equal(expected_path, actual_path)
def test_return_public_node_pointers_logged_in(self):
res = self.app.get(self.public_url, auth=self.user_two.auth)
res_json = res.json['data']
assert_equal(len(res_json), 1)
assert_equal(res.status_code, 200)
assert_equal(res.content_type, 'application/vnd.api+json')
expected_path = node_url_for(self.public_pointer_project._id)
actual_path = urlparse(res_json[0]['relationships']['target_node']['links']['related']['href']).path
assert_equal(expected_path, actual_path)
def test_return_private_node_pointers_logged_out(self):
res = self.app.get(self.private_url, expect_errors=True)
assert_equal(res.status_code, 401)
assert_in('detail', res.json['errors'][0])
def test_return_private_node_pointers_logged_in_contributor(self):
res = self.app.get(self.private_url, auth=self.user.auth)
res_json = res.json['data']
assert_equal(res.status_code, 200)
assert_equal(res.content_type, 'application/vnd.api+json')
assert_equal(len(res_json), 1)
expected_path = node_url_for(self.pointer_project._id)
actual_path = urlparse(res_json[0]['relationships']['target_node']['links']['related']['href']).path
assert_equal(expected_path, actual_path)
def test_return_private_node_pointers_logged_in_non_contributor(self):
res = self.app.get(self.private_url, auth=self.user_two.auth, expect_errors=True)
assert_equal(res.status_code, 403)
assert_in('detail', res.json['errors'][0])
def test_deleted_links_not_returned(self):
res = self.app.get(self.public_url, expect_errors=True)
res_json = res.json['data']
original_length = len(res_json)
self.public_pointer_project.is_deleted = True
self.public_pointer_project.save()
res = self.app.get(self.public_url)
res_json = res.json['data']
assert_equal(len(res_json), original_length - 1)
class TestNodeLinkCreate(ApiTestCase):
def setUp(self):
super(TestNodeLinkCreate, self).setUp()
self.user = AuthUserFactory()
self.project = ProjectFactory(is_public=False, creator=self.user)
self.pointer_project = ProjectFactory(is_public=False, creator=self.user)
self.private_url = '/{}nodes/{}/node_links/'.format(API_BASE, self.project._id)
self.private_payload = {
'data': {
"type": "node_links",
"relationships": {
'nodes': {
'data': {
'id': self.pointer_project._id,
'type': 'nodes'
}
}
}
}
}
self.public_project = ProjectFactory(is_public=True, creator=self.user)
self.public_pointer_project = ProjectFactory(is_public=True, creator=self.user)
self.public_url = '/{}nodes/{}/node_links/'.format(API_BASE, self.public_project._id)
self.public_payload = {
'data': {
"type": "node_links",
"relationships": {
'nodes': {
'data': {
'id': self.public_pointer_project._id,
'type': 'nodes'
}
}
}
}
}
self.fake_url = '/{}nodes/{}/node_links/'.format(API_BASE, 'fdxlq')
self.fake_payload = {
'data': {
"type": "node_links",
"relationships": {
'nodes': {
'data': {
'id': 'fdxlq',
'type': 'nodes'
}
}
}
}
}
self.point_to_itself_payload = {
'data': {
"type": "node_links",
"relationships": {
'nodes': {
'data': {
'id': self.public_project._id,
'type': 'nodes'
}
}
}
}
}
self.user_two = AuthUserFactory()
self.user_two_project = ProjectFactory(is_public=True, creator=self.user_two)
self.user_two_url = '/{}nodes/{}/node_links/'.format(API_BASE, self.user_two_project._id)
self.user_two_payload = {
'data': {
'type': 'node_links',
'relationships': {
'nodes': {
'data': {
'id': self.user_two_project._id,
'type': 'nodes'
}
}
}
}
}
def test_add_node_link_relationships_is_a_list(self):
data = {
'data': {
'type': 'node_links',
'relationships': [{'target_node_id': self.public_pointer_project._id}]
}
}
res = self.app.post_json_api(self.public_url, data, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['detail'], "Malformed request.")
def test_create_node_link_invalid_data(self):
res = self.app.post_json_api(self.public_url, "Incorrect data", auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['detail'], "Malformed request.")
def test_add_node_link_no_relationships(self):
data = {
'data': {
'type': 'node_links',
'attributes': {
'id': self.public_pointer_project._id
}
}
}
res = self.app.post_json_api(self.public_url, data, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['source']['pointer'], '/data/relationships')
def test_add_node_links_empty_relationships(self):
data = {
'data': {
'type': 'node_links',
'relationships': {}
}
}
res = self.app.post_json_api(self.public_url, data, auth=self.user.auth, expect_errors=True)
assert_equal(res.json['errors'][0]['source']['pointer'], '/data/relationships')
def test_add_node_links_no_nodes_key_in_relationships(self):
data = {
'data': {
'type': 'node_links',
'relationships': {
'data': {
'id': self.public_pointer_project._id,
'type': 'nodes'
}
}
}
}
res = self.app.post_json_api(self.public_url, data, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['detail'], 'Malformed request.')
def test_add_node_links_no_data_in_relationships(self):
data = {
'data': {
'type': 'node_links',
'relationships': {
'nodes': {
'id': self.public_pointer_project._id,
'type': 'nodes'
}
}
}
}
res = self.app.post_json_api(self.public_url, data, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['detail'], 'Request must include /data.')
def test_add_node_links_no_target_type_in_relationships(self):
data = {
'data': {
'type': 'node_links',
'relationships': {
'nodes': {
'data': {
'id': self.public_pointer_project._id
}
}
}
}
}
res = self.app.post_json_api(self.public_url, data, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['detail'], 'Request must include /type.')
def test_add_node_links_no_target_id_in_relationships(self):
data = {
'data': {
'type': 'node_links',
'relationships': {
'nodes': {
'data': {
'type': 'nodes'
}
}
}
}
}
res = self.app.post_json_api(self.public_url, data, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['source']['pointer'], '/data/id')
def test_add_node_links_incorrect_target_id_in_relationships(self):
data = {
'data': {
'type': 'node_links',
'relationships': {
'nodes': {
'data': {
'type': 'nodes',
'id': '12345'
}
}
}
}
}
res = self.app.post_json_api(self.public_url, data, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
def test_add_node_links_incorrect_target_type_in_relationships(self):
data = {
'data': {
'type': 'nodes',
'relationships': {
'nodes': {
'data': {
'type': 'Incorrect!',
'id': self.public_pointer_project._id
}
}
}
}
}
res = self.app.post_json_api(self.public_url, data, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 409)
def test_creates_node_link_target_not_nested(self):
payload = {
'data': {
'type': 'node_links',
'id': self.pointer_project._id
}
}
res = self.app.post_json_api(self.public_url, payload, auth=self.user_two.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['source']['pointer'], '/data/relationships')
assert_equal(res.json['errors'][0]['detail'], 'Request must include /data/relationships.')
def test_creates_public_node_pointer_logged_out(self):
res = self.app.post_json_api(self.public_url, self.public_payload, expect_errors=True)
assert_equal(res.status_code, 401)
assert_in('detail', res.json['errors'][0])
@assert_logs(NodeLog.POINTER_CREATED, 'public_project')
def test_creates_public_node_pointer_logged_in(self):
res = self.app.post_json_api(self.public_url, self.public_payload, auth=self.user_two.auth, expect_errors=True)
assert_equal(res.status_code, 403)
assert_in('detail', res.json['errors'][0])
res = self.app.post_json_api(self.public_url, self.public_payload, auth=self.user.auth)
assert_equal(res.status_code, 201)
assert_equal(res.content_type, 'application/vnd.api+json')
res_json = res.json['data']
expected_path = node_url_for(self.public_pointer_project._id)
actual_path = urlparse(res_json['relationships']['target_node']['links']['related']['href']).path
assert_equal(expected_path, actual_path)
def test_creates_private_node_pointer_logged_out(self):
res = self.app.post_json_api(self.private_url, self.private_payload, expect_errors=True)
assert_equal(res.status_code, 401)
assert_in('detail', res.json['errors'][0])
def test_creates_private_node_pointer_logged_in_contributor(self):
res = self.app.post_json_api(self.private_url, self.private_payload, auth=self.user.auth)
assert_equal(res.status_code, 201)
res_json = res.json['data']
expected_path = node_url_for(self.pointer_project._id)
actual_path = urlparse(res_json['relationships']['target_node']['links']['related']['href']).path
assert_equal(expected_path, actual_path)
assert_equal(res.content_type, 'application/vnd.api+json')
def test_creates_private_node_pointer_logged_in_non_contributor(self):
res = self.app.post_json_api(self.private_url, self.private_payload, auth=self.user_two.auth, expect_errors=True)
assert_equal(res.status_code, 403)
assert_in('detail', res.json['errors'][0])
def test_create_node_pointer_non_contributing_node_to_contributing_node(self):
res = self.app.post_json_api(self.private_url, self.user_two_payload, auth=self.user_two.auth, expect_errors=True)
assert_equal(res.status_code, 403)
assert_in('detail', res.json['errors'][0])
@assert_logs(NodeLog.POINTER_CREATED, 'project')
def test_create_node_pointer_contributing_node_to_non_contributing_node(self):
res = self.app.post_json_api(self.private_url, self.user_two_payload, auth=self.user.auth)
assert_equal(res.status_code, 201)
assert_equal(res.content_type, 'application/vnd.api+json')
res_json = res.json['data']
expected_path = node_url_for(self.user_two_project._id)
actual_path = urlparse(res_json['relationships']['target_node']['links']['related']['href']).path
assert_equal(expected_path, actual_path)
def test_create_pointer_non_contributing_node_to_fake_node(self):
res = self.app.post_json_api(self.private_url, self.fake_payload, auth=self.user_two.auth, expect_errors=True)
assert_equal(res.status_code, 403)
assert_in('detail', res.json['errors'][0])
def test_create_pointer_contributing_node_to_fake_node(self):
res = self.app.post_json_api(self.private_url, self.fake_payload, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_in('detail', res.json['errors'][0])
def test_create_fake_node_pointing_to_contributing_node(self):
res = self.app.post_json_api(self.fake_url, self.private_payload, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 404)
assert_in('detail', res.json['errors'][0])
res = self.app.post_json_api(self.fake_url, self.private_payload, auth=self.user_two.auth, expect_errors=True)
assert_equal(res.status_code, 404)
assert_in('detail', res.json['errors'][0])
@assert_logs(NodeLog.POINTER_CREATED, 'public_project')
def test_create_node_pointer_to_itself(self):
res = self.app.post_json_api(self.public_url, self.point_to_itself_payload, auth=self.user.auth)
res_json = res.json['data']
assert_equal(res.status_code, 201)
assert_equal(res.content_type, 'application/vnd.api+json')
expected_path = node_url_for(self.public_project._id)
actual_path = urlparse(res_json['relationships']['target_node']['links']['related']['href']).path
assert_equal(expected_path, actual_path)
def test_create_node_pointer_to_itself_unauthorized(self):
res = self.app.post_json_api(self.public_url, self.point_to_itself_payload, auth=self.user_two.auth, expect_errors=True)
assert_equal(res.status_code, 403)
assert_in('detail', res.json['errors'][0])
@assert_logs(NodeLog.POINTER_CREATED, 'public_project')
def test_create_node_pointer_already_connected(self):
res = self.app.post_json_api(self.public_url, self.public_payload, auth=self.user.auth)
assert_equal(res.status_code, 201)
assert_equal(res.content_type, 'application/vnd.api+json')
res_json = res.json['data']
expected_path = node_url_for(self.public_pointer_project._id)
actual_path = urlparse(res_json['relationships']['target_node']['links']['related']['href']).path
assert_equal(expected_path, actual_path)
res = self.app.post_json_api(self.public_url, self.public_payload, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_in('detail', res.json['errors'][0])
def test_cannot_add_link_to_registration(self):
registration = RegistrationFactory(creator=self.user)
url = '/{}nodes/{}/node_links/'.format(API_BASE, registration._id)
payload = {
'data': {
'type': 'node_links',
'relationships': {
'nodes': {
'data': {
'id': self.public_pointer_project._id,
'type': 'nodes'
}
}
}
}
}
res = self.app.post_json_api(url, payload, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_create_node_pointer_no_type(self):
payload = {
'data': {
'relationships': {
'nodes': {
'data': {
'id': self.user_two_project._id,
'type': 'nodes'
}
}
}
}
}
res = self.app.post_json_api(self.private_url, payload, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['detail'], 'This field may not be null.')
assert_equal(res.json['errors'][0]['source']['pointer'], '/data/type')
def test_create_node_pointer_incorrect_type(self):
payload = {
'data': {
'type': 'Wrong type.',
'relationships': {
'nodes': {
'data': {
'id': self.user_two_project._id,
'type': 'nodes'
}
}
}
}
}
res = self.app.post_json_api(self.private_url, payload, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 409)
assert_equal(res.json['errors'][0]['detail'], 'Resource identifier does not match server endpoint.')
class TestNodeLinksBulkCreate(ApiTestCase):
def setUp(self):
super(TestNodeLinksBulkCreate, self).setUp()
self.user = AuthUserFactory()
self.private_project = ProjectFactory(is_public=False, creator=self.user)
self.private_pointer_project = ProjectFactory(is_public=False, creator=self.user)
self.private_pointer_project_two = ProjectFactory(is_public=False, creator=self.user)
self.private_url = '/{}nodes/{}/node_links/'.format(API_BASE, self.private_project._id)
self.private_payload = {
'data': [{
"type": "node_links",
"relationships": {
'nodes': {
'data': {
"id": self.private_pointer_project._id,
"type": 'nodes'
}
}
}
},
{
"type": "node_links",
"relationships": {
'nodes': {
'data': {
"id": self.private_pointer_project_two._id,
"type": 'nodes'
}
}
}
}]
}
self.public_project = ProjectFactory(is_public=True, creator=self.user)
self.public_pointer_project = ProjectFactory(is_public=True, creator=self.user)
self.public_pointer_project_two = ProjectFactory(is_public=True, creator=self.user)
self.public_url = '/{}nodes/{}/node_links/'.format(API_BASE, self.public_project._id)
self.public_payload = {
'data': [{
"type": "node_links",
"relationships": {
'nodes': {
'data': {
"id": self.public_pointer_project._id,
"type": 'nodes'
}
}
}
},
{
"type": "node_links",
"relationships": {
'nodes': {
'data': {
"id": self.public_pointer_project_two._id,
"type": 'nodes'
}
}
}
}]
}
self.user_two = AuthUserFactory()
self.user_two_project = ProjectFactory(is_public=True, creator=self.user_two)
self.user_two_url = '/{}nodes/{}/node_links/'.format(API_BASE, self.user_two_project._id)
self.user_two_payload = {'data': [{
'type': 'node_links',
'relationships': {
'nodes': {
'data': {
'id': self.user_two_project._id,
'type': 'nodes'
}
}
}
}
]}
def test_bulk_create_node_links_blank_request(self):
res = self.app.post_json_api(self.public_url, auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
def test_bulk_creates_pointers_limits(self):
payload = {'data': [self.public_payload['data'][0]] * 11}
res = self.app.post_json_api(self.public_url, payload, auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['detail'], 'Bulk operation limit is 10, got 11.')
assert_equal(res.json['errors'][0]['source']['pointer'], '/data')
res = self.app.get(self.public_url)
assert_equal(res.json['data'], [])
def test_bulk_creates_project_target_not_nested(self):
payload = {'data': [{'type': 'node_links', 'target_node_id': self.private_pointer_project._id}]}
res = self.app.post_json_api(self.public_url, payload, auth=self.user_two.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['source']['pointer'], '/data/relationships')
assert_equal(res.json['errors'][0]['detail'], 'Request must include /data/relationships.')
def test_bulk_creates_public_node_pointers_logged_out(self):
res = self.app.post_json_api(self.public_url, self.public_payload, expect_errors=True, bulk=True)
assert_equal(res.status_code, 401)
assert_in('detail', res.json['errors'][0])
res = self.app.get(self.public_url)
assert_equal(res.json['data'], [])
def test_bulk_creates_public_node_pointer_logged_in_non_contrib(self):
res = self.app.post_json_api(self.public_url, self.public_payload,
auth=self.user_two.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 403)
@assert_logs(NodeLog.POINTER_CREATED, 'public_project')
def test_bulk_creates_public_node_pointer_logged_in_contrib(self):
res = self.app.post_json_api(self.public_url, self.public_payload, auth=self.user.auth, bulk=True)
assert_equal(res.status_code, 201)
assert_equal(res.content_type, 'application/vnd.api+json')
res_json = res.json['data']
expected_path = node_url_for(self.public_pointer_project._id)
actual_path = urlparse(res_json[0]['relationships']['target_node']['links']['related']['href']).path
assert_equal(expected_path, actual_path)
expected_path = node_url_for(self.public_pointer_project_two._id)
actual_path = urlparse(res_json[1]['relationships']['target_node']['links']['related']['href']).path
assert_equal(expected_path, actual_path)
def test_bulk_creates_private_node_pointers_logged_out(self):
res = self.app.post_json_api(self.private_url, self.private_payload, expect_errors=True, bulk=True)
assert_equal(res.status_code, 401)
assert_in('detail', res.json['errors'][0])
res = self.app.get(self.private_url, auth=self.user.auth)
assert_equal(res.json['data'], [])
@assert_logs(NodeLog.POINTER_CREATED, 'private_project', index=-1)
@assert_logs(NodeLog.POINTER_CREATED, 'private_project')
def test_bulk_creates_private_node_pointer_logged_in_contributor(self):
res = self.app.post_json_api(self.private_url, self.private_payload, auth=self.user.auth, bulk=True)
assert_equal(res.status_code, 201)
res_json = res.json['data']
expected_path = node_url_for(self.private_pointer_project._id)
actual_path = urlparse(res_json[0]['relationships']['target_node']['links']['related']['href']).path
assert_equal(expected_path, actual_path)
expected_path = node_url_for(self.private_pointer_project_two._id)
actual_path = urlparse(res_json[1]['relationships']['target_node']['links']['related']['href']).path
assert_equal(expected_path, actual_path)
assert_equal(res.content_type, 'application/vnd.api+json')
def test_bulk_creates_private_node_pointers_logged_in_non_contributor(self):
res = self.app.post_json_api(self.private_url, self.private_payload,
auth=self.user_two.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 403)
assert_in('detail', res.json['errors'][0])
res = self.app.get(self.private_url, auth=self.user.auth)
assert_equal(res.json['data'], [])
def test_bulk_creates_node_pointers_non_contributing_node_to_contributing_node(self):
res = self.app.post_json_api(self.private_url, self.user_two_payload,
auth=self.user_two.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 403)
assert_in('detail', res.json['errors'][0])
@assert_logs(NodeLog.POINTER_CREATED, 'private_project')
def test_bulk_creates_node_pointers_contributing_node_to_non_contributing_node(self):
res = self.app.post_json_api(self.private_url, self.user_two_payload, auth=self.user.auth, bulk=True)
assert_equal(res.status_code, 201)
assert_equal(res.content_type, 'application/vnd.api+json')
res_json = res.json['data']
expected_path = node_url_for(self.user_two_project._id)
actual_path = urlparse(res_json[0]['relationships']['target_node']['links']['related']['href']).path
assert_equal(expected_path, actual_path)
res = self.app.get(self.private_url, auth=self.user.auth)
res_json = res.json['data']
expected_path = node_url_for(self.user_two_project._id)
actual_path = urlparse(res_json[0]['relationships']['target_node']['links']['related']['href']).path
assert_equal(expected_path, actual_path)
def test_bulk_creates_pointers_non_contributing_node_to_fake_node(self):
fake_payload = {'data': [{'type': 'node_links', 'relationships': {'nodes': {'data': {'id': 'fdxlq', 'type': 'nodes'}}}}]}
res = self.app.post_json_api(self.private_url, fake_payload,
auth=self.user_two.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 403)
assert_in('detail', res.json['errors'][0])
def test_bulk_creates_pointers_contributing_node_to_fake_node(self):
fake_payload = {'data': [{'type': 'node_links', 'relationships': {'nodes': {'data': {'id': 'fdxlq', 'type': 'nodes'}}}}]}
res = self.app.post_json_api(self.private_url, fake_payload,
auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
assert_in('detail', res.json['errors'][0])
def test_bulk_creates_fake_nodes_pointing_to_contributing_node(self):
fake_url = '/{}nodes/{}/node_links/'.format(API_BASE, 'fdxlq')
res = self.app.post_json_api(fake_url, self.private_payload, auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 404)
assert_in('detail', res.json['errors'][0])
res = self.app.post_json_api(fake_url, self.private_payload, auth=self.user_two.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 404)
assert_in('detail', res.json['errors'][0])
@assert_logs(NodeLog.POINTER_CREATED, 'public_project')
def test_bulk_creates_node_pointer_to_itself(self):
point_to_itself_payload = {'data': [{'type': 'node_links', 'relationships': {'nodes': {'data': {'type': 'nodes', 'id': self.public_project._id}}}}]}
res = self.app.post_json_api(self.public_url, point_to_itself_payload, auth=self.user.auth, bulk=True)
assert_equal(res.status_code, 201)
assert_equal(res.content_type, 'application/vnd.api+json')
res_json = res.json['data']
expected_path = node_url_for(self.public_project._id)
actual_path = urlparse(res_json[0]['relationships']['target_node']['links']['related']['href']).path
assert_equal(expected_path, actual_path)
def test_bulk_creates_node_pointer_to_itself_unauthorized(self):
point_to_itself_payload = {'data': [{'type': 'node_links', 'relationships': {'nodes': {'data': {'type': 'nodes', 'id': self.public_project._id}}}}]}
res = self.app.post_json_api(self.public_url, point_to_itself_payload, bulk=True, auth=self.user_two.auth,
expect_errors=True)
assert_equal(res.status_code, 403)
assert_in('detail', res.json['errors'][0])
@assert_logs(NodeLog.POINTER_CREATED, 'public_project')
@assert_logs(NodeLog.POINTER_CREATED, 'public_project', index=-1)
def test_bulk_creates_node_pointer_already_connected(self):
res = self.app.post_json_api(self.public_url, self.public_payload, auth=self.user.auth, bulk=True)
assert_equal(res.status_code, 201)
assert_equal(res.content_type, 'application/vnd.api+json')
res_json = res.json['data']
expected_path = node_url_for(self.public_pointer_project._id)
actual_path = urlparse(res_json[0]['relationships']['target_node']['links']['related']['href']).path
assert_equal(expected_path, actual_path)
expected_path = node_url_for(self.public_pointer_project_two._id)
actual_path = urlparse(res_json[1]['relationships']['target_node']['links']['related']['href']).path
assert_equal(expected_path, actual_path)
res = self.app.post_json_api(self.public_url, self.public_payload, auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
assert_in("Target Node '{}' already pointed to by '{}'.".format(self.public_pointer_project._id, self.public_project._id), res.json['errors'][0]['detail'])
def test_bulk_cannot_add_link_to_registration(self):
registration = RegistrationFactory(creator=self.user)
url = '/{}nodes/{}/node_links/'.format(API_BASE, registration._id)
payload = {'data': [{'type': 'node_links', 'relationships': {'nodes': {'data': {'type': 'nodes', 'id': self.public_pointer_project._id}}}}]}
res = self.app.post_json_api(url, payload, auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 403)
def test_bulk_creates_node_pointer_no_type(self):
payload = {'data': [{'relationships': {'nodes': {'data': {'type': 'nodes', 'id': self.user_two_project._id}}}}]}
res = self.app.post_json_api(self.private_url, payload, auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['detail'], 'This field may not be null.')
assert_equal(res.json['errors'][0]['source']['pointer'], '/data/0/type')
def test_bulk_creates_node_pointer_incorrect_type(self):
payload = {'data': [{'type': 'Wrong type.', 'relationships': {'nodes': {'data': {'type': 'nodes', 'id': self.user_two_project._id}}}}]}
res = self.app.post_json_api(self.private_url, payload, auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 409)
assert_equal(res.json['errors'][0]['detail'], 'Resource identifier does not match server endpoint.')
class TestBulkDeleteNodeLinks(ApiTestCase):
def setUp(self):
super(TestBulkDeleteNodeLinks, self).setUp()
self.user = AuthUserFactory()
self.project = ProjectFactory(creator=self.user, is_public=False)
self.pointer_project = ProjectFactory(creator=self.user, is_public=True)
self.pointer_project_two = ProjectFactory(creator=self.user, is_public=True)
self.pointer = self.project.add_pointer(self.pointer_project, auth=Auth(self.user), save=True)
self.pointer_two = self.project.add_pointer(self.pointer_project_two, auth=Auth(self.user), save=True)
self.private_payload = {
"data": [
{"type": "node_links", "id": self.pointer._id},
{"type": "node_links", "id": self.pointer_two._id}
]
}
self.private_url = '/{}nodes/{}/node_links/'.format(API_BASE, self.project._id)
self.user_two = AuthUserFactory()
self.public_project = ProjectFactory(is_public=True, creator=self.user)
self.public_pointer_project = ProjectFactory(is_public=True, creator=self.user)
self.public_pointer_project_two = ProjectFactory(is_public=True, creator=self.user)
self.public_pointer = self.public_project.add_pointer(self.public_pointer_project,
auth=Auth(self.user),
save=True)
self.public_pointer_two = self.public_project.add_pointer(self.public_pointer_project_two,
auth=Auth(self.user),
save=True)
self.public_payload = {
'data': [
{'type': 'node_links', 'id': self.public_pointer._id},
{'type': 'node_links', 'id': self.public_pointer_two._id}
]
}
self.public_url = '/{}nodes/{}/node_links/'.format(API_BASE, self.public_project._id)
def test_bulk_delete_node_links_blank_request(self):
res = self.app.delete_json_api(self.public_url, auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
def test_bulk_delete_pointer_limits(self):
res = self.app.delete_json_api(self.public_url, {'data': [self.public_payload['data'][0]] * 11},
auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['detail'], 'Bulk operation limit is 10, got 11.')
assert_equal(res.json['errors'][0]['source']['pointer'], '/data')
def test_bulk_delete_dict_inside_data(self):
res = self.app.delete_json_api(self.public_url, {'data': {'id': self.public_project._id, 'type': 'node_links'}},
auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['detail'], 'Expected a list of items but got type "dict".')
def test_bulk_delete_pointers_no_type(self):
payload = {'data': [
{'id': self.public_pointer._id},
{'id': self.public_pointer_two._id}
]}
res = self.app.delete_json_api(self.public_url, payload, auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['source']['pointer'], "/data/type")
def test_bulk_delete_pointers_incorrect_type(self):
payload = {'data': [
{'id': self.public_pointer._id, 'type': 'Incorrect type.'},
{'id': self.public_pointer_two._id, 'type': 'Incorrect type.'}
]}
res = self.app.delete_json_api(self.public_url, payload, auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 409)
def test_bulk_delete_pointers_no_id(self):
payload = {'data': [
{'type': 'node_links'},
{'type': 'node_links'}
]}
res = self.app.delete_json_api(self.public_url, payload, auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['source']['pointer'], "/data/id")
def test_bulk_delete_pointers_no_data(self):
res = self.app.delete_json_api(self.public_url, auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['detail'], 'Request must contain array of resource identifier objects.')
def test_bulk_delete_pointers_payload_is_empty_dict(self):
res = self.app.delete_json_api(self.public_url, {}, auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['detail'], 'Request must include /data.')
def test_cannot_delete_if_registration(self):
registration = RegistrationFactory(project=self.public_project)
url = '/{}nodes/{}/node_links/'.format(API_BASE, registration._id)
res = self.app.delete_json_api(url, self.public_payload, auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 403)
def test_bulk_deletes_public_node_pointers_logged_out(self):
res = self.app.delete_json_api(self.public_url, self.public_payload, expect_errors=True, bulk=True)
assert_equal(res.status_code, 401)
assert_in('detail', res.json['errors'][0])
def test_bulk_deletes_public_node_pointers_fails_if_bad_auth(self):
node_count_before = len(self.public_project.nodes_pointer)
res = self.app.delete_json_api(self.public_url, self.public_payload,
auth=self.user_two.auth, expect_errors=True, bulk=True)
# This is could arguably be a 405, but we don't need to go crazy with status codes
assert_equal(res.status_code, 403)
assert_in('detail', res.json['errors'][0])
self.public_project.reload()
assert_equal(node_count_before, len(self.public_project.nodes_pointer))
@assert_logs(NodeLog.POINTER_REMOVED, 'public_project')
@assert_logs(NodeLog.POINTER_REMOVED, 'public_project', index=-1)
def test_bulk_deletes_public_node_pointers_succeeds_as_owner(self):
node_count_before = len(self.public_project.nodes_pointer)
res = self.app.delete_json_api(self.public_url, self.public_payload, auth=self.user.auth, bulk=True)
self.public_project.reload()
assert_equal(res.status_code, 204)
assert_equal(node_count_before - 2, len(self.public_project.nodes_pointer))
self.public_project.reload()
def test_bulk_deletes_private_node_pointers_logged_out(self):
res = self.app.delete_json_api(self.private_url, self.private_payload, expect_errors=True, bulk=True)
assert_equal(res.status_code, 401)
assert_in('detail', res.json['errors'][0])
@assert_logs(NodeLog.POINTER_REMOVED, 'project', index=-1)
@assert_logs(NodeLog.POINTER_REMOVED, 'project')
def test_bulk_deletes_private_node_pointers_logged_in_contributor(self):
res = self.app.delete_json_api(self.private_url, self.private_payload, auth=self.user.auth, bulk=True)
self.project.reload() # Update the model to reflect changes made by post request
assert_equal(res.status_code, 204)
assert_equal(len(self.project.nodes_pointer), 0)
def test_bulk_deletes_private_node_pointers_logged_in_non_contributor(self):
res = self.app.delete_json_api(self.private_url, self.private_payload,
auth=self.user_two.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 403)
assert_in('detail', res.json['errors'][0])
@assert_logs(NodeLog.POINTER_REMOVED, 'public_project', index=-1)
@assert_logs(NodeLog.POINTER_REMOVED, 'public_project')
def test_return_bulk_deleted_public_node_pointer(self):
res = self.app.delete_json_api(self.public_url, self.public_payload, auth=self.user.auth, bulk=True)
self.public_project.reload() # Update the model to reflect changes made by post request
assert_equal(res.status_code, 204)
pointer_url = '/{}nodes/{}/node_links/{}/'.format(API_BASE, self.public_project._id, self.public_pointer._id)
#check that deleted pointer can not be returned
res = self.app.get(pointer_url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 404)
@assert_logs(NodeLog.POINTER_REMOVED, 'project', index=-1)
@assert_logs(NodeLog.POINTER_REMOVED, 'project')
def test_return_bulk_deleted_private_node_pointer(self):
res = self.app.delete_json_api(self.private_url, self.private_payload, auth=self.user.auth, bulk=True)
self.project.reload() # Update the model to reflect changes made by post request
assert_equal(res.status_code, 204)
pointer_url = '/{}nodes/{}/node_links/{}/'.format(API_BASE, self.project._id, self.pointer._id)
#check that deleted pointer can not be returned
res = self.app.get(pointer_url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 404)
# Regression test for https://openscience.atlassian.net/browse/OSF-4322
def test_bulk_delete_link_that_is_not_linked_to_correct_node(self):
project = ProjectFactory(creator=self.user)
# The node link belongs to a different project
res = self.app.delete_json_api(
self.private_url, self.public_payload,
auth=self.user.auth,
expect_errors=True,
bulk=True
)
assert_equal(res.status_code, 400)
errors = res.json['errors']
assert_equal(len(errors), 1)
assert_equal(errors[0]['detail'], 'Node link does not belong to the requested node.')
|
|
from planar import Vec2
from typing import List
class UnknownPlayerIdException(Exception):
def __init__(self, player_id):
super().__init__("Unknown Player ID %d" % player_id)
class Game:
RANKED_GAME_ID = -1
def __init__(self, id_: int, tick: int, time_left: float, player_id: int, players: List['Player'],
resources: 'Resources', map_: 'Map', viruses: List['Virus']):
self.id = id_
self.tick = tick
self.time_left = time_left
self.players = players
self.resources = resources
self.map = map_
self.viruses = viruses
try:
self.me = next(player for player in players
if player.id == player_id)
except StopIteration:
raise UnknownPlayerIdException(player_id)
self.enemies = [player for player in players if player.id != player_id]
def parse(obj, player_id) -> 'Game':
return Game(
obj["id"],
obj["tick"],
obj["timeLeft"],
player_id,
[Player.parse(player) for player in obj["players"]],
Resources.parse(obj["resources"]),
Map.parse(obj["map"]),
[Virus.parse(virus) for virus in obj["viruses"]]
)
def __str__(self):
return ("""
Tick: %d
Map: %s
Players:
%s
Viruses: [%s]
Resources:
%s""" % (self.tick,
self.map,
"\n ".join([str(player) for player in self.players]),
", ".join([str(virus) for virus in self.viruses]),
self.resources))
class Map:
def __init__(self, width: float, height: float):
self.width = width
self.height = height
def parse(obj) -> 'Map':
return Map(obj["width"], obj["height"])
def __str__(self):
return "%d x %d" % (self.width, self.height)
class Player:
def __init__(self, id_: int, name: str, total_mass: int, active: bool, cells: List['Cell']):
self.id = id_
self.name = name
self.total_mass = total_mass
self.active = active
self.cells = cells
def parse(obj) -> 'Player':
return Player(
obj["id"],
obj["name"],
obj["total_mass"],
obj["isActive"],
[Cell.parse(cell) for cell in obj["cells"]]
)
def __str__(self):
return ("%d '%s' %s mass=%d cells=[%s]" %
(self.id, self.name,
"active" if self.active else "inactive",
self.total_mass,
", ".join([str(cell) for cell in self.cells])))
class Cell:
def __init__(self, id_: int, mass: int, radius: float, position: Vec2, target: Vec2):
self.id = id_
self.mass = mass
self.radius = radius
self.position = position
self.target = target
self._actions = CellActions(self.id)
def move(self, target: Vec2):
self._actions.target = target
def split(self):
self._actions.split = True
def burst(self):
self._actions.burst = True
def trade(self, quantity: int):
self._actions.trade = quantity
def parse(obj) -> 'Cell':
return Cell(
obj["id"],
obj["mass"],
obj["radius"],
parse_vec2(obj["position"]),
parse_vec2(obj["target"])
)
def __str__(self):
return ("#%d (%d) %s -> %s" %
(self.id, self.mass,
format_vec2(self.position), format_vec2(self.target)))
def actions(self):
actions = self._actions.export()
if actions is not None and actions.target is None:
actions.target = self.target
return actions
class Resources:
def __init__(self, regular_positions: List[Vec2], silver_positions: List[Vec2], gold_positions: List[Vec2]):
self.regular = regular_positions
self.silver = silver_positions
self.gold = gold_positions
self.allResources = regular_positions + silver_positions + gold_positions
def parse(obj) -> 'Resources':
return Resources(
[parse_vec2(pos) for pos in obj["regular"]],
[parse_vec2(pos) for pos in obj["silver"]],
[parse_vec2(pos) for pos in obj["gold"]]
)
def __str__(self):
return ("regular: %d, silver: %d, gold: %d" %
(len(self.regular), len(self.silver), len(self.gold)))
class Virus:
def __init__(self, mass: int, position: Vec2):
self.mass = mass
self.position = position
def parse(obj):
return Virus(
obj["mass"],
parse_vec2(obj["position"])
)
def __str__(self):
return format_vec2(self.position)
class CellActions:
def __init__(self, cell_id: int):
self._cell_id = cell_id
self._target = None
self._burst = False
self._split = False
self._trade = 0
self._changed = False
@property
def cell_id(self) -> int:
return self._cell_id
@cell_id.setter
def cell_id(self, c):
self._cell_id = c
self._changed = True
@property
def target(self) -> Vec2:
return self._target
@target.setter
def target(self, t):
self._target = t
self._changed = True
@property
def burst(self):
return self._burst
@burst.setter
def burst(self, b):
self._burst = b
self._changed = True
@property
def split(self):
return self._split
@split.setter
def split(self, s):
self._split = s
self._changed = True
@property
def trade(self):
return self._trade
@trade.setter
def trade(self, t):
self._trade = t
self._changed = True
def export(self) -> 'CellActions':
if not self._changed:
return None
return self
def parse_vec2(obj):
if obj["x"] is None or obj["y"] is None:
return Vec2(0, 0)
return Vec2(obj["x"], obj["y"])
def format_vec2(vec2):
return "(%d,%d)" % (vec2.x, vec2.y)
|
|
# cython: infer_types=True
#
# Tree visitor and transform framework
#
import cython
import inspect
import Nodes
import ExprNodes
import Naming
import Errors
import DebugFlags
class TreeVisitor(object):
"""
Base class for writing visitors for a Cython tree, contains utilities for
recursing such trees using visitors. Each node is
expected to have a child_attrs iterable containing the names of attributes
containing child nodes or lists of child nodes. Lists are not considered
part of the tree structure (i.e. contained nodes are considered direct
children of the parent node).
visit_children visits each of the children of a given node (see the visit_children
documentation). When recursing the tree using visit_children, an attribute
access_path is maintained which gives information about the current location
in the tree as a stack of tuples: (parent_node, attrname, index), representing
the node, attribute and optional list index that was taken in each step in the path to
the current node.
Example:
>>> class SampleNode(object):
... child_attrs = ["head", "body"]
... def __init__(self, value, head=None, body=None):
... self.value = value
... self.head = head
... self.body = body
... def __repr__(self): return "SampleNode(%s)" % self.value
...
>>> tree = SampleNode(0, SampleNode(1), [SampleNode(2), SampleNode(3)])
>>> class MyVisitor(TreeVisitor):
... def visit_SampleNode(self, node):
... print "in", node.value, self.access_path
... self.visitchildren(node)
... print "out", node.value
...
>>> MyVisitor().visit(tree)
in 0 []
in 1 [(SampleNode(0), 'head', None)]
out 1
in 2 [(SampleNode(0), 'body', 0)]
out 2
in 3 [(SampleNode(0), 'body', 1)]
out 3
out 0
"""
def __init__(self):
super(TreeVisitor, self).__init__()
self.dispatch_table = {}
self.access_path = []
def dump_node(self, node, indent=0):
ignored = list(node.child_attrs) + [u'child_attrs', u'pos',
u'gil_message', u'cpp_message',
u'subexprs']
values = []
pos = node.pos
if pos:
source = pos[0]
if source:
import os.path
source = os.path.basename(source.get_description())
values.append(u'%s:%s:%s' % (source, pos[1], pos[2]))
attribute_names = dir(node)
attribute_names.sort()
for attr in attribute_names:
if attr in ignored:
continue
if attr.startswith(u'_') or attr.endswith(u'_'):
continue
try:
value = getattr(node, attr)
except AttributeError:
continue
if value is None or value == 0:
continue
elif isinstance(value, list):
value = u'[...]/%d' % len(value)
elif not isinstance(value, (str, unicode, long, int, float)):
continue
else:
value = repr(value)
values.append(u'%s = %s' % (attr, value))
return u'%s(%s)' % (node.__class__.__name__,
u',\n '.join(values))
def _find_node_path(self, stacktrace):
import os.path
last_traceback = stacktrace
nodes = []
while hasattr(stacktrace, 'tb_frame'):
frame = stacktrace.tb_frame
node = frame.f_locals.get(u'self')
if isinstance(node, Nodes.Node):
code = frame.f_code
method_name = code.co_name
pos = (os.path.basename(code.co_filename),
frame.f_lineno)
nodes.append((node, method_name, pos))
last_traceback = stacktrace
stacktrace = stacktrace.tb_next
return (last_traceback, nodes)
def _raise_compiler_error(self, child, e):
import sys
trace = ['']
for parent, attribute, index in self.access_path:
node = getattr(parent, attribute)
if index is None:
index = ''
else:
node = node[index]
index = u'[%d]' % index
trace.append(u'%s.%s%s = %s' % (
parent.__class__.__name__, attribute, index,
self.dump_node(node)))
stacktrace, called_nodes = self._find_node_path(sys.exc_info()[2])
last_node = child
for node, method_name, pos in called_nodes:
last_node = node
trace.append(u"File '%s', line %d, in %s: %s" % (
pos[0], pos[1], method_name, self.dump_node(node)))
raise Errors.CompilerCrash(
last_node.pos, self.__class__.__name__,
u'\n'.join(trace), e, stacktrace)
def find_handler(self, obj):
# to resolve, try entire hierarchy
cls = type(obj)
pattern = "visit_%s"
mro = inspect.getmro(cls)
handler_method = None
for mro_cls in mro:
handler_method = getattr(self, pattern % mro_cls.__name__, None)
if handler_method is not None:
return handler_method
print type(self), cls
if self.access_path:
print self.access_path
print self.access_path[-1][0].pos
print self.access_path[-1][0].__dict__
raise RuntimeError("Visitor %r does not accept object: %s" % (self, obj))
def visit(self, obj):
return self._visit(obj)
def _visit(self, obj):
try:
handler_method = self.dispatch_table[type(obj)]
except KeyError:
handler_method = self.find_handler(obj)
self.dispatch_table[type(obj)] = handler_method
return handler_method(obj)
def _visitchild(self, child, parent, attrname, idx):
self.access_path.append((parent, attrname, idx))
try:
try:
handler_method = self.dispatch_table[type(child)]
except KeyError:
handler_method = self.find_handler(child)
self.dispatch_table[type(child)] = handler_method
result = handler_method(child)
except Errors.CompileError:
raise
except Errors.AbortError:
raise
except Exception, e:
if DebugFlags.debug_no_exception_intercept:
raise
self._raise_compiler_error(child, e)
self.access_path.pop()
return result
def visitchildren(self, parent, attrs=None):
return self._visitchildren(parent, attrs)
def _visitchildren(self, parent, attrs):
"""
Visits the children of the given parent. If parent is None, returns
immediately (returning None).
The return value is a dictionary giving the results for each
child (mapping the attribute name to either the return value
or a list of return values (in the case of multiple children
in an attribute)).
"""
if parent is None: return None
result = {}
for attr in parent.child_attrs:
if attrs is not None and attr not in attrs: continue
child = getattr(parent, attr)
if child is not None:
if type(child) is list:
childretval = [self._visitchild(x, parent, attr, idx) for idx, x in enumerate(child)]
else:
childretval = self._visitchild(child, parent, attr, None)
assert not isinstance(childretval, list), 'Cannot insert list here: %s in %r' % (attr, parent)
result[attr] = childretval
return result
class VisitorTransform(TreeVisitor):
"""
A tree transform is a base class for visitors that wants to do stream
processing of the structure (rather than attributes etc.) of a tree.
It implements __call__ to simply visit the argument node.
It requires the visitor methods to return the nodes which should take
the place of the visited node in the result tree (which can be the same
or one or more replacement). Specifically, if the return value from
a visitor method is:
- [] or None; the visited node will be removed (set to None if an attribute and
removed if in a list)
- A single node; the visited node will be replaced by the returned node.
- A list of nodes; the visited nodes will be replaced by all the nodes in the
list. This will only work if the node was already a member of a list; if it
was not, an exception will be raised. (Typically you want to ensure that you
are within a StatListNode or similar before doing this.)
"""
def visitchildren(self, parent, attrs=None):
result = self._visitchildren(parent, attrs)
for attr, newnode in result.iteritems():
if not type(newnode) is list:
setattr(parent, attr, newnode)
else:
# Flatten the list one level and remove any None
newlist = []
for x in newnode:
if x is not None:
if type(x) is list:
newlist += x
else:
newlist.append(x)
setattr(parent, attr, newlist)
return result
def recurse_to_children(self, node):
self.visitchildren(node)
return node
def __call__(self, root):
return self._visit(root)
class CythonTransform(VisitorTransform):
"""
Certain common conventions and utilities for Cython transforms.
- Sets up the context of the pipeline in self.context
- Tracks directives in effect in self.current_directives
"""
def __init__(self, context):
super(CythonTransform, self).__init__()
self.context = context
def __call__(self, node):
import ModuleNode
if isinstance(node, ModuleNode.ModuleNode):
self.current_directives = node.directives
return super(CythonTransform, self).__call__(node)
def visit_CompilerDirectivesNode(self, node):
old = self.current_directives
self.current_directives = node.directives
self.visitchildren(node)
self.current_directives = old
return node
def visit_Node(self, node):
self.visitchildren(node)
return node
class ScopeTrackingTransform(CythonTransform):
# Keeps track of type of scopes
#scope_type: can be either of 'module', 'function', 'cclass', 'pyclass', 'struct'
#scope_node: the node that owns the current scope
def visit_ModuleNode(self, node):
self.scope_type = 'module'
self.scope_node = node
self.visitchildren(node)
return node
def visit_scope(self, node, scope_type):
prev = self.scope_type, self.scope_node
self.scope_type = scope_type
self.scope_node = node
self.visitchildren(node)
self.scope_type, self.scope_node = prev
return node
def visit_CClassDefNode(self, node):
return self.visit_scope(node, 'cclass')
def visit_PyClassDefNode(self, node):
return self.visit_scope(node, 'pyclass')
def visit_FuncDefNode(self, node):
return self.visit_scope(node, 'function')
def visit_CStructOrUnionDefNode(self, node):
return self.visit_scope(node, 'struct')
class EnvTransform(CythonTransform):
"""
This transformation keeps a stack of the environments.
"""
def __call__(self, root):
self.env_stack = [root.scope]
return super(EnvTransform, self).__call__(root)
def current_env(self):
return self.env_stack[-1]
def visit_FuncDefNode(self, node):
self.env_stack.append(node.local_scope)
self.visitchildren(node)
self.env_stack.pop()
return node
class RecursiveNodeReplacer(VisitorTransform):
"""
Recursively replace all occurrences of a node in a subtree by
another node.
"""
def __init__(self, orig_node, new_node):
super(RecursiveNodeReplacer, self).__init__()
self.orig_node, self.new_node = orig_node, new_node
def visit_Node(self, node):
self.visitchildren(node)
if node is self.orig_node:
return self.new_node
else:
return node
def recursively_replace_node(tree, old_node, new_node):
replace_in = RecursiveNodeReplacer(old_node, new_node)
replace_in(tree)
# Utils
def ensure_statlist(node):
if not isinstance(node, Nodes.StatListNode):
node = Nodes.StatListNode(pos=node.pos, stats=[node])
return node
def replace_node(ptr, value):
"""Replaces a node. ptr is of the form used on the access path stack
(parent, attrname, listidx|None)
"""
parent, attrname, listidx = ptr
if listidx is None:
setattr(parent, attrname, value)
else:
getattr(parent, attrname)[listidx] = value
class PrintTree(TreeVisitor):
"""Prints a representation of the tree to standard output.
Subclass and override repr_of to provide more information
about nodes. """
def __init__(self):
TreeVisitor.__init__(self)
self._indent = ""
def indent(self):
self._indent += " "
def unindent(self):
self._indent = self._indent[:-2]
def __call__(self, tree, phase=None):
print("Parse tree dump at phase '%s'" % phase)
self._visit(tree)
return tree
# Don't do anything about process_list, the defaults gives
# nice-looking name[idx] nodes which will visually appear
# under the parent-node, not displaying the list itself in
# the hierarchy.
def visit_Node(self, node):
if len(self.access_path) == 0:
name = "(root)"
else:
parent, attr, idx = self.access_path[-1]
if idx is not None:
name = "%s[%d]" % (attr, idx)
else:
name = attr
print("%s- %s: %s" % (self._indent, name, self.repr_of(node)))
self.indent()
self.visitchildren(node)
self.unindent()
return node
def repr_of(self, node):
if node is None:
return "(none)"
else:
result = node.__class__.__name__
if isinstance(node, ExprNodes.NameNode):
result += "(type=%s, name=\"%s\")" % (repr(node.type), node.name)
elif isinstance(node, Nodes.DefNode):
result += "(name=\"%s\")" % node.name
elif isinstance(node, ExprNodes.ExprNode):
t = node.type
result += "(type=%s)" % repr(t)
elif node.pos:
pos = node.pos
path = pos[0].get_description()
if '/' in path:
path = path.split('/')[-1]
if '\\' in path:
path = path.split('\\')[-1]
result += "(pos=(%s:%s:%s))" % (path, pos[1], pos[2])
return result
if __name__ == "__main__":
import doctest
doctest.testmod()
|
|
#!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
from resource_management.libraries.script import Script
# from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
from resource_management.libraries.functions.format import format
from resource_management.libraries.functions.default import default
config = Script.get_config()
tmp_dir = Script.get_tmp_dir()
stack_name = default("/hostLevelParams/stack_name", None)
version = default("/commandParams/version", None)
stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
# ranger kms pid
user_group = config['configurations']['cluster-env']['user_group']
ranger_kms_pid_dir = default("/configurations/kms-env/ranger_kms_pid_dir", "/var/run/ranger_kms")
ranger_kms_pid_file = format('{ranger_kms_pid_dir}/rangerkms.pid')
kms_home = '/usr/lib/ranger-kms'
kms_conf_dir = '/etc/ranger-kms/conf'
kms_log_dir = default("/configurations/kms-env/kms_log_dir", "/var/log/ranger/kms")
java_home = config['hostLevelParams']['java_home']
kms_user = default("/configurations/kms-env/kms_user", "kms")
kms_group = default("/configurations/kms-env/kms_group", "kms")
jdk_location = config['hostLevelParams']['jdk_location']
kms_log4j = config['configurations']['kms-log4j']['content']
# ranger host
ranger_admin_hosts = config['clusterHostInfo']['ranger_admin_hosts'][0]
has_ranger_admin = len(ranger_admin_hosts) > 0
kms_host = config['clusterHostInfo']['ranger_kms_server_hosts'][0]
kms_port = config['configurations']['kms-env']['kms_port']
create_db_user = config['configurations']['kms-env']['create_db_user']
#kms properties
db_flavor = (config['configurations']['kms-properties']['DB_FLAVOR']).lower()
db_host = config['configurations']['kms-properties']['db_host']
db_name = config['configurations']['kms-properties']['db_name']
db_user = config['configurations']['kms-properties']['db_user']
db_password = unicode(config['configurations']['kms-properties']['db_password'])
kms_master_key_password = unicode(config['configurations']['kms-properties']['KMS_MASTER_KEY_PASSWD'])
credential_provider_path = config['configurations']['dbks-site']['ranger.ks.jpa.jdbc.credential.provider.path']
jdbc_alias = config['configurations']['dbks-site']['ranger.ks.jpa.jdbc.credential.alias']
masterkey_alias = config['configurations']['dbks-site']['ranger.ks.masterkey.credential.alias']
repo_name = str(config['clusterName']) + '_kms'
cred_lib_path = os.path.join(kms_home,"cred","lib","*")
cred_setup_prefix = (format('{kms_home}/ranger_credential_helper.py'), '-l', cred_lib_path)
credential_file = format('/etc/ranger/{repo_name}/cred.jceks')
if has_ranger_admin:
policymgr_mgr_url = config['configurations']['admin-properties']['policymgr_external_url']
xa_audit_db_flavor = (config['configurations']['admin-properties']['DB_FLAVOR']).lower()
xa_audit_db_name = config['configurations']['admin-properties']['audit_db_name']
xa_audit_db_user = config['configurations']['admin-properties']['audit_db_user']
xa_audit_db_password = config['configurations']['admin-properties']['audit_db_password']
xa_db_host = config['configurations']['admin-properties']['db_host']
admin_uname = config['configurations']['ranger-env']['admin_username']
admin_password = config['configurations']['ranger-env']['admin_password']
ambari_ranger_admin = config['configurations']['ranger-env']['ranger_admin_username']
ambari_ranger_password = config['configurations']['ranger-env']['ranger_admin_password']
admin_uname_password = format("{admin_uname}:{admin_password}")
ranger_audit_solr_urls = config['configurations']['ranger-site']['ranger.audit.solr.urls']
java_share_dir = '/usr/share/java'
if db_flavor == 'mysql':
jdbc_symlink_name = "mysql-connector-java.jar"
jdbc_jar_name = "mysql-connector-java.jar"
db_jdbc_url = format('jdbc:log4jdbc:mysql://{db_host}/{db_name}')
db_jdbc_driver = "net.sf.log4jdbc.DriverSpy"
jdbc_dialect = "org.eclipse.persistence.platform.database.MySQLPlatform"
elif db_flavor == 'oracle':
jdbc_jar_name = "ojdbc6.jar"
jdbc_symlink_name = "oracle-jdbc-driver.jar"
colon_count = db_host.count(':')
if colon_count == 2 or colon_count == 0:
db_jdbc_url = format('jdbc:oracle:thin:@{db_host}')
else:
db_jdbc_url = format('jdbc:oracle:thin:@//{db_host}')
db_jdbc_driver = "oracle.jdbc.OracleDriver"
jdbc_dialect = "org.eclipse.persistence.platform.database.OraclePlatform"
elif db_flavor == 'postgres':
jdbc_jar_name = "postgresql.jar"
jdbc_symlink_name = "postgres-jdbc-driver.jar"
db_jdbc_url = format('jdbc:postgresql://{db_host}/{db_name}')
db_jdbc_driver = "org.postgresql.Driver"
jdbc_dialect = "org.eclipse.persistence.platform.database.PostgreSQLPlatform"
elif db_flavor == 'mssql':
jdbc_jar_name = "sqljdbc4.jar"
jdbc_symlink_name = "mssql-jdbc-driver.jar"
db_jdbc_url = format('jdbc:sqlserver://{db_host};databaseName={db_name}')
db_jdbc_driver = "com.microsoft.sqlserver.jdbc.SQLServerDriver"
jdbc_dialect = "org.eclipse.persistence.platform.database.SQLServerPlatform"
elif db_flavor == 'sqla':
jdbc_jar_name = "sajdbc4.jar"
jdbc_symlink_name = "sqlanywhere-jdbc-driver.tar.gz"
db_jdbc_url = format('jdbc:sqlanywhere:database={db_name};host={db_host}')
db_jdbc_driver = "sap.jdbc4.sqlanywhere.IDriver"
jdbc_dialect = "org.eclipse.persistence.platform.database.SQLAnywherePlatform"
downloaded_custom_connector = format("{tmp_dir}/{jdbc_jar_name}")
driver_curl_source = format("{jdk_location}/{jdbc_symlink_name}")
driver_curl_target = format("{java_share_dir}/{jdbc_jar_name}")
ews_lib_jar_path = format("{kms_home}/ews/webapp/lib/{jdbc_jar_name}")
# TODO: Remove that shit!!!!!
ews_classes_conf = format("{kms_home}/ews/webapp/WEB-INF/classes/conf")
if db_flavor == 'sqla':
downloaded_custom_connector = format("{tmp_dir}/sqla-client-jdbc.tar.gz")
jar_path_in_archive = format("{tmp_dir}/sqla-client-jdbc/java/{jdbc_jar_name}")
libs_path_in_archive = format("{tmp_dir}/sqla-client-jdbc/native/lib64/*")
jdbc_libs_dir = format("{kms_home}/native/lib64")
ld_library_path = format("{jdbc_libs_dir}")
if has_ranger_admin:
if xa_audit_db_flavor == 'mysql':
jdbc_symlink = "mysql-connector-java.jar"
jdbc_jar = "mysql-connector-java.jar"
audit_jdbc_url = format('jdbc:mysql://{xa_db_host}/{xa_audit_db_name}')
jdbc_driver = "com.mysql.jdbc.Driver"
elif xa_audit_db_flavor == 'oracle':
jdbc_jar = "ojdbc6.jar"
jdbc_symlink = "oracle-jdbc-driver.jar"
colon_count = xa_db_host.count(':')
if colon_count == 2 or colon_count == 0:
audit_jdbc_url = format('jdbc:oracle:thin:@{xa_db_host}')
else:
audit_jdbc_url = format('jdbc:oracle:thin:@//{xa_db_host}')
jdbc_driver = "oracle.jdbc.OracleDriver"
elif xa_audit_db_flavor == 'postgres':
jdbc_jar = "postgresql.jar"
jdbc_symlink = "postgres-jdbc-driver.jar"
audit_jdbc_url = format('jdbc:postgresql://{xa_db_host}/{xa_audit_db_name}')
jdbc_driver = "org.postgresql.Driver"
elif xa_audit_db_flavor == 'mssql':
jdbc_jar = "sqljdbc4.jar"
jdbc_symlink = "mssql-jdbc-driver.jar"
audit_jdbc_url = format('jdbc:sqlserver://{xa_db_host};databaseName={xa_audit_db_name}')
jdbc_driver = "com.microsoft.sqlserver.jdbc.SQLServerDriver"
elif xa_audit_db_flavor == 'sqla':
jdbc_jar = "sajdbc4.jar"
jdbc_symlink = "sqlanywhere-jdbc-driver.tar.gz"
audit_jdbc_url = format('jdbc:sqlanywhere:database={xa_audit_db_name};host={xa_db_host}')
jdbc_driver = "sap.jdbc4.sqlanywhere.IDriver"
downloaded_connector_path = format("{tmp_dir}/{jdbc_jar}")
driver_source = format("{jdk_location}/{jdbc_symlink}")
driver_target = format("{java_share_dir}/{jdbc_jar}")
repo_config_username = config['configurations']['kms-properties']['REPOSITORY_CONFIG_USERNAME']
repo_config_password = unicode(config['configurations']['kms-properties']['REPOSITORY_CONFIG_PASSWORD'])
kms_plugin_config = {
'username' : repo_config_username,
'password' : repo_config_password,
'provider' : format('kms://http@{kms_host}:{kms_port}/kms')
}
kms_ranger_plugin_repo = {
'isEnabled' : 'true',
'configs' : kms_plugin_config,
'description' : 'kms repo',
'name' : repo_name,
'type' : 'kms'
}
xa_audit_db_is_enabled = config['configurations']['ranger-kms-audit']['xasecure.audit.destination.db']
ssl_keystore_password = unicode(config['configurations']['ranger-kms-policymgr-ssl']['xasecure.policymgr.clientssl.keystore.password'])
ssl_truststore_password = unicode(config['configurations']['ranger-kms-policymgr-ssl']['xasecure.policymgr.clientssl.truststore.password'])
#For SQLA explicitly disable audit to DB for Ranger
if xa_audit_db_flavor == 'sqla':
xa_audit_db_is_enabled = False
current_host = config['hostname']
ranger_kms_hosts = config['clusterHostInfo']['ranger_kms_server_hosts']
if current_host in ranger_kms_hosts:
kms_host = current_host
check_db_connection_jar_name = "DBConnectionVerification.jar"
check_db_connection_jar = format("/usr/lib/ambari-agent/{check_db_connection_jar_name}")
ranger_kms_jdbc_connection_url = config['configurations']['dbks-site']['ranger.ks.jpa.jdbc.url']
ranger_kms_jdbc_driver = config['configurations']['dbks-site']['ranger.ks.jpa.jdbc.driver']
|
|
# Copyright (c) The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Test cases for the barbican key manager.
"""
import calendar
from unittest import mock
from barbicanclient import exceptions as barbican_exceptions
from keystoneauth1 import identity
from keystoneauth1 import service_token
from oslo_context import context
from oslo_utils import timeutils
from castellan.common import exception
from castellan.common.objects import symmetric_key as sym_key
from castellan.key_manager import barbican_key_manager
from castellan.tests.unit.key_manager import test_key_manager
class BarbicanKeyManagerTestCase(test_key_manager.KeyManagerTestCase):
def _create_key_manager(self):
return barbican_key_manager.BarbicanKeyManager(self.conf)
def setUp(self):
super(BarbicanKeyManagerTestCase, self).setUp()
# Create fake auth_token
self.ctxt = mock.Mock(spec=context.RequestContext)
self.ctxt.auth_token = "fake_token"
self.ctxt.project_name = "foo"
self.ctxt.project_domain_name = "foo"
# Create mock barbican client
self._build_mock_barbican()
# Create a key_id, secret_ref, pre_hex, and hex to use
self.key_id = "d152fa13-2b41-42ca-a934-6c21566c0f40"
self.secret_ref = ("http://host:9311/v1/secrets/" + self.key_id)
self.pre_hex = "AIDxQp2++uAbKaTVDMXFYIu8PIugJGqkK0JLqkU0rhY="
self.hex = ("0080f1429dbefae01b29a4d50cc5c5608bbc3c8ba0246aa42b424baa4"
"534ae16")
self.key_mgr._base_url = "http://host:9311/v1/"
self.key_mgr.conf.barbican.number_of_retries = 3
self.key_mgr.conf.barbican.retry_delay = 1
self.addCleanup(self._restore)
def _restore(self):
try:
getattr(self, 'original_key')
sym_key.SymmetricKey = self.original_key
except AttributeError:
return None
def _build_mock_barbican(self):
self.mock_barbican = mock.MagicMock(name='mock_barbican')
# Set commonly used methods
self.get = self.mock_barbican.secrets.get
self.delete = self.mock_barbican.secrets.delete
self.store = self.mock_barbican.secrets.store
self.create = self.mock_barbican.secrets.create
self.list = self.mock_barbican.secrets.list
self.key_mgr._barbican_client = self.mock_barbican
self.key_mgr._current_context = self.ctxt
def test_barbican_endpoint(self):
endpoint_data = mock.Mock()
endpoint_data.url = 'http://localhost:9311'
auth = mock.Mock(spec=['service_catalog'])
auth.service_catalog.endpoint_data_for.return_value = endpoint_data
endpoint = self.key_mgr._get_barbican_endpoint(auth, mock.Mock())
self.assertEqual(endpoint, 'http://localhost:9311')
auth.service_catalog.endpoint_data_for.assert_called_once_with(
service_type='key-manager', interface='public',
region_name=None)
def test_barbican_endpoint_with_endpoint_type(self):
self.key_mgr.conf.barbican.barbican_endpoint_type = 'internal'
endpoint_data = mock.Mock()
endpoint_data.url = 'http://localhost:9311'
auth = mock.Mock(spec=['service_catalog'])
auth.service_catalog.endpoint_data_for.return_value = endpoint_data
endpoint = self.key_mgr._get_barbican_endpoint(auth, mock.Mock())
self.assertEqual(endpoint, 'http://localhost:9311')
auth.service_catalog.endpoint_data_for.assert_called_once_with(
service_type='key-manager', interface='internal',
region_name=None)
def test_barbican_endpoint_with_region_name(self):
self.key_mgr.conf.barbican.barbican_region_name = 'regionOne'
endpoint_data = mock.Mock()
endpoint_data.url = 'http://localhost:9311'
auth = mock.Mock(spec=['service_catalog'])
auth.service_catalog.endpoint_data_for.return_value = endpoint_data
endpoint = self.key_mgr._get_barbican_endpoint(auth, mock.Mock())
self.assertEqual(endpoint, 'http://localhost:9311')
auth.service_catalog.endpoint_data_for.assert_called_once_with(
service_type='key-manager', interface='public',
region_name='regionOne')
def test_barbican_endpoint_from_config(self):
self.key_mgr.conf.barbican.barbican_endpoint = 'http://localhost:9311'
endpoint = self.key_mgr._get_barbican_endpoint(
mock.Mock(), mock.Mock())
self.assertEqual(endpoint, 'http://localhost:9311')
def test_barbican_endpoint_by_get_endpoint(self):
auth = mock.Mock(spec=['get_endppint'])
sess = mock.Mock()
auth.get_endpoint = mock.Mock(return_value='http://localhost:9311')
endpoint = self.key_mgr._get_barbican_endpoint(auth, sess)
self.assertEqual(endpoint, 'http://localhost:9311')
auth.get_endpoint.assert_called_once_with(
sess, service_type='key-manager', interface='public',
region_name=None)
def test_barbican_endpoint_by_get_endpoint_with_endpoint_type(self):
self.key_mgr.conf.barbican.barbican_endpoint_type = 'internal'
auth = mock.Mock(spec=['get_endppint'])
sess = mock.Mock()
auth.get_endpoint = mock.Mock(return_value='http://localhost:9311')
endpoint = self.key_mgr._get_barbican_endpoint(auth, sess)
self.assertEqual(endpoint, 'http://localhost:9311')
auth.get_endpoint.assert_called_once_with(
sess, service_type='key-manager', interface='internal',
region_name=None)
def test_barbican_endpoint_by_get_endpoint_with_region_name(self):
self.key_mgr.conf.barbican.barbican_region_name = 'regionOne'
auth = mock.Mock(spec=['get_endppint'])
sess = mock.Mock()
auth.get_endpoint = mock.Mock(return_value='http://localhost:9311')
endpoint = self.key_mgr._get_barbican_endpoint(auth, sess)
self.assertEqual(endpoint, 'http://localhost:9311')
auth.get_endpoint.assert_called_once_with(
sess, service_type='key-manager', interface='public',
region_name='regionOne')
def test__get_keystone_auth(self):
auth = self.key_mgr._get_keystone_auth(self.ctxt)
self.assertIsInstance(auth, identity.Token)
def test__get_keystone_auth_service_user(self):
self.key_mgr.conf.barbican.send_service_user_token = True
auth = self.key_mgr._get_keystone_auth(self.ctxt)
self.assertIsInstance(auth, service_token.ServiceTokenAuthWrapper)
def test_base_url_old_version(self):
version = "v1"
self.key_mgr.conf.barbican.barbican_api_version = version
endpoint = "http://localhost:9311"
base_url = self.key_mgr._create_base_url(mock.Mock(),
mock.Mock(),
endpoint)
self.assertEqual(endpoint + "/" + version, base_url)
def test_base_url_new_version(self):
version = "v1"
self.key_mgr.conf.barbican.barbican_api_version = version
endpoint = "http://localhost/key_manager"
base_url = self.key_mgr._create_base_url(mock.Mock(),
mock.Mock(),
endpoint)
self.assertEqual(endpoint + "/" + version, base_url)
def test_base_url_service_catalog(self):
endpoint_data = mock.Mock()
endpoint_data.api_version = 'v321'
auth = mock.Mock(spec=['service_catalog'])
auth.service_catalog.endpoint_data_for.return_value = endpoint_data
endpoint = "http://localhost/key_manager"
base_url = self.key_mgr._create_base_url(auth,
mock.Mock(),
endpoint)
self.assertEqual(endpoint + "/" + endpoint_data.api_version, base_url)
auth.service_catalog.endpoint_data_for.assert_called_once_with(
service_type='key-manager', interface='public',
region_name=None)
def test_base_url_service_catalog_with_endpoint_type(self):
self.key_mgr.conf.barbican.barbican_endpoint_type = 'internal'
endpoint_data = mock.Mock()
endpoint_data.api_version = 'v321'
auth = mock.Mock(spec=['service_catalog'])
auth.service_catalog.endpoint_data_for.return_value = endpoint_data
endpoint = "http://localhost/key_manager"
base_url = self.key_mgr._create_base_url(auth,
mock.Mock(),
endpoint)
self.assertEqual(endpoint + "/" + endpoint_data.api_version, base_url)
auth.service_catalog.endpoint_data_for.assert_called_once_with(
service_type='key-manager', interface='internal',
region_name=None)
def test_base_url_service_catalog_with_region_name(self):
self.key_mgr.conf.barbican.barbican_region_name = 'regionOne'
endpoint_data = mock.Mock()
endpoint_data.api_version = 'v321'
auth = mock.Mock(spec=['service_catalog'])
auth.service_catalog.endpoint_data_for.return_value = endpoint_data
endpoint = "http://localhost/key_manager"
base_url = self.key_mgr._create_base_url(auth,
mock.Mock(),
endpoint)
self.assertEqual(endpoint + "/" + endpoint_data.api_version, base_url)
auth.service_catalog.endpoint_data_for.assert_called_once_with(
service_type='key-manager', interface='public',
region_name='regionOne')
def test_base_url_raise_exception(self):
auth = mock.Mock(spec=['get_discovery'])
sess = mock.Mock()
discovery = mock.Mock()
discovery.raw_version_data = mock.Mock(return_value=[])
auth.get_discovery = mock.Mock(return_value=discovery)
endpoint = "http://localhost/key_manager"
self.assertRaises(exception.KeyManagerError,
self.key_mgr._create_base_url,
auth, sess, endpoint)
auth.get_discovery.assert_called_once_with(sess, url=endpoint)
self.assertEqual(1, discovery.raw_version_data.call_count)
def test_base_url_get_discovery(self):
version = 'v100500'
auth = mock.Mock(spec=['get_discovery'])
sess = mock.Mock()
discovery = mock.Mock()
auth.get_discovery = mock.Mock(return_value=discovery)
discovery.raw_version_data = mock.Mock(return_value=[{'id': version}])
endpoint = "http://localhost/key_manager"
base_url = self.key_mgr._create_base_url(auth,
sess,
endpoint)
self.assertEqual(endpoint + "/" + version, base_url)
auth.get_discovery.assert_called_once_with(sess, url=endpoint)
self.assertEqual(1, discovery.raw_version_data.call_count)
def test_create_key(self):
# Create order_ref_url and assign return value
order_ref_url = ("http://localhost:9311/v1/orders/"
"4fe939b7-72bc-49aa-bd1e-e979589858af")
key_order = mock.Mock()
self.mock_barbican.orders.create_key.return_value = key_order
key_order.submit.return_value = order_ref_url
# Create order and assign return value
order = mock.Mock()
order.secret_ref = self.secret_ref
order.status = 'ACTIVE'
self.mock_barbican.orders.get.return_value = order
# Create the key, get the UUID
returned_uuid = self.key_mgr.create_key(self.ctxt,
algorithm='AES',
length=256)
self.mock_barbican.orders.get.assert_called_once_with(order_ref_url)
self.assertEqual(self.key_id, returned_uuid)
def test_create_key_null_context(self):
self.key_mgr._barbican_client = None
self.assertRaises(exception.Forbidden,
self.key_mgr.create_key, None, 'AES', 256)
def test_create_key_with_error(self):
key_order = mock.Mock()
self.mock_barbican.orders.create_key.return_value = key_order
key_order.submit = mock.Mock(
side_effect=barbican_exceptions.HTTPClientError('test error'))
self.assertRaises(exception.KeyManagerError,
self.key_mgr.create_key, self.ctxt, 'AES', 256)
def test_create_key_pair(self):
# Create order_ref_url and assign return value
order_ref_url = ("http://localhost:9311/v1/orders/"
"f45bf211-a917-4ead-9aec-1c91e52609df")
asym_order = mock.Mock()
self.mock_barbican.orders.create_asymmetric.return_value = asym_order
asym_order.submit.return_value = order_ref_url
# Create order and assign return value
order = mock.Mock()
container_id = "16caa8f4-dd34-4fb3-bf67-6c20533a30e4"
container_ref = ("http://localhost:9311/v1/containers/" + container_id)
order.container_ref = container_ref
order.status = 'ACTIVE'
self.mock_barbican.orders.get.return_value = order
# Create container and assign return value
container = mock.Mock()
public_key_id = "43ed09c3-e551-4c24-b612-e619abe9b534"
pub_key_ref = ("http://localhost:9311/v1/secrets/" + public_key_id)
private_key_id = "32a0bc60-4e10-4269-9f17-f49767e99586"
priv_key_ref = ("http://localhost:9311/v1/secrets/" + private_key_id)
container.secret_refs = {'public_key': pub_key_ref,
'private_key': priv_key_ref}
self.mock_barbican.containers.get.return_value = container
# Create the keys, get the UUIDs
returned_private_uuid, returned_public_uuid = (
self.key_mgr.create_key_pair(self.ctxt,
algorithm='RSA',
length=2048))
self.mock_barbican.orders.get.assert_called_once_with(order_ref_url)
self.mock_barbican.containers.get.assert_called_once_with(
container_ref)
self.mock_barbican.orders.get.assert_called_once_with(order_ref_url)
self.assertEqual(private_key_id, returned_private_uuid)
self.assertEqual(public_key_id, returned_public_uuid)
def test_create_key_pair_null_context(self):
self.key_mgr._barbican_client = None
self.assertRaises(exception.Forbidden,
self.key_mgr.create_key_pair, None, 'RSA', 2048)
def test_create_key_pair_with_error(self):
asym_order = mock.Mock()
self.mock_barbican.orders.create_asymmetric.return_value = asym_order
asym_order.submit = mock.Mock(
side_effect=barbican_exceptions.HTTPClientError('test error'))
self.assertRaises(exception.KeyManagerError,
self.key_mgr.create_key_pair, self.ctxt, 'RSA', 2048)
def test_delete_null_context(self):
self.key_mgr._barbican_client = None
self.assertRaises(exception.Forbidden,
self.key_mgr.delete, None, self.key_id)
def test_delete_key(self):
self.key_mgr.delete(self.ctxt, self.key_id)
self.delete.assert_called_once_with(self.secret_ref)
def test_delete_unknown_key(self):
self.assertRaises(exception.KeyManagerError,
self.key_mgr.delete, self.ctxt, None)
def test_delete_with_error(self):
self.mock_barbican.secrets.delete = mock.Mock(
side_effect=barbican_exceptions.HTTPClientError('test error'))
self.assertRaises(exception.KeyManagerError,
self.key_mgr.delete, self.ctxt, self.key_id)
def test_get_key(self):
original_secret_metadata = mock.Mock()
original_secret_metadata.algorithm = mock.sentinel.alg
original_secret_metadata.bit_length = mock.sentinel.bit
original_secret_metadata.secret_type = 'symmetric'
key_id = "43ed09c3-e551-4c24-b612-e619abe9b534"
key_ref = ("http://localhost:9311/v1/secrets/" + key_id)
original_secret_metadata.secret_ref = key_ref
created = timeutils.parse_isotime('2015-10-20 18:51:17+00:00')
original_secret_metadata.created = created
created_formatted = timeutils.parse_isotime(str(created))
created_posix = calendar.timegm(created_formatted.timetuple())
key_name = 'my key'
original_secret_metadata.name = key_name
original_secret_data = b'test key'
original_secret_metadata.payload = original_secret_data
self.mock_barbican.secrets.get.return_value = original_secret_metadata
key = self.key_mgr.get(self.ctxt, self.key_id)
self.get.assert_called_once_with(self.secret_ref)
self.assertEqual(key_id, key.id)
self.assertEqual(key_name, key.name)
self.assertEqual(original_secret_data, key.get_encoded())
self.assertEqual(created_posix, key.created)
def test_get_null_context(self):
self.key_mgr._barbican_client = None
self.assertRaises(exception.Forbidden,
self.key_mgr.get, None, self.key_id)
def test_get_unknown_key(self):
self.assertRaises(exception.KeyManagerError,
self.key_mgr.get, self.ctxt, None)
def test_get_with_error(self):
self.mock_barbican.secrets.get = mock.Mock(
side_effect=barbican_exceptions.HTTPClientError('test error'))
self.assertRaises(exception.KeyManagerError,
self.key_mgr.get, self.ctxt, self.key_id)
def test_store_key(self):
# Create Key to store
secret_key = bytes(b'\x01\x02\xA0\xB3')
key_length = len(secret_key) * 8
_key = sym_key.SymmetricKey('AES',
key_length,
secret_key)
# Define the return values
secret = mock.Mock()
self.create.return_value = secret
secret.store.return_value = self.secret_ref
# Store the Key
returned_uuid = self.key_mgr.store(self.ctxt, _key)
self.create.assert_called_once_with(algorithm='AES',
bit_length=key_length,
name=None,
payload=secret_key,
secret_type='symmetric')
self.assertEqual(self.key_id, returned_uuid)
def test_store_key_with_name(self):
# Create Key to store
secret_key = bytes(b'\x01\x02\xA0\xB3')
key_length = len(secret_key) * 8
secret_name = 'My Secret'
_key = sym_key.SymmetricKey('AES',
key_length,
secret_key,
secret_name)
# Define the return values
secret = mock.Mock()
self.create.return_value = secret
secret.store.return_value = self.secret_ref
# Store the Key
returned_uuid = self.key_mgr.store(self.ctxt, _key)
self.create.assert_called_once_with(algorithm='AES',
bit_length=key_length,
payload=secret_key,
name=secret_name,
secret_type='symmetric')
self.assertEqual(self.key_id, returned_uuid)
def test_store_null_context(self):
self.key_mgr._barbican_client = None
self.assertRaises(exception.Forbidden,
self.key_mgr.store, None, None)
def test_store_with_error(self):
self.mock_barbican.secrets.create = mock.Mock(
side_effect=barbican_exceptions.HTTPClientError('test error'))
secret_key = bytes(b'\x01\x02\xA0\xB3')
key_length = len(secret_key) * 8
_key = sym_key.SymmetricKey('AES',
key_length,
secret_key)
self.assertRaises(exception.KeyManagerError,
self.key_mgr.store, self.ctxt, _key)
def test_get_active_order(self):
order_ref_url = ("http://localhost:9311/v1/orders/"
"4fe939b7-72bc-49aa-bd1e-e979589858af")
pending_order = mock.Mock()
pending_order.status = 'PENDING'
pending_order.order_ref = order_ref_url
active_order = mock.Mock()
active_order.secret_ref = self.secret_ref
active_order.status = 'ACTIVE'
active_order.order_ref = order_ref_url
self.mock_barbican.orders.get.side_effect = [pending_order,
active_order]
self.key_mgr._get_active_order(self.mock_barbican, order_ref_url)
self.assertEqual(2, self.mock_barbican.orders.get.call_count)
calls = [mock.call(order_ref_url), mock.call(order_ref_url)]
self.mock_barbican.orders.get.assert_has_calls(calls)
def test_get_active_order_timeout(self):
order_ref_url = ("http://localhost:9311/v1/orders/"
"4fe939b7-72bc-49aa-bd1e-e979589858af")
number_of_retries = self.key_mgr.conf.barbican.number_of_retries
pending_order = mock.Mock()
pending_order.status = 'PENDING'
pending_order.order_ref = order_ref_url
self.mock_barbican.orders.get.return_value = pending_order
self.assertRaises(exception.KeyManagerError,
self.key_mgr._get_active_order,
self.mock_barbican,
order_ref_url)
self.assertEqual(number_of_retries + 1,
self.mock_barbican.orders.get.call_count)
def test_get_active_order_error(self):
order_ref_url = ("http://localhost:9311/v1/orders/"
"4fe939b7-72bc-49aa-bd1e-e979589858af")
error_order = mock.Mock()
error_order.status = 'ERROR'
error_order.order_ref = order_ref_url
error_order.error_status_code = u"500"
error_order.error_reason = u"Test Error"
self.mock_barbican.orders.get.return_value = error_order
self.assertRaises(exception.KeyManagerError,
self.key_mgr._get_active_order,
self.mock_barbican,
order_ref_url)
self.assertEqual(1, self.mock_barbican.orders.get.call_count)
def test_list_null_context(self):
self.key_mgr._barbican_client = None
self.assertRaises(exception.Forbidden,
self.key_mgr.list, None)
def test_list(self):
original_secret_metadata = mock.Mock()
original_secret_metadata.algorithm = mock.sentinel.alg
original_secret_metadata.bit_length = mock.sentinel.bit
original_secret_metadata.secret_type = 'symmetric'
key_id = "43ed09c3-e551-4c24-b612-e619abe9b534"
key_ref = ("http://localhost:9311/v1/secrets/" + key_id)
original_secret_metadata.secret_ref = key_ref
created = timeutils.parse_isotime('2015-10-20 18:51:17+00:00')
original_secret_metadata.created = created
created_formatted = timeutils.parse_isotime(str(created))
created_posix = calendar.timegm(created_formatted.timetuple())
key_name = 'my key'
original_secret_metadata.name = key_name
original_secret_data = b'test key'
original_secret_metadata.payload = original_secret_data
self.mock_barbican.secrets.list.return_value = (
[original_secret_metadata])
# check metadata_only = False
key_list = self.key_mgr.list(self.ctxt)
self.assertEqual(1, len(key_list))
key = key_list[0]
self.list.assert_called_once()
self.assertEqual(key_id, key.id)
self.assertEqual(key_name, key.name)
self.assertEqual(original_secret_data, key.get_encoded())
self.assertEqual(created_posix, key.created)
self.list.reset_mock()
# check metadata_only = True
key_list = self.key_mgr.list(self.ctxt, metadata_only=True)
self.assertEqual(1, len(key_list))
key = key_list[0]
self.list.assert_called_once()
self.assertEqual(key_name, key.name)
self.assertIsNone(key.get_encoded())
self.assertEqual(created_posix, key.created)
def test_list_with_error(self):
self.mock_barbican.secrets.list = mock.Mock(
side_effect=barbican_exceptions.HTTPClientError('test error'))
self.assertRaises(exception.KeyManagerError,
self.key_mgr.list, self.ctxt)
def test_list_with_invalid_object_type(self):
self.assertRaises(exception.KeyManagerError,
self.key_mgr.list, self.ctxt, "invalid_type")
def test_list_options_for_discovery(self):
opts = self.key_mgr.list_options_for_discovery()
expected_sections = ['barbican', 'barbican_service_user']
self.assertEqual(expected_sections, [section[0] for section in opts])
barbican_opts = [opt.name for opt in opts[0][1]]
# From Castellan opts.
self.assertIn('barbican_endpoint', barbican_opts)
barbican_service_user_opts = [opt.name for opt in opts[1][1]]
# From session opts.
self.assertIn('cafile', barbican_service_user_opts)
# From auth common opts.
self.assertIn('auth_section', barbican_service_user_opts)
|
|
"""Utilities for parsing content from MWS responses.
XML to Dict code Borrowed from https://github.com/timotheus/ebaysdk-python
"""
from io import BytesIO
from zipfile import ZipFile
import re
import warnings
import xml.etree.ElementTree as ET
# Removed top-level import to correct circular imports
# (we're in backport territory, these things happen)
# from mws.mws import MWSError
from mws.future_utils.crypto import calc_md5
from mws.future_utils.deprecation import RemovedInPAM11Warning
from mws.future_utils.xml import remove_xml_namespaces
### DEPRECATED - REMOVE IN 1.1 ###
class ObjectDict(dict):
"""Extension of dict to allow accessing keys as attributes.
Example:
>>> a = DotDict()
>>> a.fish = 'fish'
>>> a['fish']
'fish'
>>> a['water'] = 'water'
>>> a.water
'water'
"""
def __init__(self, initd=None):
warnings.warn(
(
"'ObjectDict' is deprecated. "
"Use 'mws.utils.parsers.DotDict' instead. "
),
RemovedInPAM11Warning,
)
if initd is None:
initd = {}
dict.__init__(self, initd)
def __getattr__(self, item):
"""Allow access to dict keys as though they were attributes."""
return self.__getitem__(item)
def __setattr__(self, item, value):
"""Allows setting dict keys like attributes, opposite of `__getattr__`."""
self.__setitem__(item, value)
def _value_or_node(self, node):
"""If `node` contains only a single 'value' key, returns the raw value.
Otherwise, returns the node unchanged.
"""
if isinstance(node, self.__class__) and "value" in node and len(node) == 1:
return node["value"]
return node
def __getitem__(self, key):
"""Returns single-value nodes as the raw value, and all else unchanged."""
node = super().__getitem__(key)
return self._value_or_node(node)
def __setstate__(self, item):
return False
def __iter__(self):
"""Nodes are iterable be default, even with just one child node.
Returns non-list nodes wrapped in an iterator, so they can be iterated
and return the child node.
"""
# If the parser finds multiple sibling nodes by the same name
# (under the same parent node), that node will return a list of DotDicts.
# However, if the same node is returned with only one child in other responses,
# downstream code may expect the list, but iterating the single node will
# throw an error.
# So, when iteration is required, we return single nodes as an iterator
# wrapping that single instance.
if not isinstance(self, list):
return iter([self])
return self
def get(self, key, default=None):
"""Access a node like `dict.get`, including default values."""
try:
return self.__getitem__(key)
except KeyError:
return default
### DEPRECATED - REMOVE IN 1.1 ###
class XML2Dict(object):
def __init__(self):
warnings.warn(
(
"'XML2Dict' is deprecated. "
"XML parsing is now performed by dependency 'xmltodict', "
"using 'xmltodict.parse' "
"(See module 'mws.utils.xml' for details). "
),
RemovedInPAM11Warning,
)
pass
def _parse_node(self, node):
node_tree = ObjectDict()
# Save attrs and text, hope there will not be a child with same name
if node.text and node.text.strip():
# Only assign a value if both the value and its `.strip`ped version work.
# (a falsey .strip() will exclude values like "\n ")
node_tree.value = node.text
for key, val in node.attrib.items():
# if val.strip():
key, val = self._namespace_split(key, ObjectDict({"value": val}))
node_tree[key] = val
# Save children
for child in node:
tag, tree = self._namespace_split(child.tag, self._parse_node(child))
if tag not in node_tree: # the first time, so store it in dict
node_tree[tag] = tree
continue
old = node_tree[tag]
if not isinstance(old, list):
node_tree.pop(tag)
node_tree[tag] = [old] # multi times, so change old dict to a list
node_tree[tag].append(tree) # add the new one
return node_tree
def _namespace_split(self, tag, value):
"""Split the tag '{http://cs.sfsu.edu/csc867/myscheduler}patients'
ns = http://cs.sfsu.edu/csc867/myscheduler
name = patients
"""
result = re.compile(r"\{(.*)\}(.*)").search(tag)
if result:
value.namespace, tag = result.groups()
return (tag, value)
def parse(self, filename):
"""Parse XML file to a dict."""
file_ = open(filename, "r")
return self.fromstring(file_.read())
def fromstring(self, str_):
"""Convert XML-formatted string to an DotDict."""
text = ET.fromstring(str_)
root_tag, root_tree = self._namespace_split(text.tag, self._parse_node(text))
return ObjectDict({root_tag: root_tree})
### DEPRECATED - REMOVE IN 1.1 ###
class DictWrapper(object):
"""Converts XML data to a parsed response object as a tree of `DotDict`s.
Use `.parsed` for direct access to those contents, and `.original` for
the original XML document string.
"""
# TODO create a base class for DictWrapper and DataWrapper with all the keys we expect in responses.
# This will make it easier to use either class in place of each other.
# Either this, or pile everything into DataWrapper and make it able to handle all cases.
def __init__(self, xml, result_key=None):
warnings.warn(
(
"DictWrapper is deprecated. "
"For parsing 'request.Response' objects, "
"use 'mws.utils.parsers.MWSResponse'. "
"For parsing raw XML content, "
"see module 'mws.utils.xml'. "
),
RemovedInPAM11Warning,
)
if isinstance(xml, bytes):
try:
xml = xml.decode(encoding="iso-8859-1")
except UnicodeDecodeError as exc:
# In the very rare occurrence of a decode error, attach the original xml to the .response of the MWSError
from mws.mws import MWSError
error = MWSError(str(exc.response.text))
error.response = xml
raise error
self.response = None
self._original = xml
self._result_key = result_key
# TODO try this with xmltodict library?
self._mydict = XML2Dict().fromstring(remove_xml_namespaces(self.original))
self._response_dict = self._mydict.get(
list(self._mydict.keys())[0], self._mydict
)
@property
def parsed(self):
"""Returns parsed XML contents as a tree of `DotDict`s."""
if self._result_key:
return self._response_dict.get(self._result_key, self._response_dict)
return self._response_dict
@property
def original(self):
"""Returns original XML content."""
return self._original
### DEPRECATED - REMOVE IN 1.1 ###
class DataWrapper(object):
"""Text wrapper in charge of validating the hash sent by Amazon."""
def __init__(self, data, headers):
warnings.warn(
(
"DataWrapper is deprecated. "
"For parsing request.Response objects, "
"use 'mws.utils.parsers.MWSResponse'. "
),
RemovedInPAM11Warning,
)
self.original = data
self.response = None
self.headers = headers
if "content-md5" in self.headers:
hash_ = calc_md5(self.original)
if self.headers["content-md5"].encode() != hash_:
from mws.mws import MWSError
raise MWSError("Wrong Content length, maybe amazon error...")
@property
def parsed(self):
"""Returns original content.
Used to provide an identical interface as `DictWrapper`, even if
content could not be parsed as XML.
"""
return self.original
@property
def unzipped(self):
"""Returns a `ZipFile` of file contents if response contains zip file bytes.
Otherwise, returns None.
"""
if self.headers["content-type"] == "application/zip":
try:
with ZipFile(BytesIO(self.original)) as unzipped_fileobj:
# unzipped the zip file contents
unzipped_fileobj.extractall()
# return original zip file object to the user
return unzipped_fileobj
except Exception as exc:
from mws.mws import MWSError
raise MWSError(str(exc))
return None # 'The response is not a zipped file.'
|
|
# =============================================================================
# Copyright (c) 2016, Cisco Systems, Inc
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
# =============================================================================
import csv
from flask import abort
from flask import Blueprint
from flask import render_template
from flask import jsonify
from flask import send_file
from flask_login import login_required
from flask_login import current_user
from flask import request
from sqlalchemy import and_
from sqlalchemy import or_
from sqlalchemy import func
from wtforms import Form
from wtforms import BooleanField
from wtforms import HiddenField
from wtforms import SelectField
from wtforms import StringField
from wtforms import TextAreaField
from wtforms.widgets import TextArea
from wtforms.validators import Length
from wtforms.validators import required
from common import get_last_successful_inventory_elapsed_time, can_create
from constants import UNKNOWN
from constants import ExportInformationFormat
from constants import UserPrivilege
from database import DBSession
from models import HostInventory
from models import Inventory
from models import Host
from models import Region
from models import SystemOption
from models import EmailJob
from models import logger
from report_writer import ExportInventoryInfoCSVWriter
from report_writer import ExportInventoryInfoHTMLWriter
from report_writer import ExportInventoryInfoExcelWriter
from report_writer import ExportInventoryDashboardHTMLWriter
from report_writer import ExportInventoryDashboardExcelWriter
from report_writer import ExportInventoryDashboardCSVWriter
from report_writer import get_search_filter_in_html
inventory = Blueprint('inventory', __name__, url_prefix='/inventory')
HEADER_FIELD_SERIAL_NUMBER = 'serial_number'
HEADER_FIELD_MODEL_NAME = 'model_name'
HEADER_FIELD_NOTES = 'notes'
@inventory.route('/dashboard')
@login_required
def dashboard():
export_dashboard_form = ExportInventoryDashboardForm(request.form)
export_dashboard_form.user_email.data = current_user.email if current_user.email else ""
return render_template('inventory/dashboard.html', current_user=current_user,
export_dashboard_form=export_dashboard_form)
@inventory.route('/query_add_inventory', methods=['GET', 'POST'])
@login_required
def query_add_inventory():
"""
Provide service for user to:
1. Query an inventory based on serial number.
2. Add/Edit the inventory with input serial number, model name(optional), and notes(optional).
3. Delete the inventory with input serial number.
Any user can query the inventory, but only Admin user can add, update or delete inventory.
"""
sn_form = QueryInventoryBySerialNumberForm(request.form)
update_inventory_form = UpdateInventoryForm(request.form)
inventory_data_fields = {'serial_number_submitted': None, 'new_inventory': False,
'inventory_name': None, 'description': None,
'hardware_revision': None, 'hostname': '', 'region_name': None,
'chassis': None, 'platform': None, 'software': None,
'last_successful_retrieval': None, 'inventory_retrieval_status': None}
error_msg = None
success_msg = None
if request.method == 'GET':
init_query_add_inventory_forms(sn_form, update_inventory_form)
elif request.method == 'POST':
db_session = DBSession()
# if user submitted the form with the serial number
# then we display back all the information for the inventory with this serial number
if sn_form.hidden_submit_sn.data == 'True':
get_inventory_data_by_serial_number(db_session, sn_form, update_inventory_form, inventory_data_fields)
# if user submitted the form with the updated inventory info
# we update/create or delete the inventory
else:
if not can_create(current_user):
error_msg = "User not authorized to create, update or delete inventory."
# there is restriction on front end - serial_number submitted cannot be empty string, double check here
elif update_inventory_form.hidden_serial_number.data:
inventory_entry = db_session.query(Inventory).filter(
Inventory.serial_number == update_inventory_form.hidden_serial_number.data).first()
if update_inventory_form.hidden_action.data == "Update":
update_or_add_inventory(db_session, inventory_entry, update_inventory_form.hidden_serial_number.data,
update_inventory_form.model_name.data, update_inventory_form.notes.data)
elif update_inventory_form.hidden_action.data == "Delete":
success_msg, error_msg = delete_inventory(db_session, inventory_entry)
else:
error_msg = "Unknown request is received."
else:
error_msg = "Failed to create inventory because serial number submitted is empty."
if not error_msg:
# clear the forms to indicate a successful inventory update/add action!
init_query_add_inventory_forms(sn_form, update_inventory_form)
else:
sn_form.serial_number.data = update_inventory_form.hidden_serial_number.data
get_inventory_data_by_serial_number(db_session, sn_form, update_inventory_form, inventory_data_fields)
return render_template('inventory/query_add_inventory.html', sn_form=sn_form,
update_inventory_form=update_inventory_form, success_msg=success_msg,
error_msg=error_msg, current_user=current_user, **inventory_data_fields)
def init_query_add_inventory_forms(sn_form, update_inventory_form):
sn_form.serial_number.data = None
sn_form.hidden_submit_sn.data = ''
update_inventory_form.model_name.data = None
update_inventory_form.notes.data = None
update_inventory_form.hidden_serial_number.data = ''
update_inventory_form.hidden_action.data = ''
return
def get_inventory_data_by_serial_number(db_session, sn_form, update_inventory_form, inventory_data_fields):
"""
Get the data fields for the queried inventory for the purpose of displaying to user
:param db_session: session of database transactions
:param sn_form: the form with the serial number input and etc.
:param update_inventory_form: the form with model name, notes and etc.
:param inventory_data_fields: the extra data fields that will be displayed to the user
:return: None
"""
inventory_data_fields['new_inventory'] = True
existing_inventory = db_session.query(Inventory).filter(
Inventory.serial_number == sn_form.serial_number.data).first()
inventory_data_fields['serial_number_submitted'] = sn_form.serial_number.data
if existing_inventory:
inventory_data_fields['new_inventory'] = False
update_inventory_form.model_name.data = existing_inventory.model_name
update_inventory_form.notes.data = existing_inventory.notes
inventory_data_fields['inventory_name'] = existing_inventory.name
inventory_data_fields['description'] = existing_inventory.description
inventory_data_fields['hardware_revision'] = existing_inventory.hardware_revision
# if this inventory has been discovered in a host - host_id is not None
if isinstance(existing_inventory.host_id, (int, long)):
host = db_session.query(Host).filter(Host.id == existing_inventory.host_id).first()
if host:
inventory_data_fields['hostname'] = host.hostname
inventory_data_fields['region_name'] = host.region.name if host.region is not None else UNKNOWN
inventory_data_fields['chassis'] = UNKNOWN if host.platform is None else host.platform
inventory_data_fields['platform'] = UNKNOWN if host.software_platform is None else host.software_platform
inventory_data_fields['software'] = UNKNOWN if host.software_version is None else host.software_version
inventory_job = host.inventory_job[0]
if inventory_job and inventory_job.last_successful_time:
inventory_data_fields['last_successful_retrieval'] = get_last_successful_inventory_elapsed_time(host)
inventory_data_fields['inventory_retrieval_status'] = inventory_job.status
return
def update_or_add_inventory(db_session, inventory_obj, serial_number, model_name, notes, commit=True):
"""
Either update or add the inventory given serial number, model name(optional), and notes(optional)
:param db_session: session of database transactions
:param inventory_obj: the inventory row that we got from querying with the provided serial_number
:param serial_number: input serial number string
:param model_name: input model name string
:param notes: input notes string
:param commit: if true, commit the db transaction in the end, else, don't commit
:return: None.
"""
if inventory_obj:
# if this inventory has been discovered in a host, model_name cannot be updated by user
# there is restriction on front end, but double ensure it here
if isinstance(inventory_obj.host_id, (int, long)):
inventory_obj.update(db_session, notes=notes)
# if this inventory is not found in a host, user can define/update the model_name
else:
inventory_obj.update(db_session, model_name=model_name, notes=notes)
if commit:
db_session.commit()
else:
inv = Inventory(serial_number=serial_number,
model_name=model_name,
notes=notes)
db_session.add(inv)
if commit:
db_session.commit()
return
def delete_inventory(db_session, inventory_obj):
"""
Delete the inventory with the input serial number from the inventory table
:param db_session: session of database transactions
:param inventory_obj: the inventory row that we got from querying with the provided serial_number
:return: success_msg, error_msg. Can either be None or strings.
"""
success_msg = None
error_msg = None
if inventory_obj:
if isinstance(inventory_obj.host_id, (int, long)):
error_msg = "Cannot delete this inventory because CSM inventory retrieval " + \
"indicates that it is currently used by a device."
else:
db_session.delete(inventory_obj)
db_session.commit()
success_msg = "Inventory with serial number '{}' ".format(inventory_obj.serial_number) + \
"has been successfully deleted."
else:
error_msg = "Cannot delete this inventory because it is not " + \
"saved in CSM inventory database yet."
return success_msg, error_msg
@inventory.route('/search_inventory', methods=['GET', 'POST'])
@login_required
def search_inventory():
search_inventory_form = SearchInventoryForm(request.form)
export_results_form = ExportInventoryInformationForm(request.form)
export_results_form.user_email.data = current_user.email if current_user.email else ""
return render_template('inventory/search_inventory.html', search_inventory_form=search_inventory_form,
export_results_form=export_results_form, current_user=current_user)
def query_available_inventory(db_session, serial_number, model_names, partial_model_names, vid):
"""
Search for the available inventories matching the selected filters. Only serial number, model name(s),
or partial model name(s) apply to this search.
:param db_session: db transaction session
:param serial_number: a string containing serial number
:param model_names: an array of strings - selected model names - to filter inventories with
:param partial_model_names: an array of strings - partial model names - to filter inventories with
:param vid: a string containing vid or hardware revision
:return: query object/iterator that contains the result of the available inventories matching
the search criteria.
"""
filter_clauses = [Inventory.host_id == None]
get_filter_clauses_for_sn_pid_vid(serial_number,
model_names,
partial_model_names,
vid,
Inventory, filter_clauses)
return db_session.query(Inventory).filter(*filter_clauses)
def query_in_use_inventory(db_session, json_data):
"""
Search for the in-use inventories matching the selected filters.
:param db_session: db transaction session
:param json_data: a dictionary containing all search criteria.
:return: query object/iterator that contains the result of the in-use inventories matching
the search criteria.
"""
filter_clauses = []
filter_clauses = get_filter_clauses_for_sn_pid_vid(json_data.get('serial_number'),
json_data.get('model_names'),
json_data.get('partial_model_names'),
json_data.get('vid'),
HostInventory, filter_clauses)
if len(filter_clauses) > 0:
results = db_session.query(HostInventory).filter(*filter_clauses)
else:
results = db_session.query(HostInventory)
join_filter_clauses = []
if json_data.get('hostname') is not None:
join_filter_clauses.append(Host.hostname == json_data.get('hostname'))
if json_data.get('region_ids'):
join_filter_clauses.append(Host.region_id.in_(map(int, json_data.get('region_ids'))))
if json_data.get('chassis_types'):
join_filter_clauses.append(Host.platform.in_(json_data.get('chassis_types')))
if json_data.get('software_versions'):
join_filter_clauses.append(Host.software_version.in_(json_data.get('software_versions')))
return results.join(Host, HostInventory.host_id == Host.id).filter(*join_filter_clauses)
def get_filter_clauses_for_sn_pid_vid(serial_number, model_names, partial_model_names,
vid, filter_table, filter_clauses):
"""
helper function to api_search_inventory, create filter clauses for input
serial number, an array of selected model names or an array of entered
partial model names and input vid
"""
if serial_number is not None:
filter_clauses.append(filter_table.serial_number == serial_number)
if model_names:
filter_clauses.append(filter_table.model_name.in_(model_names))
elif partial_model_names:
filter_clause = get_filter_clauses_for_partial_field_match(filter_table.model_name,
partial_model_names)
# cannot do 'if filter_clause' because ClauseElement
# raises an exception if called in a boolean context
if filter_clause is not None:
filter_clauses.append(filter_clause)
if vid is not None:
filter_clauses.append(filter_table.hardware_revision == vid)
return filter_clauses
def get_filter_clauses_for_partial_field_match(field, partial_strings_array):
"""
Create filter clauses for partially matching the field to any of the
strings in an array using LIKE
"""
clauses = []
for partial_string in partial_strings_array:
stripped_partial_string = partial_string.strip()
if stripped_partial_string:
clauses.append(field.like('%' + stripped_partial_string + '%'))
if len(clauses) == 0:
return None
elif len(clauses) == 1:
return clauses[0]
return or_(*clauses)
@inventory.route('/export', methods=['POST'])
@login_required
def export_inventory_information():
"""export the inventory search result to cvs, html or excel format."""
db_session = DBSession()
export_results_form = ExportInventoryInformationForm(request.form)
export_data = dict()
export_data['export_format'] = export_results_form.export_format.data
export_data['serial_number'] = export_results_form.hidden_serial_number.data \
if export_results_form.hidden_serial_number.data != "" else None
export_data['region_ids'] = export_results_form.hidden_region_ids.data.split(',') \
if export_results_form.hidden_region_ids.data else []
export_data['chassis_types'] = export_results_form.hidden_chassis_types.data.split(',') \
if export_results_form.hidden_chassis_types.data else []
export_data['software_versions'] = export_results_form.hidden_software_versions.data.split(',') \
if export_results_form.hidden_software_versions.data else []
export_data['model_names'] = export_results_form.hidden_model_names.data.split(',') \
if export_results_form.hidden_model_names.data else []
export_data['partial_model_names'] = export_results_form.hidden_partial_model_names.data.split(',') \
if export_results_form.hidden_partial_model_names.data else []
export_data['vid'] = export_results_form.hidden_vid.data \
if export_results_form.hidden_vid.data != "" else None
if export_data['region_ids']:
region_names = db_session.query(Region.name).filter(
Region.id.in_(map(int, export_data['region_ids']))).order_by(Region.name.asc()).all()
export_data['region_names'] = []
[export_data['region_names'].append(query_tuple[0]) for query_tuple in region_names]
else:
export_data['region_names'] = []
export_data['available_inventory_iter'] = query_available_inventory(db_session,
export_data.get('serial_number'),
export_data.get('model_names'),
export_data.get('partial_model_names'),
export_data.get('vid'))
export_data['in_use_inventory_iter'] = query_in_use_inventory(db_session, export_data)
export_data['user'] = current_user
writer = None
if export_data.get('export_format') == ExportInformationFormat.HTML:
writer = ExportInventoryInfoHTMLWriter(**export_data)
elif export_data.get('export_format') == ExportInformationFormat.MICROSOFT_EXCEL:
writer = ExportInventoryInfoExcelWriter(**export_data)
elif export_data.get('export_format') == ExportInformationFormat.CSV:
writer = ExportInventoryInfoCSVWriter(**export_data)
if writer:
file_path = writer.write_report()
if export_results_form.send_email.data:
email_message = "<html><head></head><body>Please find in the attachment the inventory search results " \
"matching the following search criteria: "
search_criteria_in_html = get_search_filter_in_html(export_data)
if search_criteria_in_html:
email_message += search_criteria_in_html + '</body></html>'
else:
email_message += ' None</body></html>'
create_email_job_with_attachment_files(db_session, email_message, file_path,
export_results_form.user_email.data)
return send_file(file_path, as_attachment=True)
logger.error('inventory: invalid export format "%s" chosen.' % export_data.get('export_format'))
return
@inventory.route('/api/check_if_email_notify_enabled/')
@login_required
def check_if_email_notify_enabled():
db_session = DBSession()
system_option = SystemOption.get(db_session)
db_session.close()
return jsonify({'email_notify_enabled': system_option.enable_email_notify})
def create_email_job_with_attachment_files(db_session, message, file_paths, recipients):
system_option = SystemOption.get(db_session)
if not system_option.enable_email_notify:
return
email_job = EmailJob(recipients=recipients, message=message, created_by=current_user.username,
attachment_file_paths=file_paths)
db_session.add(email_job)
db_session.commit()
return
@inventory.route('/dashboard/export', methods=['POST'])
@login_required
def export_inventory_dashboard():
"""export the inventory dashboard to cvs, html or excel format."""
db_session = DBSession()
export_dashboard_form = ExportInventoryDashboardForm(request.form)
export_data = dict()
export_data['export_format'] = export_dashboard_form.export_format.data
try:
export_data['region_id'] = int(export_dashboard_form.hidden_region_id.data) \
if export_dashboard_form.hidden_region_id.data else 0
except ValueError:
export_data['region_id'] = 0
if export_data['region_id'] == 0:
export_data['region_name'] = "ALL"
else:
export_data['region_name'] = db_session.query(Region.name).filter(
Region.id == export_data['region_id']).first()[0]
export_data['chassis_summary_iter'] = get_chassis_summary_query(db_session, export_data['region_id'])
export_data['model_name_summary_iter'] = get_model_name_summary_query_results(db_session,
export_data['region_id']).__iter__()
export_data['inventory_without_serial_number_iter'] = \
get_inventory_without_serial_number_query(db_session, export_data['region_id'])
export_data['inventory_with_duplicate_serial_number_iter'] = \
get_inventory_with_duplicate_serial_number_query(db_session, export_data['region_id'])
export_data['user'] = current_user
writer = None
if export_data.get('export_format') == ExportInformationFormat.MICROSOFT_EXCEL:
writer = ExportInventoryDashboardExcelWriter(**export_data)
elif export_data.get('export_format') == ExportInformationFormat.HTML:
writer = ExportInventoryDashboardHTMLWriter(**export_data)
elif export_data.get('export_format') == ExportInformationFormat.CSV:
writer = ExportInventoryDashboardCSVWriter(**export_data)
if writer:
file_path = writer.write_report()
if export_dashboard_form.send_email.data:
email_message = "<html><head></head><body>Please find in the attachment the inventory dashboard summary " \
"in region: " + export_data['region_name'] + '</body></html>'
create_email_job_with_attachment_files(db_session, email_message, file_path,
export_dashboard_form.user_email.data)
return send_file(file_path, as_attachment=True)
logger.error('inventory: invalid export format "%s" chosen.' % export_data.get('export_format'))
return
@inventory.route('/import_inventory')
@login_required
def import_inventory():
if request.method == 'POST' and request.form.validate():
if not can_create(current_user):
abort(401)
form = ImportInventoryForm(request.form)
return render_template('inventory/import_inventory.html', form=form)
@inventory.route('/api/import_inventory', methods=['POST'])
@login_required
def api_import_inventory():
"""
API for importing inventory
Note: 1. If inventory already exists in db, and the model name has been discovered
by CSM, we will not overwrite the model name with data from here.
2. For data with duplicate serial numbers, only the first data entry will be created
in db or used to update an existing inventory.
return either status: OK with unimported_inventory: list of unimported data rows
(for noting duplicated serial numbers)
or status: errors in the imported data separated by comma's
"""
if not can_create(current_user):
abort(401)
importable_header = [HEADER_FIELD_SERIAL_NUMBER, HEADER_FIELD_MODEL_NAME, HEADER_FIELD_NOTES]
general_notes = request.form['general_notes']
data_list = request.form['data_list']
db_session = DBSession()
reader = csv.reader(data_list.splitlines(), delimiter=',')
header_row = next(reader)
# Check mandatory data fields
error = []
if HEADER_FIELD_SERIAL_NUMBER not in header_row:
error.append('"serial_number" is missing in the header.')
for header_field in header_row:
if header_field not in importable_header:
error.append('"' + header_field + '" is not a valid header field.')
if error:
return jsonify({'status': ','.join(error)})
row = 2
# already checked that HEADER_FIELD_SERIAL_NUMBER is in header
serial_number_idx = header_row.index(HEADER_FIELD_SERIAL_NUMBER)
data_list = list(reader)
# Check if each row has the same number of data fields as the header
for row_data in data_list:
if len(row_data) > 0:
if len(row_data) != len(header_row):
error.append('line %d has wrong number of data fields.' % row)
else:
if not row_data[serial_number_idx]:
error.append('line %d missing serial number value.' % row)
row += 1
if error:
return jsonify({'status': ','.join(error)})
# Import the data
unique_serial_numbers = set()
unimported_inventory = []
row = 1
for data in data_list:
row += 1
if len(data) == 0:
continue
serial_number = ''
model_name = ''
notes = general_notes
for column in range(len(header_row)):
header_field = header_row[column]
data_field = data[column].strip()
if header_field == HEADER_FIELD_SERIAL_NUMBER:
serial_number = data_field
elif header_field == HEADER_FIELD_MODEL_NAME:
model_name = data_field
elif header_field == HEADER_FIELD_NOTES and data_field:
notes = data_field
if serial_number:
inventory_obj = db_session.query(Inventory).filter(Inventory.serial_number == serial_number).first()
# only create/update inventory data if the serial number is unique among the imported data
if serial_number not in unique_serial_numbers:
update_or_add_inventory(db_session, inventory_obj, serial_number, model_name, notes, commit=False)
unique_serial_numbers.add(serial_number)
else:
unimported_inventory.append('line %d: ' % row + ','.join(data))
else:
return jsonify({'status': 'Serial number data field cannot be empty.'})
db_session.commit()
db_session.close()
if unimported_inventory:
return jsonify({'status': 'OK', 'unimported_inventory': unimported_inventory})
return jsonify({'status': 'OK', 'unimported_inventory': []})
@inventory.route('/api/get_chassis_summary/<int:region_id>')
@login_required
def api_get_chassis_summary(region_id):
"""
Return the chassis, count summary datatable json data
"""
db_session = DBSession()
chassis_summary_query = get_chassis_summary_query(db_session, region_id)
rows = []
for chassis_type, count in chassis_summary_query:
rows.append({'chassis': chassis_type, 'count': count})
db_session.close()
return jsonify(**{'data': rows})
def get_chassis_summary_query(db_session, region_id):
if region_id == 0:
chassis_summary_query = db_session.query(Host.platform, func.count(Host.platform)).group_by(Host.platform.asc())
else:
chassis_summary_query = db_session.query(Host.platform, func.count(Host.platform))\
.filter(Host.region_id == region_id).group_by(Host.platform.asc())
return chassis_summary_query
@inventory.route('/api/get_model_name_summary/<int:region_id>')
@login_required
def api_get_model_name_summary(region_id):
"""
Return the model name, in use (count), available (count) summary datatable json data
"""
db_session = DBSession()
rows = get_model_name_summary_query_results(db_session, region_id)
db_session.close()
return jsonify(**{'data': rows})
def get_model_name_summary_query_results(db_session, region_id):
available_model_name_summary_iter = db_session.query(Inventory.model_name, func.count(Inventory.model_name))\
.filter(Inventory.host_id == None)\
.group_by(Inventory.model_name.asc()).__iter__()
if region_id == 0:
in_use_model_name_summary_iter = db_session.query(HostInventory.model_name,
func.count(HostInventory.model_name))\
.group_by(HostInventory.model_name.asc()).__iter__()
else:
in_use_model_name_summary_iter = db_session.query(HostInventory.model_name,
func.count(HostInventory.model_name)).join(Host)\
.filter(Host.region_id == region_id).group_by(HostInventory.model_name.asc()).__iter__()
rows = []
# weaving two sorted (sorted based on model name) arrays of tuples into rows, which is
# an array of dictionaries sorted based on the values of 'model_name' in the dictionary.
available_model_name, available_count = next(available_model_name_summary_iter, (None, None))
in_use_model_name, in_use_count = next(in_use_model_name_summary_iter, (None, None))
# available_model_name and in_use_model_name may be ''
while available_model_name is not None and in_use_model_name is not None:
if available_model_name == in_use_model_name:
rows.append({'model_name': available_model_name, 'in_use_count': in_use_count,
'available_count': available_count})
available_model_name, available_count = next(available_model_name_summary_iter, (None, None))
in_use_model_name, in_use_count = next(in_use_model_name_summary_iter, (None, None))
elif available_model_name < in_use_model_name:
rows.append({'model_name': available_model_name, 'in_use_count': 0, 'available_count': available_count})
available_model_name, available_count = next(available_model_name_summary_iter, (None, None))
else:
rows.append({'model_name': in_use_model_name, 'in_use_count': in_use_count, 'available_count': 0})
in_use_model_name, in_use_count = next(in_use_model_name_summary_iter, (None, None))
while available_model_name is not None:
rows.append({'model_name': available_model_name, 'in_use_count': 0, 'available_count': available_count})
available_model_name, available_count = next(available_model_name_summary_iter, (None, None))
while in_use_model_name is not None:
rows.append({'model_name': in_use_model_name, 'in_use_count': in_use_count, 'available_count': 0})
in_use_model_name, in_use_count = next(in_use_model_name_summary_iter, (None, None))
return rows
def get_inventory_without_serial_number_query(db_session, region_id):
""" Return query of hostname and count of the inventories without serial numbers in that host. """
if region_id == 0:
host_with_count_query = db_session.query(Host.hostname, func.count(HostInventory.name)) \
.group_by(Host.hostname.asc()) \
.filter(and_(Host.id == HostInventory.host_id, HostInventory.serial_number == ""))
else:
host_with_count_query = db_session.query(Host.hostname, func.count(HostInventory.name)) \
.group_by(Host.hostname.asc()) \
.filter(and_(Host.region_id == region_id, Host.id == HostInventory.host_id,
HostInventory.serial_number == ""))
return host_with_count_query
def get_inventory_with_duplicate_serial_number_query(db_session, region_id):
""" Return query of serial number and count of inventories with that serial number. """
if region_id == 0:
serial_number_with_count_query = db_session.query(HostInventory.serial_number,
func.count(HostInventory.serial_number))\
.filter(HostInventory.serial_number != "")\
.group_by(HostInventory.serial_number.asc()) \
.having(func.count(HostInventory.serial_number) > 1)
else:
serial_number_with_count_query = db_session.query(HostInventory.serial_number,
func.count(HostInventory.serial_number))\
.join(Host)\
.filter(and_(Host.region_id == region_id, HostInventory.serial_number != ""))\
.group_by(HostInventory.serial_number.asc())\
.having(func.count(HostInventory.serial_number) > 1)
return serial_number_with_count_query
@inventory.route('/api/get_chassis_types/')
@login_required
def api_get_chassis():
"""
This method is called by ajax attached to Select2.
The returned JSON contains the predefined tags.
"""
return update_select2_options(request.args, Host.platform)
@inventory.route('/api/get_software_versions/')
@login_required
def api_get_software_versions():
"""
This method is called by ajax attached to Select2.
The returned JSON contains the predefined tags.
"""
return update_select2_options(request.args, Host.software_version)
@inventory.route('/api/get_model_names/')
@login_required
def api_get_model_names():
"""
This method is called by ajax attached to Select2.
The returned JSON contains the predefined tags.
"""
return update_select2_options(request.args, Inventory.model_name)
def update_select2_options(request_args, data_field):
"""
This method helps populate the options used by ajax attached to Select2.
The returned JSON contains the predefined tags.
"""
db_session = DBSession()
rows = []
criteria = '%'
if len(request_args) > 0:
criteria += request_args.get('q') + '%'
item_iter = db_session.query(data_field).filter(data_field.like(criteria)).distinct().order_by(data_field.asc())
for item in item_iter:
if item[0]:
rows.append({'id': item[0], 'text': item[0]})
db_session.close()
return jsonify(**{'data': rows})
@inventory.route('/api/get_regions/')
@login_required
def api_get_regions():
"""
This method is called by ajax attached to Select2 in home page.
The returned JSON contains the predefined tags.
"""
db_session = DBSession()
rows = []
criteria = '%'
if request.args and request.args.get('q'):
criteria += request.args.get('q') + '%'
else:
criteria += '%'
regions = db_session.query(Region).filter(Region.name.like(criteria)).order_by(Region.id.asc()).all()
if len(regions) > 0:
if request.args.get('show_all'):
rows.append({'id': 1, 'text': 'ALL'})
for region in regions:
rows.append({'id': region.id + 1, 'text': region.name})
return jsonify(**{'data': rows})
class BasicExportInventoryInformationForm(Form):
export_format = SelectField('Export Format', coerce=str,
choices=[(ExportInformationFormat.MICROSOFT_EXCEL,
ExportInformationFormat.MICROSOFT_EXCEL),
(ExportInformationFormat.HTML,
ExportInformationFormat.HTML),
(ExportInformationFormat.CSV,
ExportInformationFormat.CSV)])
send_email = BooleanField('Email Export Data')
user_email = StringField('User Email')
class ExportInventoryInformationForm(BasicExportInventoryInformationForm):
hidden_serial_number = HiddenField('')
hidden_region_ids = HiddenField('')
hidden_chassis_types = HiddenField('')
hidden_software_versions = HiddenField('')
hidden_model_names = HiddenField('')
hidden_partial_model_names = HiddenField('')
hidden_vid = HiddenField('')
class ExportInventoryDashboardForm(BasicExportInventoryInformationForm):
# include_failed_hosts_table = BooleanField('Include "Hosts that Failed the Inventory Retrieval"')
hidden_region_id = HiddenField('')
class QueryInventoryBySerialNumberForm(Form):
serial_number = StringField('Serial Number', [required(), Length(max=50)])
hidden_submit_sn = HiddenField('')
class UpdateInventoryForm(Form):
model_name = StringField('Model Name (PID)', [Length(max=50)])
notes = StringField('Notes', widget=TextArea())
hidden_serial_number = HiddenField('')
hidden_action = HiddenField('')
class SearchInventoryForm(Form):
serial_number = StringField('Serial Number', [Length(max=50)])
partial_model_names = StringField('Model Name (PID)')
vid = StringField('VID', [Length(max=50)])
class ImportInventoryForm(Form):
general_notes = TextAreaField('')
data_list = TextAreaField('')
|
|
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
import atexit
import datetime
import functools
import logging
import os
import re
import shutil
import socket
import sys
import uuid
import warnings
import fixtures
from oslo_config import cfg
from oslo_config import fixture as config_fixture
from oslo_log import fixture as log_fixture
from oslo_log import log
from oslo_utils import timeutils
import oslotest.base as oslotest
from oslotest import mockpatch
from paste.deploy import loadwsgi
import six
from sqlalchemy import exc
from testtools import testcase
# NOTE(ayoung)
# environment.use_eventlet must run before any of the code that will
# call the eventlet monkeypatching.
from keystone.common import environment # noqa
environment.use_eventlet()
from keystone import auth
from keystone.common import config as common_cfg
from keystone.common import dependency
from keystone.common import kvs
from keystone.common.kvs import core as kvs_core
from keystone.common import sql
from keystone import config
from keystone import controllers
from keystone import exception
from keystone import notifications
from keystone.policy.backends import rules
from keystone.server import common
from keystone import service
from keystone.tests.unit import ksfixtures
config.configure()
LOG = log.getLogger(__name__)
PID = six.text_type(os.getpid())
TESTSDIR = os.path.dirname(os.path.abspath(__file__))
TESTCONF = os.path.join(TESTSDIR, 'config_files')
ROOTDIR = os.path.normpath(os.path.join(TESTSDIR, '..', '..', '..'))
VENDOR = os.path.join(ROOTDIR, 'vendor')
ETCDIR = os.path.join(ROOTDIR, 'etc')
def _calc_tmpdir():
env_val = os.environ.get('KEYSTONE_TEST_TEMP_DIR')
if not env_val:
return os.path.join(TESTSDIR, 'tmp', PID)
return os.path.join(env_val, PID)
TMPDIR = _calc_tmpdir()
CONF = cfg.CONF
log.register_options(CONF)
rules.init()
IN_MEM_DB_CONN_STRING = 'sqlite://'
TIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%fZ'
exception._FATAL_EXCEPTION_FORMAT_ERRORS = True
os.makedirs(TMPDIR)
atexit.register(shutil.rmtree, TMPDIR)
class dirs(object):
@staticmethod
def root(*p):
return os.path.join(ROOTDIR, *p)
@staticmethod
def etc(*p):
return os.path.join(ETCDIR, *p)
@staticmethod
def tests(*p):
return os.path.join(TESTSDIR, *p)
@staticmethod
def tmp(*p):
return os.path.join(TMPDIR, *p)
@staticmethod
def tests_conf(*p):
return os.path.join(TESTCONF, *p)
# keystone.common.sql.initialize() for testing.
DEFAULT_TEST_DB_FILE = dirs.tmp('test.db')
class EggLoader(loadwsgi.EggLoader):
_basket = {}
def find_egg_entry_point(self, object_type, name=None):
egg_key = '%s:%s' % (object_type, name)
egg_ep = self._basket.get(egg_key)
if not egg_ep:
egg_ep = super(EggLoader, self).find_egg_entry_point(
object_type, name=name)
self._basket[egg_key] = egg_ep
return egg_ep
# NOTE(dstanek): class paths were remove from the keystone-paste.ini in
# favor of using entry points. This caused tests to slow to a crawl
# since we reload the application object for each RESTful test. This
# monkey-patching adds caching to paste deploy's egg lookup.
loadwsgi.EggLoader = EggLoader
@atexit.register
def remove_test_databases():
db = dirs.tmp('test.db')
if os.path.exists(db):
os.unlink(db)
pristine = dirs.tmp('test.db.pristine')
if os.path.exists(pristine):
os.unlink(pristine)
def generate_paste_config(extension_name):
# Generate a file, based on keystone-paste.ini, that is named:
# extension_name.ini, and includes extension_name in the pipeline
with open(dirs.etc('keystone-paste.ini'), 'r') as f:
contents = f.read()
new_contents = contents.replace(' service_v3',
' %s service_v3' % (extension_name))
new_paste_file = dirs.tmp(extension_name + '.ini')
with open(new_paste_file, 'w') as f:
f.write(new_contents)
return new_paste_file
def remove_generated_paste_config(extension_name):
# Remove the generated paste config file, named extension_name.ini
paste_file_to_remove = dirs.tmp(extension_name + '.ini')
os.remove(paste_file_to_remove)
def skip_if_cache_disabled(*sections):
"""This decorator is used to skip a test if caching is disabled.
Caching can be disabled either globally or for a specific section.
In the code fragment::
@skip_if_cache_is_disabled('assignment', 'token')
def test_method(*args):
...
The method test_method would be skipped if caching is disabled globally via
the `enabled` option in the `cache` section of the configuration or if
the `caching` option is set to false in either `assignment` or `token`
sections of the configuration. This decorator can be used with no
arguments to only check global caching.
If a specified configuration section does not define the `caching` option,
this decorator makes the same assumption as the `should_cache_fn` in
keystone.common.cache that caching should be enabled.
"""
def wrapper(f):
@functools.wraps(f)
def inner(*args, **kwargs):
if not CONF.cache.enabled:
raise testcase.TestSkipped('Cache globally disabled.')
for s in sections:
conf_sec = getattr(CONF, s, None)
if conf_sec is not None:
if not getattr(conf_sec, 'caching', True):
raise testcase.TestSkipped('%s caching disabled.' % s)
return f(*args, **kwargs)
return inner
return wrapper
def skip_if_no_multiple_domains_support(f):
"""Decorator to skip tests for identity drivers limited to one domain."""
@functools.wraps(f)
def wrapper(*args, **kwargs):
test_obj = args[0]
if not test_obj.identity_api.multiple_domains_supported:
raise testcase.TestSkipped('No multiple domains support')
return f(*args, **kwargs)
return wrapper
class UnexpectedExit(Exception):
pass
def new_ref():
"""Populates a ref with attributes common to some API entities."""
return {
'id': uuid.uuid4().hex,
'name': uuid.uuid4().hex,
'description': uuid.uuid4().hex,
'enabled': True}
def new_region_ref():
ref = new_ref()
# Region doesn't have name or enabled.
del ref['name']
del ref['enabled']
ref['parent_region_id'] = None
return ref
def new_service_ref():
ref = new_ref()
ref['type'] = uuid.uuid4().hex
return ref
def new_endpoint_ref(service_id, interface='public', default_region_id=None,
**kwargs):
ref = new_ref()
del ref['enabled'] # enabled is optional
ref['interface'] = interface
ref['service_id'] = service_id
ref['url'] = 'https://' + uuid.uuid4().hex + '.com'
ref['region_id'] = default_region_id
ref.update(kwargs)
return ref
def new_domain_ref():
ref = new_ref()
return ref
def new_project_ref(domain_id=None, parent_id=None, is_domain=False):
ref = new_ref()
ref['domain_id'] = domain_id
ref['parent_id'] = parent_id
ref['is_domain'] = is_domain
return ref
def new_user_ref(domain_id, project_id=None):
ref = new_ref()
ref['domain_id'] = domain_id
ref['email'] = uuid.uuid4().hex
ref['password'] = uuid.uuid4().hex
if project_id:
ref['default_project_id'] = project_id
return ref
def new_group_ref(domain_id):
ref = new_ref()
ref['domain_id'] = domain_id
return ref
def new_credential_ref(user_id, project_id=None, cred_type=None):
ref = dict()
ref['id'] = uuid.uuid4().hex
ref['user_id'] = user_id
if cred_type == 'ec2':
ref['type'] = 'ec2'
ref['blob'] = uuid.uuid4().hex
else:
ref['type'] = 'cert'
ref['blob'] = uuid.uuid4().hex
if project_id:
ref['project_id'] = project_id
return ref
def new_role_ref():
ref = new_ref()
# Roles don't have a description or the enabled flag
del ref['description']
del ref['enabled']
return ref
def new_policy_ref():
ref = new_ref()
ref['blob'] = uuid.uuid4().hex
ref['type'] = uuid.uuid4().hex
return ref
def new_trust_ref(trustor_user_id, trustee_user_id, project_id=None,
impersonation=None, expires=None, role_ids=None,
role_names=None, remaining_uses=None,
allow_redelegation=False):
ref = dict()
ref['id'] = uuid.uuid4().hex
ref['trustor_user_id'] = trustor_user_id
ref['trustee_user_id'] = trustee_user_id
ref['impersonation'] = impersonation or False
ref['project_id'] = project_id
ref['remaining_uses'] = remaining_uses
ref['allow_redelegation'] = allow_redelegation
if isinstance(expires, six.string_types):
ref['expires_at'] = expires
elif isinstance(expires, dict):
ref['expires_at'] = (
timeutils.utcnow() + datetime.timedelta(**expires)
).strftime(TIME_FORMAT)
elif expires is None:
pass
else:
raise NotImplementedError('Unexpected value for "expires"')
role_ids = role_ids or []
role_names = role_names or []
if role_ids or role_names:
ref['roles'] = []
for role_id in role_ids:
ref['roles'].append({'id': role_id})
for role_name in role_names:
ref['roles'].append({'name': role_name})
return ref
class BaseTestCase(oslotest.BaseTestCase):
"""Light weight base test class.
This is a placeholder that will eventually go away once the
setup/teardown in TestCase is properly trimmed down to the bare
essentials. This is really just a play to speed up the tests by
eliminating unnecessary work.
"""
def setUp(self):
super(BaseTestCase, self).setUp()
self.useFixture(mockpatch.PatchObject(sys, 'exit',
side_effect=UnexpectedExit))
self.useFixture(log_fixture.get_logging_handle_error_fixture())
warnings.filterwarnings('error', category=DeprecationWarning,
module='^keystone\\.')
warnings.simplefilter('error', exc.SAWarning)
self.addCleanup(warnings.resetwarnings)
def cleanup_instance(self, *names):
"""Create a function suitable for use with self.addCleanup.
:returns: a callable that uses a closure to delete instance attributes
"""
def cleanup():
for name in names:
# TODO(dstanek): remove this 'if' statement once
# load_backend in test_backend_ldap is only called once
# per test
if hasattr(self, name):
delattr(self, name)
return cleanup
class TestCase(BaseTestCase):
def config_files(self):
return []
def config_overrides(self):
# NOTE(morganfainberg): enforce config_overrides can only ever be
# called a single time.
assert self.__config_overrides_called is False
self.__config_overrides_called = True
signing_certfile = 'examples/pki/certs/signing_cert.pem'
signing_keyfile = 'examples/pki/private/signing_key.pem'
self.config_fixture.config(group='oslo_policy',
policy_file=dirs.etc('policy.json'))
self.config_fixture.config(
# TODO(morganfainberg): Make Cache Testing a separate test case
# in tempest, and move it out of the base unit tests.
group='cache',
backend='dogpile.cache.memory',
enabled=True,
proxies=['keystone.tests.unit.test_cache.CacheIsolatingProxy'])
self.config_fixture.config(
group='catalog',
driver='templated',
template_file=dirs.tests('default_catalog.templates'))
self.config_fixture.config(
group='kvs',
backends=[
('keystone.tests.unit.test_kvs.'
'KVSBackendForcedKeyMangleFixture'),
'keystone.tests.unit.test_kvs.KVSBackendFixture'])
self.config_fixture.config(group='revoke', driver='kvs')
self.config_fixture.config(
group='signing', certfile=signing_certfile,
keyfile=signing_keyfile,
ca_certs='examples/pki/certs/cacert.pem')
self.config_fixture.config(group='token', driver='kvs')
self.config_fixture.config(
group='saml', certfile=signing_certfile, keyfile=signing_keyfile)
self.config_fixture.config(
default_log_levels=[
'amqp=WARN',
'amqplib=WARN',
'boto=WARN',
'qpid=WARN',
'sqlalchemy=WARN',
'suds=INFO',
'oslo.messaging=INFO',
'iso8601=WARN',
'requests.packages.urllib3.connectionpool=WARN',
'routes.middleware=INFO',
'stevedore.extension=INFO',
'keystone.notifications=INFO',
'keystone.common._memcache_pool=INFO',
'keystone.common.ldap=INFO',
])
self.auth_plugin_config_override()
def auth_plugin_config_override(self, methods=None, **method_classes):
if methods is not None:
self.config_fixture.config(group='auth', methods=methods)
common_cfg.setup_authentication()
if method_classes:
self.config_fixture.config(group='auth', **method_classes)
def _assert_config_overrides_called(self):
assert self.__config_overrides_called is True
def setUp(self):
super(TestCase, self).setUp()
self.__config_overrides_called = False
self.addCleanup(CONF.reset)
self.config_fixture = self.useFixture(config_fixture.Config(CONF))
self.addCleanup(delattr, self, 'config_fixture')
self.config(self.config_files())
# NOTE(morganfainberg): mock the auth plugin setup to use the config
# fixture which automatically unregisters options when performing
# cleanup.
def mocked_register_auth_plugin_opt(conf, opt):
self.config_fixture.register_opt(opt, group='auth')
self.useFixture(mockpatch.PatchObject(
common_cfg, '_register_auth_plugin_opt',
new=mocked_register_auth_plugin_opt))
self.config_overrides()
# NOTE(morganfainberg): ensure config_overrides has been called.
self.addCleanup(self._assert_config_overrides_called)
self.useFixture(fixtures.FakeLogger(level=logging.DEBUG))
# NOTE(morganfainberg): This code is a copy from the oslo-incubator
# log module. This is not in a function or otherwise available to use
# without having a CONF object to setup logging. This should help to
# reduce the log size by limiting what we log (similar to how Keystone
# would run under mod_wsgi or eventlet).
for pair in CONF.default_log_levels:
mod, _sep, level_name = pair.partition('=')
logger = logging.getLogger(mod)
logger.setLevel(level_name)
self.useFixture(ksfixtures.Cache())
# Clear the registry of providers so that providers from previous
# tests aren't used.
self.addCleanup(dependency.reset)
self.addCleanup(kvs.INMEMDB.clear)
# Ensure Notification subscriptions and resource types are empty
self.addCleanup(notifications.clear_subscribers)
self.addCleanup(notifications.reset_notifier)
# Reset the auth-plugin registry
self.addCleanup(self.clear_auth_plugin_registry)
self.addCleanup(setattr, controllers, '_VERSIONS', [])
def config(self, config_files):
sql.initialize()
CONF(args=[], project='keystone', default_config_files=config_files)
def load_backends(self):
"""Initializes each manager and assigns them to an attribute."""
# TODO(blk-u): Shouldn't need to clear the registry here, but some
# tests call load_backends multiple times. These should be fixed to
# only call load_backends once.
dependency.reset()
# TODO(morganfainberg): Shouldn't need to clear the registry here, but
# some tests call load_backends multiple times. Since it is not
# possible to re-configure a backend, we need to clear the list. This
# should eventually be removed once testing has been cleaned up.
kvs_core.KEY_VALUE_STORE_REGISTRY.clear()
self.clear_auth_plugin_registry()
drivers, _unused = common.setup_backends(
load_extra_backends_fn=self.load_extra_backends)
for manager_name, manager in drivers.items():
setattr(self, manager_name, manager)
self.addCleanup(self.cleanup_instance(*list(drivers.keys())))
def load_extra_backends(self):
"""Override to load managers that aren't loaded by default.
This is useful to load managers initialized by extensions. No extra
backends are loaded by default.
:return: dict of name -> manager
"""
return {}
def load_fixtures(self, fixtures):
"""Hacky basic and naive fixture loading based on a python module.
Expects that the various APIs into the various services are already
defined on `self`.
"""
# NOTE(dstanek): create a list of attribute names to be removed
# from this instance during cleanup
fixtures_to_cleanup = []
# TODO(termie): doing something from json, probably based on Django's
# loaddata will be much preferred.
if (hasattr(self, 'identity_api') and
hasattr(self, 'assignment_api') and
hasattr(self, 'resource_api')):
for domain in fixtures.DOMAINS:
try:
rv = self.resource_api.create_domain(domain['id'], domain)
except exception.Conflict:
rv = self.resource_api.get_domain(domain['id'])
except exception.NotImplemented:
rv = domain
attrname = 'domain_%s' % domain['id']
setattr(self, attrname, rv)
fixtures_to_cleanup.append(attrname)
for tenant in fixtures.TENANTS:
if hasattr(self, 'tenant_%s' % tenant['id']):
try:
# This will clear out any roles on the project as well
self.resource_api.delete_project(tenant['id'])
except exception.ProjectNotFound:
pass
rv = self.resource_api.create_project(
tenant['id'], tenant)
attrname = 'tenant_%s' % tenant['id']
setattr(self, attrname, rv)
fixtures_to_cleanup.append(attrname)
for role in fixtures.ROLES:
try:
rv = self.role_api.create_role(role['id'], role)
except exception.Conflict:
rv = self.role_api.get_role(role['id'])
attrname = 'role_%s' % role['id']
setattr(self, attrname, rv)
fixtures_to_cleanup.append(attrname)
for user in fixtures.USERS:
user_copy = user.copy()
tenants = user_copy.pop('tenants')
try:
existing_user = getattr(self, 'user_%s' % user['id'], None)
if existing_user is not None:
self.identity_api.delete_user(existing_user['id'])
except exception.UserNotFound:
pass
# For users, the manager layer will generate the ID
user_copy = self.identity_api.create_user(user_copy)
# Our tests expect that the password is still in the user
# record so that they can reference it, so put it back into
# the dict returned.
user_copy['password'] = user['password']
for tenant_id in tenants:
try:
self.assignment_api.add_user_to_project(
tenant_id, user_copy['id'])
except exception.Conflict:
pass
# Use the ID from the fixture as the attribute name, so
# that our tests can easily reference each user dict, while
# the ID in the dict will be the real public ID.
attrname = 'user_%s' % user['id']
setattr(self, attrname, user_copy)
fixtures_to_cleanup.append(attrname)
self.addCleanup(self.cleanup_instance(*fixtures_to_cleanup))
def _paste_config(self, config):
if not config.startswith('config:'):
test_path = os.path.join(TESTSDIR, config)
etc_path = os.path.join(ROOTDIR, 'etc', config)
for path in [test_path, etc_path]:
if os.path.exists('%s-paste.ini' % path):
return 'config:%s-paste.ini' % path
return config
def loadapp(self, config, name='main'):
return service.loadapp(self._paste_config(config), name=name)
def clear_auth_plugin_registry(self):
auth.controllers.AUTH_METHODS.clear()
auth.controllers.AUTH_PLUGINS_LOADED = False
def assertCloseEnoughForGovernmentWork(self, a, b, delta=3):
"""Asserts that two datetimes are nearly equal within a small delta.
:param delta: Maximum allowable time delta, defined in seconds.
"""
msg = '%s != %s within %s delta' % (a, b, delta)
self.assertTrue(abs(a - b).seconds <= delta, msg)
def assertNotEmpty(self, l):
self.assertTrue(len(l))
def assertRaisesRegexp(self, expected_exception, expected_regexp,
callable_obj, *args, **kwargs):
"""Asserts that the message in a raised exception matches a regexp."""
try:
callable_obj(*args, **kwargs)
except expected_exception as exc_value:
if isinstance(expected_regexp, six.string_types):
expected_regexp = re.compile(expected_regexp)
if isinstance(exc_value.args[0], unicode):
if not expected_regexp.search(unicode(exc_value)):
raise self.failureException(
'"%s" does not match "%s"' %
(expected_regexp.pattern, unicode(exc_value)))
else:
if not expected_regexp.search(str(exc_value)):
raise self.failureException(
'"%s" does not match "%s"' %
(expected_regexp.pattern, str(exc_value)))
else:
if hasattr(expected_exception, '__name__'):
excName = expected_exception.__name__
else:
excName = str(expected_exception)
raise self.failureException("%s not raised" % excName)
@property
def ipv6_enabled(self):
if socket.has_ipv6:
sock = None
try:
sock = socket.socket(socket.AF_INET6)
# NOTE(Mouad): Try to bind to IPv6 loopback ip address.
sock.bind(("::1", 0))
return True
except socket.error:
pass
finally:
if sock:
sock.close()
return False
def skip_if_no_ipv6(self):
if not self.ipv6_enabled:
raise self.skipTest("IPv6 is not enabled in the system")
def skip_if_env_not_set(self, env_var):
if not os.environ.get(env_var):
self.skipTest('Env variable %s is not set.' % env_var)
class SQLDriverOverrides(object):
"""A mixin for consolidating sql-specific test overrides."""
def config_overrides(self):
super(SQLDriverOverrides, self).config_overrides()
# SQL specific driver overrides
self.config_fixture.config(group='catalog', driver='sql')
self.config_fixture.config(group='identity', driver='sql')
self.config_fixture.config(group='policy', driver='sql')
self.config_fixture.config(group='revoke', driver='sql')
self.config_fixture.config(group='token', driver='sql')
self.config_fixture.config(group='trust', driver='sql')
|
|
"""Contains wrappers to quickly work with bioinformatics tools
"""
import re
import os
import glob
import h5py
import numpy as np
import pandas as pd
from ggr.util.utils import run_shell_cmd
def run_homer(
bed_file,
background_bed_file,
out_dir,
mknown=None,
parallel=24):
"""Generic wrapper to run homer on a set of regions (vs background)
"""
assert bed_file.endswith(".gz")
assert background_bed_file.endswith(".gz")
run_homer_cmd = (
"findMotifsGenome.pl <(zcat {0}) hg19 {2} "
"-bg <(zcat {1}) "
"-p {3} "
"-nomotif").format(
bed_file,
background_bed_file,
out_dir,
parallel)
if mknown is not None:
run_homer_cmd += " -mknown {}".format(mknown)
print run_homer_cmd
os.system('GREPDB="{}"; /bin/bash -c "$GREPDB"'.format(run_homer_cmd))
return None
def run_great(
bed_file,
background_bed_file,
out_dir):
"""Generic wrapper to run GREAT from R (rGREAT package)
"""
assert bed_file.endswith(".gz")
assert background_bed_file.endswith(".gz")
prefix = os.path.basename(bed_file).split(".bed")[0].split(".narrowPeak")[0]
run_rgreat = (
"bioinformatics.go.rgreat.R {0} {1} {2}").format(
bed_file,
background_bed_file,
"{}/{}".format(out_dir, prefix))
run_shell_cmd(run_rgreat)
return None
def run_gprofiler(
gene_set_file,
background_gene_set_file,
out_dir,
ordered=False,
header=True):
"""
"""
if ordered:
ordered_val = 1
else:
ordered_val = 0
if header:
header_val = 1
else:
header_val = 0
#gprofiler_cmd = "bioinformatics.go.gProfileR.R {} {} {} {}".format(
gprofiler_cmd = "Rscript /users/dskim89/git/ggr-project/R/bioinformatics.go.gProfileR.R {} {} {} {} {}".format(
gene_set_file,
background_gene_set_file,
out_dir,
header_val,
ordered_val)
run_shell_cmd(gprofiler_cmd)
return
def run_gsea(rank_file, gene_sets=[]):
"""take gene set and run gsea
make sure the file has HGNC ids and rank values
"""
return
def run_bioinformatics_on_bed(bed_file, background_bed_file, out_dir,
mknown=None, mknown_name="CUSTOM"):
"""Given a bed file and background bed file,
run HOMER/GREAT
mknown - homer style motifs file to run with different motifs
"""
assert bed_file.endswith(".gz")
assert background_bed_file.endswith(".gz")
prefix = os.path.basename(bed_file).split(".bed")[0].split(".narrowPeak")[0]
# run homer
homer_dir = "{}/homer/{}".format(out_dir, prefix)
run_shell_cmd("mkdir -p {}".format(homer_dir))
run_homer(
bed_file,
background_bed_file,
homer_dir)
# run homer with custom motifs if needed
if mknown is not None:
homer_dir = "{}/homer_{}/{}".format(out_dir, mknown_name, prefix)
run_shell_cmd("mkdir -p {}".format(homer_dir))
run_homer(
bed_file,
background_bed_file,
homer_dir,
mknown=mknown)
# run GREAT
great_dir = "{}/great/{}".format(out_dir, prefix)
run_shell_cmd("mkdir -p {}".format(great_dir))
run_great(
bed_file,
background_bed_file,
great_dir)
return homer_dir, great_dir
def aggregate_homer_results_h5(
pwm_file, work_dir, out_file, qval_thresh=0.10):
"""take the homer results and aggregate back into
a pvals file for easy use in downstream analyses with tronn
"""
# read in pwm file to get index ordering
pwm_idx = 0
name_to_idx = {}
pwm_names = []
with open(pwm_file, "r") as fp:
for line in fp:
if line.startswith(">"):
pwm_name = line.strip()
pwm_name = re.sub(">HCLUST-\d+_", "", pwm_name)
pwm_name = re.sub(".UNK.+", "", pwm_name)
name_to_idx[pwm_name] = pwm_idx
pwm_names.append(pwm_name)
pwm_idx += 1
# get results files
results_files = sorted(
glob.glob("{}/*/knownResults.txt".format(work_dir)))
# GGR - drop 15
results_files = [filename for filename in results_files
if "cluster_15" not in filename]
# for each results file, pull in results, filter, and save out
for results_idx in range(len(results_files)):
results_file = results_files[results_idx]
# read in results file
results = pd.read_csv(results_file, sep="\t")
# filter for those with appropriate q value
results = results[results["q-value (Benjamini)"] < qval_thresh]
# then take these and put into appropriate vector
pvals = np.ones((1, len(name_to_idx.keys())))
for pwm_row_idx in range(results.shape[0]):
pwm_name = results.iloc[pwm_row_idx]["Motif Name"]
pwm_vector_idx = name_to_idx[pwm_name]
# insert
pvals[0, pwm_vector_idx] = results.iloc[
pwm_row_idx]["q-value (Benjamini)"]
# sig vector - just the bool version
sig = np.any(pvals < qval_thresh, axis=0).astype(int)
# save to h5 file
with h5py.File(out_file, "a") as hf:
pvals_key = "pvals/TRAJ_LABELS-{}/pvals".format(results_idx)
sig_key = "pvals/TRAJ_LABELS-{}/sig".format(results_idx)
hf.create_dataset(pvals_key, data=pvals)
hf.create_dataset(sig_key, data=sig)
hf[pvals_key].attrs["pwm_names"] = pwm_names
hf[sig_key].attrs["pwm_names"] = pwm_names
# sanity check
print "file had {} sig at qvalue {}, saved out {}".format(
results.shape[0], qval_thresh, np.sum(pvals < qval_thresh))
# make foregrounds metadata and save to h5 file
traj_order = [0, 7, 8, 10, 11, 9, 12, 13, 1, 2, 3, 4, 5]
foregrounds = ["TRAJ_LABELS={}".format(traj) for traj in traj_order]
foregrounds_keys = ["TRAJ_LABELS-{}".format(traj) for traj in traj_order]
with h5py.File(out_file, "a") as hf:
hf["pvals"].attrs["foregrounds"] = foregrounds
hf["pvals"].attrs["foregrounds.keys"] = foregrounds_keys
return
def make_deeptools_heatmap(
point_file,
bigwig_files,
prefix,
sort=False,
kval=4,
referencepoint='TSS',
extend_dist=2000, # 1000
bin_total=100,
color="Blues"):
"""Uses deeptools to make a profile heatmap
"""
# set up bin size
bin_size = extend_dist * 2 / bin_total
# compute matrix - extract from bigwigs
point_matrix = '{}.point.mat.gz'.format(prefix)
deeptools_compute_matrix = (
"computeMatrix reference-point "
"--referencePoint {0} "
"-b {1} -a {1} -bs {2} "
"-R {3} "
"-S {4} "
#"--skipZeros "
"-o {5} ").format(
referencepoint,
extend_dist,
bin_size,
point_file,
' '.join(bigwig_files),
point_matrix)
if not os.path.isfile(point_matrix):
print deeptools_compute_matrix
os.system(deeptools_compute_matrix)
# set up sample labels as needed
sample_labels = []
for bigwig_file in bigwig_files:
fields = os.path.basename(bigwig_file).split('.')
sample_labels.append('{0}_{1}_{2}'.format(fields[3].split('-')[0],
fields[0].split('-')[1],
fields[4]))
# make plot
point_plot = '{}.heatmap.profile.pdf'.format(prefix)
point_sorted_file = '{}.point.sorted.bed'.format(prefix)
if sort == False:
sorting = '--sortRegions=no'
elif kval == 0:
sorting = '--sortRegions=descend'
else:
sorting = '--kmeans {0} --regionsLabel {1}'.format(kval, ' '.join([str(i) for i in range(kval)]))
deeptools_plot_heatmap = (
"plotHeatmap -m {0} "
"-out {1} "
"--outFileSortedRegions {2} "
"--colorMap {3} "
"{4} "
"--samplesLabel {5} "
#"--zMax 450 " # TESTING
"--xAxisLabel '' "
"--refPointLabel Summit "
"--legendLocation none "
"--heatmapHeight 50 "
"--boxAroundHeatmaps no "
"--whatToShow 'heatmap and colorbar'").format(
point_matrix,
point_plot,
point_sorted_file,
color,
sorting,
' '.join(sample_labels))
if not os.path.isfile(point_plot):
print deeptools_plot_heatmap
os.system(deeptools_plot_heatmap)
return None
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
"""
BlueButtonFHIR_API
FILE: views
Created: 3/8/16 1:45 AM
"""
__author__ = 'Mark Scrimshire:@ekivemark'
import datetime
import json
import random
import requests
import string
from collections import OrderedDict
from django.conf import settings
from django.contrib.admin.views.decorators import staff_member_required
from django.core.urlresolvers import reverse_lazy
from django.http import (HttpResponse,
HttpResponseRedirect)
from django.shortcuts import render_to_response
from django.template import RequestContext
from bbapi.utils import FhirServerUrl, notNone
from fhir.utils import (kickout_301,
kickout_400,
kickout_401,
kickout_403,
kickout_404,
kickout_500,
kickout_502,
kickout_504)
from fhir_io_hapi.utils import (error_status)
from accounts.models import User
from apps.v1api.models import Crosswalk
from apps.v1api.views.patient import re_write_url
@staff_member_required
def getpatient(request):
if settings.DEBUG:
print("in getpatient")
od = OrderedDict()
# Compile call to Patient search on FHIR Server
od['fhir_server'] = FhirServerUrl() + "/" + "Patient"
od['count'] = 10
od['parameters'] = "?_format=json&_count=" + str(od['count'])
# set count to 10 to return 10 items per page
j = get_page_content(od['fhir_server'] + od['parameters'])
if 'total' in j:
od['total'] = j['total']
else:
od['total'] = 0
then = datetime.datetime.now()
od['start'] = str(then)
if 'entry' in j:
od['entries'] = len(j['entry'])
else:
od['entries'] = 0
od['entry'] = []
if settings.DEBUG:
print("od:", od)
# print("j:", j)
if settings.DEBUG:
print("Entries:", od['entries'])
# Now we have the number of entries
# and the contents of the entries
# pass the content, current count and total to function to get patient info
bundle_count = od['entries']
rt = 0
while rt < od['total']:
x = 0
while x < notNone(bundle_count,0):
if settings.DEBUG:
# Print every 100 lines
if x % 100 == 0:
print("running for:", datetime.datetime.now()-then)
print("x:", rt+x)
print("OD:", od)
# print("entries:", len(j['entry']))
if 'entry' in j:
result = extract_info(j['entry'][x])
od['entry'].append(result['id'])
x += 1
if x >= notNone(bundle_count,0):
# We need to request the next page
next_page = get_next_page(j)
if next_page != "":
j = get_page_content(next_page)
if 'entry' in j:
bundle_count = len(j['entry'])
rt += x
# if settings.DEBUG:
# print("rt:", rt)
od['result'] = str(j)
now = datetime.datetime.now()
od['end'] = str(then)
od['elapsed'] = str(now - then)
od['processed'] = rt
# Check total
# while x <= 10 and y <= total
# get Patient from entity
# get fhir url id
# get name
# get identifier
# update crosswalk
# update user account
# Increment count while count less than total
if settings.DEBUG:
print("OD -result:",od['result'])
return HttpResponse(json.dumps(od, indent=4),
content_type="application/json")
def geteob(request):
"""
Process each crosswalk record.
Get the fhir_url_id
Construct an ExplanationOfBenefit call using patient=Patient/{xwalk.fhir_url_id}
Get the count
Write to xwalk.eob_cont
"""
server_call = FhirServerUrl() + "/ExplanationOfBenefit/?_format=json&patient=Patient/"
ctr = 0
od = OrderedDict()
od['server'] = FhirServerUrl()
od['api_call'] = "/setup/geteob"
then = datetime.datetime.now()
od['start'] = str(then)
for x in Crosswalk.objects.all():
patient_id = x.fhir_url_id
u = server_call + patient_id
j = get_page_content(u)
if 'total' in j:
x.eob_count = notNone(j["total"],0)
x.save()
ctr += 1
if ctr % 100 == 0:
print("processed ", ctr)
print("elapsed ", str(datetime.datetime.now()-then))
od['processed'] = ctr
now = datetime.datetime.now()
od['elapsed'] = str(now - then)
od['end'] = str(now)
return HttpResponse(json.dumps(od, indent=4),
content_type="application/json")
def get_next_page(j):
# Get the next page
next_page = ""
# print("Get Next Page from link:", json.dumps(j, indent=4))
for l in j['link']:
if l['relation'] == "next":
next_page = FhirServerUrl() + "?" + l['url'].split("?")[1]
return next_page
def get_page_content(u):
# Call the page and return the result
try:
r = requests.get(u)
except requests.ConnectionError:
if settings.DEBUG:
print("Problem connecting to FHIR Server")
print("called:", u)
return HttpResponseRedirect(reverse_lazy('api:v1:home'))
# test for errors:
if r.status_code in [301, 302, 400, 403, 404, 500, 502, 504]:
return error_status(r, r.status_code)
pre_text = re_write_url(r.text)
try:
j =json.loads(pre_text, object_pairs_hook=OrderedDict)
except ValueError:
if settings.DEBUG:
print("Problem with:", u)
print("returned:", pre_text)
j = {}
return j
def extract_info(item):
# Extract the Patient Entry
e = OrderedDict()
this = item
# print("this item:", this)
resource = this['resource']
e['id'] = resource['id']
e['identifier'] = resource['identifier'][0]['value']
if 'name' in resource:
e['first_name'] = resource['name'][0]['given'][0]
e['last_name'] = resource['name'][0]['family'][0]
if 'telecom' in resource:
e['phone'] = resource['telecom'][0]['value']
e['email'] = resource['telecom'][1]['value']
e['user'] = write_user_account(e)
return e
def write_user_account(e):
# Write user record
# user-name = "U" + e['id']
# password = "P" + e['id']
# ## u.set_password('new password')
# email_address = e['email']
# first_name
# last_name
# is_active = True
# is_user = True
rand_str_first = ''.join(random.sample(string.ascii_lowercase, 6))
rand_str_last = ''.join(random.sample(string.ascii_lowercase, 8))
# print(rand_str)
try:
u = User.objects.get(username="u"+e['id'])
# if settings.DEBUG:
# print("Updating:", "u"+e['id'])
if 'email'in e:
u.email = rand_str_first+"." +rand_str_last + "."+ e['email']
if 'first_name' in e:
u.first_name = e['first_name']
else:
u.first_name = ""
if 'last_name' in e:
u.last_name = e['last_name']
else:
u.last_name = ""
u.set_password('p'+e['id'])
except User.DoesNotExist:
if 'first_name' in e:
first_name = e['first_name']
else:
first_name = ""
if 'last_name' in e:
last_name = e['last_name']
else:
last_name = ""
if 'email' in e:
rand_email = rand_str_first +"."+ rand_str_first + "." +e['email']
else:
rand_email = rand_str_first + rand_str_last+ "@example.com"
u = User.objects.create_user(username="u"+e['id'],
email=rand_email,
first_name=first_name,
last_name=last_name,
password='p'+e['id'])
if 'email' in e:
u.email = rand_str_first +"."+ rand_str_first + "." +e['email']
else:
u.email = rand_str_first + rand_str_last +"@example.com"
u.is_active = True
u.is_user = True
u.is_developer = False
u.save()
# write Crosswalk
try:
c = Crosswalk.objects.get(user=u)
c.fhir_url_id = e['id']
# c.eob_count = get_eob_count(e)
c.save()
# if settings.DEBUG:
# print("Updating Crosswalk:",c.user)
except Crosswalk.DoesNotExist:
c = Crosswalk.objects.create(user=u, fhir_url_id=e['id'])
# if settings.DEBUG:
# print("Creating Crosswalk:", c.user)
return u.username+",p" + e['id']+","+u.email
def get_eob_count(e):
"""
Do EOB Search for patient=Patient/{e['id']}
Get count
"""
pass_to = FhirServerUrl() + "/" +"ExplanationOfBenefit"
pass_to += "?_format=json&patient=Patient/"
pass_to += e['id']
eob = get_page_content(pass_to)
# eob search bundle returnedin json format
eob_count = notNone(eob['total'], 0)
return eob_count
def user_list(request, fmt="html"):
"""
Print a userlist
"""
xwalk = Crosswalk.objects.all()
od = OrderedDict()
if fmt.lower() == "html":
od['userlist'] = "<table><tr><td>User</td><td>Password</td><td>FHIR Id</td><td>EOB Count</td></tr>"
else:
od['userlist'] = []
for x in xwalk:
if x.eob_count > 0:
if x.user.is_staff:
pass
else:
if x.user.username == "u"+x.fhir_url_id:
if fmt.lower() == "html":
od['userlist'] += "<tr>"
od['userlist'] += "<td>"+ x.user.username +"</td>"
od['userlist'] += "<td>p" + x.user.username[1:] + "</td>"
od['userlist'] += "<td>" + x.fhir_url_id + "</td>"
od['userlist'] += "<td>" + str(x.eob_count) + "</td>"
od['userlist'] += "</tr>"
else:
od['userlist'].append({'user': x.user.username,
'password': "p"+x.user.username[1:],
'fhir_url_id': x.fhir_url_id,
'eob_count': x.eob_count})
else:
pass
if fmt.lower() == "html":
od['userlist'] += "</table>"
context = {'display': 'User List',
'name':'UserList',
'key': fmt,
'get_fmt': fmt,
'text': od['userlist'],
}
return render_to_response('v1api/userlist.html',
RequestContext(request, context ))
else:
return HttpResponse(json.dumps(od, indent=4),
content_type="application/json")
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy
from openfermion.hamiltonians.jellium import (
dual_basis_jellium_model,
dual_basis_kinetic,
dual_basis_potential,
hypercube_grid_with_given_wigner_seitz_radius_and_filling,
jellium_model,
jordan_wigner_dual_basis_jellium,
plane_wave_kinetic,
plane_wave_potential,
wigner_seitz_length_scale,
)
from openfermion.ops.operators import FermionOperator, QubitOperator
from openfermion.transforms.opconversions import jordan_wigner
from openfermion.linalg import get_sparse_operator, eigenspectrum
from openfermion.utils import count_qubits, Grid, is_hermitian
class WignerSeitzRadiusTest(unittest.TestCase):
def test_wigner_seitz_radius_1d(self):
wigner_seitz_radius = 3.17
n_particles = 20
one_d_test = wigner_seitz_length_scale(wigner_seitz_radius, n_particles,
1)
self.assertAlmostEqual(one_d_test,
n_particles * 2. * wigner_seitz_radius)
def test_wigner_seitz_radius_2d(self):
wigner_seitz_radius = 0.5
n_particles = 3
two_d_test = wigner_seitz_length_scale(wigner_seitz_radius, n_particles,
2)**2.
self.assertAlmostEqual(two_d_test,
n_particles * numpy.pi * wigner_seitz_radius**2.)
def test_wigner_seitz_radius_3d(self):
wigner_seitz_radius = 4.6
n_particles = 37
three_d_test = wigner_seitz_length_scale(wigner_seitz_radius,
n_particles, 3)**3.
self.assertAlmostEqual(
three_d_test,
n_particles * (4. * numpy.pi / 3. * wigner_seitz_radius**3.))
def test_wigner_seitz_radius_6d(self):
wigner_seitz_radius = 5.
n_particles = 42
six_d_test = wigner_seitz_length_scale(wigner_seitz_radius, n_particles,
6)**6
self.assertAlmostEqual(
six_d_test,
n_particles * (numpy.pi**3 / 6 * wigner_seitz_radius**6))
def test_wigner_seitz_radius_bad_dimension_not_integer(self):
with self.assertRaises(ValueError):
_ = wigner_seitz_length_scale(3, 2, dimension=4.2)
def test_wigner_seitz_radius_bad_dimension_not_positive(self):
with self.assertRaises(ValueError):
_ = wigner_seitz_length_scale(3, 2, dimension=0)
class HypercubeGridTest(unittest.TestCase):
def test_1d_generation(self):
dim = 1
orbitals = 4
wigner_seitz_radius = 7.
grid = hypercube_grid_with_given_wigner_seitz_radius_and_filling(
dim, orbitals, wigner_seitz_radius)
self.assertEqual(grid.dimensions, 1)
self.assertEqual(grid.length, (4,))
self.assertEqual(grid.volume_scale(), orbitals * wigner_seitz_radius)
def test_generation_away_from_half_filling(self):
dim = 1
orbitals = 100
wigner_seitz_radius = 7.
filling = 0.2
grid = hypercube_grid_with_given_wigner_seitz_radius_and_filling(
dim, orbitals, wigner_seitz_radius, filling_fraction=filling)
self.assertEqual(grid.dimensions, 1)
self.assertEqual(grid.length, (100,))
self.assertAlmostEqual(grid.volume_scale(),
orbitals * wigner_seitz_radius / 2.5)
def test_generation_with_spin(self):
dim = 2
orbitals = 4
wigner_seitz_radius = 10.
spinless = False
grid = hypercube_grid_with_given_wigner_seitz_radius_and_filling(
dim, orbitals, wigner_seitz_radius, spinless=spinless)
self.assertEqual(grid.dimensions, 2)
self.assertEqual(grid.length, (4, 4))
self.assertAlmostEqual(grid.volume_scale(), numpy.pi * 16 * 100.)
def test_3d_generation_with_rounding(self):
filling = 0.42
grid = hypercube_grid_with_given_wigner_seitz_radius_and_filling(
3, 5, 1., filling_fraction=filling)
self.assertEqual(grid.dimensions, 3)
self.assertEqual(grid.length, (5, 5, 5))
# There are floor(125 * .42) = 52 particles.
# The volume scale should be 4/3 pi r^3 * the "true" filling fraction.
self.assertAlmostEqual(grid.volume_scale(),
(4. / 3.) * numpy.pi * (5.**3) * (52. / 125))
def test_raise_ValueError_filling_fraction_too_low(self):
with self.assertRaises(ValueError):
_ = hypercube_grid_with_given_wigner_seitz_radius_and_filling(
3, 5, wigner_seitz_radius=10., filling_fraction=0.005)
def test_raise_ValueError_filling_fraction_too_high(self):
with self.assertRaises(ValueError):
_ = hypercube_grid_with_given_wigner_seitz_radius_and_filling(
1, 4, wigner_seitz_radius=1., filling_fraction=2.)
class JelliumTest(unittest.TestCase):
def test_kinetic_integration(self):
# Compute kinetic energy operator in both momentum and position space.
grid = Grid(dimensions=2, length=2, scale=3.)
spinless = False
momentum_kinetic = plane_wave_kinetic(grid, spinless)
position_kinetic = dual_basis_kinetic(grid, spinless)
# Confirm they are Hermitian
momentum_kinetic_operator = get_sparse_operator(momentum_kinetic)
self.assertTrue(is_hermitian(momentum_kinetic_operator))
position_kinetic_operator = get_sparse_operator(position_kinetic)
self.assertTrue(is_hermitian(position_kinetic_operator))
# Confirm spectral match and hermiticity
for length in [2, 3, 4]:
grid = Grid(dimensions=1, length=length, scale=2.1)
spinless = False
momentum_kinetic = plane_wave_kinetic(grid, spinless)
position_kinetic = dual_basis_kinetic(grid, spinless)
# Confirm they are Hermitian
momentum_kinetic_operator = get_sparse_operator(momentum_kinetic)
self.assertTrue(is_hermitian(momentum_kinetic_operator))
position_kinetic_operator = get_sparse_operator(position_kinetic)
self.assertTrue(is_hermitian(position_kinetic_operator))
# Diagonalize and confirm the same energy.
jw_momentum = jordan_wigner(momentum_kinetic)
jw_position = jordan_wigner(position_kinetic)
momentum_spectrum = eigenspectrum(jw_momentum, 2 * length)
position_spectrum = eigenspectrum(jw_position, 2 * length)
# Confirm spectra are the same.
difference = numpy.amax(
numpy.absolute(momentum_spectrum - position_spectrum))
self.assertAlmostEqual(difference, 0.)
def test_potential_integration(self):
# Compute potential energy operator in momentum and position space.
for length in [2, 3]:
grid = Grid(dimensions=2, length=length, scale=2.)
spinless = True
momentum_potential = plane_wave_potential(grid, spinless)
position_potential = dual_basis_potential(grid, spinless)
# Confirm they are Hermitian
momentum_potential_operator = (
get_sparse_operator(momentum_potential))
self.assertTrue(is_hermitian(momentum_potential_operator))
position_potential_operator = (
get_sparse_operator(position_potential))
self.assertTrue(is_hermitian(position_potential_operator))
# Diagonalize and confirm the same energy.
jw_momentum = jordan_wigner(momentum_potential)
jw_position = jordan_wigner(position_potential)
momentum_spectrum = eigenspectrum(jw_momentum)
position_spectrum = eigenspectrum(jw_position)
# Confirm spectra are the same.
difference = numpy.amax(
numpy.absolute(momentum_spectrum - position_spectrum))
self.assertAlmostEqual(difference, 0.)
def test_model_integration(self):
# Compute Hamiltonian in both momentum and position space.
for length in [2, 3]:
grid = Grid(dimensions=2, length=length, scale=1.0)
spinless = True
momentum_hamiltonian = jellium_model(grid, spinless, True)
position_hamiltonian = jellium_model(grid, spinless, False)
# Confirm they are Hermitian
momentum_hamiltonian_operator = (
get_sparse_operator(momentum_hamiltonian))
self.assertTrue(is_hermitian(momentum_hamiltonian_operator))
position_hamiltonian_operator = (
get_sparse_operator(position_hamiltonian))
self.assertTrue(is_hermitian(position_hamiltonian_operator))
# Diagonalize and confirm the same energy.
jw_momentum = jordan_wigner(momentum_hamiltonian)
jw_position = jordan_wigner(position_hamiltonian)
momentum_spectrum = eigenspectrum(jw_momentum)
position_spectrum = eigenspectrum(jw_position)
# Confirm spectra are the same.
difference = numpy.amax(
numpy.absolute(momentum_spectrum - position_spectrum))
self.assertAlmostEqual(difference, 0.)
def test_model_integration_with_constant(self):
# Compute Hamiltonian in both momentum and position space.
length_scale = 0.7
for length in [2, 3]:
grid = Grid(dimensions=2, length=length, scale=length_scale)
spinless = True
# Include Madelung constant in the momentum but not the position
# Hamiltonian.
momentum_hamiltonian = jellium_model(grid,
spinless,
True,
include_constant=True)
position_hamiltonian = jellium_model(grid, spinless, False)
# Confirm they are Hermitian
momentum_hamiltonian_operator = (
get_sparse_operator(momentum_hamiltonian))
self.assertTrue(is_hermitian(momentum_hamiltonian_operator))
position_hamiltonian_operator = (
get_sparse_operator(position_hamiltonian))
self.assertTrue(is_hermitian(position_hamiltonian_operator))
# Diagonalize and confirm the same energy.
jw_momentum = jordan_wigner(momentum_hamiltonian)
jw_position = jordan_wigner(position_hamiltonian)
momentum_spectrum = eigenspectrum(jw_momentum)
position_spectrum = eigenspectrum(jw_position)
# Confirm momentum spectrum is shifted 2.8372/length_scale higher.
max_difference = numpy.amax(momentum_spectrum - position_spectrum)
min_difference = numpy.amax(momentum_spectrum - position_spectrum)
self.assertAlmostEqual(max_difference, 2.8372 / length_scale)
self.assertAlmostEqual(min_difference, 2.8372 / length_scale)
def test_coefficients(self):
# Test that the coefficients post-JW transform are as claimed in paper.
grid = Grid(dimensions=2, length=3, scale=2.)
spinless = 1
n_orbitals = grid.num_points
n_qubits = (2**(1 - spinless)) * n_orbitals
volume = grid.volume_scale()
# Kinetic operator.
kinetic = dual_basis_kinetic(grid, spinless)
qubit_kinetic = jordan_wigner(kinetic)
# Potential operator.
potential = dual_basis_potential(grid, spinless)
qubit_potential = jordan_wigner(potential)
# Check identity.
identity = tuple()
kinetic_coefficient = qubit_kinetic.terms[identity]
potential_coefficient = qubit_potential.terms[identity]
paper_kinetic_coefficient = 0.
paper_potential_coefficient = 0.
for indices in grid.all_points_indices():
momenta = grid.momentum_vector(indices)
paper_kinetic_coefficient += float(n_qubits) * momenta.dot(
momenta) / float(4. * n_orbitals)
if momenta.any():
potential_contribution = -numpy.pi * float(n_qubits) / float(
2. * momenta.dot(momenta) * volume)
paper_potential_coefficient += potential_contribution
self.assertAlmostEqual(kinetic_coefficient, paper_kinetic_coefficient)
self.assertAlmostEqual(potential_coefficient,
paper_potential_coefficient)
# Check Zp.
for p in range(n_qubits):
zp = ((p, 'Z'),)
kinetic_coefficient = qubit_kinetic.terms[zp]
potential_coefficient = qubit_potential.terms[zp]
paper_kinetic_coefficient = 0.
paper_potential_coefficient = 0.
for indices in grid.all_points_indices():
momenta = grid.momentum_vector(indices)
paper_kinetic_coefficient -= momenta.dot(momenta) / float(
4. * n_orbitals)
if momenta.any():
potential_contribution = numpy.pi / float(
momenta.dot(momenta) * volume)
paper_potential_coefficient += potential_contribution
self.assertAlmostEqual(kinetic_coefficient,
paper_kinetic_coefficient)
self.assertAlmostEqual(potential_coefficient,
paper_potential_coefficient)
# Check Zp Zq.
if spinless:
spins = [None]
for indices_a in grid.all_points_indices():
for indices_b in grid.all_points_indices():
potential_coefficient = 0.
paper_kinetic_coefficient = 0.
paper_potential_coefficient = 0.
position_a = grid.position_vector(indices_a)
position_b = grid.position_vector(indices_b)
differences = position_b - position_a
for spin_a in spins:
for spin_b in spins:
p = grid.orbital_id(indices_a, spin_a)
q = grid.orbital_id(indices_b, spin_b)
if p == q:
continue
zpzq = ((min(p, q), 'Z'), (max(p, q), 'Z'))
if zpzq in qubit_potential.terms:
potential_coefficient = qubit_potential.terms[zpzq]
for indices_c in grid.all_points_indices():
momenta = grid.momentum_vector(indices_c)
if momenta.any():
potential_contribution = numpy.pi * numpy.cos(
differences.dot(momenta)) / float(
momenta.dot(momenta) * volume)
paper_potential_coefficient += (
potential_contribution)
self.assertAlmostEqual(potential_coefficient,
paper_potential_coefficient)
def test_jordan_wigner_dual_basis_jellium(self):
# Parameters.
grid = Grid(dimensions=2, length=3, scale=1.)
spinless = True
# Compute fermionic Hamiltonian. Include then subtract constant.
fermion_hamiltonian = dual_basis_jellium_model(grid,
spinless,
include_constant=True)
qubit_hamiltonian = jordan_wigner(fermion_hamiltonian)
qubit_hamiltonian -= QubitOperator((), 2.8372)
# Compute Jordan-Wigner Hamiltonian.
test_hamiltonian = jordan_wigner_dual_basis_jellium(grid, spinless)
# Make sure Hamiltonians are the same.
self.assertTrue(test_hamiltonian == qubit_hamiltonian)
# Check number of terms.
n_qubits = count_qubits(qubit_hamiltonian)
if spinless:
paper_n_terms = 1 - .5 * n_qubits + 1.5 * (n_qubits**2)
num_nonzeros = sum(
1 for coeff in qubit_hamiltonian.terms.values() if coeff != 0.0)
self.assertTrue(num_nonzeros <= paper_n_terms)
def test_jordan_wigner_dual_basis_jellium_constant_shift(self):
length_scale = 0.6
grid = Grid(dimensions=2, length=3, scale=length_scale)
spinless = True
hamiltonian_without_constant = jordan_wigner_dual_basis_jellium(
grid, spinless, include_constant=False)
hamiltonian_with_constant = jordan_wigner_dual_basis_jellium(
grid, spinless, include_constant=True)
difference = hamiltonian_with_constant - hamiltonian_without_constant
expected = QubitOperator('') * (2.8372 / length_scale)
self.assertTrue(expected == difference)
def test_plane_wave_energy_cutoff(self):
grid = Grid(dimensions=1, length=5, scale=1.0)
spinless = True
e_cutoff = 20.0
hamiltonian_1 = jellium_model(grid, spinless, True, False)
jw_1 = jordan_wigner(hamiltonian_1)
spectrum_1 = eigenspectrum(jw_1)
hamiltonian_2 = jellium_model(grid, spinless, True, False, e_cutoff)
jw_2 = jordan_wigner(hamiltonian_2)
spectrum_2 = eigenspectrum(jw_2)
max_diff = numpy.amax(numpy.absolute(spectrum_1 - spectrum_2))
self.assertGreater(max_diff, 0.)
def test_plane_wave_period_cutoff(self):
# TODO: After figuring out the correct formula for period cutoff for
# dual basis, change period_cutoff to default, and change
# hamiltonian_1 to a real jellium_model for real integration test.
grid = Grid(dimensions=2, length=2, scale=1.0)
spinless = True
period_cutoff = 0.
hamiltonian_1 = FermionOperator()
jw_1 = jordan_wigner(hamiltonian_1)
spectrum_1 = eigenspectrum(jw_1)
hamiltonian_2 = jellium_model(grid, spinless, True, False, None, True,
period_cutoff)
jw_2 = jordan_wigner(hamiltonian_2)
spectrum_2 = eigenspectrum(jw_2)
max_diff = numpy.amax(numpy.absolute(spectrum_1 - spectrum_2))
self.assertGreater(max_diff, 0.)
# TODO: This is only for code coverage. Remove after having real
# integration test.
jellium_model(grid, spinless, True, False, None, True)
jellium_model(grid, spinless, False, False, None, True)
|
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class StorageAccountCredentialsOperations(object):
"""StorageAccountCredentialsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.databoxedge.v2019_07_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list_by_data_box_edge_device(
self,
device_name, # type: str
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.StorageAccountCredentialList"]
"""Gets all the storage account credentials in a Data Box Edge/Data Box Gateway device.
Gets all the storage account credentials in a Data Box Edge/Data Box Gateway device.
:param device_name: The device name.
:type device_name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either StorageAccountCredentialList or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databoxedge.v2019_07_01.models.StorageAccountCredentialList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccountCredentialList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_data_box_edge_device.metadata['url'] # type: ignore
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('StorageAccountCredentialList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_data_box_edge_device.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/storageAccountCredentials'} # type: ignore
def get(
self,
device_name, # type: str
name, # type: str
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.StorageAccountCredential"
"""Gets the properties of the specified storage account credential.
:param device_name: The device name.
:type device_name: str
:param name: The storage account credential name.
:type name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StorageAccountCredential, or the result of cls(response)
:rtype: ~azure.mgmt.databoxedge.v2019_07_01.models.StorageAccountCredential
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccountCredential"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('StorageAccountCredential', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/storageAccountCredentials/{name}'} # type: ignore
def _create_or_update_initial(
self,
device_name, # type: str
name, # type: str
resource_group_name, # type: str
storage_account_credential, # type: "_models.StorageAccountCredential"
**kwargs # type: Any
):
# type: (...) -> Optional["_models.StorageAccountCredential"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.StorageAccountCredential"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(storage_account_credential, 'StorageAccountCredential')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('StorageAccountCredential', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/storageAccountCredentials/{name}'} # type: ignore
def begin_create_or_update(
self,
device_name, # type: str
name, # type: str
resource_group_name, # type: str
storage_account_credential, # type: "_models.StorageAccountCredential"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.StorageAccountCredential"]
"""Creates or updates the storage account credential.
:param device_name: The device name.
:type device_name: str
:param name: The storage account credential name.
:type name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param storage_account_credential: The storage account credential.
:type storage_account_credential: ~azure.mgmt.databoxedge.v2019_07_01.models.StorageAccountCredential
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either StorageAccountCredential or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databoxedge.v2019_07_01.models.StorageAccountCredential]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccountCredential"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
device_name=device_name,
name=name,
resource_group_name=resource_group_name,
storage_account_credential=storage_account_credential,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('StorageAccountCredential', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/storageAccountCredentials/{name}'} # type: ignore
def _delete_initial(
self,
device_name, # type: str
name, # type: str
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/storageAccountCredentials/{name}'} # type: ignore
def begin_delete(
self,
device_name, # type: str
name, # type: str
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the storage account credential.
:param device_name: The device name.
:type device_name: str
:param name: The storage account credential name.
:type name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
device_name=device_name,
name=name,
resource_group_name=resource_group_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/storageAccountCredentials/{name}'} # type: ignore
|
|
# Copyright 2019 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for haiku._src.pool."""
import functools
from absl.testing import absltest
from haiku._src import pool
from haiku._src import test_utils
import jax
import jax.numpy as jnp
import numpy as np
class MaxPoolTest(absltest.TestCase):
def test_max_pool_basic(self):
x = np.arange(6, dtype=jnp.float32).reshape([6, 1])
x = np.broadcast_to(x, (2, 10, 6, 2))
window_shape = [1, 2, 2, 1]
result = pool.max_pool(
x, window_shape=window_shape, strides=window_shape, padding="VALID")
ground_truth = np.asarray([1., 3., 5.]).reshape([3, 1])
ground_truth = np.broadcast_to(ground_truth, (2, 5, 3, 2))
np.testing.assert_equal(result, ground_truth)
def test_max_pool_unbatched(self):
x = np.arange(6, dtype=jnp.float32).reshape([6, 1])
leading_dims = (2, 3)
x = np.broadcast_to(x, leading_dims + (10, 6, 2))
window_shape = [2, 2, 1]
result = pool.max_pool(
x, window_shape=window_shape, strides=window_shape, padding="VALID")
ground_truth = np.asarray([1., 3., 5.]).reshape([3, 1])
ground_truth = np.broadcast_to(ground_truth, leading_dims + (5, 3, 2))
np.testing.assert_equal(result, ground_truth)
def test_max_pool_unbatched_vmapped(self):
x = np.arange(6, dtype=jnp.float32).reshape([6, 1])
leading_dims = (2, 3)
x = np.broadcast_to(x, leading_dims + (10, 6, 2))
window_shape = [2, 2, 1]
max_pool_fn = functools.partial(
pool.max_pool,
window_shape=window_shape,
strides=window_shape,
padding="VALID")
result = jax.vmap(jax.vmap(max_pool_fn))(x)
ground_truth = np.asarray([1., 3., 5.]).reshape([3, 1])
ground_truth = np.broadcast_to(ground_truth, leading_dims + (5, 3, 2))
np.testing.assert_equal(result, ground_truth)
def test_max_pool_batch_vs_vmap(self):
key = jax.random.PRNGKey(42)
batch = jax.random.normal(key, [8, 28, 28, 3])
p = functools.partial(pool.max_pool, window_shape=(4, 4, 1),
strides=(2, 2, 1), padding="VALID")
np.testing.assert_allclose(p(batch), jax.vmap(p)(batch))
def test_max_pool_overlapping_windows(self):
x = np.arange(12, dtype=jnp.float32).reshape([6, 2])
x = np.broadcast_to(x, (2, 10, 6, 2))
window_shape = [1, 5, 3, 2]
strides = [1, 1, 3, 2]
result = pool.max_pool(
x, window_shape=window_shape, strides=strides, padding="VALID")
ground_truth = np.asarray([5., 11.,]).reshape([2, 1])
ground_truth = np.broadcast_to(ground_truth, (2, 6, 2, 1))
np.testing.assert_equal(result, ground_truth)
def test_max_pool_same_padding(self):
x = np.arange(6, dtype=jnp.float32)
x = np.broadcast_to(x, (2, 3, 6))
window_shape = [1, 3, 3]
strides = [1, 1, 1]
result = pool.max_pool(
x, window_shape=window_shape, strides=strides, padding="SAME")
np.testing.assert_equal(result.shape, x.shape)
@test_utils.transform_and_run
def test_max_pool_same_padding_class(self):
x = np.arange(6, dtype=jnp.float32)
x = np.broadcast_to(x, (2, 3, 6))
window_shape = [1, 3, 3]
strides = [1, 1, 1]
max_pool = pool.MaxPool(
window_shape=window_shape, strides=strides, padding="SAME")
result = max_pool(x)
np.testing.assert_equal(result.shape, x.shape)
def test_max_pool_basic_with_inferred_shapes(self):
x = np.arange(6, dtype=jnp.float32).reshape([6, 1])
x = np.broadcast_to(x, (2, 10, 6, 2))
result = pool.max_pool(x, 2, 2, padding="VALID")
ground_truth = np.asarray([1., 3., 5.]).reshape([3, 1])
ground_truth = np.broadcast_to(ground_truth, (2, 5, 3, 2))
np.testing.assert_equal(result, ground_truth)
def test_max_pool_same_padding_with_inferred_shapes(self):
x = np.arange(6, dtype=jnp.float32)
x = np.broadcast_to(x, (2, 3, 6))
result = pool.max_pool(x, 3, 1, padding="SAME", channel_axis=None)
np.testing.assert_equal(result.shape, x.shape)
@test_utils.transform_and_run
def test_max_pool_same_padding_class_with_inferred_shapes(self):
x = np.arange(6, dtype=jnp.float32)
x = np.broadcast_to(x, (2, 3, 6))
max_pool = pool.MaxPool(3, 1, padding="SAME", channel_axis=None)
result = max_pool(x)
np.testing.assert_equal(result.shape, x.shape)
class AvgPoolTest(absltest.TestCase):
def test_avg_pool_basic(self):
x = np.arange(6, dtype=jnp.float32).reshape([6, 1])
x = np.broadcast_to(x, (2, 10, 6, 2))
window_shape = [1, 2, 2, 1]
result = pool.avg_pool(
x, window_shape=window_shape, strides=window_shape, padding="VALID")
ground_truth = np.asarray([0.5, 2.5, 4.5]).reshape([3, 1])
ground_truth = np.broadcast_to(ground_truth, (2, 5, 3, 2))
np.testing.assert_equal(result, ground_truth)
def test_avg_pool_unbatched(self):
x = np.arange(6, dtype=jnp.float32).reshape([6, 1])
leading_dims = (2, 3)
x = np.broadcast_to(x, leading_dims + (10, 6, 2))
window_shape = [2, 2, 1]
result = pool.avg_pool(
x, window_shape=window_shape, strides=window_shape, padding="VALID")
ground_truth = np.asarray([0.5, 2.5, 4.5]).reshape([3, 1])
ground_truth = np.broadcast_to(ground_truth, leading_dims + (5, 3, 2))
np.testing.assert_equal(result, ground_truth)
def test_avg_pool_unbatched_vmapped(self):
x = np.arange(6, dtype=jnp.float32).reshape([6, 1])
leading_dims = (2, 3)
x = np.broadcast_to(x, leading_dims + (10, 6, 2))
window_shape = [2, 2, 1]
avg_pool_fn = functools.partial(
pool.avg_pool,
window_shape=window_shape,
strides=window_shape,
padding="VALID")
result = jax.vmap(jax.vmap(avg_pool_fn))(x)
ground_truth = np.asarray([0.5, 2.5, 4.5]).reshape([3, 1])
ground_truth = np.broadcast_to(ground_truth, leading_dims + (5, 3, 2))
np.testing.assert_equal(result, ground_truth)
def test_avg_pool_batch_vs_vmap(self):
key = jax.random.PRNGKey(42)
batch = jax.random.normal(key, [8, 28, 28, 3])
p = functools.partial(pool.avg_pool, window_shape=(4, 4, 1),
strides=(2, 2, 1), padding="VALID")
np.testing.assert_allclose(p(batch), jax.vmap(p)(batch))
def test_avg_pool_overlapping_windows(self):
x = np.arange(12, dtype=jnp.float32).reshape([6, 2])
x = np.broadcast_to(x, (2, 10, 6, 2))
window_shape = [1, 5, 3, 2]
strides = [1, 1, 3, 2]
result = pool.avg_pool(
x, window_shape=window_shape, strides=strides, padding="VALID")
ground_truth = np.asarray([
2.5,
8.5,
]).reshape([2, 1])
ground_truth = np.broadcast_to(ground_truth, (2, 6, 2, 1))
np.testing.assert_almost_equal(result, ground_truth, decimal=5)
def test_avg_pool_same_padding(self):
x = np.ones((2, 3, 6))
window_shape = [1, 3, 3]
strides = [1, 1, 1]
result = pool.avg_pool(
x, window_shape=window_shape, strides=strides, padding="SAME")
np.testing.assert_equal(result.shape, x.shape)
# Since x is constant, its avg value should be itself.
np.testing.assert_equal(result, x)
@test_utils.transform_and_run
def test_avg_pool_same_padding_class(self):
x = np.ones((2, 3, 6))
window_shape = [1, 3, 3]
strides = [1, 1, 1]
avg_pool = pool.AvgPool(
window_shape=window_shape, strides=strides, padding="SAME")
result = avg_pool(x)
np.testing.assert_equal(result.shape, x.shape)
# Since x is constant, its avg value should be itself.
np.testing.assert_equal(result, x)
def test_avg_pool_basic_with_inferred_shapes(self):
x = np.arange(6, dtype=jnp.float32).reshape([6, 1])
x = np.broadcast_to(x, (2, 10, 6, 2))
result = pool.avg_pool(x, 2, 2, padding="VALID")
ground_truth = np.asarray([0.5, 2.5, 4.5]).reshape([3, 1])
ground_truth = np.broadcast_to(ground_truth, (2, 5, 3, 2))
np.testing.assert_equal(result, ground_truth)
def test_avg_pool_same_padding_with_inferred_shapes(self):
x = np.ones((2, 3, 6))
result = pool.avg_pool(x, 3, 1, padding="SAME", channel_axis=None)
np.testing.assert_equal(result.shape, x.shape)
# Since x is constant, its avg value should be itself.
np.testing.assert_equal(result, x)
@test_utils.transform_and_run
def test_avg_pool_same_padding_class_with_inferred_shapes(self):
x = np.ones((2, 3, 6))
result = pool.AvgPool(3, 1, padding="SAME", channel_axis=None)(x)
np.testing.assert_equal(result.shape, x.shape)
# Since x is constant, its avg value should be itself.
np.testing.assert_equal(result, x)
if __name__ == "__main__":
absltest.main()
|
|
import os
import json
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from matplotlib2tikz import save as tikz_save
from matplotlib2tikz import get_tikz_code
from .. import tensorflow
class Analyzer:
"""DeepOBS analyzer class to generate result plots or get other summaries.
Args:
path (str): Path to the results folder. This folder should contain one
or multiple testproblem folders.
Attributes:
testproblems: Dictionary of test problems where the key is the
name of a test problem (e.g. ``cifar10_3c3d``) and the value is an
instance of the TestProblemAnalyzer class (see below).
"""
def __init__(self, path):
"""Initializes a new Analyzer instance.
Args:
path (str): Path to the results folder. This folder should contain one
or multiple testproblem folders.
"""
self.path = path
self.testproblems = self._read_testproblems()
def _read_testproblems(self):
"""Read all test problems (folders) in this results folder.
Returns:
dict: Dictionary of test problems, where the key is the test
problem's name and the value is an instance of the
TestProblemAnalyzer class.
"""
testproblems = dict()
for tp in os.listdir(self.path):
if os.path.isdir(os.path.join(self.path, tp)):
testproblems[tp] = TestProblemAnalyzer(self.path, tp)
return testproblems
class TestProblemAnalyzer:
"""DeepOBS analyzer class for a specific test problem.
This class will store all relevant information regarding a test problem,
such as the convergence performance of this problem.
Args:
path (str): Path to the parent folder of the test problem (i.e. the
results folder).
tp (str): Name of the test problem (same as the folder name).
Attributes:
name: Name of the test problem in DeepOBS format
(e.g. ``cifar10_3c3d``).
conv_perf: Convergence performance for this test problem.
metric: Metric to use for this test problem. If available this
will be ``test_accuracies``, otherwise ``test_losses``.
optimizer: Dictionary of optimizers for this test problem where
the key is the name of the optimizer (e.g.
``GradientDescentOptimizer``) and the value is an instance of the
OptimizerAnalyzer class (see below).
"""
def __init__(self, path, tp):
"""Initializes a new TestProblemAnalyzer instance.
Args:
path (str): Path to the parent folder of the test problem (i.e. the
results folder).
tp (str): Name of the test problem (same as the folder name).
"""
self._path = os.path.join(path, tp)
self.name = tp
print("Setting up", self.name)
self.conv_perf = self._get_conv_perf()
if tp == 'quadratic_deep' or tp == 'mnist_vae' or tp == 'fmnist_vae':
self.metric = "test_losses"
else:
self.metric = "test_accuracies"
self.optimizers = self._read_optimizer()
def _read_optimizer(self):
"""Read all optimizer (folders) in a test problem (folder).
Returns:
dict: Dictionary of optimizers, where the key is the optimizer's name
and the value is an instance of the OptimizerAnalyzer class.
"""
optimizers = dict()
for opt in os.listdir(self._path):
optimizers[opt] = OptimizerAnalyzer(self._path, opt, self.metric,
self.name, self.conv_perf)
return optimizers
def _get_conv_perf(self):
"""Read the convergence performance for this test problem from a
dictionary in the baseline folder.
Returns:
float: Convergence performance for this test problem
"""
try:
with open(os.path.join(tensorflow.config.get_baseline_dir(),
"convergence_performance.json"), "r") as f:
return json.load(f)[self.name]
except IOError:
print("Warning: Could not find a convergence performance file.")
return 0.0
class OptimizerAnalyzer:
"""DeepOBS analyzer class for an optimizer (and a specific test problem).
This class will give access to all relevant information regarding this
optimizer such as the best performing hyperparameter setting or the number
of settings.
Args:
path (str): Path to the parent folder of the optimizer folder (i.e. the
test problem folder).
opt (str): Name of the optimizer (folder).
metric (str): Metric to use for this test problem. If available this
will be ``test_accuracies``, otherwise ``test_losses``.
testproblem (str): Name of the test problem this optimizer (folder)
belongs to.
conv_perf (float): Convergence performance of the test problem this
optimizer (folder) belongs to.
Attributes:
name: Name of the optimizer (folder).
metric: Metric to use for this test problem. If available this
will be ``test_accuracies``, otherwise ``test_losses``.
testproblem: Name of the test problem this optimizer (folder)
belongs to.
conv_perf: Convergence performance for this test problem.
settings: Dictionary of hyperparameter settings for this
optimizer (on this test problem) where the key is the name of the
setting (folder) and the value is an instance of the
SettingAnalyzer class (see below).
num_settings: Total number of settings for this optimizer
(and test problem)
"""
def __init__(self, path, opt, metric, testproblem, conv_perf):
"""Initializes a new OptimizerAnalyzer instance.
Args:
path (str): Path to the parent folder of the optimizer folder (i.e.
the test problem folder).
opt (str): Name of the optimizer (folder).
metric (str): Metric to use for this test problem. If available this
will be ``test_accuracies``, otherwise ``test_losses``.
testproblem (str): Name of the test problem this optimizer (folder)
belongs to.
conv_perf (float): Convergence performance of the test problem this
optimizer (folder) belongs to.
"""
self._path = os.path.join(path, opt)
self.name = opt
self.metric = metric
self.testproblem = testproblem
self.conv_perf = conv_perf
self.settings = self._read_settings()
self.num_settings = len(self.settings)
self._best_setting_final = None
self._best_setting_best = None
def _read_settings(self):
"""Read all settings (folders) in a optimizer (folder).
Returns:
dict: Dictionary of settings, where the key is the setting's name
and the value is an instance of the SettingAnalyzer class.
"""
settings = dict()
for sett in os.listdir(self._path):
settings[sett] = SettingAnalyzer(self._path, sett, self.metric,
self.testproblem, self.conv_perf)
return settings
def get_best_setting_final(self):
"""Returns the setting for this optimizer that has the best final
performance using the metric (``test_losses`` or ``test_accuracies``)
defined for this test problem.
Returns:
SettingAnalyzer: Instance of the SettingAnalyzer class with the best
final performance
"""
if self._best_setting_final is not None:
return self._best_setting_final
else:
if self.metric == 'test_losses' or self.metric == 'train_losses':
current_best = np.inf
better = lambda x, y: x < y
elif self.metric == 'test_accuracies' or self.metric == 'train_accuracies':
current_best = -np.inf
better = lambda x, y: x > y
else:
raise RuntimeError("Metric unknown")
best_sett = None
for _, sett in self.settings.items():
val = sett.aggregate.final_value
if better(val, current_best):
current_best = val
best_ind = sett
self._best_setting_final = best_ind
return best_ind
def get_best_setting_best(self):
"""Returns the setting for this optimizer that has the best overall
performance using the metric (``test_losses`` or ``test_accuracies``)
defined for this test problem. In contrast to ``get_best_setting_final``
in not only looks at the final performance per setting, but the best
performance per setting.
Returns:
SettingAnalyzer: Instance of the SettingAnalyzer class with the best
overall performance
"""
if self._best_setting_best is not None:
return self._best_setting_best
else:
if self.metric == 'test_losses' or self.metric == 'train_losses':
current_best = np.inf
better = lambda x, y: x < y
elif self.metric == 'test_accuracies' or self.metric == 'train_accuracies':
current_best = -np.inf
better = lambda x, y: x > y
else:
raise RuntimeError("Metric unknown")
best_sett = None
for _, sett in self.settings.items():
val = sett.aggregate.best_value
if better(val, current_best):
current_best = val
best_ind = sett
self._best_setting_best = best_ind
return best_ind
def get_setting_most_runs(self):
"""Returns the setting with the most repeated runs (with the same
setting, but probably different seeds).
Returns:
SettingAnalyzer: Instance of the SettingAnalyzer class with the most
repeated runs.
"""
most_runs = 0
for _, sett in self.settings.items():
if sett.aggregate.num_runs > most_runs:
most_runs = sett.aggregate.num_runs
most_run_setting = sett
return most_run_setting
def plot_lr_sensitivity(self, ax, mode='final'):
"""Generates the ``learning rate`` sensitivity plot for this optimizer.
This plots the relative performance (relative to the best setting for
this optimizer) against the ``learning rate`` used in this setting.
This assumes that all settings or otherwise equal and only different in
the ``learning rate``.
Args:
ax (matplotlib.axes): Handle to a matplotlib axis to plot the
``learning rate`` sensitivity onto.
mode (str): Whether to use the final (``final``) performance or the
best (``best``) when evaluating each setting.
Defaults to ``final``.
"""
rel_perf = []
lr = []
for _, sett in self.settings.items():
if mode == 'final':
val = sett.aggregate.final_value
best = self.get_best_setting_final().aggregate.final_value
elif mode == 'best':
val = sett.aggregate.best_value
best = self.get_best_setting_best().aggregate.best_value
else:
raise RuntimeError("Mode unknown")
if self.metric == 'test_losses' or self.metric == 'train_losses':
rel_perf.append(best / val)
elif self.metric == 'test_accuracies' or self.metric == 'train_accuracies':
rel_perf.append(val / best)
else:
raise RuntimeError("Metric unknown")
lr.append(sett.aggregate.output['learning_rate'])
rel_perf = np.nan_to_num(rel_perf) # replace NaN with zero
rel_perf = np.array(np.vstack((rel_perf, lr))).transpose()
rel_perf = rel_perf[rel_perf[:, 1].argsort()]
ax.plot(rel_perf[:, 1], rel_perf[:, 0], label=self.name)
ax.set_xscale('log')
ax.set_ylim([0.0, 1.0])
def plot_performance(self, ax, mode='most'):
"""Generates a performance plot for this optimzer using one
hyperparameter setting.
Can either use the setting with the best final performance, the best
overall performance or the setting with the most runs.
This function will plot all four possible performance metrics
(``test_losses``, ``train_losses``, ``test_accuracies`` and
``train_accuracies``).
Args:
ax (list): List of four matplotlib axis to plot the performancs
metrics onto.
mode (str): Whether to use the setting with the best final
(``final``) performance, the best overall (``best``) performance
or the setting with the most runs (``most``) when plotting.
Defaults to ``most``.
"""
if mode == 'final':
run = self.get_best_setting_final()
elif mode == 'best':
run = self.get_best_setting_best()
elif mode == 'most':
run = self.get_setting_most_runs()
print("Plotting", run.aggregate.num_runs, "runs for ", self.name,
"on", run.aggregate.output['testproblem'])
else:
raise RuntimeError("Mode unknown")
for idx, metric in enumerate([
'test_losses', 'train_losses', 'test_accuracies',
'train_accuracies'
]):
ax[idx].plot(
run.aggregate.output[metric]['mean'],
label=run.aggregate.output['optimizer'])
ax[idx].fill_between(
range(run.aggregate.output[metric]['mean'].size),
run.aggregate.output[metric]['mean'] -
run.aggregate.output[metric]['std'],
run.aggregate.output[metric]['mean'] +
run.aggregate.output[metric]['std'],
color=ax[idx].get_lines()[-1].get_color(),
alpha=0.2)
def get_bm_table(self, perf_table, mode='most'):
"""Generates the overall performance table for this optimizer.
This includes metrics for the performance, speed and tuneability of this
optimizer (on this test problem).
Args:
perf_table (dict): A dictionary with three keys: ``Performance``,
``Speed`` and ``Tuneability``.
mode (str): Whether to use the setting with the best final
(``final``) performance, the best overall (``best``) performance
or the setting with the most runs (``most``).
Defaults to ``most``.
Returns:
dict: Dictionary with holding the performance, speed and tuneability
measure for this optimizer.
"""
if mode == 'final':
run = self.get_best_setting_final()
elif mode == 'best':
run = self.get_best_setting_best()
elif mode == 'most':
run = self.get_setting_most_runs()
perf_table['Performance'][self.name] = run.aggregate.output[
self.metric]['mean'][-1]
perf_table['Speed'][self.name] = run.aggregate.output['speed']
perf_table['Tuneability'][self.name] = {
**{
'lr': '{:0.2e}'.format(run.aggregate.output['learning_rate'])
},
**run.aggregate.output['hyperparams']
}
return perf_table
class SettingAnalyzer:
"""DeepOBS analyzer class for a setting (a hyperparameter setting).
Args:
path (str): Path to the parent folder of the setting folder (i.e. the
optimizer folder).
sett (str): Name of the setting (folder).
metric (str): Metric to use for this test problem. If available this
will be ``test_accuracies``, otherwise ``test_losses``.
testproblem (str): Name of the test problem this setting (folder)
belongs to.
conv_perf (float): Convergence performance of the test problem this
setting (folder) belongs to.
Attributes:
name (str): Name of the setting (folder).
metric (str): Metric to use for this test problem. If available this
will be ``test_accuracies``, otherwise ``test_losses``.
testproblem (str): Name of the test problem this setting (folder)
belongs to.
conv_perf (float): Convergence performance for this test problem.
aggregate (AggregateRun): Instance of the AggregateRun class for all
runs with this setting.
"""
def __init__(self, path, sett, metric, testproblem, conv_perf):
"""Initializes a new SettingAnalyzer instance.
Args:
name (str): Path to the parent folder of the setting folder (i.e. the
optimizer folder).
sett (str): Name of the setting (folder).
metric (str): Metric to use for this test problem. If available this
will be ``test_accuracies``, otherwise ``test_losses``.
testproblem (str): Name of the test problem this setting (folder)
belongs to.
conv_perf (float): Convergence performance of the test problem this
setting (folder) belongs to.
"""
self._path = os.path.join(path, sett)
self.name = sett
self.metric = metric
self.testproblem = testproblem
self.conv_perf = conv_perf
self.aggregate = self._get_aggregate()
def _get_aggregate(self):
"""Create aggregate run for all runs in this setting folder.
Returns:
AggregateRun: Instance of the AggregateRun class holding the
aggregate information of all runs with these settings.
"""
runs = []
for r in os.listdir(self._path):
if r.endswith(".json"):
runs.append(r)
return AggregateRun(self._path, runs, self.name, self.metric,
self.testproblem, self.conv_perf)
class AggregateRun:
"""DeepOBS class for a group of runs witht the same settings (but possibly
different seeds).
Args:
path (str): Path to the parent folder of the aggregate run folder (i.e.
the settings folder).
runs (list): List of run names all with the same setting.
name (str): Name of the aggregate run (folder).
metric (str): Metric to use for this test problem. If available this
will be ``test_accuracies``, otherwise ``test_losses``.
testproblem (str): Name of the test problem this aggregate run (folder)
belongs to.
conv_perf (float): Convergence performance of the test problem this
aggregate run (folder) belongs to.
Attributes:
name: Name of the aggregate run (folder).
testproblem: Name of the test problem this aggregate run (folder)
belongs to.
conv_perf: Convergence performance for this test problem.
runs: List of run names all with the same setting.
num_runs: Number of runs (with the same setting).
metric: Metric to use for this test problem. If available this
will be ``test_accuracies``, otherwise ``test_losses``.
output: Dictionary including all aggregate information of the
runs with this setting. All performance metrics have a mean and a
standard deviation (can be zero if there is only one run with this
setting).
final_value: Final (mean) value of the test problem's metric
best_value: Best (mean) value of the test problem's metric
"""
def __init__(self, path, runs, name, metric, testproblem, conv_perf):
"""Initializes a new AggregateRun class.
Args:
path (str): Path to the parent folder of the aggregate run folder (i.e.
the settings folder).
runs (list): List of run names all with the same setting.
name (str): Name of the aggregate run (folder).
metric (str): Metric to use for this test problem. If available this
will be ``test_accuracies``, otherwise ``test_losses``.
testproblem (str): Name of the test problem this aggregate run (folder)
belongs to.
conv_perf (float): Convergence performance of the test problem this
aggregate run (folder) belongs to.
"""
self._path = path
self.name = name
self.testproblem = testproblem
self.conv_perf = conv_perf
self.runs = runs
self.num_runs = len(runs)
self.metric = metric
self.output = self._aggregate()
self.final_value = self._get_final_value()
self.best_value = self._get_best_value()
def _aggregate(self):
"""Aggregate performance data over all runs.
Returns:
dict: Dictionary including all aggregate information of the
runs with this setting. All performance metrics have a mean and a
standard deviation (can be zero if there is only one run with this
setting).
"""
train_losses = []
train_accuracies = []
test_losses = []
test_accuracies = []
meta_loaded = False
for run in self.runs:
output = self._load_json(os.path.join(self._path, run))
# Get meta data from first run
if not meta_loaded:
meta = output
meta_loaded = True
train_losses.append(output['train_losses'])
test_losses.append(output['test_losses'])
if 'train_accuracies' in output:
train_accuracies.append(output['train_accuracies'])
test_accuracies.append(output['test_accuracies'])
aggregate = dict()
# compute speed
perf = np.array(eval(self.metric))
if self.metric == "test_losses" or self.metric == "train_losses":
# average over first time they reach conv perf (use num_epochs if conv perf is not reached)
aggregate['speed'] = np.mean(
np.argmax(perf <= self.conv_perf, axis=1) +
np.invert(np.max(perf <= self.conv_perf, axis=1)) *
perf.shape[1])
elif self.metric == "test_accuracies" or self.metric == "train_accuracies":
aggregate['speed'] = np.mean(
np.argmax(perf >= self.conv_perf, axis=1) +
np.invert(np.max(perf >= self.conv_perf, axis=1)) *
perf.shape[1])
# build dict
for m in [
'train_losses', 'test_losses', 'train_accuracies',
'test_accuracies'
]:
aggregate[m] = {
'mean': np.mean(eval(m), axis=0),
'std': np.std(eval(m), axis=0)
}
# merge meta and aggregate (aggregate replaces)
aggregate = {**meta, **aggregate}
aggregate.pop('minibatch_train_losses', None)
return aggregate
def _load_json(self, path):
"""Load the ``JSON`` file of the given path.
Args:
path (str): Path to a ``JSON`` file.
Returns:
dict: Dictionary from the ``JSON`` file.
"""
with open(path, "r") as f:
return json.load(f)
def _get_final_value(self):
"""Get final (mean) value of the metric used in this test problem.
Returns:
float: Final (mean) value of the test problem's metric.
"""
return self.output[self.metric]['mean'][-1]
def _get_best_value(self):
"""Get best (mean) value of the metric used in this test problem.
Returns:
float: Best (mean) value of the test problem's metric.
"""
if self.metric == 'test_losses' or self.metric == 'train_losses':
return min(self.output[self.metric]['mean'])
elif self.metric == 'test_accuracies' or self.metric == 'train_accuracies':
return max(self.output[self.metric]['mean'])
else:
raise RuntimeError("Metric unknown")
def beautify_lr_sensitivity(fig, ax):
"""Beautify a learning rate sensitivity plot.
This function adds axis labels and removes spines to create a nicer learning
rate sensitivity plot.
Args:
fig (matplotlib.figure): Handle to the matplotlib figure of the learning
rate sensitivity plot.
ax (list): List of lists of matplotlib axis of the learning rate
sensitivity plots.
Returns:
matplotlib.figure: Handle to the beautified matplotlib figure of the
learning rate sensitivity plot.
list: List of lists of the beautified matplotlib axis of the learning
rate sensitivity plots.
"""
fig.suptitle("Learning rate sensitivity", fontsize=20)
for i in range(ax.shape[0]):
for j in range(ax.shape[1]):
ax[i][j].get_yaxis().set_visible(False)
ax[i][j].spines['top'].set_visible(False)
ax[i][j].spines['right'].set_visible(False)
# ax[i][j].spines['bottom'].set_visible(False)
ax[i][j].spines['left'].set_visible(False)
if i == 0:
ax[i][j].get_xaxis().set_visible(False)
if i == 1:
ax[i][j].set_xlabel('Learning Rate')
return fig, ax
def texify_lr_sensitivity(fig, ax):
"""Write a ``.tex`` file with the learning rate sensitivity plot.
The function will create a file named `tuning_plot.tex` with the latex code
for the learning rate sensitivity plot.
Args:
fig (matplotlib.figure): Handle to the matplotlib figure of the learning
rate sensitivity plot.
ax (list): List of lists of matplotlib axis of the learning rate
sensitivity plots.
Returns:
str: String of the latex code for the learning rate sensitivity plot.
"""
tikz_code = get_tikz_code(
'tuning_plot_new.tex',
figureheight='\\figureheight',
figurewidth='0.33\\figurewidth')
tikz_code = tikz_code.replace(
'\\begin{groupplot}[group style={group size=4 by 2}]',
'\\begin{groupplot}[group style={group size=4 by 2, horizontal sep=0.02\\figurewidth, vertical sep=0.15cm}]'
)
tikz_code = r"\pgfplotsset{every axis/.append style={label style={font=\tiny}, tick label style={font=\tiny}, legend style={font=\tiny, line width=1pt}}}" + tikz_code
tikz_code = tikz_code.replace('minor', '%minor') # comment minor tick
tikz_code = tikz_code.replace('x grid',
'%x grid') # remove grid xmajorticks=false,
tikz_code = tikz_code.replace('y grid', '%y grid') # remove grid
tikz_code = tikz_code.replace('tick align',
'%tick align') # ugly outside ticks
tikz_code = tikz_code.replace(
'nextgroupplot[', 'nextgroupplot[axis x line*=bottom,\nhide y axis,'
) # ugly outside ticks
tikz_code = tikz_code.replace(
'(current bounding box.south west)!0.98!(current bounding box.north west)',
'(current bounding box.south west)!1.05!(current bounding box.north west)'
) # position title higher
tikz_code = tikz_code.replace('title={',
'title={\small ') # shrink title size
# Write the file out again
with open('tuning_plot.tex', 'w') as file:
file.write(tikz_code)
return tikz_code
def rescale_ax(ax):
"""Rescale an axis to include the most important data.
Args:
ax (matplotlib.axis): Handle to a matplotlib axis.
"""
lines = ax.lines
y_data = []
y_limits = []
for line in lines:
if line.get_label() != "convergence_performance":
y_data.append(line.get_ydata())
else:
y_limits.append(line.get_ydata()[0])
if y_data:
y_limits.append(np.percentile(np.array(y_data), 20))
y_limits.append(np.percentile(np.array(y_data), 80))
y_limits = y_limits + (np.array(y_data)[:, -1].tolist())
y_limits = [np.min(y_limits), np.max(y_limits)]
y_limits = [y_limits[0] * 0.9, y_limits[1] * 1.1]
if y_limits[0] != y_limits[1]:
ax.set_ylim([max(1e-10, y_limits[0]), y_limits[1]])
ax.margins(x=0)
else:
ax.set_ylim([1.0, 2.0])
def beautify_plot_performance(fig, ax, folder_parser, problem_set):
"""Beautify a performance plot.
This function adds axis labels, sets titles and more to create a nicer
performance plot.
Args:
fig (matplotlib.figure): Handle to the matplotlib figure of the
performance plot.
ax (list): List of lists of matplotlib axis of the performance plot.
folder_parser (Analyzer): An instance of the DeepOBS Analyzer class
to plot the performance from.
problem_set (str): Can either be ``small`` or ``large`` to switch
between which benchmark set is being plotted.
Returns:
matplotlib.figure: Handle to the beautified matplotlib figure of the
performance plot.
list: List of lists of the beautified matplotlib axis of the performance
plots.
"""
fig.subplots_adjust(hspace=0.4)
if problem_set == "small":
fig.suptitle("Benchmark Set Small", fontsize=20)
titles = [
"P1 Quadratic Deep", "P2 MNIST - VAE", "P3 F-MNIST - CNN",
"P4 CIFAR-10 - CNN"
]
# clear axis (needed for matplotlib2tikz)
plt.sca(ax[2][0])
plt.cla()
plt.sca(ax[2][1])
plt.cla()
plt.sca(ax[3][1])
plt.cla()
ax[2][1].axis('off')
ax[3][1].axis('off')
ax[1][0].set_xlabel("Epochs")
ax[1][1].set_xlabel("Epochs")
ax[2][2].set_ylabel("Test Accuracy")
ax[3][2].set_ylabel("Train Accuracy")
ax[1][1].tick_params(
axis='x', which='major', bottom=False,
labelbottom=True) # show x axis
# Add convergence performance line
for idx, tp in enumerate(
["quadratic_deep", "mnist_vae", "fmnist_2c2d", "cifar10_3c3d"]):
if tp in folder_parser.testproblems:
metric = folder_parser.testproblems[tp].metric
conv_perf = folder_parser.testproblems[tp].conv_perf
if metric == "test_losses":
ax_row = 0
elif metric == "test_accuracies":
ax_row = 2
ax[ax_row][idx].axhline(
conv_perf, color='#AFB3B7', label="convergence_performance")
elif problem_set == "large":
fig.suptitle("Benchmark Set Large", fontsize=20)
ax[1][0].set_xlabel("Epochs")
ax[3][1].set_xlabel("Epochs")
ax[2][1].set_ylabel("Test Accuracy")
ax[3][1].set_ylabel("Train Accuracy")
titles = [
"P5 F-MNIST - VAE", "P6 CIFAR 100 - All CNN C",
"P7 SVHN - Wide ResNet 16-4", "P8 Tolstoi - Char RNN"
]
# Add convergence performance line
for idx, tp in enumerate([
"fmnist_vae", "cifar100_allcnnc", "svhn_wrn164",
"tolstoi_char_rnn"
]):
if tp in folder_parser.testproblems:
metric = folder_parser.testproblems[tp].metric
conv_perf = folder_parser.testproblems[tp].conv_perf
if metric == "test_losses":
ax_row = 0
elif metric == "test_accuracies":
ax_row = 2
ax[ax_row][idx].axhline(
conv_perf, color='#AFB3B7', label="convergence_performance")
# clear axis (needed for matplotlib2tikz)
plt.sca(ax[2][0])
plt.cla()
plt.sca(ax[3][0])
plt.cla()
ax[2][0].axis('off')
ax[3][0].axis('off')
ax[3][2].set_xlabel("Epochs")
ax[3][3].set_xlabel("Epochs")
ax[0][0].set_ylabel("Test Loss")
ax[1][0].set_ylabel("Train Loss")
ax[1][0].tick_params(
axis='x', which='major', bottom=False, labelbottom=True) # show x axis
# automatic rescaling
for axlist in ax:
for a in axlist:
a = rescale_ax(a)
# Legend
handles, labels = ax[0][3].get_legend_handles_labels()
# labels_tex = [tfobs.plot_utils.texify(l) for l in labels]
ax[3][0].legend(
handles,
labels,
loc='upper right',
bbox_to_anchor=(0.2, 1.1, 0.5, 0.5))
for idx, title in enumerate(titles):
ax[0, idx].set_title(title)
return fig, ax
def texify_plot_performance(fig, ax, problem_set):
"""Write a ``.tex`` file with the performance plot.
The function will create a file named `benchmark_small.tex` or
`benchmark_large.tex` with the latex code for the performance plot.
Args:
fig (matplotlib.figure): Handle to the matplotlib figure of the
performance plot.
ax (list): List of lists of matplotlib axis of the performance plot.
problem_set (str): Can either be ``small`` or ``large`` to switch
between which benchmark set is being plotted.
Returns:
str: String of the latex code for the learning rate sensitivity plot.
"""
file_name = 'benchmark_' + str(problem_set) + '.tex'
tikz_code = get_tikz_code(
file_name, figureheight='\\figureheight', figurewidth='\\figurewidth')
tikz_code = r"\pgfplotsset{every axis/.append style={label style={font=\tiny}, tick label style={font=\tiny}, legend style={font=\tiny, line width=1pt}}}" + tikz_code
tikz_code = tikz_code.replace('minor', '%minor') # comment minor tick
tikz_code = tikz_code.replace('x grid', '%x grid') # remove grid
tikz_code = tikz_code.replace('y grid', '%y grid') # remove grid
tikz_code = tikz_code.replace('tick align',
'%tick align') # ugly outside ticks
tikz_code = tikz_code.replace(
'nextgroupplot[',
'nextgroupplot[axis x line*=bottom,\naxis y line*=left,'
) # ugly outside ticks
tikz_code = tikz_code.replace('xlabel={Epochs},\nxmajorticks=false,',
'xlabel={Epochs},\nxmajorticks=true,'
) # if x label is epoch, show ticks
tikz_code = tikz_code.replace('ymajorticks=false,',
'ymajorticks=true,') # show y labels
tikz_code = tikz_code.replace('\mathdefault',
'') # remove mathdefault in labels
tikz_code = tikz_code.replace(
'\path [draw=white!80.0!black, fill opacity=0]',
'%\path [draw=white!80.0!black, fill opacity=0]'
) # remove lines that are created for some reason
tikz_code = tikz_code.replace(
'(current bounding box.south west)!0.98!(current bounding box.north west)',
'(current bounding box.south west)!1.05!(current bounding box.north west)'
) # position title higher
tikz_code = tikz_code.replace('title={',
'title={\small ') # shrink title size
tikz_code = tikz_code.replace(
'group style={group size=4 by 4',
'group style={group size=4 by 4, horizontal sep=1cm, vertical sep=0.4cm '
) # reduce separation between plots
tikz_code = tikz_code.replace(
'ylabel={Test Loss}', r'ylabel style={align=left}, ylabel=Test\\Loss'
) # y label in two lines
tikz_code = tikz_code.replace(
'ylabel={Test Accuracy}',
r'ylabel style={align=left}, ylabel=Test\\Accuracy'
) # y label in two lines
tikz_code = tikz_code.replace(
'ylabel={Train Loss}', r'ylabel style={align=left}, ylabel=Train\\Loss'
) # y label in two lines
tikz_code = tikz_code.replace(
'ylabel={Train Accuracy}',
r'ylabel style={align=left}, ylabel=Train\\Accuracy'
) # y label in two lines
# Write the file out again
with open(file_name, 'w') as file:
file.write(tikz_code)
return tikz_code
def beautify_plot_table(bm_table):
"""Beautify a performance table.
This function makes a few changes to the performance table to make it nicer.
Args:
bm_table (dict): Dictionary holding all the information for the
performance table.
Returns:
pandas.dataframe: A pandas data frame for the performance table.
"""
bm_table_pd = pd.DataFrame.from_dict({(i, j): bm_table[i][j]
for i in bm_table.keys()
for j in bm_table[i].keys()}).T
cols = list(bm_table_pd.columns.values)
if 'AdamOptimizer' in cols:
cols.insert(0, cols.pop(cols.index('AdamOptimizer')))
if 'MomentumOptimizer' in cols:
cols.insert(0, cols.pop(cols.index('MomentumOptimizer')))
if 'GradientDescentOptimizer' in cols:
cols.insert(0, cols.pop(cols.index('GradientDescentOptimizer')))
bm_table_pd = bm_table_pd.reindex(columns=cols)
print(bm_table_pd)
return bm_table_pd
def texify_plot_table(perf_table_pd, problem_set):
"""Write a ``.tex`` file with the performance table.
The function will create a file named `performance_table_small.tex` or
`performance_table_large.tex` with the latex code for the performance table.
Args:
perf_table_pd (pandas.dataframe): Pandas data frame for the performance
table.
problem_set (str): Can either be ``small`` or ``large`` to switch
between which benchmark set is being plotted.
Returns:
str: String of the latex code for the performance table.
"""
if not perf_table_pd.empty:
# Postprocessing for Latex Output
pd.set_option('display.max_colwidth', -1)
perf_table_pd_n = perf_table_pd.apply(
norm, axis=1) # normalize between 0 and 100
perf_table_pd_n_str = perf_table_pd_n.applymap(
add_color_coding_tex) + perf_table_pd.applymap(
latex) # combine normalise version with latex color code command
perf_table_pd_n_str.columns = perf_table_pd_n_str.columns.str.replace(
'_', r'\_') # Texify the column headers
tikz_code = r"\def\cca#1#2{\cellcolor{green!#1!red}\ifnum #1<50\color{white}\fi{#2}}" +\
"\n" + r"\resizebox{\textwidth}{!}{%" + "\n" +\
perf_table_pd_n_str.to_latex(escape=False) + r"}"
with open('performance_table_' + problem_set + '.tex', 'w') as tex_file:
tex_file.write(tikz_code)
return tikz_code
def norm(x):
"""Normalize the input of x, depending on the name (higher is better if
test_acc is used, otherwise lower is better)"""
if x.name[1] == 'Tuneability':
return x
if x.min() == x.max():
return x - x.min() + 50.0
if x.name[1] == 'Performance':
if x.name[0] == "quadratic_deep" or x.name[0] == "mnist_vae" or x.name[
0] == "fmnist_vae":
return np.abs((x - x.max()) / (x.min() - x.max()) * 100)
else:
return np.abs((x - x.min()) / (x.max() - x.min()) * 100)
else:
return np.abs((x - x.max()) / (x.min() - x.max()) * 100)
def latex(input):
"""Create the latex output version of the input."""
if isinstance(input, float):
input = "%.4f" % input
return "{" + str(input) + "}"
elif isinstance(input, int):
return "{" + str(input) + "}"
elif isinstance(input, dict):
return str(input).replace('{', '').replace('}', '').replace(
"'", '').replace('_', '')
else:
return ""
def add_color_coding_tex(input):
"""Adds the latex command for color coding to the input"""
if isinstance(input, str) or isinstance(input, int) or isinstance(
input, float) and not np.isnan(input):
return "\cca{" + str(int(input)) + "}"
else:
return ""
|
|
#!/usr/bin/env python
#
# Copyright 2007 The Closure Linter Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Token utility functions."""
__author__ = ('robbyw@google.com (Robert Walker)',
'ajp@google.com (Andy Perelson)')
import copy
import StringIO
from closure_linter.common import tokens
from closure_linter.javascripttokens import JavaScriptToken
from closure_linter.javascripttokens import JavaScriptTokenType
# Shorthand
Type = tokens.TokenType
def GetFirstTokenInSameLine(token):
"""Returns the first token in the same line as token.
Args:
token: Any token in the line.
Returns:
The first token in the same line as token.
"""
while not token.IsFirstInLine():
token = token.previous
return token
def GetFirstTokenInPreviousLine(token):
"""Returns the first token in the previous line as token.
Args:
token: Any token in the line.
Returns:
The first token in the previous line as token, or None if token is on the
first line.
"""
first_in_line = GetFirstTokenInSameLine(token)
if first_in_line.previous:
return GetFirstTokenInSameLine(first_in_line.previous)
return None
def GetLastTokenInSameLine(token):
"""Returns the last token in the same line as token.
Args:
token: Any token in the line.
Returns:
The last token in the same line as token.
"""
while not token.IsLastInLine():
token = token.next
return token
def GetAllTokensInSameLine(token):
"""Returns all tokens in the same line as the given token.
Args:
token: Any token in the line.
Returns:
All tokens on the same line as the given token.
"""
first_token = GetFirstTokenInSameLine(token)
last_token = GetLastTokenInSameLine(token)
tokens_in_line = []
while first_token != last_token:
tokens_in_line.append(first_token)
first_token = first_token.next
tokens_in_line.append(last_token)
return tokens_in_line
def CustomSearch(start_token, func, end_func=None, distance=None,
reverse=False):
"""Returns the first token where func is True within distance of this token.
Args:
start_token: The token to start searching from
func: The function to call to test a token for applicability
end_func: The function to call to test a token to determine whether to abort
the search.
distance: The number of tokens to look through before failing search. Must
be positive. If unspecified, will search until the end of the token
chain
reverse: When true, search the tokens before this one instead of the tokens
after it
Returns:
The first token matching func within distance of this token, or None if no
such token is found.
"""
token = start_token
if reverse:
while token and (distance is None or distance > 0):
previous = token.previous
if previous:
if func(previous):
return previous
if end_func and end_func(previous):
return None
token = previous
if distance is not None:
distance -= 1
else:
while token and (distance is None or distance > 0):
next_token = token.next
if next_token:
if func(next_token):
return next_token
if end_func and end_func(next_token):
return None
token = next_token
if distance is not None:
distance -= 1
return None
def Search(start_token, token_types, distance=None, reverse=False):
"""Returns the first token of type in token_types within distance.
Args:
start_token: The token to start searching from
token_types: The allowable types of the token being searched for
distance: The number of tokens to look through before failing search. Must
be positive. If unspecified, will search until the end of the token
chain
reverse: When true, search the tokens before this one instead of the tokens
after it
Returns:
The first token of any type in token_types within distance of this token, or
None if no such token is found.
"""
return CustomSearch(start_token, lambda token: token.IsAnyType(token_types),
None, distance, reverse)
def SearchExcept(start_token, token_types, distance=None, reverse=False):
"""Returns the first token not of any type in token_types within distance.
Args:
start_token: The token to start searching from
token_types: The unallowable types of the token being searched for
distance: The number of tokens to look through before failing search. Must
be positive. If unspecified, will search until the end of the token
chain
reverse: When true, search the tokens before this one instead of the tokens
after it
Returns:
The first token of any type in token_types within distance of this token, or
None if no such token is found.
"""
return CustomSearch(start_token,
lambda token: not token.IsAnyType(token_types),
None, distance, reverse)
def SearchUntil(start_token, token_types, end_types, distance=None,
reverse=False):
"""Returns the first token of type in token_types before a token of end_type.
Args:
start_token: The token to start searching from.
token_types: The allowable types of the token being searched for.
end_types: Types of tokens to abort search if we find.
distance: The number of tokens to look through before failing search. Must
be positive. If unspecified, will search until the end of the token
chain
reverse: When true, search the tokens before this one instead of the tokens
after it
Returns:
The first token of any type in token_types within distance of this token
before any tokens of type in end_type, or None if no such token is found.
"""
return CustomSearch(start_token, lambda token: token.IsAnyType(token_types),
lambda token: token.IsAnyType(end_types),
distance, reverse)
def DeleteToken(token):
"""Deletes the given token from the linked list.
Args:
token: The token to delete
"""
# When deleting a token, we do not update the deleted token itself to make
# sure the previous and next pointers are still pointing to tokens which are
# not deleted. Also it is very hard to keep track of all previously deleted
# tokens to update them when their pointers become invalid. So we add this
# flag that any token linked list iteration logic can skip deleted node safely
# when its current token is deleted.
token.is_deleted = True
if token.previous:
token.previous.next = token.next
if token.next:
token.next.previous = token.previous
following_token = token.next
while following_token and following_token.metadata.last_code == token:
following_token.metadata.last_code = token.metadata.last_code
following_token = following_token.next
def DeleteTokens(token, token_count):
"""Deletes the given number of tokens starting with the given token.
Args:
token: The token to start deleting at.
token_count: The total number of tokens to delete.
"""
for i in xrange(1, token_count):
DeleteToken(token.next)
DeleteToken(token)
def InsertTokenBefore(new_token, token):
"""Insert new_token before token.
Args:
new_token: A token to be added to the stream
token: A token already in the stream
"""
new_token.next = token
new_token.previous = token.previous
new_token.metadata = copy.copy(token.metadata)
if new_token.IsCode():
old_last_code = token.metadata.last_code
following_token = token
while (following_token and
following_token.metadata.last_code == old_last_code):
following_token.metadata.last_code = new_token
following_token = following_token.next
token.previous = new_token
if new_token.previous:
new_token.previous.next = new_token
if new_token.start_index is None:
if new_token.line_number == token.line_number:
new_token.start_index = token.start_index
else:
previous_token = new_token.previous
if previous_token:
new_token.start_index = (previous_token.start_index +
len(previous_token.string))
else:
new_token.start_index = 0
iterator = new_token.next
while iterator and iterator.line_number == new_token.line_number:
iterator.start_index += len(new_token.string)
iterator = iterator.next
def InsertTokenAfter(new_token, token):
"""Insert new_token after token.
Args:
new_token: A token to be added to the stream
token: A token already in the stream
"""
new_token.previous = token
new_token.next = token.next
new_token.metadata = copy.copy(token.metadata)
if token.IsCode():
new_token.metadata.last_code = token
if new_token.IsCode():
following_token = token.next
while following_token and following_token.metadata.last_code == token:
following_token.metadata.last_code = new_token
following_token = following_token.next
token.next = new_token
if new_token.next:
new_token.next.previous = new_token
if new_token.start_index is None:
if new_token.line_number == token.line_number:
new_token.start_index = token.start_index + len(token.string)
else:
new_token.start_index = 0
iterator = new_token.next
while iterator and iterator.line_number == new_token.line_number:
iterator.start_index += len(new_token.string)
iterator = iterator.next
def InsertTokensAfter(new_tokens, token):
"""Insert multiple tokens after token.
Args:
new_tokens: An array of tokens to be added to the stream
token: A token already in the stream
"""
# TODO(user): It would be nicer to have InsertTokenAfter defer to here
# instead of vice-versa.
current_token = token
for new_token in new_tokens:
InsertTokenAfter(new_token, current_token)
current_token = new_token
def InsertSpaceTokenAfter(token):
"""Inserts a space token after the given token.
Args:
token: The token to insert a space token after
Returns:
A single space token
"""
space_token = JavaScriptToken(' ', Type.WHITESPACE, token.line,
token.line_number)
InsertTokenAfter(space_token, token)
def InsertBlankLineAfter(token):
"""Inserts a blank line after the given token.
Args:
token: The token to insert a blank line after
Returns:
A single space token
"""
blank_token = JavaScriptToken('', Type.BLANK_LINE, '',
token.line_number + 1)
InsertLineAfter(token, [blank_token])
def InsertLineAfter(token, new_tokens):
"""Inserts a new line consisting of new_tokens after the given token.
Args:
token: The token to insert after.
new_tokens: The tokens that will make up the new line.
"""
insert_location = token
for new_token in new_tokens:
InsertTokenAfter(new_token, insert_location)
insert_location = new_token
# Update all subsequent line numbers.
next_token = new_tokens[-1].next
while next_token:
next_token.line_number += 1
next_token = next_token.next
def SplitToken(token, position):
"""Splits the token into two tokens at position.
Args:
token: The token to split
position: The position to split at. Will be the beginning of second token.
Returns:
The new second token.
"""
new_string = token.string[position:]
token.string = token.string[:position]
new_token = JavaScriptToken(new_string, token.type, token.line,
token.line_number)
InsertTokenAfter(new_token, token)
return new_token
def Compare(token1, token2):
"""Compares two tokens and determines their relative order.
Args:
token1: The first token to compare.
token2: The second token to compare.
Returns:
A negative integer, zero, or a positive integer as the first token is
before, equal, or after the second in the token stream.
"""
if token2.line_number != token1.line_number:
return token1.line_number - token2.line_number
else:
return token1.start_index - token2.start_index
def GoogScopeOrNoneFromStartBlock(token):
"""Determines if the given START_BLOCK is part of a goog.scope statement.
Args:
token: A token of type START_BLOCK.
Returns:
The goog.scope function call token, or None if such call doesn't exist.
"""
if token.type != JavaScriptTokenType.START_BLOCK:
return None
# Search for a goog.scope statement, which will be 5 tokens before the
# block. Illustration of the tokens found prior to the start block:
# goog.scope(function() {
# 5 4 3 21 ^
maybe_goog_scope = token
for unused_i in xrange(5):
maybe_goog_scope = (maybe_goog_scope.previous if maybe_goog_scope and
maybe_goog_scope.previous else None)
if maybe_goog_scope and maybe_goog_scope.string == 'goog.scope':
return maybe_goog_scope
def GetTokenRange(start_token, end_token):
"""Returns a list of tokens between the two given, inclusive.
Args:
start_token: Start token in the range.
end_token: End token in the range.
Returns:
A list of tokens, in order, from start_token to end_token (including start
and end). Returns none if the tokens do not describe a valid range.
"""
token_range = []
token = start_token
while token:
token_range.append(token)
if token == end_token:
return token_range
token = token.next
def TokensToString(token_iterable):
"""Convert a number of tokens into a string.
Newlines will be inserted whenever the line_number of two neighboring
strings differ.
Args:
token_iterable: The tokens to turn to a string.
Returns:
A string representation of the given tokens.
"""
buf = StringIO.StringIO()
token_list = list(token_iterable)
if not token_list:
return ''
line_number = token_list[0].line_number
for token in token_list:
while line_number < token.line_number:
line_number += 1
buf.write('\n')
if line_number > token.line_number:
line_number = token.line_number
buf.write('\n')
buf.write(token.string)
return buf.getvalue()
def GetPreviousCodeToken(token):
"""Returns the code token before the specified token.
Args:
token: A token.
Returns:
The code token before the specified token or None if no such token
exists.
"""
return CustomSearch(
token,
lambda t: t and t.type not in JavaScriptTokenType.NON_CODE_TYPES,
reverse=True)
def GetNextCodeToken(token):
"""Returns the next code token after the specified token.
Args:
token: A token.
Returns:
The next code token after the specified token or None if no such token
exists.
"""
return CustomSearch(
token,
lambda t: t and t.type not in JavaScriptTokenType.NON_CODE_TYPES,
reverse=False)
def GetIdentifierStart(token):
"""Returns the first token in an identifier.
Given a token which is part of an identifier, returns the token at the start
of the identifier.
Args:
token: A token which is part of an identifier.
Returns:
The token at the start of the identifier or None if the identifier was not
of the form 'a.b.c' (e.g. "['a']['b'].c").
"""
start_token = token
previous_code_token = GetPreviousCodeToken(token)
while (previous_code_token and (
previous_code_token.IsType(JavaScriptTokenType.IDENTIFIER) or
_IsDot(previous_code_token))):
start_token = previous_code_token
previous_code_token = GetPreviousCodeToken(previous_code_token)
if _IsDot(start_token):
return None
return start_token
def GetIdentifierForToken(token):
"""Get the symbol specified by a token.
Given a token, this function additionally concatenates any parts of an
identifying symbol being identified that are split by whitespace or a
newline.
The function will return None if the token is not the first token of an
identifier.
Args:
token: The first token of a symbol.
Returns:
The whole symbol, as a string.
"""
# Search backward to determine if this token is the first token of the
# identifier. If it is not the first token, return None to signal that this
# token should be ignored.
prev_token = token.previous
while prev_token:
if (prev_token.IsType(JavaScriptTokenType.IDENTIFIER) or
_IsDot(prev_token)):
return None
if (prev_token.IsType(tokens.TokenType.WHITESPACE) or
prev_token.IsAnyType(JavaScriptTokenType.COMMENT_TYPES)):
prev_token = prev_token.previous
else:
break
# A "function foo()" declaration.
if token.type is JavaScriptTokenType.FUNCTION_NAME:
return token.string
# A "var foo" declaration (if the previous token is 'var')
previous_code_token = GetPreviousCodeToken(token)
if previous_code_token and previous_code_token.IsKeyword('var'):
return token.string
# Otherwise, this is potentially a namespaced (goog.foo.bar) identifier that
# could span multiple lines or be broken up by whitespace. We need
# to concatenate.
identifier_types = set([
JavaScriptTokenType.IDENTIFIER,
JavaScriptTokenType.SIMPLE_LVALUE
])
assert token.type in identifier_types
# Start with the first token
symbol_tokens = [token]
if token.next:
for t in token.next:
last_symbol_token = symbol_tokens[-1]
# An identifier is part of the previous symbol if it has a trailing
# dot.
if t.type in identifier_types:
if last_symbol_token.string.endswith('.'):
symbol_tokens.append(t)
continue
else:
break
# A dot is part of the previous symbol if it does not have a trailing
# dot.
if _IsDot(t):
if not last_symbol_token.string.endswith('.'):
symbol_tokens.append(t)
continue
else:
break
# Skip any whitespace
if t.type in JavaScriptTokenType.NON_CODE_TYPES:
continue
# This is the end of the identifier. Stop iterating.
break
if symbol_tokens:
return ''.join([t.string for t in symbol_tokens])
def _IsDot(token):
"""Whether the token represents a "dot" operator (foo.bar)."""
return token.type is tokens.TokenType.NORMAL and token.string == '.'
|
|
"""
homeassistant.components.light
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Provides functionality to interact with lights.
It offers the following services:
TURN_OFF - Turns one or multiple lights off.
Supports following parameters:
- transition
Integer that represents the time the light should take to transition to
the new state.
- entity_id
String or list of strings that point at entity_ids of lights.
TURN_ON - Turns one or multiple lights on and change attributes.
Supports following parameters:
- transition
Integer that represents the time the light should take to transition to
the new state.
- entity_id
String or list of strings that point at entity_ids of lights.
- profile
String with the name of one of the built-in profiles (relax, energize,
concentrate, reading) or one of the custom profiles defined in
light_profiles.csv in the current working directory.
Light profiles define a xy color and a brightness.
If a profile is given and a brightness or xy color then the profile values
will be overwritten.
- xy_color
A list containing two floats representing the xy color you want the light
to be.
- rgb_color
A list containing three integers representing the xy color you want the
light to be.
- brightness
Integer between 0 and 255 representing how bright you want the light to be.
"""
import logging
import os
import csv
from homeassistant.helpers.device_component import DeviceComponent
import homeassistant.util as util
from homeassistant.const import (
STATE_ON, SERVICE_TURN_ON, SERVICE_TURN_OFF, ATTR_ENTITY_ID)
from homeassistant.helpers import extract_entity_ids
from homeassistant.components import group, discovery, wink
DOMAIN = "light"
DEPENDENCIES = []
SCAN_INTERVAL = 30
GROUP_NAME_ALL_LIGHTS = 'all lights'
ENTITY_ID_ALL_LIGHTS = group.ENTITY_ID_FORMAT.format('all_lights')
ENTITY_ID_FORMAT = DOMAIN + ".{}"
# integer that represents transition time in seconds to make change
ATTR_TRANSITION = "transition"
# lists holding color values
ATTR_RGB_COLOR = "rgb_color"
ATTR_XY_COLOR = "xy_color"
# int with value 0 .. 255 representing brightness of the light
ATTR_BRIGHTNESS = "brightness"
# String representing a profile (built-in ones or external defined)
ATTR_PROFILE = "profile"
# If the light should flash, can be FLASH_SHORT or FLASH_LONG
ATTR_FLASH = "flash"
FLASH_SHORT = "short"
FLASH_LONG = "long"
LIGHT_PROFILES_FILE = "light_profiles.csv"
# Maps discovered services to their platforms
DISCOVERY_PLATFORMS = {
wink.DISCOVER_LIGHTS: 'wink',
discovery.services.PHILIPS_HUE: 'hue',
}
_LOGGER = logging.getLogger(__name__)
def is_on(hass, entity_id=None):
""" Returns if the lights are on based on the statemachine. """
entity_id = entity_id or ENTITY_ID_ALL_LIGHTS
return hass.states.is_state(entity_id, STATE_ON)
# pylint: disable=too-many-arguments
def turn_on(hass, entity_id=None, transition=None, brightness=None,
rgb_color=None, xy_color=None, profile=None, flash=None):
""" Turns all or specified light on. """
data = {
key: value for key, value in [
(ATTR_ENTITY_ID, entity_id),
(ATTR_PROFILE, profile),
(ATTR_TRANSITION, transition),
(ATTR_BRIGHTNESS, brightness),
(ATTR_RGB_COLOR, rgb_color),
(ATTR_XY_COLOR, xy_color),
(ATTR_FLASH, flash),
] if value is not None
}
hass.services.call(DOMAIN, SERVICE_TURN_ON, data)
def turn_off(hass, entity_id=None, transition=None):
""" Turns all or specified light off. """
data = {
key: value for key, value in [
(ATTR_ENTITY_ID, entity_id),
(ATTR_TRANSITION, transition),
] if value is not None
}
hass.services.call(DOMAIN, SERVICE_TURN_OFF, data)
# pylint: disable=too-many-branches, too-many-locals
def setup(hass, config):
""" Exposes light control via statemachine and services. """
component = DeviceComponent(
_LOGGER, DOMAIN, hass, SCAN_INTERVAL, DISCOVERY_PLATFORMS,
GROUP_NAME_ALL_LIGHTS)
component.setup(config)
lights = component.devices
# Load built-in profiles and custom profiles
profile_paths = [os.path.join(os.path.dirname(__file__),
LIGHT_PROFILES_FILE),
hass.get_config_path(LIGHT_PROFILES_FILE)]
profiles = {}
for profile_path in profile_paths:
if os.path.isfile(profile_path):
with open(profile_path) as inp:
reader = csv.reader(inp)
# Skip the header
next(reader, None)
try:
for profile_id, color_x, color_y, brightness in reader:
profiles[profile_id] = (float(color_x), float(color_y),
int(brightness))
except ValueError:
# ValueError if not 4 values per row
# ValueError if convert to float/int failed
_LOGGER.error(
"Error parsing light profiles from %s", profile_path)
return False
def handle_light_service(service):
""" Hande a turn light on or off service call. """
# Get and validate data
dat = service.data
# Convert the entity ids to valid light ids
target_lights = [lights[entity_id] for entity_id
in extract_entity_ids(hass, service)
if entity_id in lights]
if not target_lights:
target_lights = lights.values()
params = {}
transition = util.convert(dat.get(ATTR_TRANSITION), int)
if transition is not None:
params[ATTR_TRANSITION] = transition
if service.service == SERVICE_TURN_OFF:
for light in target_lights:
# pylint: disable=star-args
light.turn_off(**params)
else:
# Processing extra data for turn light on request
# We process the profile first so that we get the desired
# behavior that extra service data attributes overwrite
# profile values
profile = profiles.get(dat.get(ATTR_PROFILE))
if profile:
*params[ATTR_XY_COLOR], params[ATTR_BRIGHTNESS] = profile
if ATTR_BRIGHTNESS in dat:
# We pass in the old value as the default parameter if parsing
# of the new one goes wrong.
params[ATTR_BRIGHTNESS] = util.convert(
dat.get(ATTR_BRIGHTNESS), int, params.get(ATTR_BRIGHTNESS))
if ATTR_XY_COLOR in dat:
try:
# xy_color should be a list containing 2 floats
xycolor = dat.get(ATTR_XY_COLOR)
# Without this check, a xycolor with value '99' would work
if not isinstance(xycolor, str):
params[ATTR_XY_COLOR] = [float(val) for val in xycolor]
except (TypeError, ValueError):
# TypeError if xy_color is not iterable
# ValueError if value could not be converted to float
pass
if ATTR_RGB_COLOR in dat:
try:
# rgb_color should be a list containing 3 ints
rgb_color = dat.get(ATTR_RGB_COLOR)
if len(rgb_color) == 3:
params[ATTR_XY_COLOR] = \
util.color_RGB_to_xy(int(rgb_color[0]),
int(rgb_color[1]),
int(rgb_color[2]))
except (TypeError, ValueError):
# TypeError if rgb_color is not iterable
# ValueError if not all values can be converted to int
pass
if ATTR_FLASH in dat:
if dat[ATTR_FLASH] == FLASH_SHORT:
params[ATTR_FLASH] = FLASH_SHORT
elif dat[ATTR_FLASH] == FLASH_LONG:
params[ATTR_FLASH] = FLASH_LONG
for light in target_lights:
# pylint: disable=star-args
light.turn_on(**params)
for light in target_lights:
light.update_ha_state(True)
# Listen for light on and light off service calls
hass.services.register(DOMAIN, SERVICE_TURN_ON,
handle_light_service)
hass.services.register(DOMAIN, SERVICE_TURN_OFF,
handle_light_service)
return True
|
|
# Copyright 2017 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Test sonnet.python.modules.nets.convnet."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
from functools import partial
import itertools
# Dependency imports
from absl.testing import parameterized
import numpy as np
import sonnet as snt
from sonnet.python.modules.conv import _fill_shape as fill_shape
import tensorflow as tf
from tensorflow.python.ops import variables
class SharedConvNets2DTest(parameterized.TestCase, tf.test.TestCase):
def setUp(self):
super(SharedConvNets2DTest, self).setUp()
self.output_channels = [2, 3, 4]
self.kernel_shapes = [[3, 3]]
self.strides = [1]
self.paddings = [snt.SAME]
@parameterized.named_parameters(
("ConvNet2D", snt.nets.ConvNet2D),
("ConvNet2DTranspose", partial(snt.nets.ConvNet2DTranspose,
output_shapes=[[100, 100]])))
def testName(self, module):
unique_name = "unique_name"
with tf.variable_scope("scope"):
net = module(name=unique_name,
output_channels=self.output_channels,
kernel_shapes=self.kernel_shapes,
strides=self.strides,
paddings=self.paddings)
self.assertEqual(net.scope_name, "scope/" + unique_name)
self.assertEqual(net.module_name, unique_name)
@parameterized.named_parameters(
("ConvNet2D", snt.nets.ConvNet2D),
("ConvNet2DTranspose", partial(snt.nets.ConvNet2DTranspose,
output_shapes=[[100, 100]])))
def testConstructor(self, module):
with self.assertRaisesRegexp(ValueError,
"output_channels must not be empty"):
module(output_channels=[],
kernel_shapes=self.kernel_shapes,
strides=self.strides,
paddings=self.paddings)
with self.assertRaisesRegexp(ValueError,
"kernel_shapes must be of length 1 or *"):
module(output_channels=self.output_channels,
kernel_shapes=[],
strides=self.strides,
paddings=self.paddings)
with self.assertRaisesRegexp(ValueError,
"kernel_shapes must be of length 1 or *"):
module(output_channels=self.output_channels,
kernel_shapes=[1, 2],
strides=self.strides,
paddings=self.paddings)
with self.assertRaisesRegexp(ValueError,
"strides must be of length 1 or *"):
module(output_channels=self.output_channels,
kernel_shapes=self.kernel_shapes,
strides=[],
paddings=self.paddings)
with self.assertRaisesRegexp(ValueError,
"strides must be of length 1 or *"):
module(output_channels=self.output_channels,
kernel_shapes=self.kernel_shapes,
strides=[1, 1],
paddings=self.paddings)
with self.assertRaisesRegexp(ValueError,
"paddings must be of length 1 or *"):
module(output_channels=self.output_channels,
kernel_shapes=self.kernel_shapes,
strides=self.paddings,
paddings=[])
with self.assertRaisesRegexp(ValueError,
"paddings must be of length 1 or *"):
module(output_channels=self.output_channels,
kernel_shapes=self.kernel_shapes,
strides=self.strides,
paddings=[snt.SAME, snt.SAME])
with self.assertRaisesRegexp(KeyError,
"Invalid initializer keys.*"):
module(
output_channels=self.output_channels,
kernel_shapes=self.kernel_shapes,
strides=self.strides,
paddings=self.paddings,
initializers={"not_w": tf.truncated_normal_initializer(stddev=1.0)})
with self.assertRaisesRegexp(TypeError,
"Initializer for 'w' is not a callable "
"function or dictionary"):
module(output_channels=self.output_channels,
kernel_shapes=self.kernel_shapes,
strides=self.strides,
paddings=self.paddings,
initializers={"w": tf.zeros([1, 2, 3])})
with self.assertRaisesRegexp(KeyError,
"Invalid regularizer keys.*"):
module(
output_channels=self.output_channels,
kernel_shapes=self.kernel_shapes,
strides=self.strides,
paddings=self.paddings,
regularizers={"not_w": tf.contrib.layers.l1_regularizer(scale=0.5)})
with self.assertRaisesRegexp(TypeError,
"Regularizer for 'w' is not a callable "
"function or dictionary"):
module(output_channels=self.output_channels,
kernel_shapes=self.kernel_shapes,
strides=self.strides,
paddings=self.paddings,
regularizers={"w": tf.zeros([1, 2, 3])})
with self.assertRaisesRegexp(TypeError,
"Input 'activation' must be callable"):
module(output_channels=self.output_channels,
kernel_shapes=self.kernel_shapes,
strides=self.strides,
paddings=self.paddings,
activation="not_a_function")
err = "output_channels must be iterable"
with self.assertRaisesRegexp(TypeError, err):
module(output_channels=42,
kernel_shapes=self.kernel_shapes,
strides=self.strides,
paddings=self.paddings)
err = "kernel_shapes must be iterable"
with self.assertRaisesRegexp(TypeError, err):
module(output_channels=self.output_channels,
kernel_shapes=None,
strides=self.strides,
paddings=self.paddings)
err = "strides must be iterable"
with self.assertRaisesRegexp(TypeError, err):
module(output_channels=self.output_channels,
kernel_shapes=self.kernel_shapes,
strides=True,
paddings=self.paddings)
err = "paddings must be iterable"
with self.assertRaisesRegexp(TypeError, err):
module(output_channels=self.output_channels,
kernel_shapes=self.kernel_shapes,
strides=self.strides,
paddings=lambda x: x + 42)
err = "use_bias must be either a bool or an iterable"
with self.assertRaisesRegexp(TypeError, err):
module(output_channels=self.output_channels,
kernel_shapes=self.kernel_shapes,
strides=self.strides,
paddings=self.paddings,
use_bias=2)
err = "Invalid data_format"
# Also checks that the error works with non-string types
for data_format in ["NHCW", 3]:
with self.assertRaisesRegexp(ValueError, err):
module(
output_channels=self.output_channels,
kernel_shapes=self.kernel_shapes,
strides=self.strides,
paddings=self.paddings,
data_format=data_format)
@parameterized.named_parameters(
("ConvNet2D", snt.nets.ConvNet2D),
("ConvNet2DTranspose",
partial(snt.nets.ConvNet2DTranspose,
output_shapes=[[100, 100]])))
def testBatchNormBuildFlag(self, module):
model = module(output_channels=self.output_channels,
kernel_shapes=self.kernel_shapes,
strides=self.strides,
paddings=self.paddings,
use_batch_norm=True)
self.assertTrue(model.use_batch_norm)
input_to_net = tf.placeholder(tf.float32, shape=(1, 100, 100, 3))
# Check that an error is raised if we don't specify the is_training flag
err = "is_training flag must be explicitly specified"
with self.assertRaisesRegexp(ValueError, err):
model(input_to_net)
@parameterized.named_parameters(
("ConvNet2D", snt.nets.ConvNet2D),
("ConvNet2DTranspose",
partial(snt.nets.ConvNet2DTranspose,
output_shapes=[[100, 100]])))
def testBatchNorm(self, module):
model = module(output_channels=self.output_channels,
kernel_shapes=self.kernel_shapes,
strides=self.strides,
paddings=self.paddings,
use_batch_norm=True)
self.assertTrue(model.use_batch_norm)
input_to_net = tf.placeholder(tf.float32, shape=(1, 100, 100, 3))
# Check Tensorflow flags work
is_training = tf.placeholder(tf.bool)
test_local_stats = tf.placeholder(tf.bool)
model(input_to_net,
is_training=is_training,
test_local_stats=test_local_stats)
# Check Python is_training flag works
model(input_to_net, is_training=False, test_local_stats=False)
model_variables = model.get_variables()
self.assertEqual(
len(model_variables),
len(self.output_channels) * 3 - 1)
# Check that the appropriate moving statistics variables have been created.
self.assertTrue(
any("moving_variance" in var.name
for var in tf.global_variables()))
self.assertTrue(
any("moving_mean" in var.name
for var in tf.global_variables()))
@parameterized.named_parameters(
("ConvNet2D", snt.nets.ConvNet2D),
("ConvNet2DTranspose", partial(snt.nets.ConvNet2DTranspose,
output_shapes=[[100, 100]])))
def testBatchNormConfig(self, module):
batch_norm_config = {
"scale": True,
}
model = module(output_channels=self.output_channels,
kernel_shapes=self.kernel_shapes,
strides=self.strides,
paddings=self.paddings,
use_batch_norm=True,
batch_norm_config=batch_norm_config)
input_to_net = tf.placeholder(tf.float32, shape=(1, 100, 100, 3))
model(input_to_net, is_training=True)
model_variables = model.get_variables()
self.assertEqual(
len(model_variables),
len(self.output_channels) * 4 - 2)
@parameterized.named_parameters(
("ConvNet2D", snt.nets.ConvNet2D),
("ConvNet2DTranspose", partial(snt.nets.ConvNet2DTranspose,
output_shapes=[[100, 100]])))
def testNoBias(self, module):
model = module(output_channels=self.output_channels,
kernel_shapes=self.kernel_shapes,
strides=self.strides,
paddings=self.paddings,
use_bias=False)
self.assertEqual(model.use_bias, (False,) * len(self.output_channels))
input_to_net = tf.placeholder(tf.float32, shape=(1, 100, 100, 3))
model(input_to_net)
model_variables = model.get_variables()
self.assertEqual(
len(model_variables),
len(self.output_channels))
@parameterized.named_parameters(
("ConvNet2D", snt.nets.ConvNet2D),
("ConvNet2DTranspose", partial(snt.nets.ConvNet2DTranspose,
output_shapes=[[100, 100]])))
def testNoBiasIterable(self, module):
use_bias = (True,) * (len(self.output_channels) - 1) + (False,)
model = module(output_channels=self.output_channels,
kernel_shapes=self.kernel_shapes,
strides=self.strides,
paddings=self.paddings,
use_bias=use_bias)
actual_use_biases = tuple(layer.has_bias for layer in model.layers)
self.assertEqual(model.use_bias, actual_use_biases)
self.assertEqual(use_bias, actual_use_biases)
model_transpose = model.transpose()
actual_use_biases = tuple(layer.has_bias
for layer in model_transpose.layers)
self.assertEqual(model_transpose.use_bias, actual_use_biases)
self.assertEqual(tuple(reversed(use_bias)), actual_use_biases)
@parameterized.named_parameters(
("ConvNet2DNoBias", snt.nets.ConvNet2D, False),
("ConvNet2DBias", snt.nets.ConvNet2D, True),
("ConvNet2DTransposeNoBias", partial(snt.nets.ConvNet2DTranspose,
output_shapes=[[100, 100]]), False),
("ConvNet2DTransposeBias", partial(snt.nets.ConvNet2DTranspose,
output_shapes=[[100, 100]]), True))
def testRegularizersInRegularizationLosses(self, module, use_bias):
if use_bias:
regularizers = {"w": tf.contrib.layers.l1_regularizer(scale=0.5),
"b": tf.contrib.layers.l2_regularizer(scale=0.5)}
else:
regularizers = {"w": tf.contrib.layers.l1_regularizer(scale=0.5)}
model = module(output_channels=self.output_channels,
kernel_shapes=self.kernel_shapes,
strides=self.strides,
paddings=self.paddings,
use_bias=use_bias,
regularizers=regularizers)
input_to_net = tf.placeholder(tf.float32, shape=(1, 100, 100, 3))
model(input_to_net)
graph_regularizers = tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES)
self.assertRegexpMatches(graph_regularizers[0].name, ".*l1_regularizer.*")
if use_bias:
self.assertRegexpMatches(graph_regularizers[1].name, ".*l2_regularizer.*")
@parameterized.named_parameters(
("ConvNet2D", snt.nets.ConvNet2D, False),
("ConvNet2DFinal", snt.nets.ConvNet2D, True),
("ConvNet2DTranspose",
partial(snt.nets.ConvNet2DTranspose, output_shapes=[[100, 100]]),
False),
("ConvNet2DTransposeFinal",
partial(snt.nets.ConvNet2DTranspose, output_shapes=[[100, 100]]),
True))
def testActivateFinal(self, module, activate_final):
model = module(output_channels=self.output_channels,
kernel_shapes=self.kernel_shapes,
strides=self.strides,
paddings=self.paddings,
activate_final=activate_final,
use_batch_norm=True,
use_bias=False)
self.assertEqual(activate_final, model.activate_final)
input_to_net = tf.placeholder(tf.float32, shape=(1, 100, 100, 3))
model(input_to_net, is_training=True)
model_variables = model.get_variables()
# Batch norm variable missing for final activation
if activate_final:
self.assertEqual(len(model_variables), len(self.output_channels) * 2)
else:
self.assertEqual(len(model_variables), len(self.output_channels) * 2 - 1)
# Test transpose method's activate_final arg.
transposed_model_activate_final = model.transpose(activate_final=True)
transposed_model_no_activate_final = model.transpose(activate_final=False)
transposed_model_inherit_activate_final = model.transpose()
self.assertEqual(True, transposed_model_activate_final.activate_final)
self.assertEqual(False, transposed_model_no_activate_final.activate_final)
self.assertEqual(model.activate_final,
transposed_model_inherit_activate_final.activate_final)
@parameterized.parameters(
*itertools.product(
[snt.nets.ConvNet2D,
partial(snt.nets.ConvNet2DTranspose, output_shapes=[[100, 100]])],
["kernel_shapes", "strides", "paddings", "activation", "initializers",
"partitioners", "regularizers", "use_bias", "batch_norm_config"]))
def testTransposeDefaultParameter(self, module, param_name):
"""Tests if .transpose correctly chooses the default parameters.
Args:
module: The conv net class.
param_name: The name of the parameter to test.
"""
# For these parameters, the expected values are their reversed values
expected_reversed = ["kernel_shapes", "strides", "paddings", "use_bias"]
# We have to choose asymmetric parameter values here in order for the test
# to be effective. This is why we don't take the default ones.
model = module(output_channels=[2, 3, 4],
kernel_shapes=[[3, 3], [5, 5], [7, 7]],
strides=[[1, 1], [2, 2], [3, 3]],
paddings=[snt.SAME, snt.SAME, snt.VALID],
use_batch_norm=[True, True, False],
use_bias=[True, True, False])
# We don't pass the parameter on to .transpose, None should be the default
transpose_model = model.transpose()
if param_name in expected_reversed:
self.assertItemsEqual(reversed(getattr(model, param_name)),
getattr(transpose_model, param_name))
else:
self.assertEqual(getattr(model, param_name),
getattr(transpose_model, param_name))
@parameterized.parameters(
*itertools.product(
[snt.nets.ConvNet2D,
partial(snt.nets.ConvNet2DTranspose, output_shapes=[[100, 100]])],
[("kernel_shapes", [[3, 3], [3, 3], [3, 3]]),
("strides", [[1, 1], [1, 1], [1, 1]]),
("paddings", [snt.SAME, snt.SAME, snt.SAME]),
("activation", tf.nn.tanh),
("initializers", {}),
("partitioners", {}),
("regularizers", {}),
("use_bias", [True, True, True]),
("batch_norm_config", {"scale": True})]))
def testTransposePassThroughParameter(self, module, param_name_and_value):
"""Tests if .transpose correctly passes through the given parameters.
Args:
module: The conv net class.
param_name_and_value: Tuple consisting of the parameter name and value.
"""
param_name, param_value = param_name_and_value
# The given parameter values are all for three-layer networks. Changing
# the default parameters would therefore break this test. Thus, we choose
# fixed/independent parameters.
model = module(output_channels=[2, 3, 4],
kernel_shapes=[[3, 3], [5, 5], [7, 7]],
strides=[[1, 1], [2, 2], [3, 3]],
paddings=[snt.SAME, snt.SAME, snt.VALID],
use_batch_norm=[True, True, False],
use_bias=[True, True, False])
transpose_model = model.transpose(**{param_name: param_value})
if isinstance(param_value, collections.Mapping):
self.assertDictEqual(param_value, getattr(transpose_model, param_name))
elif isinstance(param_value, collections.Iterable):
self.assertItemsEqual(param_value, getattr(transpose_model, param_name))
else:
self.assertEqual(param_value, getattr(transpose_model, param_name))
@parameterized.named_parameters(
("ConvNet2DNHWC", snt.nets.ConvNet2D, "NHWC"),
("ConvNet2DNCHW", snt.nets.ConvNet2D, "NCHW"),
("ConvNet2DTransposeNHWC", partial(
snt.nets.ConvNet2DTranspose, output_shapes=[[100, 100]]), "NHWC"),
("ConvNet2DTransposeNCHW", partial(
snt.nets.ConvNet2DTranspose, output_shapes=[[100, 100]]), "NCHW"),)
def testDataFormat(self, module, data_format):
net = module(
output_channels=self.output_channels,
kernel_shapes=self.kernel_shapes,
strides=self.strides,
paddings=self.paddings,
data_format=data_format)
input_height, input_width, input_channels = 100, 100, 3
batch_size = 10
final_channel = self.output_channels[-1]
if data_format == "NHWC":
input_shape = [batch_size, input_height, input_width, input_channels]
expected_output_shape = [
batch_size, input_height, input_width, final_channel
]
else:
input_shape = [batch_size, input_channels, input_height, input_width]
expected_output_shape = [
batch_size, final_channel, input_height, input_width
]
input_to_net = tf.placeholder(tf.float32, shape=input_shape)
output = net(input_to_net)
self.assertEqual(output.get_shape().as_list(), expected_output_shape)
class ConvNet2DTest(tf.test.TestCase):
def setUp(self):
super(ConvNet2DTest, self).setUp()
self.output_channels = [2, 3, 4]
self.kernel_shapes = [[3, 3]]
self.strides = [1]
self.paddings = [snt.SAME]
def testConstructor(self):
net = snt.nets.ConvNet2D(output_channels=self.output_channels,
kernel_shapes=self.kernel_shapes,
strides=self.strides,
paddings=self.paddings)
self.assertEqual(len(net.layers), len(self.output_channels))
for i, layer in enumerate(net.layers):
self.assertEqual(layer.output_channels, self.output_channels[i])
self.assertEqual(layer.stride,
(1,) + fill_shape(self.strides[0], 2) + (1,))
self.assertEqual(layer.kernel_shape, fill_shape(self.kernel_shapes[0], 2))
self.assertEqual(layer.padding, self.paddings[0])
self.assertEqual(layer.output_channels, net.output_channels[i])
self.assertEqual(layer.stride,
(1,) + fill_shape(net.strides[i], 2) + (1,))
self.assertEqual(layer.kernel_shape, fill_shape(net.kernel_shapes[i], 2))
self.assertEqual(layer.padding, net.paddings[i])
def testTranspose(self):
with tf.variable_scope("scope1"):
net = snt.nets.ConvNet2D(output_channels=self.output_channels,
kernel_shapes=self.kernel_shapes,
strides=self.strides,
paddings=self.paddings,
name="conv_net_2d")
err = "Iterable output_channels length must match the number of layers"
with self.assertRaisesRegexp(ValueError, err):
net.transpose(output_channels=[42] * 18)
with tf.variable_scope("scope2"):
net_transpose = net.transpose()
self.assertEqual("scope1/conv_net_2d", net.scope_name)
self.assertEqual("conv_net_2d", net.module_name)
self.assertEqual("scope2/conv_net_2d_transpose", net_transpose.scope_name)
self.assertEqual("conv_net_2d_transpose", net_transpose.module_name)
input_shape = [10, 100, 100, 3]
input_to_net = tf.placeholder(tf.float32, shape=input_shape)
# Tests that trying to connect the trasposed network before connecting the
# original nets raises an error. The reason is that the output_shapes and
# output_channels are laziliy evaluated and not yet known.
with self.assertRaisesRegexp(snt.Error,
"Variables in {} not instantiated yet, "
"__call__ the module first.".format(
net.layers[-1].scope_name)):
net_transpose(input_to_net)
net_transpose = net.transpose(name="another_net_transpose")
net_out = net(input_to_net, is_training=True)
self.assertEqual(net.input_shape, tuple(input_shape))
net_transposed_output = net_transpose(net_out)
self.assertEqual(net_transposed_output.get_shape(),
input_to_net.get_shape())
for i in range(len(net.layers)):
self.assertEqual(net_transpose.layers[i].output_shape,
net.layers[-1 - i].input_shape[1:-1])
self.assertEqual(net_transpose.layers[i].output_channels,
net.layers[-1 - i].input_shape[-1])
data = np.random.rand(*input_shape)
init = tf.global_variables_initializer()
with self.test_session() as sess:
sess.run(init)
sess.run(net_transposed_output, feed_dict={input_to_net: data})
def testVariableMap(self):
"""Tests for regressions in variable names."""
use_bias = True
use_batch_norm = True
var_names_w = [
u"conv_net_2d/conv_2d_0/w:0",
u"conv_net_2d/conv_2d_1/w:0",
u"conv_net_2d/conv_2d_2/w:0",
]
var_names_b = [
u"conv_net_2d/conv_2d_0/b:0",
u"conv_net_2d/conv_2d_1/b:0",
u"conv_net_2d/conv_2d_2/b:0",
]
var_names_bn = [
u"conv_net_2d/batch_norm_0/beta:0",
u"conv_net_2d/batch_norm_1/beta:0",
]
correct_variable_names = set(var_names_w + var_names_b + var_names_bn)
module = snt.nets.ConvNet2D(output_channels=self.output_channels,
kernel_shapes=self.kernel_shapes,
strides=self.strides,
paddings=self.paddings,
use_bias=use_bias,
use_batch_norm=use_batch_norm)
input_shape = [10, 100, 100, 3]
input_to_net = tf.placeholder(tf.float32, shape=input_shape)
_ = module(input_to_net, is_training=True)
variable_names = [var.name for var in module.get_variables()]
self.assertEqual(set(variable_names), correct_variable_names)
def testPartitioners(self):
partitioners = {
"w": tf.variable_axis_size_partitioner(10),
"b": tf.variable_axis_size_partitioner(8),
}
module = snt.nets.ConvNet2D(output_channels=self.output_channels,
kernel_shapes=self.kernel_shapes,
strides=self.strides,
paddings=self.paddings,
partitioners=partitioners)
input_shape = [10, 100, 100, 3]
input_to_net = tf.placeholder(tf.float32, shape=input_shape)
_ = module(input_to_net)
for layer in module._layers:
self.assertEqual(type(layer.w), variables.PartitionedVariable)
self.assertEqual(type(layer.b), variables.PartitionedVariable)
class ConvNet2DTransposeTest(tf.test.TestCase):
def setUp(self):
super(ConvNet2DTransposeTest, self).setUp()
self.output_channels = [2, 3, 4]
self.output_shapes = [[100, 100]]
self.kernel_shapes = [[3, 3]]
self.strides = [1]
self.paddings = [snt.SAME]
def testConstructor(self):
with self.assertRaisesRegexp(ValueError,
"output_shapes must be of length 1 or *"):
snt.nets.ConvNet2DTranspose(output_channels=self.output_channels,
output_shapes=[],
kernel_shapes=self.kernel_shapes,
strides=self.strides,
paddings=self.paddings)
with self.assertRaisesRegexp(ValueError,
"output_shapes must be of length 1 or *"):
snt.nets.ConvNet2DTranspose(output_channels=self.output_channels,
output_shapes=[[1, 2], [1, 2]],
kernel_shapes=self.kernel_shapes,
strides=[],
paddings=self.paddings)
with self.assertRaisesRegexp(KeyError,
"Invalid initializer keys.*"):
snt.nets.ConvNet2DTranspose(
output_channels=self.output_channels,
output_shapes=self.output_shapes,
kernel_shapes=self.kernel_shapes,
strides=self.strides,
paddings=self.paddings,
initializers={"not_w": tf.truncated_normal_initializer(stddev=1.0)})
net = snt.nets.ConvNet2DTranspose(output_channels=self.output_channels,
output_shapes=self.output_shapes,
kernel_shapes=self.kernel_shapes,
strides=self.strides,
paddings=self.paddings)
self.assertEqual(net.output_shapes,
tuple(self.output_shapes) * len(self.output_channels))
self.assertEqual(len(net.layers), len(self.output_channels))
for i, layer in enumerate(net.layers):
self.assertEqual(layer.output_channels, self.output_channels[i])
self.assertEqual(layer.stride,
(1,) + fill_shape(self.strides[0], 2) + (1,))
self.assertEqual(layer.kernel_shape, fill_shape(self.kernel_shapes[0], 2))
self.assertEqual(layer.padding, self.paddings[0])
self.assertEqual(layer.output_channels, net.output_channels[i])
self.assertEqual(layer.stride,
(1,) + fill_shape(net.strides[i], 2) + (1,))
self.assertEqual(layer.kernel_shape, fill_shape(net.kernel_shapes[i], 2))
self.assertEqual(layer.padding, net.paddings[i])
with self.assertRaisesRegexp(TypeError, "output_shapes must be iterable"):
snt.nets.ConvNet2DTranspose(output_channels=self.output_channels,
output_shapes=False,
kernel_shapes=self.kernel_shapes,
strides=self.strides,
paddings=self.paddings)
def testTranspose(self):
net = snt.nets.ConvNet2DTranspose(output_channels=self.output_channels,
output_shapes=self.output_shapes,
kernel_shapes=self.kernel_shapes,
strides=self.strides,
paddings=self.paddings)
err = "Iterable output_channels length must match the number of layers"
with self.assertRaisesRegexp(ValueError, err):
net.transpose(output_channels=[42] * 18)
net_transpose = net.transpose()
input_shape = [10, 100, 100, 3]
input_to_net = tf.placeholder(tf.float32, shape=input_shape)
# Tests that trying to connect the trasposed network before connecting the
# original nets raises an error. The reason is that the output_shapes and
# output_channels are laziliy evaluated and not yet known.
with self.assertRaisesRegexp(snt.Error,
"Variables in {} not instantiated yet, "
"__call__ the module first.".format(
net.layers[-1].scope_name)):
net_transpose(input_to_net)
net_transpose = net.transpose(name="another_net_transpose")
net_out = net(input_to_net, is_training=True)
net_transposed_output = net_transpose(net_out)
self.assertEqual(net_transposed_output.get_shape(),
input_to_net.get_shape())
for i in range(len(net.layers)):
self.assertEqual(net_transpose.layers[i].input_shape[1:-1],
net.layers[-1 - i].output_shape)
self.assertEqual(net_transpose.layers[i].output_channels,
net.layers[-1 - i].input_shape[-1])
data = np.random.rand(*input_shape)
init = tf.global_variables_initializer()
with self.test_session() as sess:
sess.run(init)
sess.run(net_transposed_output, feed_dict={input_to_net: data})
def testPartitioners(self):
partitioners = {
"w": tf.variable_axis_size_partitioner(10),
"b": tf.variable_axis_size_partitioner(8),
}
module = snt.nets.ConvNet2DTranspose(output_channels=self.output_channels,
output_shapes=self.output_shapes,
kernel_shapes=self.kernel_shapes,
strides=self.strides,
paddings=self.paddings,
partitioners=partitioners)
input_shape = [10, 100, 100, 3]
input_to_net = tf.placeholder(tf.float32, shape=input_shape)
_ = module(input_to_net)
for layer in module._layers:
self.assertEqual(type(layer.w), variables.PartitionedVariable)
self.assertEqual(type(layer.b), variables.PartitionedVariable)
if __name__ == "__main__":
tf.test.main()
|
|
from __future__ import unicode_literals
import pytest
import boto
import sure # noqa
from boto.exception import EC2ResponseError
from moto import mock_ec2_deprecated
from .helpers import rsa_check_private_key
RSA_PUBLIC_KEY_OPENSSH = b"""\
ssh-rsa \
AAAAB3NzaC1yc2EAAAADAQABAAABAQDusXfgTE4eBP50NglSzCSEGnIL6+cr6m3H\
6cZANOQ+P1o/W4BdtcAL3sor4iGi7SOeJgo\\8kweyMQrhrt6HaKGgromRiz37LQx\
4YIAcBi4Zd023mO/V7Rc2Chh18mWgLSmA6ng+j37ip6452zxtv0jHAz9pJolbKBp\
JzbZlPN45ZCTk9ck0fSVHRl6VRSSPQcpqi65XpRf+35zNOCGCc1mAOOTmw59Q2a6\
A3t8mL7r91aM5q6QOQm219lctFM8O7HRJnDgmhGpnjRwE1LyKktWTbgFZ4SNWU2X\
qusUO07jKuSxzPumXBeU+JEtx0J1tqZwJlpGt2R+0qN7nKnPl2+hx \
moto@github.com"""
RSA_PUBLIC_KEY_RFC4716 = b"""\
---- BEGIN SSH2 PUBLIC KEY ----
AAAAB3NzaC1yc2EAAAADAQABAAABAQDusXfgTE4eBP50NglSzCSEGnIL6+cr6m3H6cZANO
Q+P1o/W4BdtcAL3sor4iGi7SOeJgo8kweyMQrhrt6HaKGgromRiz37LQx4YIAcBi4Zd023
mO/V7Rc2Chh18mWgLSmA6ng+j37ip6452zxtv0jHAz9pJolbKBpJzbZlPN45ZCTk9ck0fS
VHRl6VRSSPQcpqi65XpRf+35zNOCGCc1mAOOTmw59Q2a6A3t8mL7r91aM5q6QOQm219lct
FM8O7HRJnDgmhGpnjRwE1LyKktWTbgFZ4SNWU2XqusUO07jKuSxzPumXBeU+JEtx0J1tqZ
wJlpGt2R+0qN7nKnPl2+hx
---- END SSH2 PUBLIC KEY ----
"""
RSA_PUBLIC_KEY_FINGERPRINT = "6a:49:07:1c:7e:bd:d2:bd:96:25:fe:b5:74:83:ae:fd"
DSA_PUBLIC_KEY_OPENSSH = b"""ssh-dss \
AAAAB3NzaC1kc3MAAACBAJ0aXctVwbN6VB81gpo8R7DUk8zXRjZvrkg8Y8vEGt63gklpNJNsLXtEUXkl5D4c0nD2FZO1rJNqFoe\
OQOCoGSfclHvt9w4yPl/lUEtb3Qtj1j80MInETHr19vaSunRk5R+M+8YH+LLcdYdz7MijuGey02mbi0H9K5nUIcuLMArVAAAAFQ\
D0RDvsObRWBlnaW8645obZBM86jwAAAIBNZwf3B4krIzAwVfkMHLDSdAvs7lOWE7o8SJLzr9t4a9HhYp9SLbMzJ815KWfidEYV2\
+s4ZaPCfcZ1GENFRbE8rixz5eMAjEUXEPMJkblDZTHzMsH96z2cOCQZ0vfOmgznsf18Uf725pqo9OqAioEsTJjX8jtI2qNPEBU0\
uhMSZQAAAIBBMGhDu5CWPUlS2QG7vzmzw81XasmHE/s2YPDRbolkriwlunpgwZhCscoQP8HFHY+DLUVvUb+GZwBmFt4l1uHl03b\
ffsm7UIHtCBYERr9Nx0u20ldfhkgB1lhaJb5o0ZJ3pmJ38KChfyHe5EUcqRdEFo89Mp72VI2Z6UHyL175RA== \
moto@github.com"""
@mock_ec2_deprecated
def test_key_pairs_empty():
conn = boto.connect_ec2("the_key", "the_secret")
assert len(conn.get_all_key_pairs()) == 0
@mock_ec2_deprecated
def test_key_pairs_invalid_id():
conn = boto.connect_ec2("the_key", "the_secret")
with pytest.raises(EC2ResponseError) as cm:
conn.get_all_key_pairs("foo")
cm.value.code.should.equal("InvalidKeyPair.NotFound")
cm.value.status.should.equal(400)
cm.value.request_id.should_not.be.none
@mock_ec2_deprecated
def test_key_pairs_create():
conn = boto.connect_ec2("the_key", "the_secret")
with pytest.raises(EC2ResponseError) as ex:
conn.create_key_pair("foo", dry_run=True)
ex.value.error_code.should.equal("DryRunOperation")
ex.value.status.should.equal(400)
ex.value.message.should.equal(
"An error occurred (DryRunOperation) when calling the CreateKeyPair operation: Request would have succeeded, but DryRun flag is set"
)
kp = conn.create_key_pair("foo")
rsa_check_private_key(kp.material)
kps = conn.get_all_key_pairs()
assert len(kps) == 1
assert kps[0].name == "foo"
@mock_ec2_deprecated
def test_key_pairs_create_two():
conn = boto.connect_ec2("the_key", "the_secret")
kp1 = conn.create_key_pair("foo")
rsa_check_private_key(kp1.material)
kp2 = conn.create_key_pair("bar")
rsa_check_private_key(kp2.material)
assert kp1.material != kp2.material
kps = conn.get_all_key_pairs()
kps.should.have.length_of(2)
assert {i.name for i in kps} == {"foo", "bar"}
kps = conn.get_all_key_pairs("foo")
kps.should.have.length_of(1)
kps[0].name.should.equal("foo")
@mock_ec2_deprecated
def test_key_pairs_create_exist():
conn = boto.connect_ec2("the_key", "the_secret")
conn.create_key_pair("foo")
assert len(conn.get_all_key_pairs()) == 1
with pytest.raises(EC2ResponseError) as cm:
conn.create_key_pair("foo")
cm.value.code.should.equal("InvalidKeyPair.Duplicate")
cm.value.status.should.equal(400)
cm.value.request_id.should_not.be.none
@mock_ec2_deprecated
def test_key_pairs_delete_no_exist():
conn = boto.connect_ec2("the_key", "the_secret")
assert len(conn.get_all_key_pairs()) == 0
r = conn.delete_key_pair("foo")
r.should.be.ok
@mock_ec2_deprecated
def test_key_pairs_delete_exist():
conn = boto.connect_ec2("the_key", "the_secret")
conn.create_key_pair("foo")
with pytest.raises(EC2ResponseError) as ex:
r = conn.delete_key_pair("foo", dry_run=True)
ex.value.error_code.should.equal("DryRunOperation")
ex.value.status.should.equal(400)
ex.value.message.should.equal(
"An error occurred (DryRunOperation) when calling the DeleteKeyPair operation: Request would have succeeded, but DryRun flag is set"
)
r = conn.delete_key_pair("foo")
r.should.be.ok
assert len(conn.get_all_key_pairs()) == 0
@mock_ec2_deprecated
def test_key_pairs_import():
conn = boto.connect_ec2("the_key", "the_secret")
with pytest.raises(EC2ResponseError) as ex:
conn.import_key_pair("foo", RSA_PUBLIC_KEY_OPENSSH, dry_run=True)
ex.value.error_code.should.equal("DryRunOperation")
ex.value.status.should.equal(400)
ex.value.message.should.equal(
"An error occurred (DryRunOperation) when calling the ImportKeyPair operation: Request would have succeeded, but DryRun flag is set"
)
kp1 = conn.import_key_pair("foo", RSA_PUBLIC_KEY_OPENSSH)
assert kp1.name == "foo"
assert kp1.fingerprint == RSA_PUBLIC_KEY_FINGERPRINT
kp2 = conn.import_key_pair("foo2", RSA_PUBLIC_KEY_RFC4716)
assert kp2.name == "foo2"
assert kp2.fingerprint == RSA_PUBLIC_KEY_FINGERPRINT
kps = conn.get_all_key_pairs()
assert len(kps) == 2
assert kps[0].name == kp1.name
assert kps[1].name == kp2.name
@mock_ec2_deprecated
def test_key_pairs_import_exist():
conn = boto.connect_ec2("the_key", "the_secret")
kp = conn.import_key_pair("foo", RSA_PUBLIC_KEY_OPENSSH)
assert kp.name == "foo"
assert len(conn.get_all_key_pairs()) == 1
with pytest.raises(EC2ResponseError) as cm:
conn.create_key_pair("foo")
cm.value.code.should.equal("InvalidKeyPair.Duplicate")
cm.value.status.should.equal(400)
cm.value.request_id.should_not.be.none
@mock_ec2_deprecated
def test_key_pairs_invalid():
conn = boto.connect_ec2("the_key", "the_secret")
with pytest.raises(EC2ResponseError) as ex:
conn.import_key_pair("foo", b"")
ex.value.error_code.should.equal("InvalidKeyPair.Format")
ex.value.status.should.equal(400)
ex.value.message.should.equal("Key is not in valid OpenSSH public key format")
with pytest.raises(EC2ResponseError) as ex:
conn.import_key_pair("foo", b"garbage")
ex.value.error_code.should.equal("InvalidKeyPair.Format")
ex.value.status.should.equal(400)
ex.value.message.should.equal("Key is not in valid OpenSSH public key format")
with pytest.raises(EC2ResponseError) as ex:
conn.import_key_pair("foo", DSA_PUBLIC_KEY_OPENSSH)
ex.value.error_code.should.equal("InvalidKeyPair.Format")
ex.value.status.should.equal(400)
ex.value.message.should.equal("Key is not in valid OpenSSH public key format")
@mock_ec2_deprecated
def test_key_pair_filters():
conn = boto.connect_ec2("the_key", "the_secret")
_ = conn.create_key_pair("kpfltr1")
kp2 = conn.create_key_pair("kpfltr2")
kp3 = conn.create_key_pair("kpfltr3")
kp_by_name = conn.get_all_key_pairs(filters={"key-name": "kpfltr2"})
set([kp.name for kp in kp_by_name]).should.equal(set([kp2.name]))
kp_by_name = conn.get_all_key_pairs(filters={"fingerprint": kp3.fingerprint})
set([kp.name for kp in kp_by_name]).should.equal(set([kp3.name]))
|
|
# Copyright 2017, Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# EDITING INSTRUCTIONS
# This file was generated from the file
# https://github.com/google/googleapis/blob/master/google/spanner/admin/instance/v1/spanner_instance_admin.proto,
# and updates to that file get reflected here through a refresh process.
# For the short term, the refresh process will only be runnable by Google engineers.
#
# The only allowed edits are to method and file documentation. A 3-way
# merge preserves those additions if the generated source changes.
"""Accesses the google.spanner.admin.instance.v1 InstanceAdmin API."""
import collections
import json
import os
import pkg_resources
import platform
from google.gapic.longrunning import operations_client
from google.gax import api_callable
from google.gax import config
from google.gax import path_template
import google.gax
from google.cloud.gapic.spanner_admin_instance.v1 import enums
from google.cloud.proto.spanner.admin.instance.v1 import spanner_instance_admin_pb2
from google.iam.v1 import iam_policy_pb2
from google.iam.v1 import policy_pb2
from google.protobuf import field_mask_pb2
_PageDesc = google.gax.PageDescriptor
class InstanceAdminClient(object):
"""
Cloud Spanner Instance Admin API
The Cloud Spanner Instance Admin API can be used to create, delete,
modify and list instances. Instances are dedicated Cloud Spanner serving
and storage resources to be used by Cloud Spanner databases.
Each instance has a \"configuration\", which dictates where the
serving resources for the Cloud Spanner instance are located (e.g.,
US-central, Europe). Configurations are created by Google based on
resource availability.
Cloud Spanner billing is based on the instances that exist and their
sizes. After an instance exists, there are no additional
per-database or per-operation charges for use of the instance
(though there may be additional network bandwidth charges).
Instances offer isolation: problems with databases in one instance
will not affect other instances. However, within an instance
databases can affect each other. For example, if one database in an
instance receives a lot of requests and consumes most of the
instance resources, fewer resources are available for other
databases in that instance, and their performance may suffer.
"""
SERVICE_ADDRESS = 'spanner.googleapis.com'
"""The default address of the service."""
DEFAULT_SERVICE_PORT = 443
"""The default port of the service."""
_PAGE_DESCRIPTORS = {
'list_instance_configs': _PageDesc('page_token', 'next_page_token',
'instance_configs'),
'list_instances': _PageDesc('page_token', 'next_page_token',
'instances')
}
# The scopes needed to make gRPC calls to all of the methods defined in
# this service
_ALL_SCOPES = (
'https://www.googleapis.com/auth/cloud-platform',
'https://www.googleapis.com/auth/spanner.admin', )
_PROJECT_PATH_TEMPLATE = path_template.PathTemplate('projects/{project}')
_INSTANCE_CONFIG_PATH_TEMPLATE = path_template.PathTemplate(
'projects/{project}/instanceConfigs/{instance_config}')
_INSTANCE_PATH_TEMPLATE = path_template.PathTemplate(
'projects/{project}/instances/{instance}')
@classmethod
def project_path(cls, project):
"""Returns a fully-qualified project resource name string."""
return cls._PROJECT_PATH_TEMPLATE.render({'project': project, })
@classmethod
def instance_config_path(cls, project, instance_config):
"""Returns a fully-qualified instance_config resource name string."""
return cls._INSTANCE_CONFIG_PATH_TEMPLATE.render({
'project': project,
'instance_config': instance_config,
})
@classmethod
def instance_path(cls, project, instance):
"""Returns a fully-qualified instance resource name string."""
return cls._INSTANCE_PATH_TEMPLATE.render({
'project': project,
'instance': instance,
})
@classmethod
def match_project_from_project_name(cls, project_name):
"""Parses the project from a project resource.
Args:
project_name (string): A fully-qualified path representing a project
resource.
Returns:
A string representing the project.
"""
return cls._PROJECT_PATH_TEMPLATE.match(project_name).get('project')
@classmethod
def match_project_from_instance_config_name(cls, instance_config_name):
"""Parses the project from a instance_config resource.
Args:
instance_config_name (string): A fully-qualified path representing a instance_config
resource.
Returns:
A string representing the project.
"""
return cls._INSTANCE_CONFIG_PATH_TEMPLATE.match(
instance_config_name).get('project')
@classmethod
def match_instance_config_from_instance_config_name(cls,
instance_config_name):
"""Parses the instance_config from a instance_config resource.
Args:
instance_config_name (string): A fully-qualified path representing a instance_config
resource.
Returns:
A string representing the instance_config.
"""
return cls._INSTANCE_CONFIG_PATH_TEMPLATE.match(
instance_config_name).get('instance_config')
@classmethod
def match_project_from_instance_name(cls, instance_name):
"""Parses the project from a instance resource.
Args:
instance_name (string): A fully-qualified path representing a instance
resource.
Returns:
A string representing the project.
"""
return cls._INSTANCE_PATH_TEMPLATE.match(instance_name).get('project')
@classmethod
def match_instance_from_instance_name(cls, instance_name):
"""Parses the instance from a instance resource.
Args:
instance_name (string): A fully-qualified path representing a instance
resource.
Returns:
A string representing the instance.
"""
return cls._INSTANCE_PATH_TEMPLATE.match(instance_name).get('instance')
def __init__(self,
service_path=SERVICE_ADDRESS,
port=DEFAULT_SERVICE_PORT,
channel=None,
credentials=None,
ssl_credentials=None,
scopes=None,
client_config=None,
app_name=None,
app_version='',
lib_name=None,
lib_version='',
metrics_headers=()):
"""Constructor.
Args:
service_path (string): The domain name of the API remote host.
port (int): The port on which to connect to the remote host.
channel (:class:`grpc.Channel`): A ``Channel`` instance through
which to make calls.
credentials (object): The authorization credentials to attach to
requests. These credentials identify this application to the
service.
ssl_credentials (:class:`grpc.ChannelCredentials`): A
``ChannelCredentials`` instance for use with an SSL-enabled
channel.
scopes (list[string]): A list of OAuth2 scopes to attach to requests.
client_config (dict):
A dictionary for call options for each method. See
:func:`google.gax.construct_settings` for the structure of
this data. Falls back to the default config if not specified
or the specified config is missing data points.
app_name (string): The name of the application calling
the service. Recommended for analytics purposes.
app_version (string): The version of the application calling
the service. Recommended for analytics purposes.
lib_name (string): The API library software used for calling
the service. (Unless you are writing an API client itself,
leave this as default.)
lib_version (string): The API library software version used
for calling the service. (Unless you are writing an API client
itself, leave this as default.)
metrics_headers (dict): A dictionary of values for tracking
client library metrics. Ultimately serializes to a string
(e.g. 'foo/1.2.3 bar/3.14.1'). This argument should be
considered private.
Returns:
A InstanceAdminClient object.
"""
# Unless the calling application specifically requested
# OAuth scopes, request everything.
if scopes is None:
scopes = self._ALL_SCOPES
# Initialize an empty client config, if none is set.
if client_config is None:
client_config = {}
# Initialize metrics_headers as an ordered dictionary
# (cuts down on cardinality of the resulting string slightly).
metrics_headers = collections.OrderedDict(metrics_headers)
metrics_headers['gl-python'] = platform.python_version()
# The library may or may not be set, depending on what is
# calling this client. Newer client libraries set the library name
# and version.
if lib_name:
metrics_headers[lib_name] = lib_version
# Finally, track the GAPIC package version.
metrics_headers['gapic'] = pkg_resources.get_distribution(
'gapic-google-cloud-spanner-admin-instance-v1', ).version
# Load the configuration defaults.
default_client_config = json.loads(
pkg_resources.resource_string(
__name__, 'instance_admin_client_config.json').decode())
defaults = api_callable.construct_settings(
'google.spanner.admin.instance.v1.InstanceAdmin',
default_client_config,
client_config,
config.STATUS_CODE_NAMES,
metrics_headers=metrics_headers,
page_descriptors=self._PAGE_DESCRIPTORS, )
self.instance_admin_stub = config.create_stub(
spanner_instance_admin_pb2.InstanceAdminStub,
channel=channel,
service_path=service_path,
service_port=port,
credentials=credentials,
scopes=scopes,
ssl_credentials=ssl_credentials)
self.operations_client = operations_client.OperationsClient(
service_path=service_path,
port=port,
channel=channel,
credentials=credentials,
ssl_credentials=ssl_credentials,
scopes=scopes,
client_config=client_config,
metrics_headers=metrics_headers, )
self._list_instance_configs = api_callable.create_api_call(
self.instance_admin_stub.ListInstanceConfigs,
settings=defaults['list_instance_configs'])
self._get_instance_config = api_callable.create_api_call(
self.instance_admin_stub.GetInstanceConfig,
settings=defaults['get_instance_config'])
self._list_instances = api_callable.create_api_call(
self.instance_admin_stub.ListInstances,
settings=defaults['list_instances'])
self._get_instance = api_callable.create_api_call(
self.instance_admin_stub.GetInstance,
settings=defaults['get_instance'])
self._create_instance = api_callable.create_api_call(
self.instance_admin_stub.CreateInstance,
settings=defaults['create_instance'])
self._update_instance = api_callable.create_api_call(
self.instance_admin_stub.UpdateInstance,
settings=defaults['update_instance'])
self._delete_instance = api_callable.create_api_call(
self.instance_admin_stub.DeleteInstance,
settings=defaults['delete_instance'])
self._set_iam_policy = api_callable.create_api_call(
self.instance_admin_stub.SetIamPolicy,
settings=defaults['set_iam_policy'])
self._get_iam_policy = api_callable.create_api_call(
self.instance_admin_stub.GetIamPolicy,
settings=defaults['get_iam_policy'])
self._test_iam_permissions = api_callable.create_api_call(
self.instance_admin_stub.TestIamPermissions,
settings=defaults['test_iam_permissions'])
# Service calls
def list_instance_configs(self, parent, page_size=0, options=None):
"""
Lists the supported instance configurations for a given project.
Example:
>>> from google.cloud.gapic.spanner_admin_instance.v1 import instance_admin_client
>>> from google.gax import CallOptions, INITIAL_PAGE
>>> api = instance_admin_client.InstanceAdminClient()
>>> parent = api.project_path('[PROJECT]')
>>>
>>> # Iterate over all results
>>> for element in api.list_instance_configs(parent):
>>> # process element
>>> pass
>>>
>>> # Or iterate over results one page at a time
>>> for page in api.list_instance_configs(parent, options=CallOptions(page_token=INITIAL_PAGE)):
>>> for element in page:
>>> # process element
>>> pass
Args:
parent (string): Required. The name of the project for which a list of supported instance
configurations is requested. Values are of the form
``projects/<project>``.
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
options (:class:`google.gax.CallOptions`): Overrides the default
settings for this call, e.g, timeout, retries etc.
Returns:
A :class:`google.gax.PageIterator` instance. By default, this
is an iterable of :class:`google.cloud.proto.spanner.admin.instance.v1.spanner_instance_admin_pb2.InstanceConfig` instances.
This object can also be configured to iterate over the pages
of the response through the `CallOptions` parameter.
Raises:
:exc:`google.gax.errors.GaxError` if the RPC is aborted.
:exc:`ValueError` if the parameters are invalid.
"""
# Create the request object.
request = spanner_instance_admin_pb2.ListInstanceConfigsRequest(
parent=parent, page_size=page_size)
return self._list_instance_configs(request, options)
def get_instance_config(self, name, options=None):
"""
Gets information about a particular instance configuration.
Example:
>>> from google.cloud.gapic.spanner_admin_instance.v1 import instance_admin_client
>>> api = instance_admin_client.InstanceAdminClient()
>>> name = api.instance_config_path('[PROJECT]', '[INSTANCE_CONFIG]')
>>> response = api.get_instance_config(name)
Args:
name (string): Required. The name of the requested instance configuration. Values are of
the form ``projects/<project>/instanceConfigs/<config>``.
options (:class:`google.gax.CallOptions`): Overrides the default
settings for this call, e.g, timeout, retries etc.
Returns:
A :class:`google.cloud.proto.spanner.admin.instance.v1.spanner_instance_admin_pb2.InstanceConfig` instance.
Raises:
:exc:`google.gax.errors.GaxError` if the RPC is aborted.
:exc:`ValueError` if the parameters are invalid.
"""
# Create the request object.
request = spanner_instance_admin_pb2.GetInstanceConfigRequest(
name=name)
return self._get_instance_config(request, options)
def list_instances(self, parent, page_size=0, filter_='', options=None):
"""
Lists all instances in the given project.
Example:
>>> from google.cloud.gapic.spanner_admin_instance.v1 import instance_admin_client
>>> from google.gax import CallOptions, INITIAL_PAGE
>>> api = instance_admin_client.InstanceAdminClient()
>>> parent = api.project_path('[PROJECT]')
>>>
>>> # Iterate over all results
>>> for element in api.list_instances(parent):
>>> # process element
>>> pass
>>>
>>> # Or iterate over results one page at a time
>>> for page in api.list_instances(parent, options=CallOptions(page_token=INITIAL_PAGE)):
>>> for element in page:
>>> # process element
>>> pass
Args:
parent (string): Required. The name of the project for which a list of instances is
requested. Values are of the form ``projects/<project>``.
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
filter_ (string): An expression for filtering the results of the request. Filter rules are
case insensitive. The fields eligible for filtering are:
* name
* display_name
* labels.key where key is the name of a label
Some examples of using filters are:
* name:* --> The instance has a name.
* name:Howl --> The instance's name contains the string \"howl\".
* name:HOWL --> Equivalent to above.
* NAME:howl --> Equivalent to above.
* labels.env:* --> The instance has the label \"env\".
* labels.env:dev --> The instance has the label \"env\" and the value of
::
the label contains the string \"dev\".
* name:howl labels.env:dev --> The instance's name contains \"howl\" and
::
it has the label \"env\" with its value
containing \"dev\".
options (:class:`google.gax.CallOptions`): Overrides the default
settings for this call, e.g, timeout, retries etc.
Returns:
A :class:`google.gax.PageIterator` instance. By default, this
is an iterable of :class:`google.cloud.proto.spanner.admin.instance.v1.spanner_instance_admin_pb2.Instance` instances.
This object can also be configured to iterate over the pages
of the response through the `CallOptions` parameter.
Raises:
:exc:`google.gax.errors.GaxError` if the RPC is aborted.
:exc:`ValueError` if the parameters are invalid.
"""
# Create the request object.
request = spanner_instance_admin_pb2.ListInstancesRequest(
parent=parent, page_size=page_size, filter=filter_)
return self._list_instances(request, options)
def get_instance(self, name, options=None):
"""
Gets information about a particular instance.
Example:
>>> from google.cloud.gapic.spanner_admin_instance.v1 import instance_admin_client
>>> api = instance_admin_client.InstanceAdminClient()
>>> name = api.instance_path('[PROJECT]', '[INSTANCE]')
>>> response = api.get_instance(name)
Args:
name (string): Required. The name of the requested instance. Values are of the form
``projects/<project>/instances/<instance>``.
options (:class:`google.gax.CallOptions`): Overrides the default
settings for this call, e.g, timeout, retries etc.
Returns:
A :class:`google.cloud.proto.spanner.admin.instance.v1.spanner_instance_admin_pb2.Instance` instance.
Raises:
:exc:`google.gax.errors.GaxError` if the RPC is aborted.
:exc:`ValueError` if the parameters are invalid.
"""
# Create the request object.
request = spanner_instance_admin_pb2.GetInstanceRequest(name=name)
return self._get_instance(request, options)
def create_instance(self, parent, instance_id, instance, options=None):
"""
Creates an instance and begins preparing it to begin serving. The
returned ``long-running operation``
can be used to track the progress of preparing the new
instance. The instance name is assigned by the caller. If the
named instance already exists, ``CreateInstance`` returns
``ALREADY_EXISTS``.
Immediately upon completion of this request:
* The instance is readable via the API, with all requested attributes
but no allocated resources. Its state is `CREATING`.
Until completion of the returned operation:
* Cancelling the operation renders the instance immediately unreadable
via the API.
* The instance can be deleted.
* All other attempts to modify the instance are rejected.
Upon completion of the returned operation:
* Billing for all successfully-allocated resources begins (some types
may have lower than the requested levels).
* Databases can be created in the instance.
* The instance's allocated resource levels are readable via the API.
* The instance's state becomes ``READY``.
The returned ``long-running operation`` will
have a name of the format ``<instance_name>/operations/<operation_id>`` and
can be used to track creation of the instance. The
``metadata`` field type is
``CreateInstanceMetadata``.
The ``response`` field type is
``Instance``, if successful.
Example:
>>> from google.cloud.gapic.spanner_admin_instance.v1 import instance_admin_client
>>> from google.cloud.proto.spanner.admin.instance.v1 import spanner_instance_admin_pb2
>>> api = instance_admin_client.InstanceAdminClient()
>>> parent = api.project_path('[PROJECT]')
>>> instance_id = ''
>>> instance = spanner_instance_admin_pb2.Instance()
>>> response = api.create_instance(parent, instance_id, instance)
>>>
>>> def callback(operation_future):
>>> # Handle result.
>>> result = operation_future.result()
>>>
>>> response.add_done_callback(callback)
>>>
>>> # Handle metadata.
>>> metadata = response.metadata()
Args:
parent (string): Required. The name of the project in which to create the instance. Values
are of the form ``projects/<project>``.
instance_id (string): Required. The ID of the instance to create. Valid identifiers are of the
form ``[a-z][-a-z0-9]*[a-z0-9]`` and must be between 6 and 30 characters in
length.
instance (:class:`google.cloud.proto.spanner.admin.instance.v1.spanner_instance_admin_pb2.Instance`): Required. The instance to create. The name may be omitted, but if
specified must be ``<parent>/instances/<instance_id>``.
options (:class:`google.gax.CallOptions`): Overrides the default
settings for this call, e.g, timeout, retries etc.
Returns:
A :class:`google.gax._OperationFuture` instance.
Raises:
:exc:`google.gax.errors.GaxError` if the RPC is aborted.
:exc:`ValueError` if the parameters are invalid.
"""
# Create the request object.
request = spanner_instance_admin_pb2.CreateInstanceRequest(
parent=parent, instance_id=instance_id, instance=instance)
return google.gax._OperationFuture(
self._create_instance(request, options), self.operations_client,
spanner_instance_admin_pb2.Instance,
spanner_instance_admin_pb2.CreateInstanceMetadata, options)
def update_instance(self, instance, field_mask, options=None):
"""
Updates an instance, and begins allocating or releasing resources
as requested. The returned [long-running
operation][google.longrunning.Operation] can be used to track the
progress of updating the instance. If the named instance does not
exist, returns ``NOT_FOUND``.
Immediately upon completion of this request:
* For resource types for which a decrease in the instance's allocation
has been requested, billing is based on the newly-requested level.
Until completion of the returned operation:
* Cancelling the operation sets its metadata's
[cancel_time][google.spanner.admin.instance.v1.UpdateInstanceMetadata.cancel_time], and begins
restoring resources to their pre-request values. The operation
is guaranteed to succeed at undoing all resource changes,
after which point it terminates with a `CANCELLED` status.
* All other attempts to modify the instance are rejected.
* Reading the instance via the API continues to give the pre-request
resource levels.
Upon completion of the returned operation:
* Billing begins for all successfully-allocated resources (some types
may have lower than the requested levels).
* All newly-reserved resources are available for serving the instance's
tables.
* The instance's new resource levels are readable via the API.
The returned ``long-running operation`` will
have a name of the format ``<instance_name>/operations/<operation_id>`` and
can be used to track the instance modification. The
``metadata`` field type is
``UpdateInstanceMetadata``.
The ``response`` field type is
``Instance``, if successful.
Authorization requires ``spanner.instances.update`` permission on
resource ``name``.
Example:
>>> from google.cloud.gapic.spanner_admin_instance.v1 import instance_admin_client
>>> from google.cloud.proto.spanner.admin.instance.v1 import spanner_instance_admin_pb2
>>> from google.protobuf import field_mask_pb2
>>> api = instance_admin_client.InstanceAdminClient()
>>> instance = spanner_instance_admin_pb2.Instance()
>>> field_mask = field_mask_pb2.FieldMask()
>>> response = api.update_instance(instance, field_mask)
>>>
>>> def callback(operation_future):
>>> # Handle result.
>>> result = operation_future.result()
>>>
>>> response.add_done_callback(callback)
>>>
>>> # Handle metadata.
>>> metadata = response.metadata()
Args:
instance (:class:`google.cloud.proto.spanner.admin.instance.v1.spanner_instance_admin_pb2.Instance`): Required. The instance to update, which must always include the instance
name. Otherwise, only fields mentioned in [][google.spanner.admin.instance.v1.UpdateInstanceRequest.field_mask] need be included.
field_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Required. A mask specifying which fields in [][google.spanner.admin.instance.v1.UpdateInstanceRequest.instance] should be updated.
The field mask must always be specified; this prevents any future fields in
[][google.spanner.admin.instance.v1.Instance] from being erased accidentally by clients that do not know
about them.
options (:class:`google.gax.CallOptions`): Overrides the default
settings for this call, e.g, timeout, retries etc.
Returns:
A :class:`google.gax._OperationFuture` instance.
Raises:
:exc:`google.gax.errors.GaxError` if the RPC is aborted.
:exc:`ValueError` if the parameters are invalid.
"""
# Create the request object.
request = spanner_instance_admin_pb2.UpdateInstanceRequest(
instance=instance, field_mask=field_mask)
return google.gax._OperationFuture(
self._update_instance(request, options), self.operations_client,
spanner_instance_admin_pb2.Instance,
spanner_instance_admin_pb2.UpdateInstanceMetadata, options)
def delete_instance(self, name, options=None):
"""
Deletes an instance.
Immediately upon completion of the request:
* Billing ceases for all of the instance's reserved resources.
Soon afterward:
* The instance and *all of its databases* immediately and
irrevocably disappear from the API. All data in the databases
is permanently deleted.
Example:
>>> from google.cloud.gapic.spanner_admin_instance.v1 import instance_admin_client
>>> api = instance_admin_client.InstanceAdminClient()
>>> name = api.instance_path('[PROJECT]', '[INSTANCE]')
>>> api.delete_instance(name)
Args:
name (string): Required. The name of the instance to be deleted. Values are of the form
``projects/<project>/instances/<instance>``
options (:class:`google.gax.CallOptions`): Overrides the default
settings for this call, e.g, timeout, retries etc.
Raises:
:exc:`google.gax.errors.GaxError` if the RPC is aborted.
:exc:`ValueError` if the parameters are invalid.
"""
# Create the request object.
request = spanner_instance_admin_pb2.DeleteInstanceRequest(name=name)
self._delete_instance(request, options)
def set_iam_policy(self, resource, policy, options=None):
"""
Sets the access control policy on an instance resource. Replaces any
existing policy.
Authorization requires ``spanner.instances.setIamPolicy`` on
``resource``.
Example:
>>> from google.cloud.gapic.spanner_admin_instance.v1 import instance_admin_client
>>> from google.iam.v1 import policy_pb2
>>> api = instance_admin_client.InstanceAdminClient()
>>> resource = api.instance_path('[PROJECT]', '[INSTANCE]')
>>> policy = policy_pb2.Policy()
>>> response = api.set_iam_policy(resource, policy)
Args:
resource (string): REQUIRED: The resource for which the policy is being specified.
``resource`` is usually specified as a path. For example, a Project
resource is specified as ``projects/{project}``.
policy (:class:`google.iam.v1.policy_pb2.Policy`): REQUIRED: The complete policy to be applied to the ``resource``. The size of
the policy is limited to a few 10s of KB. An empty policy is a
valid policy but certain Cloud Platform services (such as Projects)
might reject them.
options (:class:`google.gax.CallOptions`): Overrides the default
settings for this call, e.g, timeout, retries etc.
Returns:
A :class:`google.iam.v1.policy_pb2.Policy` instance.
Raises:
:exc:`google.gax.errors.GaxError` if the RPC is aborted.
:exc:`ValueError` if the parameters are invalid.
"""
# Create the request object.
request = iam_policy_pb2.SetIamPolicyRequest(
resource=resource, policy=policy)
return self._set_iam_policy(request, options)
def get_iam_policy(self, resource, options=None):
"""
Gets the access control policy for an instance resource. Returns an empty
policy if an instance exists but does not have a policy set.
Authorization requires ``spanner.instances.getIamPolicy`` on
``resource``.
Example:
>>> from google.cloud.gapic.spanner_admin_instance.v1 import instance_admin_client
>>> api = instance_admin_client.InstanceAdminClient()
>>> resource = api.instance_path('[PROJECT]', '[INSTANCE]')
>>> response = api.get_iam_policy(resource)
Args:
resource (string): REQUIRED: The resource for which the policy is being requested.
``resource`` is usually specified as a path. For example, a Project
resource is specified as ``projects/{project}``.
options (:class:`google.gax.CallOptions`): Overrides the default
settings for this call, e.g, timeout, retries etc.
Returns:
A :class:`google.iam.v1.policy_pb2.Policy` instance.
Raises:
:exc:`google.gax.errors.GaxError` if the RPC is aborted.
:exc:`ValueError` if the parameters are invalid.
"""
# Create the request object.
request = iam_policy_pb2.GetIamPolicyRequest(resource=resource)
return self._get_iam_policy(request, options)
def test_iam_permissions(self, resource, permissions, options=None):
"""
Returns permissions that the caller has on the specified instance resource.
Attempting this RPC on a non-existent Cloud Spanner instance resource will
result in a NOT_FOUND error if the user has ``spanner.instances.list``
permission on the containing Google Cloud Project. Otherwise returns an
empty set of permissions.
Example:
>>> from google.cloud.gapic.spanner_admin_instance.v1 import instance_admin_client
>>> api = instance_admin_client.InstanceAdminClient()
>>> resource = api.instance_path('[PROJECT]', '[INSTANCE]')
>>> permissions = []
>>> response = api.test_iam_permissions(resource, permissions)
Args:
resource (string): REQUIRED: The resource for which the policy detail is being requested.
``resource`` is usually specified as a path. For example, a Project
resource is specified as ``projects/{project}``.
permissions (list[string]): The set of permissions to check for the ``resource``. Permissions with
wildcards (such as '*' or 'storage.*') are not allowed. For more
information see
`IAM Overview <https://cloud.google.com/iam/docs/overview#permissions>`_.
options (:class:`google.gax.CallOptions`): Overrides the default
settings for this call, e.g, timeout, retries etc.
Returns:
A :class:`google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse` instance.
Raises:
:exc:`google.gax.errors.GaxError` if the RPC is aborted.
:exc:`ValueError` if the parameters are invalid.
"""
# Create the request object.
request = iam_policy_pb2.TestIamPermissionsRequest(
resource=resource, permissions=permissions)
return self._test_iam_permissions(request, options)
|
|
"""The test for the Template sensor platform."""
from datetime import timedelta
from os import path
from unittest.mock import patch
from homeassistant import config
from homeassistant.components.template import DOMAIN
from homeassistant.helpers.reload import SERVICE_RELOAD
from homeassistant.setup import async_setup_component
from homeassistant.util import dt as dt_util
from tests.common import async_fire_time_changed
async def test_reloadable(hass):
"""Test that we can reload."""
hass.states.async_set("sensor.test_sensor", "mytest")
await async_setup_component(
hass,
"sensor",
{
"sensor": {
"platform": DOMAIN,
"sensors": {
"state": {
"value_template": "{{ states.sensor.test_sensor.state }}"
},
},
},
"template": [
{
"trigger": {"platform": "event", "event_type": "event_1"},
"sensor": {
"name": "top level",
"state": "{{ trigger.event.data.source }}",
},
},
{
"sensor": {
"name": "top level state",
"state": "{{ states.sensor.top_level.state }} + 2",
},
"binary_sensor": {
"name": "top level state",
"state": "{{ states.sensor.top_level.state == 'init' }}",
},
},
],
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
assert hass.states.get("sensor.top_level_state").state == "unknown + 2"
assert hass.states.get("binary_sensor.top_level_state").state == "off"
hass.bus.async_fire("event_1", {"source": "init"})
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 5
assert hass.states.get("sensor.state").state == "mytest"
assert hass.states.get("sensor.top_level").state == "init"
await hass.async_block_till_done()
assert hass.states.get("sensor.top_level_state").state == "init + 2"
assert hass.states.get("binary_sensor.top_level_state").state == "on"
yaml_path = path.join(
_get_fixtures_base_path(),
"fixtures",
"template/sensor_configuration.yaml",
)
with patch.object(config, "YAML_CONFIG_FILE", yaml_path):
await hass.services.async_call(
DOMAIN,
SERVICE_RELOAD,
{},
blocking=True,
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 4
hass.bus.async_fire("event_2", {"source": "reload"})
await hass.async_block_till_done()
assert hass.states.get("sensor.state") is None
assert hass.states.get("sensor.top_level") is None
assert hass.states.get("sensor.watching_tv_in_master_bedroom").state == "off"
assert float(hass.states.get("sensor.combined_sensor_energy_usage").state) == 0
assert hass.states.get("sensor.top_level_2").state == "reload"
async def test_reloadable_can_remove(hass):
"""Test that we can reload and remove all template sensors."""
hass.states.async_set("sensor.test_sensor", "mytest")
await async_setup_component(
hass,
"sensor",
{
"sensor": {
"platform": DOMAIN,
"sensors": {
"state": {
"value_template": "{{ states.sensor.test_sensor.state }}"
},
},
},
"template": {
"trigger": {"platform": "event", "event_type": "event_1"},
"sensor": {
"name": "top level",
"state": "{{ trigger.event.data.source }}",
},
},
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
hass.bus.async_fire("event_1", {"source": "init"})
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 3
assert hass.states.get("sensor.state").state == "mytest"
assert hass.states.get("sensor.top_level").state == "init"
yaml_path = path.join(
_get_fixtures_base_path(),
"fixtures",
"template/empty_configuration.yaml",
)
with patch.object(config, "YAML_CONFIG_FILE", yaml_path):
await hass.services.async_call(
DOMAIN,
SERVICE_RELOAD,
{},
blocking=True,
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
async def test_reloadable_stops_on_invalid_config(hass):
"""Test we stop the reload if configuration.yaml is completely broken."""
hass.states.async_set("sensor.test_sensor", "mytest")
await async_setup_component(
hass,
"sensor",
{
"sensor": {
"platform": DOMAIN,
"sensors": {
"state": {
"value_template": "{{ states.sensor.test_sensor.state }}"
},
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
assert hass.states.get("sensor.state").state == "mytest"
assert len(hass.states.async_all()) == 2
yaml_path = path.join(
_get_fixtures_base_path(),
"fixtures",
"template/configuration.yaml.corrupt",
)
with patch.object(config, "YAML_CONFIG_FILE", yaml_path):
await hass.services.async_call(
DOMAIN,
SERVICE_RELOAD,
{},
blocking=True,
)
await hass.async_block_till_done()
assert hass.states.get("sensor.state").state == "mytest"
assert len(hass.states.async_all()) == 2
async def test_reloadable_handles_partial_valid_config(hass):
"""Test we can still setup valid sensors when configuration.yaml has a broken entry."""
hass.states.async_set("sensor.test_sensor", "mytest")
await async_setup_component(
hass,
"sensor",
{
"sensor": {
"platform": DOMAIN,
"sensors": {
"state": {
"value_template": "{{ states.sensor.test_sensor.state }}"
},
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
assert hass.states.get("sensor.state").state == "mytest"
assert len(hass.states.async_all()) == 2
yaml_path = path.join(
_get_fixtures_base_path(),
"fixtures",
"template/broken_configuration.yaml",
)
with patch.object(config, "YAML_CONFIG_FILE", yaml_path):
await hass.services.async_call(
DOMAIN,
SERVICE_RELOAD,
{},
blocking=True,
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 3
assert hass.states.get("sensor.state") is None
assert hass.states.get("sensor.watching_tv_in_master_bedroom").state == "off"
assert float(hass.states.get("sensor.combined_sensor_energy_usage").state) == 0
async def test_reloadable_multiple_platforms(hass):
"""Test that we can reload."""
hass.states.async_set("sensor.test_sensor", "mytest")
await async_setup_component(
hass,
"sensor",
{
"sensor": {
"platform": DOMAIN,
"sensors": {
"state": {
"value_template": "{{ states.sensor.test_sensor.state }}"
},
},
}
},
)
await async_setup_component(
hass,
"binary_sensor",
{
"binary_sensor": {
"platform": DOMAIN,
"sensors": {
"state": {
"value_template": "{{ states.sensor.test_sensor.state }}"
},
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
assert hass.states.get("sensor.state").state == "mytest"
assert hass.states.get("binary_sensor.state").state == "off"
assert len(hass.states.async_all()) == 3
yaml_path = path.join(
_get_fixtures_base_path(),
"fixtures",
"template/sensor_configuration.yaml",
)
with patch.object(config, "YAML_CONFIG_FILE", yaml_path):
await hass.services.async_call(
DOMAIN,
SERVICE_RELOAD,
{},
blocking=True,
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 4
assert hass.states.get("sensor.state") is None
assert hass.states.get("sensor.watching_tv_in_master_bedroom").state == "off"
assert float(hass.states.get("sensor.combined_sensor_energy_usage").state) == 0
assert hass.states.get("sensor.top_level_2") is not None
async def test_reload_sensors_that_reference_other_template_sensors(hass):
"""Test that we can reload sensor that reference other template sensors."""
await async_setup_component(
hass,
"sensor",
{
"sensor": {
"platform": DOMAIN,
"sensors": {
"state": {"value_template": "{{ 1 }}"},
},
}
},
)
await hass.async_block_till_done()
yaml_path = path.join(
_get_fixtures_base_path(),
"fixtures",
"template/ref_configuration.yaml",
)
with patch.object(config, "YAML_CONFIG_FILE", yaml_path):
await hass.services.async_call(
DOMAIN,
SERVICE_RELOAD,
{},
blocking=True,
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 3
await hass.async_block_till_done()
next_time = dt_util.utcnow() + timedelta(seconds=1.2)
with patch(
"homeassistant.helpers.ratelimit.dt_util.utcnow", return_value=next_time
):
async_fire_time_changed(hass, next_time)
await hass.async_block_till_done()
assert hass.states.get("sensor.test1").state == "3"
assert hass.states.get("sensor.test2").state == "1"
assert hass.states.get("sensor.test3").state == "2"
def _get_fixtures_base_path():
return path.dirname(path.dirname(path.dirname(__file__)))
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from mock import patch
from mock import MagicMock as Mock
from pyos.clouddatabases import CloudDatabaseBackupManager
from pyos.clouddatabases import CloudDatabaseDatabase
from pyos.clouddatabases import CloudDatabaseFlavor
from pyos.clouddatabases import CloudDatabaseInstance
from pyos.clouddatabases import CloudDatabaseUser
from pyos.clouddatabases import CloudDatabaseVolume
from pyos.clouddatabases import assure_instance
import pyos.exceptions as exc
from pyos.resource import BaseResource
import pyos.utils as utils
from pyos import fakes
example_uri = "http://example.com"
class CloudDatabasesTest(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(CloudDatabasesTest, self).__init__(*args, **kwargs)
def setUp(self):
self.instance = fakes.FakeDatabaseInstance()
self.client = fakes.FakeDatabaseClient()
def tearDown(self):
pass
def test_assure_instance(self):
class TestClient(object):
_manager = fakes.FakeManager()
@assure_instance
def test_method(self, instance):
return instance
client = TestClient()
client._manager.get = Mock(return_value=self.instance)
# Pass the instance
ret = client.test_method(self.instance)
self.assertTrue(ret is self.instance)
# Pass the ID
ret = client.test_method(self.instance.id)
self.assertTrue(ret is self.instance)
@patch("pyos.manager.BaseManager", new=fakes.FakeManager)
def test_instantiate_instance(self):
inst = CloudDatabaseInstance(fakes.FakeManager(), {"id": 42,
"volume": {"size": 1, "used": 0.2}})
self.assertTrue(isinstance(inst, CloudDatabaseInstance))
self.assertTrue(isinstance(inst.volume, CloudDatabaseVolume))
def test_list_databases(self):
inst = self.instance
inst._database_manager.list = Mock()
limit = utils.random_unicode()
marker = utils.random_unicode()
inst.list_databases(limit=limit, marker=marker)
inst._database_manager.list.assert_called_once_with(limit=limit,
marker=marker)
def test_list_users(self):
inst = self.instance
inst._user_manager.list = Mock()
limit = utils.random_unicode()
marker = utils.random_unicode()
inst.list_users(limit=limit, marker=marker)
inst._user_manager.list.assert_called_once_with(limit=limit,
marker=marker)
def test_get_database(self):
inst = self.instance
db1 = fakes.FakeEntity()
db1.name = "a"
db2 = fakes.FakeEntity()
db2.name = "b"
inst.list_databases = Mock(return_value=[db1, db2])
ret = inst.get_database("a")
self.assertEqual(ret, db1)
def test_get_database_bad(self):
inst = self.instance
db1 = fakes.FakeEntity()
db1.name = "a"
db2 = fakes.FakeEntity()
db2.name = "b"
inst.list_databases = Mock(return_value=[db1, db2])
self.assertRaises(exc.NoSuchDatabase, inst.get_database, "z")
def test_dbmgr_get(self):
mgr = fakes.FakeDatabaseManager()
rsrc = fakes.FakeDatabaseInstance()
rsrc.volume = {}
mgr._get = Mock(return_value=rsrc)
ret = mgr.get("fake")
self.assertTrue(isinstance(ret, CloudDatabaseInstance))
self.assertTrue(isinstance(ret.volume, CloudDatabaseVolume))
def test_dbmgr_create_backup(self):
inst = self.instance
mgr = inst.manager
name = utils.random_unicode()
description = utils.random_unicode()
mgr.api.method_post = Mock(return_value=(None, {"backup": {}}))
expected_uri = "/backups"
expected_body = {"backup": {"instance": inst.id, "name": name,
"description": description}}
mgr.create_backup(inst, name, description=description)
mgr.api.method_post.assert_called_once_with(expected_uri,
body=expected_body)
@patch('pyos.clouddatabases.CloudDatabaseInstance',
new=fakes.FakeDatabaseInstance)
def test_mgr_restore_backup(self):
inst = self.instance
mgr = inst.manager
name = utils.random_unicode()
flavor = utils.random_unicode()
fref = utils.random_unicode()
volume = utils.random_unicode()
backup = utils.random_unicode()
mgr.api.method_post = Mock(return_value=(None, {"instance": {}}))
mgr.api._get_flavor_ref = Mock(return_value=fref)
expected_uri = "/%s" % mgr.uri_base
expected_body = {"instance": {"name": name, "flavorRef": fref,
"volume": {"size": volume}, "restorePoint":
{"backupRef": backup}}}
mgr.restore_backup(backup, name, flavor, volume)
mgr.api.method_post.assert_called_once_with(expected_uri,
body=expected_body)
def test_mgr_list_backups(self):
inst = self.instance
mgr = inst.manager
mgr.api._backup_manager.list = Mock(return_value=(None, None))
mgr.list_backups(inst)
mgr.api._backup_manager.list.assert_called_once_with(instance=inst)
def test_mgr_list_backups_for_instance(self):
inst = self.instance
mgr = inst.manager
mgr.api.method_get = Mock(return_value=(None, {"backups": []}))
expected_uri = "/%s/%s/backups" % (mgr.uri_base, inst.id)
mgr._list_backups_for_instance(inst)
mgr.api.method_get.assert_called_once_with(expected_uri)
def test_create_database(self):
inst = self.instance
inst._database_manager.create = Mock()
inst._database_manager.find = Mock()
db = inst.create_database(name="test")
inst._database_manager.create.assert_called_once_with(name="test",
character_set="utf8", collate="utf8_general_ci",
return_none=True)
def test_create_user(self):
inst = self.instance
inst._user_manager.create = Mock()
inst._user_manager.find = Mock()
name = utils.random_unicode()
password = utils.random_unicode()
database_names = utils.random_unicode()
host = utils.random_unicode()
inst.create_user(name=name, password=password,
database_names=database_names, host=host)
inst._user_manager.create.assert_called_once_with(name=name,
password=password, database_names=[database_names], host=host,
return_none=True)
def test_delete_database(self):
inst = self.instance
inst._database_manager.delete = Mock()
inst.delete_database("dbname")
inst._database_manager.delete.assert_called_once_with("dbname")
def test_delete_user(self):
inst = self.instance
inst._user_manager.delete = Mock()
inst.delete_user("username")
inst._user_manager.delete.assert_called_once_with("username")
def test_delete_database_direct(self):
inst = self.instance
mgr = inst.manager
name = utils.random_unicode()
db = CloudDatabaseDatabase(mgr, info={"name": name})
mgr.delete = Mock()
db.delete()
mgr.delete.assert_called_once_with(name)
def test_delete_user_direct(self):
inst = self.instance
mgr = inst.manager
name = utils.random_unicode()
user = CloudDatabaseUser(mgr, info={"name": name})
mgr.delete = Mock()
user.delete()
mgr.delete.assert_called_once_with(name)
def test_enable_root_user(self):
inst = self.instance
pw = utils.random_unicode()
fake_body = {"user": {"password": pw}}
inst.manager.api.method_post = Mock(return_value=(None, fake_body))
ret = inst.enable_root_user()
call_uri = "/instances/%s/root" % inst.id
inst.manager.api.method_post.assert_called_once_with(call_uri)
self.assertEqual(ret, pw)
def test_root_user_status(self):
inst = self.instance
fake_body = {"rootEnabled": True}
inst.manager.api.method_get = Mock(return_value=(None, fake_body))
ret = inst.root_user_status()
call_uri = "/instances/%s/root" % inst.id
inst.manager.api.method_get.assert_called_once_with(call_uri)
self.assertTrue(ret)
def test_restart(self):
inst = self.instance
inst.manager.action = Mock()
ret = inst.restart()
inst.manager.action.assert_called_once_with(inst, "restart")
def test_resize(self):
inst = self.instance
flavor_ref = utils.random_unicode()
inst.manager.api._get_flavor_ref = Mock(return_value=flavor_ref)
fake_body = {"flavorRef": flavor_ref}
inst.manager.action = Mock()
ret = inst.resize(42)
call_uri = "/instances/%s/action" % inst.id
inst.manager.action.assert_called_once_with(inst, "resize",
body=fake_body)
def test_resize_volume_too_small(self):
inst = self.instance
inst.volume.get = Mock(return_value=2)
self.assertRaises(exc.InvalidVolumeResize, inst.resize_volume, 1)
def test_resize_volume(self):
inst = self.instance
fake_body = {"volume": {"size": 2}}
inst.manager.action = Mock()
ret = inst.resize_volume(2)
inst.manager.action.assert_called_once_with(inst, "resize",
body=fake_body)
def test_resize_volume_direct(self):
inst = self.instance
vol = inst.volume
fake_body = {"volume": {"size": 2}}
inst.manager.action = Mock()
ret = vol.resize(2)
inst.manager.action.assert_called_once_with(inst, "resize",
body=fake_body)
def test_volume_get(self):
inst = self.instance
vol = inst.volume
att = vol.size
using_get = vol.get("size")
self.assertEqual(att, using_get)
def test_volume_get_fail(self):
inst = self.instance
vol = inst.volume
self.assertRaises(AttributeError, vol.get, "fake")
def test_inst_list_backups(self):
inst = self.instance
mgr = inst.manager
mgr._list_backups_for_instance = Mock()
inst.list_backups()
mgr._list_backups_for_instance.assert_called_once_with(inst)
def test_inst_create_backup(self):
inst = self.instance
mgr = inst.manager
name = utils.random_unicode()
description = utils.random_unicode()
mgr.create_backup = Mock()
inst.create_backup(name, description=description)
mgr.create_backup.assert_called_once_with(inst, name,
description=description)
def test_get_flavor_property(self):
inst = self.instance
inst._loaded = True
flavor = inst.flavor
self.assertTrue(isinstance(flavor, CloudDatabaseFlavor))
def test_set_flavor_property_dict(self):
inst = self.instance
inst._loaded = True
inst.flavor = {"name": "test"}
self.assertTrue(isinstance(inst.flavor, CloudDatabaseFlavor))
def test_set_flavor_property_instance(self):
inst = self.instance
inst._loaded = True
flavor = CloudDatabaseFlavor(inst.manager, {"name": "test"})
inst.flavor = flavor
self.assertTrue(isinstance(inst.flavor, CloudDatabaseFlavor))
@patch("pyos.manager.BaseManager", new=fakes.FakeManager)
def test_list_databases_for_instance(self):
clt = self.client
inst = self.instance
limit = utils.random_unicode()
marker = utils.random_unicode()
inst.list_databases = Mock(return_value=["db"])
ret = clt.list_databases(inst, limit=limit, marker=marker)
self.assertEqual(ret, ["db"])
inst.list_databases.assert_called_once_with(limit=limit, marker=marker)
@patch("pyos.manager.BaseManager", new=fakes.FakeManager)
def test_create_database_for_instance(self):
clt = self.client
inst = self.instance
inst.create_database = Mock(return_value=["db"])
nm = utils.random_unicode()
ret = clt.create_database(inst, nm)
self.assertEqual(ret, ["db"])
inst.create_database.assert_called_once_with(nm,
character_set=None, collate=None)
def test_clt_get_database(self):
clt = self.client
inst = self.instance
inst.get_database = Mock()
nm = utils.random_unicode()
clt.get_database(inst, nm)
inst.get_database.assert_called_once_with(nm)
@patch("pyos.manager.BaseManager", new=fakes.FakeManager)
def test_delete_database_for_instance(self):
clt = self.client
inst = self.instance
inst.delete_database = Mock()
nm = utils.random_unicode()
clt.delete_database(inst, nm)
inst.delete_database.assert_called_once_with(nm)
@patch("pyos.manager.BaseManager", new=fakes.FakeManager)
def test_list_users_for_instance(self):
clt = self.client
inst = self.instance
limit = utils.random_unicode()
marker = utils.random_unicode()
inst.list_users = Mock(return_value=["user"])
ret = clt.list_users(inst, limit=limit, marker=marker)
self.assertEqual(ret, ["user"])
inst.list_users.assert_called_once_with(limit=limit, marker=marker)
def test_create_user_for_instance(self):
clt = self.client
inst = self.instance
inst.create_user = Mock()
nm = utils.random_unicode()
pw = utils.random_unicode()
host = utils.random_unicode()
ret = clt.create_user(inst, nm, pw, ["db"], host=host)
inst.create_user.assert_called_once_with(name=nm, password=pw,
database_names=["db"], host=host)
@patch("pyos.manager.BaseManager", new=fakes.FakeManager)
def test_delete_user_for_instance(self):
clt = self.client
inst = self.instance
inst.delete_user = Mock()
nm = utils.random_unicode()
clt.delete_user(inst, nm)
inst.delete_user.assert_called_once_with(nm)
@patch("pyos.manager.BaseManager", new=fakes.FakeManager)
def test_enable_root_user_for_instance(self):
clt = self.client
inst = self.instance
inst.enable_root_user = Mock()
clt.enable_root_user(inst)
inst.enable_root_user.assert_called_once_with()
@patch("pyos.manager.BaseManager", new=fakes.FakeManager)
def test_root_user_status_for_instance(self):
clt = self.client
inst = self.instance
inst.root_user_status = Mock()
clt.root_user_status(inst)
inst.root_user_status.assert_called_once_with()
@patch("pyos.manager.BaseManager", new=fakes.FakeManager)
def test_get_user_by_client(self):
clt = self.client
inst = self.instance
inst.get_user = Mock()
fakeuser = utils.random_unicode()
clt.get_user(inst, fakeuser)
inst.get_user.assert_called_once_with(fakeuser)
def test_get_user(self):
inst = self.instance
good_name = utils.random_unicode()
user = fakes.FakeDatabaseUser(manager=None, info={"name": good_name})
inst._user_manager.get = Mock(return_value=user)
returned = inst.get_user(good_name)
self.assertEqual(returned, user)
def test_get_user_fail(self):
inst = self.instance
bad_name = utils.random_unicode()
inst._user_manager.get = Mock(side_effect=exc.NotFound(""))
self.assertRaises(exc.NoSuchDatabaseUser, inst.get_user, bad_name)
def test_get_db_names(self):
inst = self.instance
mgr = inst._user_manager
mgr.instance = inst
dbname1 = utils.random_ascii()
dbname2 = utils.random_ascii()
inst.list_databases = Mock(return_value=((dbname1, dbname2)))
resp = mgr._get_db_names(dbname1)
self.assertEqual(resp, [dbname1])
def test_get_db_names_not_strict(self):
inst = self.instance
mgr = inst._user_manager
mgr.instance = inst
dbname1 = utils.random_ascii()
dbname2 = utils.random_ascii()
inst.list_databases = Mock(return_value=((dbname1, dbname2)))
resp = mgr._get_db_names("BAD", strict=False)
self.assertEqual(resp, ["BAD"])
def test_get_db_names_fail(self):
inst = self.instance
mgr = inst._user_manager
mgr.instance = inst
dbname1 = utils.random_ascii()
dbname2 = utils.random_ascii()
inst.list_databases = Mock(return_value=((dbname1, dbname2)))
self.assertRaises(exc.NoSuchDatabase, mgr._get_db_names, "BAD")
def test_change_user_password(self):
inst = self.instance
fakename = utils.random_ascii()
newpass = utils.random_ascii()
resp = fakes.FakeResponse()
resp.status_code = 202
inst._user_manager.api.method_put = Mock(return_value=(resp, {}))
fakeuser = fakes.FakeDatabaseUser(inst._user_manager, {"name": fakename})
inst._user_manager.get = Mock(return_value=fakeuser)
inst.change_user_password(fakename, newpass)
inst._user_manager.api.method_put.assert_called_once_with(
"/None/%s" % fakename, body={"user": {"password": newpass}})
def test_update_user(self):
inst = self.instance
mgr = inst._user_manager
user = utils.random_unicode()
name = utils.random_unicode()
password = utils.random_unicode()
host = utils.random_unicode()
mgr.update = Mock()
inst.update_user(user, name=name, password=password, host=host)
mgr.update.assert_called_once_with(user, name=name, password=password,
host=host)
def test_user_manager_update(self):
inst = self.instance
mgr = inst._user_manager
username = utils.random_unicode()
user = fakes.FakeDatabaseUser(mgr, info={"name": username})
name = utils.random_unicode()
host = utils.random_unicode()
password = utils.random_unicode()
mgr.api.method_put = Mock(return_value=(None, None))
expected_uri = "/%s/%s" % (mgr.uri_base, username)
expected_body = {"user": {"name": name, "host": host,
"password": password}}
mgr.update(user, name=name, host=host, password=password)
mgr.api.method_put.assert_called_once_with(expected_uri,
body=expected_body)
def test_user_manager_update_missing(self):
inst = self.instance
mgr = inst._user_manager
username = utils.random_unicode()
user = fakes.FakeDatabaseUser(mgr, info={"name": username})
self.assertRaises(exc.MissingDBUserParameters, mgr.update, user)
def test_user_manager_update_unchanged(self):
inst = self.instance
mgr = inst._user_manager
username = utils.random_unicode()
user = fakes.FakeDatabaseUser(mgr, info={"name": username})
self.assertRaises(exc.DBUpdateUnchanged, mgr.update, user,
name=username)
def test_list_user_access(self):
inst = self.instance
dbname1 = utils.random_ascii()
dbname2 = utils.random_ascii()
acc = {"databases": [{"name": dbname1}, {"name": dbname2}]}
inst._user_manager.api.method_get = Mock(return_value=(None, acc))
db_list = inst.list_user_access("fakeuser")
self.assertEqual(len(db_list), 2)
self.assertTrue(db_list[0].name in (dbname1, dbname2))
def test_list_user_access_not_found(self):
inst = self.instance
mgr = inst._user_manager
mgr.api.method_get = Mock(side_effect=exc.NotFound(""))
username = utils.random_unicode()
user = fakes.FakeDatabaseUser(mgr, info={"name": username})
self.assertRaises(exc.NoSuchDatabaseUser, mgr.list_user_access, user)
def test_grant_user_access(self):
inst = self.instance
fakeuser = utils.random_ascii()
dbname1 = utils.random_ascii()
inst._user_manager.api.method_put = Mock(return_value=(None, None))
inst.grant_user_access(fakeuser, dbname1, strict=False)
inst._user_manager.api.method_put.assert_called_once_with(
"/None/%s/databases" % fakeuser, body={"databases": [{"name":
dbname1}]})
def test_grant_user_access_not_found(self):
inst = self.instance
mgr = inst._user_manager
mgr.api.method_put = Mock(side_effect=exc.NotFound(""))
username = utils.random_unicode()
user = fakes.FakeDatabaseUser(mgr, info={"name": username})
db_names = utils.random_unicode()
mgr._get_db_names = Mock(return_value=[])
self.assertRaises(exc.NoSuchDatabaseUser, mgr.grant_user_access, user,
db_names)
def test_revoke_user_access(self):
inst = self.instance
fakeuser = utils.random_ascii()
dbname1 = utils.random_ascii()
inst._user_manager.api.method_delete = Mock(return_value=(None, None))
inst.revoke_user_access(fakeuser, dbname1, strict=False)
inst._user_manager.api.method_delete.assert_called_once_with(
"/None/%s/databases/%s" % (fakeuser, dbname1))
def test_backup_mgr_create_body(self):
inst = self.instance
mgr = inst.manager
bu_mgr = mgr.api._backup_manager
name = utils.random_unicode()
description = utils.random_unicode()
expected_body = {"backup": {"instance": inst.id, "name": name,
"description": description}}
ret = bu_mgr._create_body(name, inst, description=description)
self.assertEqual(ret, expected_body)
def test_backup_mgr_list(self):
inst = self.instance
mgr = inst.manager
bu_mgr = mgr.api._backup_manager
fake_val = utils.random_unicode()
bu_mgr._list = Mock(return_value=fake_val)
ret = bu_mgr.list()
self.assertEqual(ret, fake_val)
def test_backup_mgr_list_instance(self):
inst = self.instance
mgr = inst.manager
bu_mgr = mgr.api._backup_manager
db_mgr = mgr.api._manager
db_mgr._list_backups_for_instance = Mock()
bu_mgr.list(instance=inst)
db_mgr._list_backups_for_instance.assert_called_once_with(inst)
def test_clt_change_user_password(self):
clt = self.client
inst = self.instance
inst.change_user_password = Mock()
user = utils.random_unicode()
pw = utils.random_unicode()
clt.change_user_password(inst, user, pw)
inst.change_user_password.assert_called_once_with(user, pw)
def test_user_change_password(self):
inst = self.instance
mgr = inst.manager
password = utils.random_unicode()
user = CloudDatabaseUser(mgr, info={"name": "fake"})
mgr.change_user_password = Mock()
user.change_password(password)
mgr.change_user_password.assert_called_once_with(user, password)
def test_clt_update_user(self):
clt = self.client
inst = self.instance
inst.update_user = Mock()
user = utils.random_unicode()
name = utils.random_unicode()
password = utils.random_unicode()
host = utils.random_unicode()
clt.update_user(inst, user, name=name, password=password, host=host)
inst.update_user.assert_called_once_with(user, name=name,
password=password, host=host)
def test_user_update(self):
inst = self.instance
mgr = inst.manager
name = utils.random_unicode()
password = utils.random_unicode()
host = utils.random_unicode()
user = CloudDatabaseUser(mgr, info={"name": "fake"})
mgr.update = Mock()
user.update(name=name, password=password, host=host)
mgr.update.assert_called_once_with(user, name=name, password=password,
host=host)
def test_clt_list_user_access(self):
clt = self.client
inst = self.instance
inst.list_user_access = Mock()
user = utils.random_unicode()
clt.list_user_access(inst, user)
inst.list_user_access.assert_called_once_with(user)
def test_user_list_user_access(self):
inst = self.instance
mgr = inst.manager
user = CloudDatabaseUser(mgr, info={"name": "fake"})
mgr.list_user_access = Mock()
user.list_user_access()
mgr.list_user_access.assert_called_once_with(user)
def test_clt_grant_user_access(self):
clt = self.client
inst = self.instance
inst.grant_user_access = Mock()
user = utils.random_unicode()
db_names = utils.random_unicode()
clt.grant_user_access(inst, user, db_names)
inst.grant_user_access.assert_called_once_with(user, db_names,
strict=True)
def test_user_grant_user_access(self):
inst = self.instance
mgr = inst.manager
user = CloudDatabaseUser(mgr, info={"name": "fake"})
db_names = utils.random_unicode()
strict = utils.random_unicode()
mgr.grant_user_access = Mock()
user.grant_user_access(db_names, strict=strict)
mgr.grant_user_access.assert_called_once_with(user, db_names,
strict=strict)
def test_clt_revoke_user_access(self):
clt = self.client
inst = self.instance
inst.revoke_user_access = Mock()
user = utils.random_unicode()
db_names = utils.random_unicode()
clt.revoke_user_access(inst, user, db_names)
inst.revoke_user_access.assert_called_once_with(user, db_names,
strict=True)
def test_user_revoke_user_access(self):
inst = self.instance
mgr = inst.manager
user = CloudDatabaseUser(mgr, info={"name": "fake"})
db_names = utils.random_unicode()
strict = utils.random_unicode()
mgr.revoke_user_access = Mock()
user.revoke_user_access(db_names, strict=strict)
mgr.revoke_user_access.assert_called_once_with(user, db_names,
strict=strict)
def test_clt_restart(self):
clt = self.client
inst = self.instance
inst.restart = Mock()
clt.restart(inst)
inst.restart.assert_called_once_with()
@patch("pyos.manager.BaseManager", new=fakes.FakeManager)
def test_inst_resize(self):
clt = self.client
inst = self.instance
inst.resize = Mock()
clt.resize(inst, "flavor")
inst.resize.assert_called_once_with("flavor")
def test_get_limits(self):
self.assertRaises(NotImplementedError, self.client.get_limits)
@patch("pyos.manager.BaseManager", new=fakes.FakeManager)
def test_list_flavors(self):
clt = self.client
clt._flavor_manager.list = Mock()
limit = utils.random_unicode()
marker = utils.random_unicode()
clt.list_flavors(limit=limit, marker=marker)
clt._flavor_manager.list.assert_called_once_with(limit=limit,
marker=marker)
@patch("pyos.manager.BaseManager", new=fakes.FakeManager)
def test_get_flavor(self):
clt = self.client
clt._flavor_manager.get = Mock()
clt.get_flavor("flavorid")
clt._flavor_manager.get.assert_called_once_with("flavorid")
@patch("pyos.manager.BaseManager", new=fakes.FakeManager)
def test_get_flavor_ref_for_obj(self):
clt = self.client
info = {"id": 1,
"name": "test_flavor",
"ram": 42,
"links": [{
"href": example_uri,
"rel": "self"}]}
flavor_obj = CloudDatabaseFlavor(clt._manager, info)
ret = clt._get_flavor_ref(flavor_obj)
self.assertEqual(ret, example_uri)
@patch("pyos.manager.BaseManager", new=fakes.FakeManager)
def test_get_flavor_ref_for_id(self):
clt = self.client
info = {"id": 1,
"name": "test_flavor",
"ram": 42,
"links": [{
"href": example_uri,
"rel": "self"}]}
flavor_obj = CloudDatabaseFlavor(clt._manager, info)
clt.get_flavor = Mock(return_value=flavor_obj)
ret = clt._get_flavor_ref(1)
self.assertEqual(ret, example_uri)
@patch("pyos.manager.BaseManager", new=fakes.FakeManager)
def test_get_flavor_ref_for_name(self):
clt = self.client
info = {"id": 1,
"name": "test_flavor",
"ram": 42,
"links": [{
"href": example_uri,
"rel": "self"}]}
flavor_obj = CloudDatabaseFlavor(clt._manager, info)
clt.get_flavor = Mock(side_effect=exc.NotFound(""))
clt.list_flavors = Mock(return_value=[flavor_obj])
ret = clt._get_flavor_ref("test_flavor")
self.assertEqual(ret, example_uri)
@patch("pyos.manager.BaseManager", new=fakes.FakeManager)
def test_get_flavor_ref_for_ram(self):
clt = self.client
info = {"id": 1,
"name": "test_flavor",
"ram": 42,
"links": [{
"href": example_uri,
"rel": "self"}]}
flavor_obj = CloudDatabaseFlavor(clt._manager, info)
clt.get_flavor = Mock(side_effect=exc.NotFound(""))
clt.list_flavors = Mock(return_value=[flavor_obj])
ret = clt._get_flavor_ref(42)
self.assertEqual(ret, example_uri)
@patch("pyos.manager.BaseManager", new=fakes.FakeManager)
def test_get_flavor_ref_not_found(self):
clt = self.client
info = {"id": 1,
"name": "test_flavor",
"ram": 42,
"links": [{
"href": example_uri,
"rel": "self"}]}
flavor_obj = CloudDatabaseFlavor(clt._manager, info)
clt.get_flavor = Mock(side_effect=exc.NotFound(""))
clt.list_flavors = Mock(return_value=[flavor_obj])
self.assertRaises(exc.FlavorNotFound, clt._get_flavor_ref, "nonsense")
def test_clt_list_backups(self):
clt = self.client
mgr = clt._backup_manager
mgr.list = Mock()
clt.list_backups()
mgr.list.assert_called_once_with(instance=None)
def test_clt_list_backups_for_instance(self):
clt = self.client
mgr = clt._backup_manager
mgr.list = Mock()
inst = utils.random_unicode()
clt.list_backups(instance=inst)
mgr.list.assert_called_once_with(instance=inst)
def test_clt_get_backup(self):
clt = self.client
mgr = clt._backup_manager
mgr.get = Mock()
backup = utils.random_unicode()
clt.get_backup(backup)
mgr.get.assert_called_once_with(backup)
def test_clt_delete_backup(self):
clt = self.client
mgr = clt._backup_manager
mgr.delete = Mock()
backup = utils.random_unicode()
clt.delete_backup(backup)
mgr.delete.assert_called_once_with(backup)
def test_clt_create_backup(self):
clt = self.client
inst = self.instance
name = utils.random_unicode()
description = utils.random_unicode()
inst.create_backup = Mock()
clt.create_backup(inst, name, description=description)
inst.create_backup.assert_called_once_with(name,
description=description)
def test_clt_restore_backup(self):
clt = self.client
mgr = clt._manager
backup = utils.random_unicode()
name = utils.random_unicode()
flavor = utils.random_unicode()
volume = utils.random_unicode()
mgr.restore_backup = Mock()
clt.restore_backup(backup, name, flavor, volume)
mgr.restore_backup.assert_called_once_with(backup, name, flavor, volume)
@patch("pyos.manager.BaseManager", new=fakes.FakeManager)
def test_create_body_db(self):
mgr = self.instance._database_manager
nm = utils.random_unicode()
ret = mgr._create_body(nm, character_set="CS", collate="CO")
expected = {"databases": [
{"name": nm,
"character_set": "CS",
"collate": "CO"}]}
self.assertEqual(ret, expected)
@patch("pyos.manager.BaseManager", new=fakes.FakeManager)
def test_create_body_user(self):
inst = self.instance
mgr = inst._user_manager
nm = utils.random_unicode()
pw = utils.random_unicode()
dbnames = [utils.random_unicode(), utils.random_unicode()]
ret = mgr._create_body(nm, password=pw, database_names=dbnames)
expected = {"users": [
{"name": nm,
"password": pw,
"databases": [{"name": dbnames[0]}, {"name": dbnames[1]}]}]}
self.assertEqual(ret, expected)
@patch("pyos.manager.BaseManager", new=fakes.FakeManager)
def test_create_body_user_host(self):
inst = self.instance
mgr = inst._user_manager
nm = utils.random_unicode()
host = utils.random_unicode()
pw = utils.random_unicode()
dbnames = [utils.random_unicode(), utils.random_unicode()]
ret = mgr._create_body(nm, host=host, password=pw,
database_names=dbnames)
expected = {"users": [
{"name": nm,
"password": pw,
"host": host,
"databases": [{"name": dbnames[0]}, {"name": dbnames[1]}]}]}
self.assertEqual(ret, expected)
@patch("pyos.manager.BaseManager", new=fakes.FakeManager)
def test_create_body_flavor(self):
clt = self.client
nm = utils.random_unicode()
clt._get_flavor_ref = Mock(return_value=example_uri)
ret = clt._manager._create_body(nm)
expected = {"instance": {
"name": nm,
"flavorRef": example_uri,
"volume": {"size": 1},
"databases": [],
"users": []}}
self.assertEqual(ret, expected)
if __name__ == "__main__":
unittest.main()
|
|
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from compas.topology import breadth_first_traverse
__all__ = [
'mesh_face_adjacency',
'mesh_unify_cycles',
'mesh_flip_cycles',
]
def _mesh_face_adjacency(mesh, nmax=10, radius=10.0):
fkey_index = {fkey: index for index, fkey in enumerate(mesh.faces())}
index_fkey = {index: fkey for index, fkey in enumerate(mesh.faces())}
points = [mesh.face_centroid(fkey) for fkey in mesh.faces()]
k = min(mesh.number_of_faces(), nmax)
try:
from scipy.spatial import cKDTree
tree = cKDTree(points)
_, closest = tree.query(points, k=k, n_jobs=-1)
except Exception:
# try:
# from Rhino.Geometry import RTree
# from Rhino.Geometry import Sphere
# from Rhino.Geometry import Point3d
# except Exception:
from compas.geometry import KDTree
tree = KDTree(points)
closest = [tree.nearest_neighbors(point, k) for point in points]
closest = [[index for xyz, index, d in nnbrs] for nnbrs in closest]
# else:
# tree = RTree()
# for i, point in enumerate(points):
# tree.Insert(Point3d(* point), i)
# def callback(sender, e):
# data = e.Tag
# data.append(e.Id)
# closest = []
# for i, point in enumerate(points):
# sphere = Sphere(Point3d(* point), radius)
# data = []
# tree.Search(sphere, callback, data)
# closest.append(data)
adjacency = {}
for fkey in mesh.faces():
nbrs = []
index = fkey_index[fkey]
found = set()
nnbrs = set(closest[index])
for u, v in mesh.face_halfedges(fkey):
for index in nnbrs:
nbr = index_fkey[index]
if nbr == fkey:
continue
if nbr in found:
continue
for a, b in mesh.face_halfedges(nbr):
if v == a and u == b:
nbrs.append(nbr)
found.add(nbr)
break
for a, b in mesh.face_halfedges(nbr):
if u == a and v == b:
nbrs.append(nbr)
found.add(nbr)
break
adjacency[fkey] = nbrs
return adjacency
def mesh_face_adjacency(mesh):
"""Build a face adjacency dict.
Parameters
----------
mesh : :class:`~compas.datastructures.Mesh`
A mesh object.
Returns
-------
dict[int, list[int]]
A dictionary mapping face identifiers (keys) to lists of neighboring faces.
Notes
-----
This algorithm is used primarily to unify the cycle directions of a given mesh.
Therefore, the premise is that the topological information of the mesh is corrupt
and cannot be used to construct the adjacency structure. The algorithm is thus
purely geometrical, but uses a spatial indexing tree to speed up the search.
"""
f = mesh.number_of_faces()
if f > 100:
return _mesh_face_adjacency(mesh)
adjacency = {}
faces = list(mesh.faces())
for fkey in mesh.faces():
# faces = []
# for key in mesh.face_vertices(fkey):
# for nbr in mesh.halfedge[key]:
# fnbr = mesh.halfedge[key][nbr]
# if fnbr is not None:
# faces.append(fnbr)
nbrs = []
found = set()
for u, v in mesh.face_halfedges(fkey):
for nbr in faces:
if nbr == fkey:
continue
if nbr in found:
continue
for a, b in mesh.face_halfedges(nbr):
if v == a and u == b:
nbrs.append(nbr)
found.add(nbr)
break
for a, b in mesh.face_halfedges(nbr):
if u == a and v == b:
nbrs.append(nbr)
found.add(nbr)
break
adjacency[fkey] = nbrs
return adjacency
def mesh_unify_cycles(mesh, root=None):
"""Unify the cycle directions of all faces.
Unified cycle directions is a necessary condition for the data structure to
work properly. When in doubt, run this function on your mesh.
Parameters
----------
mesh : :class:`~compas.datastructures.Mesh`
A mesh object.
root : str, optional
The key of the root face.
Returns
-------
None
The mesh is modified in place.
Raises
------
AssertionError
If no all faces are included in the unnification process.
"""
def unify(node, nbr):
# find the common edge
for u, v in mesh.face_halfedges(nbr):
if u in mesh.face[node] and v in mesh.face[node]:
# node and nbr have edge u-v in common
i = mesh.face[node].index(u)
j = mesh.face[node].index(v)
if i == j - 1 or (j == 0 and u == mesh.face[node][-1]):
# if the traversal of a neighboring halfedge
# is in the same direction
# flip the neighbor
mesh.face[nbr][:] = mesh.face[nbr][::-1]
return
if root is None:
root = mesh.get_any_face()
adj = mesh_face_adjacency(mesh)
visited = breadth_first_traverse(adj, root, unify)
assert len(list(visited)) == mesh.number_of_faces(), 'Not all faces were visited'
mesh.halfedge = {key: {} for key in mesh.vertices()}
for fkey in mesh.faces():
for u, v in mesh.face_halfedges(fkey):
mesh.halfedge[u][v] = fkey
if u not in mesh.halfedge[v]:
mesh.halfedge[v][u] = None
def mesh_flip_cycles(mesh):
"""Flip the cycle directions of all faces.
Parameters
----------
mesh : :class:`~compas.datastructures.Mesh`
A mesh object.
Returns
-------
None
The mesh is modified in place.
Notes
-----
This function does not care about the directions being unified or not. It
just reverses whatever direction it finds.
"""
mesh.halfedge = {key: {} for key in mesh.vertices()}
for fkey in mesh.faces():
mesh.face[fkey][:] = mesh.face[fkey][::-1]
for u, v in mesh.face_halfedges(fkey):
mesh.halfedge[u][v] = fkey
if u not in mesh.halfedge[v]:
mesh.halfedge[v][u] = None
|
|
<<<<<<< HEAD
<<<<<<< HEAD
"""
dropbox.session.DropboxSession is responsible for holding OAuth authentication
info (app key/secret, request key/secret, access key/secret). It knows how to
use all of this information to craft properly constructed requests to Dropbox.
A DropboxSession object must be passed to a dropbox.client.DropboxClient object upon
initialization.
"""
import random
import sys
import time
import urllib.request, urllib.parse, urllib.error
try:
from urllib.parse import parse_qs
except ImportError:
# fall back for Python 2.5
from cgi import parse_qs
from . import rest
class OAuthToken(object):
"""
A class representing an OAuth token. Contains two fields: ``key`` and
``secret``.
"""
def __init__(self, key, secret):
self.key = key
self.secret = secret
class BaseSession(object):
API_VERSION = 1
API_HOST = "api.dropbox.com"
WEB_HOST = "www.dropbox.com"
API_CONTENT_HOST = "api-content.dropbox.com"
API_NOTIFICATION_HOST = "api-notify.dropbox.com"
def __init__(self, consumer_key, consumer_secret, access_type="auto", locale=None, rest_client=rest.RESTClient):
"""Initialize a DropboxSession object.
Your consumer key and secret are available
at https://www.dropbox.com/developers/apps
Args:
- ``access_type``: Either 'auto' (the default), 'dropbox', or
'app_folder'. You probably don't need to specify this and should
just use the default.
- ``locale``: A locale string ('en', 'pt_PT', etc.) [optional]
The locale setting will be used to translate any user-facing error
messages that the server generates. At this time Dropbox supports
'en', 'es', 'fr', 'de', and 'ja', though we will be supporting more
languages in the future. If you send a language the server doesn't
support, messages will remain in English. Look for these translated
messages in rest.ErrorResponse exceptions as e.user_error_msg.
"""
assert access_type in ['dropbox', 'app_folder', 'auto'], "expected access_type of 'dropbox' or 'app_folder'"
self.consumer_creds = OAuthToken(consumer_key, consumer_secret)
self.token = None
self.request_token = None
self.root = 'sandbox' if access_type == 'app_folder' else access_type
self.locale = locale
self.rest_client = rest_client
def is_linked(self):
"""Return whether the DropboxSession has an access token attached."""
return bool(self.token)
def unlink(self):
"""Remove any attached access token from the DropboxSession."""
self.token = None
def build_path(self, target, params=None):
"""Build the path component for an API URL.
This method urlencodes the parameters, adds them
to the end of the target url, and puts a marker for the API
version in front.
Args:
- ``target``: A target url (e.g. '/files') to build upon.
- ``params``: A dictionary of parameters (name to value). [optional]
Returns:
- The path and parameters components of an API URL.
"""
if sys.version_info < (3,) and type(target) == str:
target = target.encode("utf8")
target_path = urllib.parse.quote(target)
params = params or {}
params = params.copy()
if self.locale:
params['locale'] = self.locale
if params:
return "/%s%s?%s" % (self.API_VERSION, target_path, urllib.parse.urlencode(params))
else:
return "/%s%s" % (self.API_VERSION, target_path)
def build_url(self, host, target, params=None):
"""Build an API URL.
This method adds scheme and hostname to the path
returned from build_path.
Args:
- ``target``: A target url (e.g. '/files') to build upon.
- ``params``: A dictionary of parameters (name to value). [optional]
Returns:
- The full API URL.
"""
return "https://%s%s" % (host, self.build_path(target, params))
class DropboxSession(BaseSession):
def set_token(self, access_token, access_token_secret):
"""Attach an access token to the DropboxSession.
Note that the access 'token' is made up of both a token string
and a secret string.
"""
self.token = OAuthToken(access_token, access_token_secret)
def set_request_token(self, request_token, request_token_secret):
"""Attach an request token to the DropboxSession.
Note that the request 'token' is made up of both a token string
and a secret string.
"""
self.request_token = OAuthToken(request_token, request_token_secret)
def build_authorize_url(self, request_token, oauth_callback=None):
"""Build a request token authorization URL.
After obtaining a request token, you'll need to send the user to
the URL returned from this function so that they can confirm that
they want to connect their account to your app.
Args:
- ``request_token``: A request token from obtain_request_token.
- ``oauth_callback``: A url to redirect back to with the authorized
request token.
Returns:
- An authorization for the given request token.
"""
params = {'oauth_token': request_token.key,
}
if oauth_callback:
params['oauth_callback'] = oauth_callback
return self.build_url(self.WEB_HOST, '/oauth/authorize', params)
def obtain_request_token(self):
"""Obtain a request token from the Dropbox API.
This is your first step in the OAuth process. You call this to get a
request_token from the Dropbox server that you can then use with
DropboxSession.build_authorize_url() to get the user to authorize it.
After it's authorized you use this token with
DropboxSession.obtain_access_token() to get an access token.
NOTE: You should only need to do this once for each user, and then you
can store the access token for that user for later operations.
Returns:
- An :py:class:`OAuthToken` object representing the
request token Dropbox assigned to this app. Also attaches the
request token as self.request_token.
"""
self.token = None # clear any token currently on the request
url = self.build_url(self.API_HOST, '/oauth/request_token')
headers, params = self.build_access_headers('POST', url)
response = self.rest_client.POST(url, headers=headers, params=params, raw_response=True)
self.request_token = self._parse_token(response.read())
return self.request_token
def obtain_access_token(self, request_token=None):
"""Obtain an access token for a user.
After you get a request token, and then send the user to the authorize
URL, you can use the authorized request token with this method to get the
access token to use for future operations. The access token is stored on
the session object.
Args:
- ``request_token``: A request token from obtain_request_token. [optional]
The request_token should have been authorized via the
authorization url from build_authorize_url. If you don't pass
a request_token, the fallback is self.request_token, which
will exist if you previously called obtain_request_token on this
DropboxSession instance.
Returns:
- An :py:class:`OAuthToken` object with fields ``key`` and ``secret``
representing the access token Dropbox assigned to this app and
user. Also attaches the access token as self.token.
"""
request_token = request_token or self.request_token
assert request_token, "No request_token available on the session. Please pass one."
url = self.build_url(self.API_HOST, '/oauth/access_token')
headers, params = self.build_access_headers('POST', url, request_token=request_token)
response = self.rest_client.POST(url, headers=headers, params=params, raw_response=True)
self.token = self._parse_token(response.read())
return self.token
def build_access_headers(self, method, resource_url, params=None, request_token=None):
"""Build OAuth access headers for a future request.
Args:
- ``method``: The HTTP method being used (e.g. 'GET' or 'POST').
- ``resource_url``: The full url the request will be made to.
- ``params``: A dictionary of parameters to add to what's already on the url.
Typically, this would consist of POST parameters.
Returns:
- A tuple of (header_dict, params) where header_dict is a dictionary
of header names and values appropriate for passing into dropbox.rest.RESTClient
and params is a dictionary like the one that was passed in, but augmented with
oauth-related parameters as appropriate.
"""
if params is None:
params = {}
else:
params = params.copy()
oauth_params = {
'oauth_consumer_key' : self.consumer_creds.key,
'oauth_timestamp' : self._generate_oauth_timestamp(),
'oauth_nonce' : self._generate_oauth_nonce(),
'oauth_version' : self._oauth_version(),
}
token = request_token if request_token is not None else self.token
if token:
oauth_params['oauth_token'] = token.key
self._oauth_sign_request(oauth_params, self.consumer_creds, token)
params.update(oauth_params)
return {}, params
@classmethod
def _oauth_sign_request(cls, params, consumer_pair, token_pair):
params.update({'oauth_signature_method' : 'PLAINTEXT',
'oauth_signature' : ('%s&%s' % (consumer_pair.secret, token_pair.secret)
if token_pair is not None else
'%s&' % (consumer_pair.secret,))})
@classmethod
def _generate_oauth_timestamp(cls):
return int(time.time())
@classmethod
def _generate_oauth_nonce(cls, length=8):
return ''.join([str(random.randint(0, 9)) for i in range(length)])
@classmethod
def _oauth_version(cls):
return '1.0'
@classmethod
def _parse_token(cls, s):
if not s:
raise ValueError("Invalid parameter string.")
params = parse_qs(s, keep_blank_values=False)
if not params:
raise ValueError("Invalid parameter string: %r" % s)
try:
key = params['oauth_token'][0]
except Exception:
raise ValueError("'oauth_token' not found in OAuth request.")
try:
secret = params['oauth_token_secret'][0]
except Exception:
raise ValueError("'oauth_token_secret' not found in "
"OAuth request.")
return OAuthToken(key, secret)
# Don't use this class directly.
class DropboxOAuth2Session(BaseSession):
def __init__(self, oauth2_access_token, locale, rest_client=rest.RESTClient):
super(DropboxOAuth2Session, self).__init__("", "", "auto", locale=locale, rest_client=rest_client)
self.access_token = oauth2_access_token
def build_access_headers(self, method, resource_url, params=None, token=None):
assert token is None
headers = {"Authorization": "Bearer " + self.access_token}
return headers, params
=======
"""
dropbox.session.DropboxSession is responsible for holding OAuth authentication
info (app key/secret, request key/secret, access key/secret). It knows how to
use all of this information to craft properly constructed requests to Dropbox.
A DropboxSession object must be passed to a dropbox.client.DropboxClient object upon
initialization.
"""
import random
import sys
import time
import urllib.request, urllib.parse, urllib.error
try:
from urllib.parse import parse_qs
except ImportError:
# fall back for Python 2.5
from cgi import parse_qs
from . import rest
class OAuthToken(object):
"""
A class representing an OAuth token. Contains two fields: ``key`` and
``secret``.
"""
def __init__(self, key, secret):
self.key = key
self.secret = secret
class BaseSession(object):
API_VERSION = 1
API_HOST = "api.dropbox.com"
WEB_HOST = "www.dropbox.com"
API_CONTENT_HOST = "api-content.dropbox.com"
API_NOTIFICATION_HOST = "api-notify.dropbox.com"
def __init__(self, consumer_key, consumer_secret, access_type="auto", locale=None, rest_client=rest.RESTClient):
"""Initialize a DropboxSession object.
Your consumer key and secret are available
at https://www.dropbox.com/developers/apps
Args:
- ``access_type``: Either 'auto' (the default), 'dropbox', or
'app_folder'. You probably don't need to specify this and should
just use the default.
- ``locale``: A locale string ('en', 'pt_PT', etc.) [optional]
The locale setting will be used to translate any user-facing error
messages that the server generates. At this time Dropbox supports
'en', 'es', 'fr', 'de', and 'ja', though we will be supporting more
languages in the future. If you send a language the server doesn't
support, messages will remain in English. Look for these translated
messages in rest.ErrorResponse exceptions as e.user_error_msg.
"""
assert access_type in ['dropbox', 'app_folder', 'auto'], "expected access_type of 'dropbox' or 'app_folder'"
self.consumer_creds = OAuthToken(consumer_key, consumer_secret)
self.token = None
self.request_token = None
self.root = 'sandbox' if access_type == 'app_folder' else access_type
self.locale = locale
self.rest_client = rest_client
def is_linked(self):
"""Return whether the DropboxSession has an access token attached."""
return bool(self.token)
def unlink(self):
"""Remove any attached access token from the DropboxSession."""
self.token = None
def build_path(self, target, params=None):
"""Build the path component for an API URL.
This method urlencodes the parameters, adds them
to the end of the target url, and puts a marker for the API
version in front.
Args:
- ``target``: A target url (e.g. '/files') to build upon.
- ``params``: A dictionary of parameters (name to value). [optional]
Returns:
- The path and parameters components of an API URL.
"""
if sys.version_info < (3,) and type(target) == str:
target = target.encode("utf8")
target_path = urllib.parse.quote(target)
params = params or {}
params = params.copy()
if self.locale:
params['locale'] = self.locale
if params:
return "/%s%s?%s" % (self.API_VERSION, target_path, urllib.parse.urlencode(params))
else:
return "/%s%s" % (self.API_VERSION, target_path)
def build_url(self, host, target, params=None):
"""Build an API URL.
This method adds scheme and hostname to the path
returned from build_path.
Args:
- ``target``: A target url (e.g. '/files') to build upon.
- ``params``: A dictionary of parameters (name to value). [optional]
Returns:
- The full API URL.
"""
return "https://%s%s" % (host, self.build_path(target, params))
class DropboxSession(BaseSession):
def set_token(self, access_token, access_token_secret):
"""Attach an access token to the DropboxSession.
Note that the access 'token' is made up of both a token string
and a secret string.
"""
self.token = OAuthToken(access_token, access_token_secret)
def set_request_token(self, request_token, request_token_secret):
"""Attach an request token to the DropboxSession.
Note that the request 'token' is made up of both a token string
and a secret string.
"""
self.request_token = OAuthToken(request_token, request_token_secret)
def build_authorize_url(self, request_token, oauth_callback=None):
"""Build a request token authorization URL.
After obtaining a request token, you'll need to send the user to
the URL returned from this function so that they can confirm that
they want to connect their account to your app.
Args:
- ``request_token``: A request token from obtain_request_token.
- ``oauth_callback``: A url to redirect back to with the authorized
request token.
Returns:
- An authorization for the given request token.
"""
params = {'oauth_token': request_token.key,
}
if oauth_callback:
params['oauth_callback'] = oauth_callback
return self.build_url(self.WEB_HOST, '/oauth/authorize', params)
def obtain_request_token(self):
"""Obtain a request token from the Dropbox API.
This is your first step in the OAuth process. You call this to get a
request_token from the Dropbox server that you can then use with
DropboxSession.build_authorize_url() to get the user to authorize it.
After it's authorized you use this token with
DropboxSession.obtain_access_token() to get an access token.
NOTE: You should only need to do this once for each user, and then you
can store the access token for that user for later operations.
Returns:
- An :py:class:`OAuthToken` object representing the
request token Dropbox assigned to this app. Also attaches the
request token as self.request_token.
"""
self.token = None # clear any token currently on the request
url = self.build_url(self.API_HOST, '/oauth/request_token')
headers, params = self.build_access_headers('POST', url)
response = self.rest_client.POST(url, headers=headers, params=params, raw_response=True)
self.request_token = self._parse_token(response.read())
return self.request_token
def obtain_access_token(self, request_token=None):
"""Obtain an access token for a user.
After you get a request token, and then send the user to the authorize
URL, you can use the authorized request token with this method to get the
access token to use for future operations. The access token is stored on
the session object.
Args:
- ``request_token``: A request token from obtain_request_token. [optional]
The request_token should have been authorized via the
authorization url from build_authorize_url. If you don't pass
a request_token, the fallback is self.request_token, which
will exist if you previously called obtain_request_token on this
DropboxSession instance.
Returns:
- An :py:class:`OAuthToken` object with fields ``key`` and ``secret``
representing the access token Dropbox assigned to this app and
user. Also attaches the access token as self.token.
"""
request_token = request_token or self.request_token
assert request_token, "No request_token available on the session. Please pass one."
url = self.build_url(self.API_HOST, '/oauth/access_token')
headers, params = self.build_access_headers('POST', url, request_token=request_token)
response = self.rest_client.POST(url, headers=headers, params=params, raw_response=True)
self.token = self._parse_token(response.read())
return self.token
def build_access_headers(self, method, resource_url, params=None, request_token=None):
"""Build OAuth access headers for a future request.
Args:
- ``method``: The HTTP method being used (e.g. 'GET' or 'POST').
- ``resource_url``: The full url the request will be made to.
- ``params``: A dictionary of parameters to add to what's already on the url.
Typically, this would consist of POST parameters.
Returns:
- A tuple of (header_dict, params) where header_dict is a dictionary
of header names and values appropriate for passing into dropbox.rest.RESTClient
and params is a dictionary like the one that was passed in, but augmented with
oauth-related parameters as appropriate.
"""
if params is None:
params = {}
else:
params = params.copy()
oauth_params = {
'oauth_consumer_key' : self.consumer_creds.key,
'oauth_timestamp' : self._generate_oauth_timestamp(),
'oauth_nonce' : self._generate_oauth_nonce(),
'oauth_version' : self._oauth_version(),
}
token = request_token if request_token is not None else self.token
if token:
oauth_params['oauth_token'] = token.key
self._oauth_sign_request(oauth_params, self.consumer_creds, token)
params.update(oauth_params)
return {}, params
@classmethod
def _oauth_sign_request(cls, params, consumer_pair, token_pair):
params.update({'oauth_signature_method' : 'PLAINTEXT',
'oauth_signature' : ('%s&%s' % (consumer_pair.secret, token_pair.secret)
if token_pair is not None else
'%s&' % (consumer_pair.secret,))})
@classmethod
def _generate_oauth_timestamp(cls):
return int(time.time())
@classmethod
def _generate_oauth_nonce(cls, length=8):
return ''.join([str(random.randint(0, 9)) for i in range(length)])
@classmethod
def _oauth_version(cls):
return '1.0'
@classmethod
def _parse_token(cls, s):
if not s:
raise ValueError("Invalid parameter string.")
params = parse_qs(s, keep_blank_values=False)
if not params:
raise ValueError("Invalid parameter string: %r" % s)
try:
key = params['oauth_token'][0]
except Exception:
raise ValueError("'oauth_token' not found in OAuth request.")
try:
secret = params['oauth_token_secret'][0]
except Exception:
raise ValueError("'oauth_token_secret' not found in "
"OAuth request.")
return OAuthToken(key, secret)
# Don't use this class directly.
class DropboxOAuth2Session(BaseSession):
def __init__(self, oauth2_access_token, locale, rest_client=rest.RESTClient):
super(DropboxOAuth2Session, self).__init__("", "", "auto", locale=locale, rest_client=rest_client)
self.access_token = oauth2_access_token
def build_access_headers(self, method, resource_url, params=None, token=None):
assert token is None
headers = {"Authorization": "Bearer " + self.access_token}
return headers, params
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
=======
"""
dropbox.session.DropboxSession is responsible for holding OAuth authentication
info (app key/secret, request key/secret, access key/secret). It knows how to
use all of this information to craft properly constructed requests to Dropbox.
A DropboxSession object must be passed to a dropbox.client.DropboxClient object upon
initialization.
"""
import random
import sys
import time
import urllib.request, urllib.parse, urllib.error
try:
from urllib.parse import parse_qs
except ImportError:
# fall back for Python 2.5
from cgi import parse_qs
from . import rest
class OAuthToken(object):
"""
A class representing an OAuth token. Contains two fields: ``key`` and
``secret``.
"""
def __init__(self, key, secret):
self.key = key
self.secret = secret
class BaseSession(object):
API_VERSION = 1
API_HOST = "api.dropbox.com"
WEB_HOST = "www.dropbox.com"
API_CONTENT_HOST = "api-content.dropbox.com"
API_NOTIFICATION_HOST = "api-notify.dropbox.com"
def __init__(self, consumer_key, consumer_secret, access_type="auto", locale=None, rest_client=rest.RESTClient):
"""Initialize a DropboxSession object.
Your consumer key and secret are available
at https://www.dropbox.com/developers/apps
Args:
- ``access_type``: Either 'auto' (the default), 'dropbox', or
'app_folder'. You probably don't need to specify this and should
just use the default.
- ``locale``: A locale string ('en', 'pt_PT', etc.) [optional]
The locale setting will be used to translate any user-facing error
messages that the server generates. At this time Dropbox supports
'en', 'es', 'fr', 'de', and 'ja', though we will be supporting more
languages in the future. If you send a language the server doesn't
support, messages will remain in English. Look for these translated
messages in rest.ErrorResponse exceptions as e.user_error_msg.
"""
assert access_type in ['dropbox', 'app_folder', 'auto'], "expected access_type of 'dropbox' or 'app_folder'"
self.consumer_creds = OAuthToken(consumer_key, consumer_secret)
self.token = None
self.request_token = None
self.root = 'sandbox' if access_type == 'app_folder' else access_type
self.locale = locale
self.rest_client = rest_client
def is_linked(self):
"""Return whether the DropboxSession has an access token attached."""
return bool(self.token)
def unlink(self):
"""Remove any attached access token from the DropboxSession."""
self.token = None
def build_path(self, target, params=None):
"""Build the path component for an API URL.
This method urlencodes the parameters, adds them
to the end of the target url, and puts a marker for the API
version in front.
Args:
- ``target``: A target url (e.g. '/files') to build upon.
- ``params``: A dictionary of parameters (name to value). [optional]
Returns:
- The path and parameters components of an API URL.
"""
if sys.version_info < (3,) and type(target) == str:
target = target.encode("utf8")
target_path = urllib.parse.quote(target)
params = params or {}
params = params.copy()
if self.locale:
params['locale'] = self.locale
if params:
return "/%s%s?%s" % (self.API_VERSION, target_path, urllib.parse.urlencode(params))
else:
return "/%s%s" % (self.API_VERSION, target_path)
def build_url(self, host, target, params=None):
"""Build an API URL.
This method adds scheme and hostname to the path
returned from build_path.
Args:
- ``target``: A target url (e.g. '/files') to build upon.
- ``params``: A dictionary of parameters (name to value). [optional]
Returns:
- The full API URL.
"""
return "https://%s%s" % (host, self.build_path(target, params))
class DropboxSession(BaseSession):
def set_token(self, access_token, access_token_secret):
"""Attach an access token to the DropboxSession.
Note that the access 'token' is made up of both a token string
and a secret string.
"""
self.token = OAuthToken(access_token, access_token_secret)
def set_request_token(self, request_token, request_token_secret):
"""Attach an request token to the DropboxSession.
Note that the request 'token' is made up of both a token string
and a secret string.
"""
self.request_token = OAuthToken(request_token, request_token_secret)
def build_authorize_url(self, request_token, oauth_callback=None):
"""Build a request token authorization URL.
After obtaining a request token, you'll need to send the user to
the URL returned from this function so that they can confirm that
they want to connect their account to your app.
Args:
- ``request_token``: A request token from obtain_request_token.
- ``oauth_callback``: A url to redirect back to with the authorized
request token.
Returns:
- An authorization for the given request token.
"""
params = {'oauth_token': request_token.key,
}
if oauth_callback:
params['oauth_callback'] = oauth_callback
return self.build_url(self.WEB_HOST, '/oauth/authorize', params)
def obtain_request_token(self):
"""Obtain a request token from the Dropbox API.
This is your first step in the OAuth process. You call this to get a
request_token from the Dropbox server that you can then use with
DropboxSession.build_authorize_url() to get the user to authorize it.
After it's authorized you use this token with
DropboxSession.obtain_access_token() to get an access token.
NOTE: You should only need to do this once for each user, and then you
can store the access token for that user for later operations.
Returns:
- An :py:class:`OAuthToken` object representing the
request token Dropbox assigned to this app. Also attaches the
request token as self.request_token.
"""
self.token = None # clear any token currently on the request
url = self.build_url(self.API_HOST, '/oauth/request_token')
headers, params = self.build_access_headers('POST', url)
response = self.rest_client.POST(url, headers=headers, params=params, raw_response=True)
self.request_token = self._parse_token(response.read())
return self.request_token
def obtain_access_token(self, request_token=None):
"""Obtain an access token for a user.
After you get a request token, and then send the user to the authorize
URL, you can use the authorized request token with this method to get the
access token to use for future operations. The access token is stored on
the session object.
Args:
- ``request_token``: A request token from obtain_request_token. [optional]
The request_token should have been authorized via the
authorization url from build_authorize_url. If you don't pass
a request_token, the fallback is self.request_token, which
will exist if you previously called obtain_request_token on this
DropboxSession instance.
Returns:
- An :py:class:`OAuthToken` object with fields ``key`` and ``secret``
representing the access token Dropbox assigned to this app and
user. Also attaches the access token as self.token.
"""
request_token = request_token or self.request_token
assert request_token, "No request_token available on the session. Please pass one."
url = self.build_url(self.API_HOST, '/oauth/access_token')
headers, params = self.build_access_headers('POST', url, request_token=request_token)
response = self.rest_client.POST(url, headers=headers, params=params, raw_response=True)
self.token = self._parse_token(response.read())
return self.token
def build_access_headers(self, method, resource_url, params=None, request_token=None):
"""Build OAuth access headers for a future request.
Args:
- ``method``: The HTTP method being used (e.g. 'GET' or 'POST').
- ``resource_url``: The full url the request will be made to.
- ``params``: A dictionary of parameters to add to what's already on the url.
Typically, this would consist of POST parameters.
Returns:
- A tuple of (header_dict, params) where header_dict is a dictionary
of header names and values appropriate for passing into dropbox.rest.RESTClient
and params is a dictionary like the one that was passed in, but augmented with
oauth-related parameters as appropriate.
"""
if params is None:
params = {}
else:
params = params.copy()
oauth_params = {
'oauth_consumer_key' : self.consumer_creds.key,
'oauth_timestamp' : self._generate_oauth_timestamp(),
'oauth_nonce' : self._generate_oauth_nonce(),
'oauth_version' : self._oauth_version(),
}
token = request_token if request_token is not None else self.token
if token:
oauth_params['oauth_token'] = token.key
self._oauth_sign_request(oauth_params, self.consumer_creds, token)
params.update(oauth_params)
return {}, params
@classmethod
def _oauth_sign_request(cls, params, consumer_pair, token_pair):
params.update({'oauth_signature_method' : 'PLAINTEXT',
'oauth_signature' : ('%s&%s' % (consumer_pair.secret, token_pair.secret)
if token_pair is not None else
'%s&' % (consumer_pair.secret,))})
@classmethod
def _generate_oauth_timestamp(cls):
return int(time.time())
@classmethod
def _generate_oauth_nonce(cls, length=8):
return ''.join([str(random.randint(0, 9)) for i in range(length)])
@classmethod
def _oauth_version(cls):
return '1.0'
@classmethod
def _parse_token(cls, s):
if not s:
raise ValueError("Invalid parameter string.")
params = parse_qs(s, keep_blank_values=False)
if not params:
raise ValueError("Invalid parameter string: %r" % s)
try:
key = params['oauth_token'][0]
except Exception:
raise ValueError("'oauth_token' not found in OAuth request.")
try:
secret = params['oauth_token_secret'][0]
except Exception:
raise ValueError("'oauth_token_secret' not found in "
"OAuth request.")
return OAuthToken(key, secret)
# Don't use this class directly.
class DropboxOAuth2Session(BaseSession):
def __init__(self, oauth2_access_token, locale, rest_client=rest.RESTClient):
super(DropboxOAuth2Session, self).__init__("", "", "auto", locale=locale, rest_client=rest_client)
self.access_token = oauth2_access_token
def build_access_headers(self, method, resource_url, params=None, token=None):
assert token is None
headers = {"Authorization": "Bearer " + self.access_token}
return headers, params
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
|
|
from __future__ import print_function
import os, sys
import re
import webbrowser
import datetime
import logging
from storage import RemoteStorage
import flickr_api
from flickr_api.api import flickr
from file_info import FileInfo
from folder_info import FolderInfo
from local_storage import mkdirp
from config import __packagename__
TOKEN_FILENAME = __packagename__ + '.token'
"""
About Tags
----------
Normal Tags
With normal tags, the accepted characters for uniqueness checking are [A-Za-z0-9] (in regex form) - so all other
characters are stripped out (and uppercase characters are downcased eg: A->a) *but* they're maintained for
"viewing" (reading) - a way of maintaining ease of use for users and ease of indexing/searching etc for the system.
Machine Tags
Have been developed to support intra-application tagging (eg: delicious, upcoming, last.fm, dopplr and more).
They have the following structure:
namespace:predicate=value
a namespace, i.e. upcoming [who is going to care about this tag]
a predicate, i.e. event [what does this apply to]
a value, i.e. 123456 [which one is this]
"""
CHECKSUM_PREFIX = 'checksum:md5'
EXTENSION_PREFIX = 'flickrrsync:extn'
OAUTH_PERMISSIONS = 'write'
logger = logging.getLogger(__name__)
class FlickrStorage(RemoteStorage):
def __init__(self, config, resiliently):
self._config = config
self._resiliently = resiliently
self._is_authenticated = False
self._user = None
self._photosets = {}
self._photos = {}
def list_folders(self):
"""
Lists all photosets in Flickr
Returns:
A lazy loaded generator function of FolderInfo objects
"""
self._authenticate()
walker = self._resiliently.call(flickr_api.objects.Walker, self._user.getPhotosets)
for photoset in walker:
self._photosets[photoset.id] = photoset
folder = FolderInfo(id=photoset.id, name=photoset.title.encode('utf-8'))
if self._should_include(folder.name, self._config.include_dir, self._config.exclude_dir):
yield folder
def list_files(self, folder):
"""
Lists all photos within a photoset
Args:
folder: The FolderInfo object of the folder to list (from list_folders), or None to list all photos not
in a photoset
Returns:
A lazy loaded generator function of FileInfo objects
Raises:
KeyError: If folder.id is unrecognised
"""
self._authenticate()
if not folder.is_root:
walker = self._resiliently.call(
flickr_api.objects.Walker,
self._photosets[folder.id].getPhotos,
extras='original_format,tags')
else:
walker = self._resiliently.call(
flickr_api.objects.Walker,
self._user.getNotInSetPhotos,
extras='original_format,tags')
for photo in walker:
self._photos[photo.id] = photo
file_info = self._get_file_info(photo)
if self._should_include(file_info.name, self._config.include, self._config.exclude):
yield file_info
def download(self, file_info, dest_path):
"""
Downloads a photo from Flickr to local file system
Args:
file_info: The file info object (as returned by list_files) of the file to download
dest_path: The file system path to save the file to
Raises:
KeyError: If the file_info.id is unrecognised
"""
mkdirp(dest_path)
photo = self._photos[file_info.id]
is_video = photo.media == 'video'
size = 'Video Original' if is_video else 'Original'
self._resiliently.call(photo.save, dest_path, size_label=size)
def upload(self, src_path, folder_name, file_name, checksum):
"""
Uploads a photo to Flickr from local file system
Args:
src_path: The file system path to upload the photo from
folder_name: The photset name to add the photo to
file_name: The name of the photo, any extension will be removed
Raises:
KeyError: If the file_info.id is unrecognised
"""
extension = os.path.splitext(file_name)[1][1:]
tags = '{} "{}={}"'.format(self._config.tags, EXTENSION_PREFIX, extension)
if checksum:
tags = '{} {}={}'.format(tags, CHECKSUM_PREFIX, checksum)
photo = self._resiliently.call(
flickr_api.upload,
photo_file=src_path,
title=os.path.splitext(file_name)[0],
tags=tags.strip(),
is_public=self._config.is_public,
is_friend=self._config.is_friend,
is_family=self._config.is_family,
async=0)
if folder_name:
photoset = self._get_folder_by_name(folder_name)
if not photoset:
photoset = self._resiliently.call(flickr_api.Photoset.create, title=folder_name, primary_photo=photo)
self._photosets[photoset.id] = photoset
else:
self._resiliently.call(photoset.addPhoto, photo=photo)
def copy_file(self, file_info, folder_name, dest_storage):
if isinstance(dest_storage, RemoteStorage):
temp_file = NamedTemporaryFile()
self.download(file_info, temp_file.name)
dest_storage.upload(temp_file.name, folder_name, file_info.name, file_info.checksum)
temp_file.close()
else:
dest = os.path.join(dest_storage.path, folder_name, file_info.name)
self.download(file_info, dest)
def _get_folder_by_name(self, name):
return next((x for x in self._photosets.values() if x.title.encode('utf-8').lower() == name.lower()), None)
def _get_file_info(self, photo):
name = photo.title.encode('utf-8') if photo.title else photo.id
checksum = None
extension = None
if photo.tags:
# If we've just pulled the photo, tags is a string, if we've inspected any properties like 'media', it becomes a list
tags = photo.tags.split() if isinstance(photo.tags, basestring) else [tag.text for tag in photo.tags]
checksum = next((parts[1] for parts in (tag.split('=') for tag in tags) if parts[0] == CHECKSUM_PREFIX), None)
extension = next((parts[1] for parts in (tag.split('=') for tag in tags) if parts[0] == EXTENSION_PREFIX), None)
if not extension:
extension = photo.originalformat
if extension:
name += "." + extension
return FileInfo(id=photo.id, name=name, checksum=checksum)
def _should_include(self, name, include_pattern, exclude_pattern):
return ((not include_pattern or re.search(include_pattern, name, flags=re.IGNORECASE)) and
(not exclude_pattern or not re.search(exclude_pattern, name, flags=re.IGNORECASE)))
def _authenticate(self):
if self._is_authenticated:
return
flickr_api.set_keys(api_key = self._config.api_key, api_secret = self._config.api_secret)
token_path = self._config.locate_datafile(TOKEN_FILENAME)
if token_path:
auth_handler = flickr_api.auth.AuthHandler.load(token_path)
else:
token_path = self._config.default_datafile(TOKEN_FILENAME)
auth_handler = flickr_api.auth.AuthHandler()
permissions_requested = OAUTH_PERMISSIONS
url = auth_handler.get_authorization_url(permissions_requested)
webbrowser.open(url)
print("Please enter the OAuth verifier tag once logged in:")
verifier_code = raw_input("> ")
auth_handler.set_verifier(verifier_code)
auth_handler.save(token_path)
try:
flickr_api.set_auth_handler(auth_handler)
self._user = flickr_api.test.login()
self._is_authenticated = True
except flickr_api.flickrerrors.FlickrError as e:
print(e.message)
if e.message == 'The Flickr API keys have not been set':
print("Go to http://www.flickr.com/services/apps/create/apply and apply for an API key")
sys.exit(1);
|
|
from __future__ import division
import csv
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.db.models import Q
from django.http import HttpResponse
from django.utils.encoding import smart_str
from django.views.decorators.csrf import csrf_exempt
from django.core import serializers
from open_elections.models import *
def emit_county_csv_results(request):
"""Returns a CSV file to be parsed for use by KPBS.org"""
response = HttpResponse(mimetype='text/csv')
response['Content-Disposition'] = 'attachment; filename=county_election_results.csv'
csv.register_dialect('county_csv', delimiter=',', quoting=csv.QUOTE_ALL)
writer = csv.writer(response, 'county_csv')
writer.writerow(['contest_id', 'title', 'reporting', 'numprec', 'pctrpt', 'name', 'party', 'vote', 'pct', 'last_update'])
contests = CountyContest.objects.values_list('contest_id', 'title', 'reporting',
'numprec', 'pctrpt', 'countycandidate__name', 'countycandidate__party',
'countycandidate__vote', 'countycandidate__pct', 'countycandidate__last_update').order_by('contest_key',
'-countycandidate__vote')
for c in contests:
writer.writerow([c[0], c[1], c[2], c[3], c[4], c[5], c[6], c[7], c[8], c[9].strftime("%A %B %e, %Y %I:%M %p")])
return response
def emit_state_csv_results(request):
"""Returns a CSV file to be parsed for use by KPBS.org"""
response = HttpResponse(mimetype='text/csv')
response['Content-Disposition'] = 'attachment; filename=state_election_results.csv'
writer = csv.writer(response)
writer.writerow(['contest_identifier', 'contest_name', 'precincts_reporting', 'total_precincts', 'candidate_name', 'affiliation', 'total_votes', 'percent_votes', 'referendum_option_identifier'])
candidates = StateCandidate.objects.select_related().order_by('state_contest', '-valid_votes').filter(state_contest__show_on_web=True)
candidate_name = ''
for c in candidates:
if c.candidate_name:
candidate_name = c.candidate_name
elif c.proposal_identifier:
candidate_name = c.referendum_option_identifier
writer.writerow([c.state_contest.contest_identifier,
c.state_contest.contest_name,
c.state_contest.precincts_reporting,
c.state_contest.total_precincts,
smart_str(candidate_name),
c.affiliation,
c.valid_votes,
c.pct_votes_race,
c.referendum_option_identifier,
c.last_update.strftime("%A %B %e, %Y %I:%M %p")])
return response
def emit_jason_results(request):
"""docstring for emit_jason_results"""
pass
def get_candidate_json(request):
"""Returns a the information about each candidate (person or prop) as JSON"""
candidate_json = serializers.serialize('json', Candidate.objects.all())
return HttpResponse(candidate_json, mimetype="application/json")
def get_candidates(request):
"""Returns a the information about each candidate (person or prop)"""
candidates = Candidate.objects.all()
return render_to_response('election_guide.html',
{'candidates': candidates},
context_instance=RequestContext(request))
def get_candidate_contributions(request):
"""Returns a the information about each candidate (person or prop)"""
contributions = Contribution.objects.select_related().order_by('candidate',
'tran_naml', 'tran_namf')
return render_to_response('contributions.html',
{'contributions': contributions},
context_instance=RequestContext(request))
def emit_contribution_csv(request, filer_id=''):
"""Returns a CSV file to be parsed for use by KPBS.org"""
if filer_id == '':
pass
else:
response = HttpResponse(mimetype='text/csv')
response['Content-Disposition'] = 'attachment; filename=%s.csv' % filer
csv.register_dialect('contrib_csv', delimiter=',', quoting=csv.QUOTE_ALL)
writer = csv.writer(response, 'contrib_csv')
writer.writerow(['tran_amt', 'zip_code'])
contribs = Contribution.objects.filter()
for c in contribs:
writer.writerow([c['tran_amt'], c['zip_code']])
return response
def get_contests_for_voter_guide(request):
"""Returns contest and candidate data. Can be called using a number of
different identifiers including contest ID, contest name, contest type or
district"""
# from open_elections.models import Contest
# if request.GET:
# if request.GET.get('contest_id'):
# identifier = request.GET['contest_id']
# elif request.GET.get('contest_name'):
# identifier = request.GET['contest_name']
# elif request.GET.get('contest_type'):
# identifier = request.GET['contest_type']
# elif request.GET.get('contest_district'):
# identifier = request.GET['contest_district']
# key = request.GET.items()[0][0]
# value = request.GET.items()[0][1]
# else:
# identifier = 'nada' #TODO: return a 404 here get_object_or_404
return render_to_response('election_guide.html',
{}, context_instance=RequestContext(request))
def get_contests_for_voter_guide_dev(request):
"""Returns contest and candidate data. Can be called using a number of
different identifiers including contest ID, contest name, contest type or
district"""
# from open_elections.models import Contest
# if request.GET:
# if request.GET.get('contest_id'):
# identifier = request.GET['contest_id']
# elif request.GET.get('contest_name'):
# identifier = request.GET['contest_name']
# elif request.GET.get('contest_type'):
# identifier = request.GET['contest_type']
# elif request.GET.get('contest_district'):
# identifier = request.GET['contest_district']
# key = request.GET.items()[0][0]
# value = request.GET.items()[0][1]
# else:
# identifier = 'nada' #TODO: return a 404 here get_object_or_404
return render_to_response('election_guide_dev.html',
{}, context_instance=RequestContext(request))
def get_county_pres_results_for_chart(request):
results = CountyCandidate.objects.filter(cankey__in=[7, 10])
pct_rpt = results[0].contest.pctrpt
return render_to_response('chart_data.html', {'results': results, 'pct_rpt': pct_rpt}, context_instance=RequestContext(request))
def get_map(request):
return render_to_response('map.html',
{},
context_instance=RequestContext(request))
### IT shouldn't need to be dependent on this other application
### I figure people can install it if they want to...
# from endless_pagination.decorators import page_template
@csrf_exempt
# @page_template("demaio_contributors.html")
# @page_template("filner_contributors.html", key="filner_contributors")
def searchable_map(request, template="searchable_map.html",
extra_context=None):
from django.db.models import Sum
if request.REQUEST.get('d_search_submit'):
params = {}
form = ContributionForm(request.POST)
if request.method == 'POST' and form.is_valid():
if form.cleaned_data['tran_namf']:
params['tran_namf__icontains'] = form.cleaned_data['tran_namf']
if form.cleaned_data['tran_naml']:
params['tran_naml__icontains'] = form.cleaned_data['tran_naml']
if form.cleaned_data['tran_emp']:
params['tran_emp__icontains'] = form.cleaned_data['tran_emp']
if form.cleaned_data['tran_zip']:
params['tran_zip'] = form.cleaned_data['tran_zip']
params['candidate'] = 2
elif request.method == 'GET' and request.GET['d_search_submit']:
if request.GET.get('tran_namf', ''):
params['tran_namf__icontains'] = request.GET.get('tran_namf', '')
if request.GET.get('tran_naml', ''):
params['tran_naml__icontains'] = request.GET.get('tran_naml', '')
if request.GET.get('tran_emp', ''):
params['tran_emp__icontains'] = request.GET.get('tran_emp', '')
if request.GET.get('tran_zip', ''):
params['tran_zip'] = request.GET.get('tran_zip', '')
params['candidate'] = 2
demaio_contribs = Contribution.objects.values('tran_namf', 'tran_naml', 'tran_emp', 'tran_zip').filter(**params).annotate(amount=Sum('tran_amt1')).order_by('-amount', 'tran_naml')
filner_contribs = Contribution.objects.values('tran_namf', 'tran_naml', 'tran_emp', 'tran_zip').filter(candidate_id=3).annotate(amount=Sum('tran_amt1')).order_by('-amount', 'tran_naml')
context = {'demaio_contribs': demaio_contribs, 'filner_contribs': filner_contribs, 'form': form, 'search_submit': 'd_search_submit'}
elif request.REQUEST.get('f_search_submit'):
params = {}
params['candidate'] = 3
form = ContributionForm(request.POST)
if request.method == 'POST' and form.is_valid():
if form.cleaned_data['tran_namf']:
params['tran_namf__icontains'] = form.cleaned_data['tran_namf']
if form.cleaned_data['tran_naml']:
params['tran_naml__icontains'] = form.cleaned_data['tran_naml']
if form.cleaned_data['tran_emp']:
params['tran_emp__icontains'] = form.cleaned_data['tran_emp']
if form.cleaned_data['tran_zip']:
params['tran_zip'] = form.cleaned_data['tran_zip']
elif request.method == 'GET' and request.GET['f_search_submit']:
if request.GET.get('tran_namf', ''):
params['tran_namf__icontains'] = request.GET.get('tran_namf', '')
if request.GET.get('tran_naml', ''):
params['tran_naml__icontains'] = request.GET.get('tran_naml', '')
if request.GET.get('tran_emp', ''):
params['tran_emp__icontains'] = request.GET.get('tran_emp', '')
if request.GET.get('tran_zip', ''):
params['tran_zip'] = request.GET.get('tran_zip', '')
filner_contribs = Contribution.objects.values('tran_namf', 'tran_naml', 'tran_emp', 'tran_zip').filter(**params).annotate(amount=Sum('tran_amt1')).order_by('-amount', 'tran_naml')
demaio_contribs = Contribution.objects.values('tran_namf', 'tran_naml', 'tran_emp', 'tran_zip').filter(candidate_id=2).annotate(amount=Sum('tran_amt1')).order_by('-amount', 'tran_naml')
context = {'demaio_contribs': demaio_contribs, 'filner_contribs': filner_contribs, 'form': form, 'search_submit': 'f_search_submit'}
else:
demaio_contribs = Contribution.objects.values('tran_namf', 'tran_naml', 'tran_emp', 'tran_zip').filter(candidate_id=2).annotate(amount=Sum('tran_amt1')).order_by('-amount', 'tran_naml')
filner_contribs = Contribution.objects.values('tran_namf', 'tran_naml', 'tran_emp', 'tran_zip').filter(candidate_id=3).annotate(amount=Sum('tran_amt1')).order_by('-amount', 'tran_naml')
form = ContributionForm()
context = {
'demaio_contribs': demaio_contribs,
'demaio_grid_title': 'All Contributors',
'filner_contribs': filner_contribs,
'filner_grid_title': 'All Contributors',
'form': form
}
if extra_context is not None:
context.update(extra_context)
return render_to_response(template, context, context_instance=RequestContext(request))
## --- Our Chyron may not be so useful for others...
def emit_chyron_xml_results(request):
"""Returns an XML file to be parsed for use by KPBS TV"""
# Excluded values are kpbs_elections_countycontest.contest_id includes presidential & congress
county_excluded = [5, 10, 15, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60, 65, 70, 75,
139, 375, 380, 395, 400, 445, 450, 455, 460, 465, 470, 475, 480, 485, 490,
495, 500, 505, 510, 515, 520, 525, 530, 545, 550, 555, 560, 565, 570, 575,
580, 585, 590, 595, 600, 605, 610, 615, 620, 625, 630, 635, 640, 645, 650,
655, 660, 665, 670, 675, 680]
county_candidates = CountyCandidate.objects.all().order_by('contest__contest_key', '-pct').exclude(contest__contest_id__in=county_excluded)
state_candidates = StateCandidate.objects.select_related().order_by('state_contest', '-valid_votes').filter(state_contest__show_on_web=True)
pres_candidates = ApCandidate.objects.select_related().filter(show_on_web=True).order_by('-popular_vote')
return render_to_response('chyron.xml', {'county_candidates': county_candidates, 'state_candidates': state_candidates, 'pres_candidates': pres_candidates}, context_instance=RequestContext(request), mimetype="text/xml")
def emit_chyron_county_html_table(request):
"""Returns an HTML table to be parsed for use by KPBS TV"""
county_candidates = CountyCandidate.objects.all().filter(show_on_web=True).order_by('contest__contest_key', '-pct')
state_candidates = StateCandidate.objects.select_related().order_by('state_contest', '-valid_votes').filter(state_contest__show_on_web=True)
pres_candidates = ApCandidate.objects.select_related().filter(show_on_web=True).order_by('-popular_vote')
return render_to_response('chyron_table.html', {'county_candidates': county_candidates, 'state_candidates': state_candidates, 'pres_candidates': pres_candidates}, context_instance=RequestContext(request))
|
|
# Copyright (C) 2016 Antoine Carme <Antoine.Carme@Laposte.net>
# All rights reserved.
# This file is part of the Python Automatic Forecasting (PyAF) library and is made available under
# the terms of the 3 Clause BSD license
import pandas as pd
import numpy as np
from enum import IntEnum
from . import Utils as tsutil
from . import TimeSeries_Cutting as tscut
from . import DateTime_Functions as dtfunc
class cTimeInfo:
# class data
def __init__(self):
self.mSignalFrame = pd.DataFrame()
self.mTimeMin = None;
self.mTimeMax = None;
self.mTimeMinMaxDiff = None;
self.mTimeDelta = None;
self.mHorizon = None;
self.mResolution = dtfunc.eTimeResolution.NONE
self.mSplit = None
def info(self):
lStr2 = "TimeVariable='" + self.mTime +"'";
lStr2 += " TimeMin=" + str(self.mTimeMin) +"";
lStr2 += " TimeMax=" + str(self.mTimeMax) +"";
lStr2 += " TimeDelta=" + str(self.mTimeDelta) +"";
lStr2 += " Horizon=" + str(self.mHorizon) +"";
return lStr2;
def to_dict(self):
dict1 = {};
dict1["TimeVariable"] = self.mTime;
dict1["TimeMinMax"] = [str(self.mSignalFrame[self.mTime].min()) ,
str(self.mSignalFrame[self.mTime].max())];
dict1["Horizon"] = self.mHorizon;
return dict1;
def addVars(self, df):
df[self.mRowNumberColumn] = self.mSignalFrame[self.mRowNumberColumn]
df[self.mTime] = self.mSignalFrame[self.mTime]
df[self.mNormalizedTimeColumn] = self.mSignalFrame[self.mNormalizedTimeColumn]
df[self.mSignal] = self.mSignalFrame[self.mSignal]
df[self.mOriginalSignal] = self.mSignalFrame[self.mOriginalSignal]
def get_time_dtype(self):
# print(self.mTimeMax, type(self.mTimeMax))
lType = self.mSignalFrame[self.mTime].dtype;
return lType;
def checkDateTypesForNewDataset(self, df):
if(self.mTimeMax is not None):
lType1 = self.get_time_dtype();
lType2 = df[self.mTime].dtype
if(lType1.kind != lType2.kind):
raise tsutil.PyAF_Error('Incompatible Time Column Type expected=' + str(lType1) + ' got: ' + str(lType2) + "'");
pass
def transformDataset(self, df):
self.checkDateTypesForNewDataset(df);
# new row
lLastRow = df.tail(1).copy();
lNextTime = self.nextTime(df, 1)
lLastRow[self.mTime] = lNextTime
lLastRow[self.mSignal] = np.nan
if(self.mNormalizedTimeColumn in df.columns):
lLastRow[self.mNormalizedTimeColumn] = self.normalizeTime(lNextTime)
lLastRow[self.mRowNumberColumn] = lLastRow[self.mRowNumberColumn].max() + 1
# print(lLastRow.columns , df.columns)
assert(str(lLastRow.columns) == str(df.columns))
df = pd.concat([df, lLastRow], ignore_index=True, verify_integrity = True, sort=False);
if(self.mNormalizedTimeColumn not in df.columns):
df[self.mRowNumberColumn] = np.arange(0, df.shape[0]);
df[self.mNormalizedTimeColumn] = self.compute_normalized_date_column(df[self.mTime])
# print(df.tail());
return df;
def isPhysicalTime(self):
lHelper = dtfunc.cDateTime_Helper()
return lHelper.isPhysicalTime(self.mSignalFrame[self.mTime])
def analyzeSeasonals(self):
if(not self.isPhysicalTime()):
return;
lEstim = self.mSplit.getEstimPart(self.mSignalFrame);
lEstimTime = lEstim[self.mTime]
lHelper = dtfunc.cDateTime_Helper()
self.mResolution = lHelper.guess_time_resolution(lEstimTime);
def checkDateTypes(self):
# print(self.mSignalFrame.info());
type1 = self.mSignalFrame[self.mTime].dtype
if(type1.kind == 'O'):
raise tsutil.PyAF_Error('Invalid Time Column Type ' + self.mTime + '[' + str(type1) + ']');
def adaptTimeDeltaToTimeResolution(self):
if(not self.isPhysicalTime()):
return;
lHelper = dtfunc.cDateTime_Helper()
self.mTimeDelta = lHelper.adaptTimeDeltaToTimeResolution(self.mResolution , self.mTimeDelta);
def computeTimeDelta(self):
#print(self.mSignalFrame.columns);
# print(self.mSignalFrame[self.mTime].head());
lEstim = self.mSplit.getEstimPart(self.mSignalFrame)
lTimeBefore = lEstim[self.mTime].shift(1);
# lTimeBefore.fillna(self.mTimeMin, inplace=True)
N = lEstim.shape[0];
if(N == 1):
if(self.isPhysicalTime()):
self.mTimeDelta = np.timedelta64(1,'D');
else:
self.mTimeDelta = 1
return
#print(self.mSignal, self.mTime, N);
#print(lEstim[self.mTime].head());
#print(lTimeBefore.head());
lDiffs = lEstim[self.mTime][1:N] - lTimeBefore[1:N]
if(self.mOptions.mTimeDeltaComputationMethod == "USER"):
self.mTimeDelta = self.mOptions.mUserTimeDelta;
if(self.mOptions.mTimeDeltaComputationMethod == "AVG"):
self.mTimeDelta = np.mean(lDiffs);
type1 = self.mSignalFrame[self.mTime].dtype
if(type1.kind == 'i' or type1.kind == 'u'):
self.mTimeDelta = int(self.mTimeDelta)
if(self.mOptions.mTimeDeltaComputationMethod == "MODE"):
delta_counts = pd.DataFrame(lDiffs.value_counts());
self.mTimeDelta = delta_counts[self.mTime].argmax();
self.adaptTimeDeltaToTimeResolution();
def estimate(self):
#print(self.mSignalFrame.columns);
#print(self.mSignalFrame[self.mTime].head());
self.checkDateTypes();
self.mRowNumberColumn = "row_number"
self.mNormalizedTimeColumn = self.mTime + "_Normalized";
self.analyzeSeasonals();
lEstim = self.mSplit.getEstimPart(self.mSignalFrame)
self.mTimeMin = lEstim[self.mTime].min();
self.mTimeMax = lEstim[self.mTime].max();
if(self.isPhysicalTime()):
self.mTimeMin = np.datetime64(self.mTimeMin.to_pydatetime());
self.mTimeMax = np.datetime64(self.mTimeMax.to_pydatetime());
self.mTimeMinMaxDiff = self.mTimeMax - self.mTimeMin;
self.mEstimCount = lEstim.shape[0]
# print(self.mTimeMin, self.mTimeMax , self.mTimeMinMaxDiff , (self.mTimeMax - self.mTimeMin)/self.mTimeMinMaxDiff)
self.computeTimeDelta();
self.mSignalFrame[self.mNormalizedTimeColumn] = self.compute_normalized_date_column(self.mSignalFrame[self.mTime])
self.dump();
def dump(self):
time_info = self.info();
def compute_normalized_date_column(self, idate_column):
if(self.mEstimCount == 1):
return 0.0;
vf = np.vectorize(self.normalizeTime)
return vf(idate_column)
@tsutil.cMemoize
def normalizeTime(self , iTime):
if(self.mEstimCount == 1):
return 0.0;
output = ( iTime- self.mTimeMin) / self.mTimeMinMaxDiff
return output
def cast_to_time_dtype(self, iTimeValue):
lType1 = self.get_time_dtype();
lTimeValue = np.array([iTimeValue]).astype(lType1)[0];
return lTimeValue;
def nextTime(self, df, iSteps):
#print(df.tail(1)[self.mTime]);
lLastTime = df[self.mTime].values[-1]
if(self.isPhysicalTime()):
lLastTime = pd.Timestamp(lLastTime)
# print("NEXT_TIME" , lLastTime, iSteps, self.mTimeDelta);
lNextTime = lLastTime + iSteps * self.mTimeDelta;
lNextTime = self.cast_to_time_dtype(lNextTime.to_datetime64())
else:
lNextTime = lLastTime + iSteps * self.mTimeDelta;
lNextTime = self.cast_to_time_dtype(lNextTime)
return lNextTime;
|
|
# Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
# Copyright (c) 2011, 2012 Open Networking Foundation
# Copyright (c) 2012, 2013 Big Switch Networks, Inc.
# See the file LICENSE.pyloxi which should have been included in the source distribution
# Automatically generated by LOXI from template module.py
# Do not modify
import struct
import loxi
from . import util
import loxi.generic_util
import sys
ofp = sys.modules['loxi.of13']
class action(loxi.OFObject):
subtypes = {}
def __init__(self, type=None):
if type != None:
self.type = type
else:
self.type = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append('\x00' * 4)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!H', 0)
subclass = action.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = action()
obj.type = reader.read("!H")[0]
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.type != other.type: return False
return True
def pretty_print(self, q):
q.text("action {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
class experimenter(action):
subtypes = {}
type = 65535
def __init__(self, experimenter=None, data=None):
if experimenter != None:
self.experimenter = experimenter
else:
self.experimenter = 0
if data != None:
self.data = data
else:
self.data = ''
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!L", self.experimenter))
packed.append(self.data)
length = sum([len(x) for x in packed])
packed.append(loxi.generic_util.pad_to(8, length))
length += len(packed[-1])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!L', 4)
subclass = experimenter.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = experimenter()
_type = reader.read("!H")[0]
assert(_type == 65535)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
obj.experimenter = reader.read("!L")[0]
obj.data = str(reader.read_all())
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.experimenter != other.experimenter: return False
if self.data != other.data: return False
return True
def pretty_print(self, q):
q.text("experimenter {")
with q.group():
with q.indent(2):
q.breakable()
q.text("data = ");
q.pp(self.data)
q.breakable()
q.text('}')
action.subtypes[65535] = experimenter
class bsn(experimenter):
subtypes = {}
type = 65535
experimenter = 6035143
def __init__(self, subtype=None):
if subtype != None:
self.subtype = subtype
else:
self.subtype = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append('\x00' * 4)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!L', 8)
subclass = bsn.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = bsn()
_type = reader.read("!H")[0]
assert(_type == 65535)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
obj.subtype = reader.read("!L")[0]
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.subtype != other.subtype: return False
return True
def pretty_print(self, q):
q.text("bsn {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
experimenter.subtypes[6035143] = bsn
class bsn_checksum(bsn):
type = 65535
experimenter = 6035143
subtype = 4
def __init__(self, checksum=None):
if checksum != None:
self.checksum = checksum
else:
self.checksum = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(util.pack_checksum_128(self.checksum))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_checksum()
_type = reader.read("!H")[0]
assert(_type == 65535)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 4)
obj.checksum = util.unpack_checksum_128(reader)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.checksum != other.checksum: return False
return True
def pretty_print(self, q):
q.text("bsn_checksum {")
with q.group():
with q.indent(2):
q.breakable()
q.text("checksum = ");
q.pp(self.checksum)
q.breakable()
q.text('}')
bsn.subtypes[4] = bsn_checksum
class bsn_gentable(bsn):
type = 65535
experimenter = 6035143
subtype = 5
def __init__(self, table_id=None, key=None):
if table_id != None:
self.table_id = table_id
else:
self.table_id = 0
if key != None:
self.key = key
else:
self.key = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!L", self.table_id))
packed.append(loxi.generic_util.pack_list(self.key))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_gentable()
_type = reader.read("!H")[0]
assert(_type == 65535)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 5)
obj.table_id = reader.read("!L")[0]
obj.key = loxi.generic_util.unpack_list(reader, ofp.bsn_tlv.bsn_tlv.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.table_id != other.table_id: return False
if self.key != other.key: return False
return True
def pretty_print(self, q):
q.text("bsn_gentable {")
with q.group():
with q.indent(2):
q.breakable()
q.text("table_id = ");
q.text("%#x" % self.table_id)
q.text(","); q.breakable()
q.text("key = ");
q.pp(self.key)
q.breakable()
q.text('}')
bsn.subtypes[5] = bsn_gentable
class bsn_mirror(bsn):
type = 65535
experimenter = 6035143
subtype = 1
def __init__(self, dest_port=None, vlan_tag=None, copy_stage=None):
if dest_port != None:
self.dest_port = dest_port
else:
self.dest_port = 0
if vlan_tag != None:
self.vlan_tag = vlan_tag
else:
self.vlan_tag = 0
if copy_stage != None:
self.copy_stage = copy_stage
else:
self.copy_stage = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!L", self.dest_port))
packed.append(struct.pack("!L", self.vlan_tag))
packed.append(struct.pack("!B", self.copy_stage))
packed.append('\x00' * 3)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_mirror()
_type = reader.read("!H")[0]
assert(_type == 65535)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 1)
obj.dest_port = reader.read("!L")[0]
obj.vlan_tag = reader.read("!L")[0]
obj.copy_stage = reader.read("!B")[0]
reader.skip(3)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.dest_port != other.dest_port: return False
if self.vlan_tag != other.vlan_tag: return False
if self.copy_stage != other.copy_stage: return False
return True
def pretty_print(self, q):
q.text("bsn_mirror {")
with q.group():
with q.indent(2):
q.breakable()
q.text("dest_port = ");
q.text("%#x" % self.dest_port)
q.text(","); q.breakable()
q.text("vlan_tag = ");
q.text("%#x" % self.vlan_tag)
q.text(","); q.breakable()
q.text("copy_stage = ");
q.text("%#x" % self.copy_stage)
q.breakable()
q.text('}')
bsn.subtypes[1] = bsn_mirror
class bsn_set_tunnel_dst(bsn):
type = 65535
experimenter = 6035143
subtype = 2
def __init__(self, dst=None):
if dst != None:
self.dst = dst
else:
self.dst = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!L", self.dst))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_set_tunnel_dst()
_type = reader.read("!H")[0]
assert(_type == 65535)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 2)
obj.dst = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.dst != other.dst: return False
return True
def pretty_print(self, q):
q.text("bsn_set_tunnel_dst {")
with q.group():
with q.indent(2):
q.breakable()
q.text("dst = ");
q.text("%#x" % self.dst)
q.breakable()
q.text('}')
bsn.subtypes[2] = bsn_set_tunnel_dst
class copy_ttl_in(action):
type = 12
def __init__(self):
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append('\x00' * 4)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = copy_ttl_in()
_type = reader.read("!H")[0]
assert(_type == 12)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
return True
def pretty_print(self, q):
q.text("copy_ttl_in {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
action.subtypes[12] = copy_ttl_in
class copy_ttl_out(action):
type = 11
def __init__(self):
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append('\x00' * 4)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = copy_ttl_out()
_type = reader.read("!H")[0]
assert(_type == 11)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
return True
def pretty_print(self, q):
q.text("copy_ttl_out {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
action.subtypes[11] = copy_ttl_out
class dec_mpls_ttl(action):
type = 16
def __init__(self):
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append('\x00' * 4)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = dec_mpls_ttl()
_type = reader.read("!H")[0]
assert(_type == 16)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
return True
def pretty_print(self, q):
q.text("dec_mpls_ttl {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
action.subtypes[16] = dec_mpls_ttl
class dec_nw_ttl(action):
type = 24
def __init__(self):
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append('\x00' * 4)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = dec_nw_ttl()
_type = reader.read("!H")[0]
assert(_type == 24)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
return True
def pretty_print(self, q):
q.text("dec_nw_ttl {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
action.subtypes[24] = dec_nw_ttl
class group(action):
type = 22
def __init__(self, group_id=None):
if group_id != None:
self.group_id = group_id
else:
self.group_id = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!L", self.group_id))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = group()
_type = reader.read("!H")[0]
assert(_type == 22)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
obj.group_id = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.group_id != other.group_id: return False
return True
def pretty_print(self, q):
q.text("group {")
with q.group():
with q.indent(2):
q.breakable()
q.text("group_id = ");
q.text("%#x" % self.group_id)
q.breakable()
q.text('}')
action.subtypes[22] = group
class nicira(experimenter):
subtypes = {}
type = 65535
experimenter = 8992
def __init__(self, subtype=None):
if subtype != None:
self.subtype = subtype
else:
self.subtype = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!H", self.subtype))
packed.append('\x00' * 2)
packed.append('\x00' * 4)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!H', 8)
subclass = nicira.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = nicira()
_type = reader.read("!H")[0]
assert(_type == 65535)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 8992)
obj.subtype = reader.read("!H")[0]
reader.skip(2)
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.subtype != other.subtype: return False
return True
def pretty_print(self, q):
q.text("nicira {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
experimenter.subtypes[8992] = nicira
class nicira_dec_ttl(nicira):
type = 65535
experimenter = 8992
subtype = 18
def __init__(self):
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!H", self.subtype))
packed.append('\x00' * 2)
packed.append('\x00' * 4)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = nicira_dec_ttl()
_type = reader.read("!H")[0]
assert(_type == 65535)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 8992)
_subtype = reader.read("!H")[0]
assert(_subtype == 18)
reader.skip(2)
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
return True
def pretty_print(self, q):
q.text("nicira_dec_ttl {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
nicira.subtypes[18] = nicira_dec_ttl
class output(action):
type = 0
def __init__(self, port=None, max_len=None):
if port != None:
self.port = port
else:
self.port = 0
if max_len != None:
self.max_len = max_len
else:
self.max_len = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(util.pack_port_no(self.port))
packed.append(struct.pack("!H", self.max_len))
packed.append('\x00' * 6)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = output()
_type = reader.read("!H")[0]
assert(_type == 0)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
obj.port = util.unpack_port_no(reader)
obj.max_len = reader.read("!H")[0]
reader.skip(6)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.port != other.port: return False
if self.max_len != other.max_len: return False
return True
def pretty_print(self, q):
q.text("output {")
with q.group():
with q.indent(2):
q.breakable()
q.text("port = ");
q.text(util.pretty_port(self.port))
q.text(","); q.breakable()
q.text("max_len = ");
q.text("%#x" % self.max_len)
q.breakable()
q.text('}')
action.subtypes[0] = output
class pop_mpls(action):
type = 20
def __init__(self, ethertype=None):
if ethertype != None:
self.ethertype = ethertype
else:
self.ethertype = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!H", self.ethertype))
packed.append('\x00' * 2)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = pop_mpls()
_type = reader.read("!H")[0]
assert(_type == 20)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
obj.ethertype = reader.read("!H")[0]
reader.skip(2)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.ethertype != other.ethertype: return False
return True
def pretty_print(self, q):
q.text("pop_mpls {")
with q.group():
with q.indent(2):
q.breakable()
q.text("ethertype = ");
q.text("%#x" % self.ethertype)
q.breakable()
q.text('}')
action.subtypes[20] = pop_mpls
class pop_pbb(action):
type = 27
def __init__(self):
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append('\x00' * 4)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = pop_pbb()
_type = reader.read("!H")[0]
assert(_type == 27)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
return True
def pretty_print(self, q):
q.text("pop_pbb {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
action.subtypes[27] = pop_pbb
class pop_vlan(action):
type = 18
def __init__(self):
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append('\x00' * 4)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = pop_vlan()
_type = reader.read("!H")[0]
assert(_type == 18)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
return True
def pretty_print(self, q):
q.text("pop_vlan {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
action.subtypes[18] = pop_vlan
class push_mpls(action):
type = 19
def __init__(self, ethertype=None):
if ethertype != None:
self.ethertype = ethertype
else:
self.ethertype = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!H", self.ethertype))
packed.append('\x00' * 2)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = push_mpls()
_type = reader.read("!H")[0]
assert(_type == 19)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
obj.ethertype = reader.read("!H")[0]
reader.skip(2)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.ethertype != other.ethertype: return False
return True
def pretty_print(self, q):
q.text("push_mpls {")
with q.group():
with q.indent(2):
q.breakable()
q.text("ethertype = ");
q.text("%#x" % self.ethertype)
q.breakable()
q.text('}')
action.subtypes[19] = push_mpls
class push_pbb(action):
type = 26
def __init__(self, ethertype=None):
if ethertype != None:
self.ethertype = ethertype
else:
self.ethertype = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!H", self.ethertype))
packed.append('\x00' * 2)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = push_pbb()
_type = reader.read("!H")[0]
assert(_type == 26)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
obj.ethertype = reader.read("!H")[0]
reader.skip(2)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.ethertype != other.ethertype: return False
return True
def pretty_print(self, q):
q.text("push_pbb {")
with q.group():
with q.indent(2):
q.breakable()
q.text("ethertype = ");
q.text("%#x" % self.ethertype)
q.breakable()
q.text('}')
action.subtypes[26] = push_pbb
class push_vlan(action):
type = 17
def __init__(self, ethertype=None):
if ethertype != None:
self.ethertype = ethertype
else:
self.ethertype = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!H", self.ethertype))
packed.append('\x00' * 2)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = push_vlan()
_type = reader.read("!H")[0]
assert(_type == 17)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
obj.ethertype = reader.read("!H")[0]
reader.skip(2)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.ethertype != other.ethertype: return False
return True
def pretty_print(self, q):
q.text("push_vlan {")
with q.group():
with q.indent(2):
q.breakable()
q.text("ethertype = ");
q.text("%#x" % self.ethertype)
q.breakable()
q.text('}')
action.subtypes[17] = push_vlan
class set_field(action):
type = 25
def __init__(self, field=None):
if field != None:
self.field = field
else:
self.field = None
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(self.field.pack())
length = sum([len(x) for x in packed])
packed.append(loxi.generic_util.pad_to(8, length))
length += len(packed[-1])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = set_field()
_type = reader.read("!H")[0]
assert(_type == 25)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
obj.field = ofp.oxm.oxm.unpack(reader)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.field != other.field: return False
return True
def pretty_print(self, q):
q.text("set_field {")
with q.group():
with q.indent(2):
q.breakable()
q.text("field = ");
q.pp(self.field)
q.breakable()
q.text('}')
action.subtypes[25] = set_field
class set_mpls_ttl(action):
type = 15
def __init__(self, mpls_ttl=None):
if mpls_ttl != None:
self.mpls_ttl = mpls_ttl
else:
self.mpls_ttl = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!B", self.mpls_ttl))
packed.append('\x00' * 3)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = set_mpls_ttl()
_type = reader.read("!H")[0]
assert(_type == 15)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
obj.mpls_ttl = reader.read("!B")[0]
reader.skip(3)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.mpls_ttl != other.mpls_ttl: return False
return True
def pretty_print(self, q):
q.text("set_mpls_ttl {")
with q.group():
with q.indent(2):
q.breakable()
q.text("mpls_ttl = ");
q.text("%#x" % self.mpls_ttl)
q.breakable()
q.text('}')
action.subtypes[15] = set_mpls_ttl
class set_nw_ttl(action):
type = 23
def __init__(self, nw_ttl=None):
if nw_ttl != None:
self.nw_ttl = nw_ttl
else:
self.nw_ttl = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!B", self.nw_ttl))
packed.append('\x00' * 3)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = set_nw_ttl()
_type = reader.read("!H")[0]
assert(_type == 23)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
obj.nw_ttl = reader.read("!B")[0]
reader.skip(3)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.nw_ttl != other.nw_ttl: return False
return True
def pretty_print(self, q):
q.text("set_nw_ttl {")
with q.group():
with q.indent(2):
q.breakable()
q.text("nw_ttl = ");
q.text("%#x" % self.nw_ttl)
q.breakable()
q.text('}')
action.subtypes[23] = set_nw_ttl
class set_queue(action):
type = 21
def __init__(self, queue_id=None):
if queue_id != None:
self.queue_id = queue_id
else:
self.queue_id = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!L", self.queue_id))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = set_queue()
_type = reader.read("!H")[0]
assert(_type == 21)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
obj.queue_id = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.queue_id != other.queue_id: return False
return True
def pretty_print(self, q):
q.text("set_queue {")
with q.group():
with q.indent(2):
q.breakable()
q.text("queue_id = ");
q.text("%#x" % self.queue_id)
q.breakable()
q.text('}')
action.subtypes[21] = set_queue
|
|
"""
Copyright 2010 Rusty Klophaus <rusty@basho.com>
Copyright 2010 Justin Sheehy <justin@basho.com>
Copyright 2009 Jay Baird <jay@mochimedia.com>
This file is provided to you under the Apache License,
Version 2.0 (the "License"); you may not use this file
except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
"""
import mimetypes
from riak.util import deprecateQuorumAccessors, deprecated
def deprecateBucketQuorumAccessors(klass):
return deprecateQuorumAccessors(klass, parent='_client')
def bucket_property(name, doc=None):
def _prop_getter(self):
return self.get_property(name)
def _prop_setter(self, value):
return self.set_property(name, value)
return property(_prop_getter, _prop_setter, doc=doc)
@deprecateBucketQuorumAccessors
class RiakBucket(object):
"""
The ``RiakBucket`` object allows you to access and change information
about a Riak bucket, and provides methods to create or retrieve
objects within the bucket.
"""
def __init__(self, client, name):
"""
Returns a new ``RiakBucket`` instance.
:param client: A :class:`RiakClient <riak.client.RiakClient>` instance
:type client: :class:`RiakClient <riak.client.RiakClient>`
:param name: The bucket name
:type name: string
"""
try:
if isinstance(name, basestring):
name = name.encode('ascii')
else:
raise TypeError('Bucket name must be a string')
except UnicodeError:
raise TypeError('Unicode bucket names are not supported.')
self._client = client
self.name = name
self._encoders = {}
self._decoders = {}
self._resolver = None
def __hash__(self):
return hash((self.name, self._client))
def __eq__(self, other):
if isinstance(other, self.__class__):
return hash(self) == hash(other)
else:
return False
def __ne__(self, other):
if isinstance(other, self.__class__):
return hash(self) != hash(other)
else:
return True
def get_encoder(self, content_type):
"""
Get the encoding function for the provided content type for
this bucket.
:param content_type: the requested media type
:type content_type: str
:param content_type: Content type requested
"""
if content_type in self._encoders:
return self._encoders[content_type]
else:
return self._client.get_encoder(content_type)
def set_encoder(self, content_type, encoder):
"""
Set the encoding function for the provided content type for
this bucket.
:param content_type: the requested media type
:type content_type: str
:param encoder: an encoding function, takes a single object
argument and returns a string data as single argument.
:type encoder: function
"""
self._encoders[content_type] = encoder
return self
def get_decoder(self, content_type):
"""
Get the decoding function for the provided content type for
this bucket.
:param content_type: the requested media type
:type content_type: str
:rtype: function
"""
if content_type in self._decoders:
return self._decoders[content_type]
else:
return self._client.get_decoder(content_type)
def set_decoder(self, content_type, decoder):
"""
Set the decoding function for the provided content type for
this bucket.
:param content_type: the requested media type
:type content_type: str
:param decoder: a decoding function, takes a string and
returns a Python type
:type decoder: function
"""
self._decoders[content_type] = decoder
return self
def new(self, key=None, data=None, content_type='application/json',
encoded_data=None):
"""
Create a new :class:`RiakObject <riak.riak_object.RiakObject>`
that will be stored as JSON. A shortcut for manually
instantiating a :class:`RiakObject
<riak.riak_object.RiakObject>`.
:param key: Name of the key. Leaving this to be None (default)
will make Riak generate the key on store.
:type key: string
:param data: The data to store.
:type data: object
:rtype: :class:`RiakObject <riak.riak_object.RiakObject>`
"""
try:
if isinstance(data, basestring):
data = data.encode('ascii')
except UnicodeError:
raise TypeError('Unicode data values are not supported.')
obj = RiakObject(self._client, self, key)
obj.content_type = content_type
if data is not None:
obj.data = data
if encoded_data is not None:
obj.encoded_data = encoded_data
return obj
def new_binary(self, key=None, data=None,
content_type='application/octet-stream'):
"""
Create a new :class:`RiakObject <riak.riak_object.RiakObject>`
that will be stored as plain text/binary. A shortcut for
manually instantiating a :class:`RiakObject
<riak.riak_object.RiakObject>`.
.. deprecated:: 2.0.0
Use :meth:`new` instead.
:param key: Name of the key.
:type key: string
:param data: The data to store.
:type data: object
:param content_type: The content type of the object.
:type content_type: string
:rtype: :class:`RiakObject <riak.riak_object.RiakObject>`
"""
deprecated('RiakBucket.new_binary is deprecated, '
'use RiakBucket.new with the encoded_data '
'param instead of data')
return self.new(key, encoded_data=data, content_type=content_type)
def get(self, key, r=None, pr=None, timeout=None):
"""
Retrieve an object from Riak.
:param key: Name of the key.
:type key: string
:param r: R-Value of the request (defaults to bucket's R)
:type r: integer
:param pr: PR-Value of the request (defaults to bucket's PR)
:type pr: integer
:param timeout: a timeout value in milliseconds
:type timeout: int
:rtype: :class:`RiakObject <riak.riak_object.RiakObject>`
"""
obj = RiakObject(self._client, self, key)
return obj.reload(r=r, pr=pr, timeout=timeout)
def get_binary(self, key, r=None, pr=None, timeout=None):
"""
Retrieve a binary/string object from Riak.
.. deprecated:: 2.0.0
Use :meth:`get` instead.
:param key: Name of the key.
:type key: string
:param r: R-Value of the request (defaults to bucket's R)
:type r: integer
:param pr: PR-Value of the request (defaults to bucket's PR)
:type pr: integer
:param timeout: a timeout value in milliseconds
:type timeout: int
:rtype: :class:`RiakObject <riak.riak_object.RiakObject>`
"""
deprecated('RiakBucket.get_binary is deprecated, '
'use RiakBucket.get')
return self.get(key, r=r, pr=pr, timeout=timeout)
def multiget(self, keys, r=None, pr=None):
"""
Retrieves a list of keys belonging to this bucket in parallel.
:param keys: the keys to fetch
:type keys: list
:param r: R-Value for the requests (defaults to bucket's R)
:type r: integer
:param pr: PR-Value for the requests (defaults to bucket's PR)
:type pr: integer
:rtype: list of :class:`RiakObject <riak.riak_object.RiakObject>`
"""
bkeys = [(self.name, key) for key in keys]
return self._client.multiget(bkeys, r=r, pr=pr)
def _get_resolver(self):
if callable(self._resolver):
return self._resolver
elif self._resolver is None:
return self._client.resolver
else:
raise TypeError("resolver is not a function")
def _set_resolver(self, value):
if value is None or callable(value):
self._resolver = value
else:
raise TypeError("resolver is not a function")
resolver = property(_get_resolver, _set_resolver, doc=
"""The sibling-resolution function for this
bucket. If the resolver is not set, the
client's resolver will be used.""")
n_val = bucket_property('n_val', doc="""
N-value for this bucket, which is the number of replicas
that will be written of each object in the bucket.
.. warning:: Set this once before you write any data to the
bucket, and never change it again, otherwise unpredictable
things could happen. This should only be used if you know what
you are doing.
""")
allow_mult = bucket_property('allow_mult', doc="""
If set to True, then writes with conflicting data will be stored
and returned to the client.
:type bool: boolean
""")
r = bucket_property('r', doc="""
The default 'read' quorum for this bucket (how many replicas must
reply for a successful read). This should be an integer less than
the 'n_val' property, or a string of 'one', 'quorum', 'all', or
'default'""")
pr = bucket_property('pr', doc="""
The default 'primary read' quorum for this bucket (how many
primary replicas are required for a successful read). This should
be an integer less than the 'n_val' property, or a string of
'one', 'quorum', 'all', or 'default'""")
rw = bucket_property('rw', doc="""
The default 'read' and 'write' quorum for this bucket (equivalent
to 'r' and 'w' but for deletes). This should be an integer less
than the 'n_val' property, or a string of 'one', 'quorum', 'all',
or 'default'""")
w = bucket_property('w', doc="""
The default 'write' quorum for this bucket (how many replicas must
acknowledge receipt of a write). This should be an integer less
than the 'n_val' property, or a string of 'one', 'quorum', 'all',
or 'default'""")
dw = bucket_property('dw', doc="""
The default 'durable write' quorum for this bucket (how many
replicas must commit the write). This should be an integer less
than the 'n_val' property, or a string of 'one', 'quorum', 'all',
or 'default'""")
pw = bucket_property('pw', doc="""
The default 'primary write' quorum for this bucket (how many
primary replicas are required for a successful write). This should
be an integer less than the 'n_val' property, or a string of
'one', 'quorum', 'all', or 'default'""")
def set_property(self, key, value):
"""
Set a bucket property.
:param key: Property to set.
:type key: string
:param value: Property value.
:type value: mixed
"""
return self.set_properties({key: value})
def get_property(self, key):
"""
Retrieve a bucket property.
:param key: The property to retrieve.
:type key: string
:rtype: mixed
"""
return self.get_properties()[key]
def set_properties(self, props):
"""
Set multiple bucket properties in one call.
:param props: A dictionary of properties
:type props: dict
"""
self._client.set_bucket_props(self, props)
def get_properties(self):
"""
Retrieve a dict of all bucket properties.
:rtype: dict
"""
return self._client.get_bucket_props(self)
def clear_properties(self):
"""
Reset all bucket properties to their defaults.
"""
return self._client.clear_bucket_props(self)
def get_keys(self):
"""
Return all keys within the bucket.
:rtype: list of keys
"""
return self._client.get_keys(self)
def stream_keys(self):
"""
Streams all keys within the bucket through an iterator.
:rtype: iterator
"""
return self._client.stream_keys(self)
def new_from_file(self, key, filename):
"""
Create a new Riak object in the bucket, using the contents of
the specified file. This is a shortcut for :meth:`new`, where the
``encoded_data`` and ``content_type`` are set for you.
:param key: the key of the new object
:type key: string
:param filename: the file to read the contents from
:type filename: string
:rtype: :class:`RiakObject <riak.riak_object.RiakObject>`
"""
binary_data = open(filename, "rb").read()
mimetype, encoding = mimetypes.guess_type(filename)
if encoding:
binary_data = bytearray(binary_data, encoding)
else:
binary_data = bytearray(binary_data)
if not mimetype:
mimetype = 'application/octet-stream'
return self.new(key, encoded_data=binary_data, content_type=mimetype)
def new_binary_from_file(self, key, filename):
"""
Create a new Riak object in the bucket, using the contents of
the specified file. This is a shortcut for :meth:`new`, where the
``encoded_data`` and ``content_type`` are set for you.
.. deprecated:: 2.0.0
Use :meth:`new_from_file` instead.
:param key: the key of the new object
:type key: string
:param filename: the file to read the contents from
:type filename: string
:rtype: :class:`RiakObject <riak.riak_object.RiakObject>`
"""
deprecated('RiakBucket.new_binary_from_file is deprecated, use '
'RiakBucket.new_from_file')
return self.new_from_file(key, filename)
def search_enabled(self):
"""
Returns True if search indexing is enabled for this
bucket.
"""
return self.get_properties().get('search', False)
def enable_search(self):
"""
Enable search indexing for this bucket.
"""
if not self.search_enabled():
self.set_property('search', True)
return True
def disable_search(self):
"""
Disable search indexing for this bucket.
"""
if self.search_enabled():
self.set_property('search', False)
return True
def search(self, query, **params):
"""
Queries a search index over objects in this bucket/index. See
:meth:`RiakClient.fulltext_search()
<riak.client.RiakClient.fulltext_search>` for more details.
"""
return self._client.fulltext_search(self.name, query, **params)
def get_index(self, index, startkey, endkey=None, return_terms=None,
max_results=None, continuation=None):
"""
Queries a secondary index over objects in this bucket,
returning keys or index/key pairs. See
:meth:`RiakClient.get_index()
<riak.client.RiakClient.get_index>` for more details.
"""
return self._client.get_index(self.name, index, startkey, endkey,
return_terms=return_terms,
max_results=max_results,
continuation=continuation)
def stream_index(self, index, startkey, endkey=None, return_terms=None,
max_results=None, continuation=None):
"""
Queries a secondary index over objects in this bucket,
streaming keys or index/key pairs via an iterator. See
:meth:`RiakClient.stream_index()
<riak.client.RiakClient.stream_index>` for more details.
"""
return self._client.stream_index(self.name, index, startkey, endkey,
return_terms=return_terms,
max_results=max_results,
continuation=continuation)
def delete(self, key, **kwargs):
"""Deletes an object from riak. Short hand for
bucket.new(key).delete(). See :meth:`RiakClient.delete()
<riak.client.RiakClient.delete>` for options.
:param key: The key for the object
:type key: string
:rtype: RiakObject
"""
return self.new(key).delete(**kwargs)
def get_counter(self, key, **kwargs):
"""
Gets the value of a counter stored in this bucket. See
:meth:`RiakClient.get_counter()
<riak.client.RiakClient.get_counter>` for options.
:param key: the key of the counter
:type key: string
:rtype: int
"""
return self._client.get_counter(self, key, **kwargs)
def update_counter(self, key, value, **kwargs):
"""
Updates the value of a counter stored in this bucket. Positive
values increment the counter, negative values decrement. See
:meth:`RiakClient.update_counter()
<riak.client.RiakClient.update_counter>` for options.
:param key: the key of the counter
:type key: string
:param value: the amount to increment or decrement
:type value: integer
"""
return self._client.update_counter(self, key, value, **kwargs)
increment_counter = update_counter
def __str__(self):
return '<RiakBucket "{0}">'.format(self.name)
from riak_object import RiakObject
|
|
import unittest
from unittest.mock import patch, call
from mistletoe import block_token, span_token
from mistletoe.block_tokenizer import FileWrapper
class TestToken(unittest.TestCase):
def setUp(self):
self.addCleanup(lambda: span_token._token_types.__setitem__(-1, span_token.RawText))
patcher = patch('mistletoe.span_token.RawText')
self.mock = patcher.start()
span_token._token_types[-1] = self.mock
self.addCleanup(patcher.stop)
def _test_match(self, token_cls, lines, arg, **kwargs):
token = next(iter(block_token.tokenize(lines)))
self.assertIsInstance(token, token_cls)
self._test_token(token, arg, **kwargs)
def _test_token(self, token, arg, **kwargs):
for attr, value in kwargs.items():
self.assertEqual(getattr(token, attr), value)
self.mock.assert_any_call(arg)
class TestATXHeading(TestToken):
def test_match(self):
lines = ['### heading 3\n']
arg = 'heading 3'
self._test_match(block_token.Heading, lines, arg, level=3)
def test_children_with_enclosing_hashes(self):
lines = ['# heading 3 ##### \n']
arg = 'heading 3'
self._test_match(block_token.Heading, lines, arg, level=1)
def test_not_heading(self):
lines = ['####### paragraph\n']
arg = '####### paragraph'
self._test_match(block_token.Paragraph, lines, arg)
def test_heading_in_paragraph(self):
lines = ['foo\n', '# heading\n', 'bar\n']
token1, token2, token3 = block_token.tokenize(lines)
self.assertIsInstance(token1, block_token.Paragraph)
self.assertIsInstance(token2, block_token.Heading)
self.assertIsInstance(token3, block_token.Paragraph)
class TestSetextHeading(TestToken):
def test_match(self):
lines = ['some heading\n', '---\n']
arg = 'some heading'
self._test_match(block_token.SetextHeading, lines, arg, level=2)
def test_next(self):
lines = ['some\n', 'heading\n', '---\n', '\n', 'foobar\n']
tokens = iter(block_token.tokenize(lines))
self.assertIsInstance(next(tokens), block_token.SetextHeading)
self.assertIsInstance(next(tokens), block_token.Paragraph)
self.mock.assert_has_calls([call('some'), call('heading'), call('foobar')])
with self.assertRaises(StopIteration) as e:
token = next(tokens)
class TestQuote(unittest.TestCase):
def test_match(self):
with patch('mistletoe.block_token.Paragraph') as mock:
token = next(iter(block_token.tokenize(['> line 1\n', '> line 2\n'])))
self.assertIsInstance(token, block_token.Quote)
def test_lazy_continuation(self):
with patch('mistletoe.block_token.Paragraph') as mock:
token = next(iter(block_token.tokenize(['> line 1\n', 'line 2\n'])))
self.assertIsInstance(token, block_token.Quote)
class TestCodeFence(TestToken):
def test_match_fenced_code(self):
lines = ['```sh\n', 'rm dir\n', 'mkdir test\n', '```\n']
arg = 'rm dir\nmkdir test\n'
self._test_match(block_token.CodeFence, lines, arg, language='sh')
def test_match_fenced_code_with_tilde(self):
lines = ['~~~sh\n', 'rm dir\n', 'mkdir test\n', '~~~\n']
arg = 'rm dir\nmkdir test\n'
self._test_match(block_token.CodeFence, lines, arg, language='sh')
def test_not_match_fenced_code_when_only_inline_code(self):
lines = ['`~` is called tilde']
token = next(iter(block_token.tokenize(lines)))
self.assertIsInstance(token, block_token.Paragraph)
token1 = token.children[0]
self.assertIsInstance(token1, span_token.InlineCode)
self.mock.assert_has_calls([call('~'), call(' is called tilde')])
def test_mixed_code_fence(self):
lines = ['~~~markdown\n', '```sh\n', 'some code\n', '```\n', '~~~\n']
arg = '```sh\nsome code\n```\n'
self._test_match(block_token.CodeFence, lines, arg, language='markdown')
def test_fence_code_lazy_continuation(self):
lines = ['```sh\n', 'rm dir\n', '\n', 'mkdir test\n', '```\n']
arg = 'rm dir\n\nmkdir test\n'
self._test_match(block_token.CodeFence, lines, arg, language='sh')
def test_no_wrapping_newlines_code_fence(self):
lines = ['```\n', 'hey', '```\n', 'paragraph\n']
arg = 'hey'
self._test_match(block_token.CodeFence, lines, arg, language='')
def test_unclosed_code_fence(self):
lines = ['```\n', 'hey']
arg = 'hey'
self._test_match(block_token.CodeFence, lines, arg, language='')
class TestBlockCode(TestToken):
def test_parse_indented_code(self):
lines = [' rm dir\n', ' mkdir test\n']
arg = 'rm dir\nmkdir test\n'
self._test_match(block_token.BlockCode, lines, arg, language='')
class TestParagraph(TestToken):
def test_parse(self):
lines = ['some\n', 'continuous\n', 'lines\n']
arg = 'some'
self._test_match(block_token.Paragraph, lines, arg)
def test_read(self):
lines = ['this\n', '```\n', 'is some\n', '```\n', 'code\n']
try:
token1, token2, token3 = block_token.tokenize(lines)
except ValueError as e:
raise AssertionError("Token number mismatch.") from e
self.assertIsInstance(token1, block_token.Paragraph)
self.assertIsInstance(token2, block_token.CodeFence)
self.assertIsInstance(token3, block_token.Paragraph)
class TestListItem(unittest.TestCase):
def test_parse_marker(self):
lines = ['- foo\n',
'* bar\n',
' + baz\n',
'1. item 1\n',
'2) item 2\n',
'123456789. item x\n']
for line in lines:
self.assertTrue(block_token.ListItem.parse_marker(line))
bad_lines = ['> foo\n',
'1item 1\n',
'2| item 2\n',
'1234567890. item x\n']
for line in bad_lines:
self.assertFalse(block_token.ListItem.parse_marker(line))
def test_tokenize(self):
lines = [' - foo\n',
' bar\n',
'\n',
' baz\n']
token1, token2 = next(iter(block_token.tokenize(lines))).children[0].children
self.assertIsInstance(token1, block_token.Paragraph)
self.assertTrue('foo' in token1)
self.assertIsInstance(token2, block_token.BlockCode)
def test_sublist(self):
lines = ['- foo\n',
' - bar\n']
token1, token2 = block_token.tokenize(lines)[0].children[0].children
self.assertIsInstance(token1, block_token.Paragraph)
self.assertIsInstance(token2, block_token.List)
def test_deep_list(self):
lines = ['- foo\n',
' - bar\n',
' - baz\n']
f = FileWrapper(lines)
ptoken, ltoken = block_token.tokenize(lines)[0].children[0].children
self.assertIsInstance(ptoken, block_token.Paragraph)
self.assertIsInstance(ltoken, block_token.List)
self.assertTrue('foo' in ptoken)
ptoken, ltoken = ltoken.children[0].children
self.assertIsInstance(ptoken, block_token.Paragraph)
self.assertTrue('bar' in ptoken)
self.assertIsInstance(ltoken, block_token.List)
self.assertTrue('baz' in ltoken)
def test_loose_list(self):
lines = ['- foo\n',
' ~~~\n',
' bar\n',
' \n',
' baz\n'
' ~~~\n']
f = FileWrapper(lines)
list_item = block_token.tokenize(lines)[0].children[0]
self.assertEqual(list_item.loose, False)
def test_tight_list(self):
lines = ['- foo\n',
'\n',
'# bar\n']
f = FileWrapper(lines)
list_item = block_token.tokenize(lines)[0].children[0]
self.assertEqual(list_item.loose, False)
class TestList(unittest.TestCase):
def test_different_markers(self):
lines = ['- foo\n',
'* bar\n',
'1. baz\n',
'2) spam\n']
l1, l2, l3, l4 = block_token.tokenize(lines)
self.assertIsInstance(l1, block_token.List)
self.assertTrue('foo' in l1)
self.assertIsInstance(l2, block_token.List)
self.assertTrue('bar' in l2)
self.assertIsInstance(l3, block_token.List)
self.assertTrue('baz' in l3)
self.assertIsInstance(l4, block_token.List)
self.assertTrue('spam' in l4)
def test_sublist(self):
lines = ['- foo\n',
' + bar\n']
token, = block_token.tokenize(lines)
self.assertIsInstance(token, block_token.List)
class TestTable(unittest.TestCase):
def test_parse_align(self):
test_func = block_token.Table.parse_align
self.assertEqual(test_func(':------'), None)
self.assertEqual(test_func(':-----:'), 0)
self.assertEqual(test_func('------:'), 1)
def test_parse_delimiter(self):
test_func = block_token.Table.split_delimiter
self.assertEqual(list(test_func('| :--- | :---: | ---:|\n')),
[':---', ':---:', '---:'])
def test_match(self):
lines = ['| header 1 | header 2 | header 3 |\n',
'| --- | --- | --- |\n',
'| cell 1 | cell 2 | cell 3 |\n',
'| more 1 | more 2 | more 3 |\n']
with patch('mistletoe.block_token.TableRow') as mock:
token = next(iter(block_token.tokenize(lines)))
self.assertIsInstance(token, block_token.Table)
self.assertTrue(hasattr(token, 'header'))
self.assertEqual(token.column_align, [None, None, None])
token.children
calls = [call(line, [None, None, None]) for line in lines[:1]+lines[2:]]
mock.assert_has_calls(calls)
def test_easy_table(self):
lines = ['header 1 | header 2\n',
' ---: | :---\n',
' cell 1 | cell 2\n']
with patch('mistletoe.block_token.TableRow') as mock:
token, = block_token.tokenize(lines)
self.assertIsInstance(token, block_token.Table)
self.assertTrue(hasattr(token, 'header'))
self.assertEqual(token.column_align, [1, None])
token.children
calls = [call(line, [1, None]) for line in lines[:1] + lines[2:]]
mock.assert_has_calls(calls)
def test_not_easy_table(self):
lines = ['not header 1 | not header 2\n',
'foo | bar\n']
token, = block_token.tokenize(lines)
self.assertIsInstance(token, block_token.Paragraph)
class TestTableRow(unittest.TestCase):
def test_match(self):
with patch('mistletoe.block_token.TableCell') as mock:
line = '| cell 1 | cell 2 |\n'
token = block_token.TableRow(line)
self.assertEqual(token.row_align, [None])
mock.assert_has_calls([call('cell 1', None), call('cell 2', None)])
def test_easy_table_row(self):
with patch('mistletoe.block_token.TableCell') as mock:
line = 'cell 1 | cell 2\n'
token = block_token.TableRow(line)
self.assertEqual(token.row_align, [None])
mock.assert_has_calls([call('cell 1', None), call('cell 2', None)])
def test_short_row(self):
with patch('mistletoe.block_token.TableCell') as mock:
line = '| cell 1 |\n'
token = block_token.TableRow(line, [None, None])
self.assertEqual(token.row_align, [None, None])
mock.assert_has_calls([call('cell 1', None), call('', None)])
def test_escaped_pipe_in_cell(self):
with patch('mistletoe.block_token.TableCell') as mock:
line = '| pipe: `\\|` | cell 2\n'
token = block_token.TableRow(line, [None, None])
self.assertEqual(token.row_align, [None, None])
mock.assert_has_calls([call('pipe: `|`', None), call('cell 2', None)])
@unittest.skip('Even GitHub fails in here, workaround: always put a space before `|`')
def test_not_really_escaped_pipe_in_cell(self):
with patch('mistletoe.block_token.TableCell') as mock:
line = '|ending with a \\\\|cell 2\n'
token = block_token.TableRow(line, [None, None])
self.assertEqual(token.row_align, [None, None])
mock.assert_has_calls([call('ending with a \\\\', None), call('cell 2', None)])
class TestTableCell(TestToken):
def test_match(self):
token = block_token.TableCell('cell 2')
self._test_token(token, 'cell 2', align=None)
class TestFootnote(unittest.TestCase):
def test_parse_simple(self):
lines = ['[key 1]: value1\n',
'[key 2]: value2\n']
token = block_token.Document(lines)
self.assertEqual(token.footnotes, {"key 1": ("value1", ""),
"key 2": ("value2", "")})
def test_parse_with_title(self):
lines = ['[key 1]: value1 "title1"\n',
'[key 2]: value2\n',
'"title2"\n']
token = block_token.Document(lines)
self.assertEqual(token.footnotes, {"key 1": ("value1", "title1"),
"key 2": ("value2", "title2")})
# this tests an edge case, it shouldn't occur in normal documents
def test_parse_with_para_right_after(self):
lines = ['[key 1]: value1\n',
# 'something1\n', # if uncommented,
# this and the next line should be treated as a paragraph
# - this line gets skipped instead now
'[key 2]: value2\n',
'something2\n',
'\n',
'[key 3]: value3\r\n', # '\r', or any other whitespace
'something3\n']
token = block_token.Document(lines)
self.assertEqual(token.footnotes, {"key 1": ("value1", ""),
"key 2": ("value2", ""),
"key 3": ("value3", "")})
self.assertEqual(len(token.children), 2)
self.assertIsInstance(token.children[0], block_token.Paragraph)
self.assertEqual(token.children[0].children[0].content, "something2")
self.assertEqual(token.children[1].children[0].content, "something3")
def test_parse_opening_bracket_as_paragraph(self): # ... and no error is raised
lines = ['[\n']
token = block_token.Document(lines)
self.assertEqual(len(token.footnotes), 0)
self.assertEqual(len(token.children), 1)
self.assertIsInstance(token.children[0], block_token.Paragraph)
self.assertEqual(token.children[0].children[0].content, '[')
def test_parse_opening_brackets_as_paragraph(self): # ... and no lines are skipped
lines = ['[\n',
'[ \n',
']\n']
token = block_token.Document(lines)
self.assertEqual(len(token.footnotes), 0)
self.assertEqual(len(token.children), 1)
para = token.children[0]
self.assertIsInstance(para, block_token.Paragraph)
self.assertEqual(len(para.children), 5,
'expected: RawText, LineBreak, RawText, LineBreak, RawText')
self.assertEqual(para.children[0].content, '[')
class TestDocument(unittest.TestCase):
def test_store_footnote(self):
lines = ['[key 1]: value1\n',
'[key 2]: value2\n']
document = block_token.Document(lines)
self.assertEqual(document.footnotes['key 1'], ('value1', ''))
self.assertEqual(document.footnotes['key 2'], ('value2', ''))
def test_auto_splitlines(self):
lines = "some\ncontinual\nlines\n"
document = block_token.Document(lines)
self.assertIsInstance(document.children[0], block_token.Paragraph)
self.assertEqual(len(document.children), 1)
class TestThematicBreak(unittest.TestCase):
def test_match(self):
def test_case(line):
token = next(iter(block_token.tokenize([line])))
self.assertIsInstance(token, block_token.ThematicBreak)
cases = ['---\n', '* * *\n', '_ _ _\n']
for case in cases:
test_case(case)
class TestContains(unittest.TestCase):
def test_contains(self):
lines = ['# heading\n', '\n', 'paragraph\n', 'with\n', '`code`\n']
token = block_token.Document(lines)
self.assertTrue('heading' in token)
self.assertTrue('code' in token)
self.assertFalse('foo' in token)
|
|
"""
Support for MQTT Template lights.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/light.mqtt_template/
"""
import logging
import voluptuous as vol
from homeassistant.core import callback
from homeassistant.components import mqtt
from homeassistant.components.light import (
ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_EFFECT, ATTR_FLASH,
ATTR_HS_COLOR, ATTR_TRANSITION, ATTR_WHITE_VALUE, Light,
SUPPORT_BRIGHTNESS, SUPPORT_COLOR_TEMP, SUPPORT_EFFECT, SUPPORT_FLASH,
SUPPORT_COLOR, SUPPORT_TRANSITION, SUPPORT_WHITE_VALUE)
from homeassistant.const import (
CONF_DEVICE, CONF_NAME, CONF_OPTIMISTIC, STATE_ON, STATE_OFF)
from homeassistant.components.mqtt import (
CONF_COMMAND_TOPIC, CONF_QOS, CONF_RETAIN, CONF_STATE_TOPIC,
CONF_UNIQUE_ID, MqttAttributes, MqttAvailability, MqttDiscoveryUpdate,
MqttEntityDeviceInfo, subscription)
import homeassistant.helpers.config_validation as cv
import homeassistant.util.color as color_util
from homeassistant.helpers.restore_state import RestoreEntity
from . import MQTT_LIGHT_SCHEMA_SCHEMA
_LOGGER = logging.getLogger(__name__)
DOMAIN = 'mqtt_template'
DEPENDENCIES = ['mqtt']
DEFAULT_NAME = 'MQTT Template Light'
DEFAULT_OPTIMISTIC = False
CONF_BLUE_TEMPLATE = 'blue_template'
CONF_BRIGHTNESS_TEMPLATE = 'brightness_template'
CONF_COLOR_TEMP_TEMPLATE = 'color_temp_template'
CONF_COMMAND_OFF_TEMPLATE = 'command_off_template'
CONF_COMMAND_ON_TEMPLATE = 'command_on_template'
CONF_EFFECT_LIST = 'effect_list'
CONF_EFFECT_TEMPLATE = 'effect_template'
CONF_GREEN_TEMPLATE = 'green_template'
CONF_RED_TEMPLATE = 'red_template'
CONF_STATE_TEMPLATE = 'state_template'
CONF_WHITE_VALUE_TEMPLATE = 'white_value_template'
PLATFORM_SCHEMA_TEMPLATE = mqtt.MQTT_RW_PLATFORM_SCHEMA.extend({
vol.Optional(CONF_BLUE_TEMPLATE): cv.template,
vol.Optional(CONF_BRIGHTNESS_TEMPLATE): cv.template,
vol.Optional(CONF_COLOR_TEMP_TEMPLATE): cv.template,
vol.Optional(CONF_EFFECT_LIST): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_EFFECT_TEMPLATE): cv.template,
vol.Optional(CONF_GREEN_TEMPLATE): cv.template,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean,
vol.Optional(CONF_RED_TEMPLATE): cv.template,
vol.Optional(CONF_RETAIN, default=mqtt.DEFAULT_RETAIN): cv.boolean,
vol.Optional(CONF_STATE_TEMPLATE): cv.template,
vol.Optional(CONF_STATE_TOPIC): mqtt.valid_subscribe_topic,
vol.Optional(CONF_WHITE_VALUE_TEMPLATE): cv.template,
vol.Required(CONF_COMMAND_OFF_TEMPLATE): cv.template,
vol.Required(CONF_COMMAND_ON_TEMPLATE): cv.template,
vol.Required(CONF_COMMAND_TOPIC): mqtt.valid_publish_topic,
vol.Optional(CONF_QOS, default=mqtt.DEFAULT_QOS):
vol.All(vol.Coerce(int), vol.In([0, 1, 2])),
vol.Optional(CONF_UNIQUE_ID): cv.string,
vol.Optional(CONF_DEVICE): mqtt.MQTT_ENTITY_DEVICE_INFO_SCHEMA,
}).extend(mqtt.MQTT_AVAILABILITY_SCHEMA.schema).extend(
mqtt.MQTT_JSON_ATTRS_SCHEMA.schema).extend(MQTT_LIGHT_SCHEMA_SCHEMA.schema)
async def async_setup_entity_template(config, async_add_entities, config_entry,
discovery_hash):
"""Set up a MQTT Template light."""
async_add_entities([MqttTemplate(config, config_entry, discovery_hash)])
# pylint: disable=too-many-ancestors
class MqttTemplate(MqttAttributes, MqttAvailability, MqttDiscoveryUpdate,
MqttEntityDeviceInfo, Light, RestoreEntity):
"""Representation of a MQTT Template light."""
def __init__(self, config, config_entry, discovery_hash):
"""Initialize a MQTT Template light."""
self._state = False
self._sub_state = None
self._topics = None
self._templates = None
self._optimistic = False
# features
self._brightness = None
self._color_temp = None
self._white_value = None
self._hs = None
self._effect = None
self._unique_id = config.get(CONF_UNIQUE_ID)
# Load config
self._setup_from_config(config)
device_config = config.get(CONF_DEVICE)
MqttAttributes.__init__(self, config)
MqttAvailability.__init__(self, config)
MqttDiscoveryUpdate.__init__(self, discovery_hash,
self.discovery_update)
MqttEntityDeviceInfo.__init__(self, device_config, config_entry)
async def async_added_to_hass(self):
"""Subscribe to MQTT events."""
await super().async_added_to_hass()
await self._subscribe_topics()
async def discovery_update(self, discovery_payload):
"""Handle updated discovery message."""
config = PLATFORM_SCHEMA_TEMPLATE(discovery_payload)
self._setup_from_config(config)
await self.attributes_discovery_update(config)
await self.availability_discovery_update(config)
await self.device_info_discovery_update(config)
await self._subscribe_topics()
self.async_write_ha_state()
def _setup_from_config(self, config):
"""(Re)Setup the entity."""
self._config = config
self._topics = {
key: config.get(key) for key in (
CONF_STATE_TOPIC,
CONF_COMMAND_TOPIC
)
}
self._templates = {
key: config.get(key) for key in (
CONF_BLUE_TEMPLATE,
CONF_BRIGHTNESS_TEMPLATE,
CONF_COLOR_TEMP_TEMPLATE,
CONF_COMMAND_OFF_TEMPLATE,
CONF_COMMAND_ON_TEMPLATE,
CONF_EFFECT_TEMPLATE,
CONF_GREEN_TEMPLATE,
CONF_RED_TEMPLATE,
CONF_STATE_TEMPLATE,
CONF_WHITE_VALUE_TEMPLATE,
)
}
optimistic = config.get(CONF_OPTIMISTIC)
self._optimistic = optimistic \
or self._topics[CONF_STATE_TOPIC] is None \
or self._templates[CONF_STATE_TEMPLATE] is None
# features
if self._templates[CONF_BRIGHTNESS_TEMPLATE] is not None:
self._brightness = 255
else:
self._brightness = None
if self._templates[CONF_COLOR_TEMP_TEMPLATE] is not None:
self._color_temp = 255
else:
self._color_temp = None
if self._templates[CONF_WHITE_VALUE_TEMPLATE] is not None:
self._white_value = 255
else:
self._white_value = None
if (self._templates[CONF_RED_TEMPLATE] is not None and
self._templates[CONF_GREEN_TEMPLATE] is not None and
self._templates[CONF_BLUE_TEMPLATE] is not None):
self._hs = [0, 0]
else:
self._hs = None
self._effect = None
async def _subscribe_topics(self):
"""(Re)Subscribe to topics."""
for tpl in self._templates.values():
if tpl is not None:
tpl.hass = self.hass
last_state = await self.async_get_last_state()
@callback
def state_received(msg):
"""Handle new MQTT messages."""
state = self._templates[CONF_STATE_TEMPLATE].\
async_render_with_possible_json_value(msg.payload)
if state == STATE_ON:
self._state = True
elif state == STATE_OFF:
self._state = False
else:
_LOGGER.warning("Invalid state value received")
if self._brightness is not None:
try:
self._brightness = int(
self._templates[CONF_BRIGHTNESS_TEMPLATE].
async_render_with_possible_json_value(msg.payload)
)
except ValueError:
_LOGGER.warning("Invalid brightness value received")
if self._color_temp is not None:
try:
self._color_temp = int(
self._templates[CONF_COLOR_TEMP_TEMPLATE].
async_render_with_possible_json_value(msg.payload)
)
except ValueError:
_LOGGER.warning("Invalid color temperature value received")
if self._hs is not None:
try:
red = int(
self._templates[CONF_RED_TEMPLATE].
async_render_with_possible_json_value(msg.payload))
green = int(
self._templates[CONF_GREEN_TEMPLATE].
async_render_with_possible_json_value(msg.payload))
blue = int(
self._templates[CONF_BLUE_TEMPLATE].
async_render_with_possible_json_value(msg.payload))
self._hs = color_util.color_RGB_to_hs(red, green, blue)
except ValueError:
_LOGGER.warning("Invalid color value received")
if self._white_value is not None:
try:
self._white_value = int(
self._templates[CONF_WHITE_VALUE_TEMPLATE].
async_render_with_possible_json_value(msg.payload)
)
except ValueError:
_LOGGER.warning('Invalid white value received')
if self._templates[CONF_EFFECT_TEMPLATE] is not None:
effect = self._templates[CONF_EFFECT_TEMPLATE].\
async_render_with_possible_json_value(msg.payload)
if effect in self._config.get(CONF_EFFECT_LIST):
self._effect = effect
else:
_LOGGER.warning("Unsupported effect value received")
self.async_write_ha_state()
if self._topics[CONF_STATE_TOPIC] is not None:
self._sub_state = await subscription.async_subscribe_topics(
self.hass, self._sub_state,
{'state_topic': {'topic': self._topics[CONF_STATE_TOPIC],
'msg_callback': state_received,
'qos': self._config.get(CONF_QOS)}})
if self._optimistic and last_state:
self._state = last_state.state == STATE_ON
if last_state.attributes.get(ATTR_BRIGHTNESS):
self._brightness = last_state.attributes.get(ATTR_BRIGHTNESS)
if last_state.attributes.get(ATTR_HS_COLOR):
self._hs = last_state.attributes.get(ATTR_HS_COLOR)
if last_state.attributes.get(ATTR_COLOR_TEMP):
self._color_temp = last_state.attributes.get(ATTR_COLOR_TEMP)
if last_state.attributes.get(ATTR_EFFECT):
self._effect = last_state.attributes.get(ATTR_EFFECT)
if last_state.attributes.get(ATTR_WHITE_VALUE):
self._white_value = last_state.attributes.get(ATTR_WHITE_VALUE)
async def async_will_remove_from_hass(self):
"""Unsubscribe when removed."""
self._sub_state = await subscription.async_unsubscribe_topics(
self.hass, self._sub_state)
await MqttAttributes.async_will_remove_from_hass(self)
await MqttAvailability.async_will_remove_from_hass(self)
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return self._brightness
@property
def color_temp(self):
"""Return the color temperature in mired."""
return self._color_temp
@property
def hs_color(self):
"""Return the hs color value [int, int]."""
return self._hs
@property
def white_value(self):
"""Return the white property."""
return self._white_value
@property
def should_poll(self):
"""Return True if entity has to be polled for state.
False if entity pushes its state to HA.
"""
return False
@property
def name(self):
"""Return the name of the entity."""
return self._config.get(CONF_NAME)
@property
def unique_id(self):
"""Return a unique ID."""
return self._unique_id
@property
def is_on(self):
"""Return True if entity is on."""
return self._state
@property
def assumed_state(self):
"""Return True if unable to access real state of the entity."""
return self._optimistic
@property
def effect_list(self):
"""Return the list of supported effects."""
return self._config.get(CONF_EFFECT_LIST)
@property
def effect(self):
"""Return the current effect."""
return self._effect
async def async_turn_on(self, **kwargs):
"""Turn the entity on.
This method is a coroutine.
"""
values = {'state': True}
if self._optimistic:
self._state = True
if ATTR_BRIGHTNESS in kwargs:
values['brightness'] = int(kwargs[ATTR_BRIGHTNESS])
if self._optimistic:
self._brightness = kwargs[ATTR_BRIGHTNESS]
if ATTR_COLOR_TEMP in kwargs:
values['color_temp'] = int(kwargs[ATTR_COLOR_TEMP])
if self._optimistic:
self._color_temp = kwargs[ATTR_COLOR_TEMP]
if ATTR_HS_COLOR in kwargs:
hs_color = kwargs[ATTR_HS_COLOR]
# If there's a brightness topic set, we don't want to scale the RGB
# values given using the brightness.
if self._templates[CONF_BRIGHTNESS_TEMPLATE] is not None:
brightness = 255
else:
brightness = kwargs.get(
ATTR_BRIGHTNESS, self._brightness if self._brightness else
255)
rgb = color_util.color_hsv_to_RGB(
hs_color[0], hs_color[1], brightness / 255 * 100)
values['red'] = rgb[0]
values['green'] = rgb[1]
values['blue'] = rgb[2]
if self._optimistic:
self._hs = kwargs[ATTR_HS_COLOR]
if ATTR_WHITE_VALUE in kwargs:
values['white_value'] = int(kwargs[ATTR_WHITE_VALUE])
if self._optimistic:
self._white_value = kwargs[ATTR_WHITE_VALUE]
if ATTR_EFFECT in kwargs:
values['effect'] = kwargs.get(ATTR_EFFECT)
if ATTR_FLASH in kwargs:
values['flash'] = kwargs.get(ATTR_FLASH)
if ATTR_TRANSITION in kwargs:
values['transition'] = int(kwargs[ATTR_TRANSITION])
mqtt.async_publish(
self.hass, self._topics[CONF_COMMAND_TOPIC],
self._templates[CONF_COMMAND_ON_TEMPLATE].async_render(**values),
self._config.get(CONF_QOS), self._config.get(CONF_RETAIN)
)
if self._optimistic:
self.async_write_ha_state()
async def async_turn_off(self, **kwargs):
"""Turn the entity off.
This method is a coroutine.
"""
values = {'state': False}
if self._optimistic:
self._state = False
if ATTR_TRANSITION in kwargs:
values['transition'] = int(kwargs[ATTR_TRANSITION])
mqtt.async_publish(
self.hass, self._topics[CONF_COMMAND_TOPIC],
self._templates[CONF_COMMAND_OFF_TEMPLATE].async_render(**values),
self._config.get(CONF_QOS), self._config.get(CONF_RETAIN)
)
if self._optimistic:
self.async_write_ha_state()
@property
def supported_features(self):
"""Flag supported features."""
features = (SUPPORT_FLASH | SUPPORT_TRANSITION)
if self._brightness is not None:
features = features | SUPPORT_BRIGHTNESS
if self._hs is not None:
features = features | SUPPORT_COLOR
if self._config.get(CONF_EFFECT_LIST) is not None:
features = features | SUPPORT_EFFECT
if self._color_temp is not None:
features = features | SUPPORT_COLOR_TEMP
if self._white_value is not None:
features = features | SUPPORT_WHITE_VALUE
return features
|
|
# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
To run this template just do:
python generative_adversarial_net.py
After a few epochs, launch TensorBoard to see the images being generated at every batch:
tensorboard --logdir default
"""
import os
from argparse import ArgumentParser, Namespace
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.data import DataLoader
from pl_examples import cli_lightning_logo
from pl_examples.basic_examples.mnist_datamodule import MNIST
from pytorch_lightning.core import LightningDataModule, LightningModule
from pytorch_lightning.trainer import Trainer
from pytorch_lightning.utilities.imports import _TORCHVISION_AVAILABLE
if _TORCHVISION_AVAILABLE:
import torchvision
from torchvision import transforms
class Generator(nn.Module):
"""
>>> Generator(img_shape=(1, 8, 8)) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
Generator(
(model): Sequential(...)
)
"""
def __init__(self, latent_dim: int = 100, img_shape: tuple = (1, 28, 28)):
super().__init__()
self.img_shape = img_shape
def block(in_feat, out_feat, normalize=True):
layers = [nn.Linear(in_feat, out_feat)]
if normalize:
layers.append(nn.BatchNorm1d(out_feat, 0.8))
layers.append(nn.LeakyReLU(0.2, inplace=True))
return layers
self.model = nn.Sequential(
*block(latent_dim, 128, normalize=False),
*block(128, 256),
*block(256, 512),
*block(512, 1024),
nn.Linear(1024, int(np.prod(img_shape))),
nn.Tanh(),
)
def forward(self, z):
img = self.model(z)
img = img.view(img.size(0), *self.img_shape)
return img
class Discriminator(nn.Module):
"""
>>> Discriminator(img_shape=(1, 28, 28)) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
Discriminator(
(model): Sequential(...)
)
"""
def __init__(self, img_shape):
super().__init__()
self.model = nn.Sequential(
nn.Linear(int(np.prod(img_shape)), 512),
nn.LeakyReLU(0.2, inplace=True),
nn.Linear(512, 256),
nn.LeakyReLU(0.2, inplace=True),
nn.Linear(256, 1),
)
def forward(self, img):
img_flat = img.view(img.size(0), -1)
validity = self.model(img_flat)
return validity
class GAN(LightningModule):
"""
>>> GAN(img_shape=(1, 8, 8)) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
GAN(
(generator): Generator(
(model): Sequential(...)
)
(discriminator): Discriminator(
(model): Sequential(...)
)
)
"""
def __init__(
self,
img_shape: tuple = (1, 28, 28),
lr: float = 0.0002,
b1: float = 0.5,
b2: float = 0.999,
latent_dim: int = 100,
):
super().__init__()
self.save_hyperparameters()
# networks
self.generator = Generator(latent_dim=self.hparams.latent_dim, img_shape=img_shape)
self.discriminator = Discriminator(img_shape=img_shape)
self.validation_z = torch.randn(8, self.hparams.latent_dim)
self.example_input_array = torch.zeros(2, self.hparams.latent_dim)
@staticmethod
def add_argparse_args(parent_parser: ArgumentParser, *, use_argument_group=True):
if use_argument_group:
parser = parent_parser.add_argument_group("pl.GAN")
parser_out = parent_parser
else:
parser = ArgumentParser(parents=[parent_parser], add_help=False)
parser_out = parser
parser.add_argument("--lr", type=float, default=0.0002, help="adam: learning rate")
parser.add_argument("--b1", type=float, default=0.5, help="adam: decay of first order momentum of gradient")
parser.add_argument("--b2", type=float, default=0.999, help="adam: decay of second order momentum of gradient")
parser.add_argument("--latent_dim", type=int, default=100, help="dimensionality of the latent space")
return parser_out
def forward(self, z):
return self.generator(z)
@staticmethod
def adversarial_loss(y_hat, y):
return F.binary_cross_entropy_with_logits(y_hat, y)
def training_step(self, batch, batch_idx, optimizer_idx):
imgs, _ = batch
# sample noise
z = torch.randn(imgs.shape[0], self.hparams.latent_dim)
z = z.type_as(imgs)
# train generator
if optimizer_idx == 0:
# ground truth result (ie: all fake)
# put on GPU because we created this tensor inside training_loop
valid = torch.ones(imgs.size(0), 1)
valid = valid.type_as(imgs)
# adversarial loss is binary cross-entropy
g_loss = self.adversarial_loss(self.discriminator(self(z)), valid)
tqdm_dict = {"g_loss": g_loss}
self.log_dict(tqdm_dict)
return g_loss
# train discriminator
if optimizer_idx == 1:
# Measure discriminator's ability to classify real from generated samples
# how well can it label as real?
valid = torch.ones(imgs.size(0), 1)
valid = valid.type_as(imgs)
real_loss = self.adversarial_loss(self.discriminator(imgs), valid)
# how well can it label as fake?
fake = torch.zeros(imgs.size(0), 1)
fake = fake.type_as(imgs)
fake_loss = self.adversarial_loss(self.discriminator(self(z).detach()), fake)
# discriminator loss is the average of these
d_loss = (real_loss + fake_loss) / 2
tqdm_dict = {"d_loss": d_loss}
self.log_dict(tqdm_dict)
return d_loss
def configure_optimizers(self):
lr = self.hparams.lr
b1 = self.hparams.b1
b2 = self.hparams.b2
opt_g = torch.optim.Adam(self.generator.parameters(), lr=lr, betas=(b1, b2))
opt_d = torch.optim.Adam(self.discriminator.parameters(), lr=lr, betas=(b1, b2))
return [opt_g, opt_d], []
def on_epoch_end(self):
z = self.validation_z.type_as(self.generator.model[0].weight)
# log sampled images
sample_imgs = self(z)
grid = torchvision.utils.make_grid(sample_imgs)
self.logger.experiment.add_image("generated_images", grid, self.current_epoch)
class MNISTDataModule(LightningDataModule):
"""
>>> MNISTDataModule() # doctest: +ELLIPSIS
<...generative_adversarial_net.MNISTDataModule object at ...>
"""
def __init__(self, batch_size: int = 64, data_path: str = os.getcwd(), num_workers: int = 4):
super().__init__()
self.batch_size = batch_size
self.data_path = data_path
self.num_workers = num_workers
self.transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize([0.5], [0.5])])
self.dims = (1, 28, 28)
def prepare_data(self, stage=None):
# Use this method to do things that might write to disk or that need to be done only from a single GPU
# in distributed settings. Like downloading the dataset for the first time.
MNIST(self.data_path, train=True, download=True, transform=transforms.ToTensor())
def setup(self, stage=None):
# There are also data operations you might want to perform on every GPU, such as applying transforms
# defined explicitly in your datamodule or assigned in init.
self.mnist_train = MNIST(self.data_path, train=True, transform=self.transform)
def train_dataloader(self):
return DataLoader(self.mnist_train, batch_size=self.batch_size, num_workers=self.num_workers)
def main(args: Namespace) -> None:
# ------------------------
# 1 INIT LIGHTNING MODEL
# ------------------------
model = GAN(lr=args.lr, b1=args.b1, b2=args.b2, latent_dim=args.latent_dim)
# ------------------------
# 2 INIT TRAINER
# ------------------------
# If use distubuted training PyTorch recommends to use DistributedDataParallel.
# See: https://pytorch.org/docs/stable/nn.html#torch.nn.DataParallel
dm = MNISTDataModule.from_argparse_args(args)
trainer = Trainer.from_argparse_args(args)
# ------------------------
# 3 START TRAINING
# ------------------------
trainer.fit(model, dm)
if __name__ == "__main__":
cli_lightning_logo()
parser = ArgumentParser()
# Add program level args, if any.
# ------------------------
# Add LightningDataLoader args
parser = MNISTDataModule.add_argparse_args(parser)
# Add model specific args
parser = GAN.add_argparse_args(parser)
# Add trainer args
parser = Trainer.add_argparse_args(parser)
# Parse all arguments
args = parser.parse_args()
main(args)
|
|
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from warnings import warn
from email.Utils import formatdate
from twisted.python import log
from twisted.internet import defer
from zope.interface import implements
from buildbot.process.buildstep import RemoteCommand
from buildbot.interfaces import BuildSlaveTooOldError, IRenderable
from buildbot.steps.source.base import Source
class _ComputeRepositoryURL(object):
implements(IRenderable)
def __init__(self, step, repository):
self.step = step
self.repository = repository
def getRenderingFor(self, props):
'''
Helper function that the repository URL based on the parameter the
source step took and the Change 'repository' property
'''
build = props.getBuild()
assert build is not None, "Build should be available *during* a build?"
s = build.getSourceStamp(self.step.codebase)
repository = self.repository
if not repository:
return str(s.repository)
else:
if callable(repository):
d = props.render(repository(s.repository))
elif isinstance(repository, dict):
d = props.render(repository.get(s.repository))
elif isinstance(repository, str) or isinstance(repository, unicode):
try:
return str(repository % s.repository)
except TypeError:
# that's the backward compatibility case
d = props.render(repository)
else:
d = props.render(repository)
d.addCallback(str)
return d
class SlaveSource(Source):
def __init__(self, mode='update', retry=None, **kwargs):
"""
@type mode: string
@param mode: the kind of VC operation that is desired:
- 'update': specifies that the checkout/update should be
performed directly into the workdir. Each build is performed
in the same directory, allowing for incremental builds. This
minimizes disk space, bandwidth, and CPU time. However, it
may encounter problems if the build process does not handle
dependencies properly (if you must sometimes do a 'clean
build' to make sure everything gets compiled), or if source
files are deleted but generated files can influence test
behavior (e.g. python's .pyc files), or when source
directories are deleted but generated files prevent CVS from
removing them. When used with a patched checkout, from a
previous buildbot try for instance, it will try to "revert"
the changes first and will do a clobber if it is unable to
get a clean checkout. The behavior is SCM-dependent.
- 'copy': specifies that the source-controlled workspace
should be maintained in a separate directory (called the
'copydir'), using checkout or update as necessary. For each
build, a new workdir is created with a copy of the source
tree (rm -rf workdir; cp -R -P -p copydir workdir). This
doubles the disk space required, but keeps the bandwidth low
(update instead of a full checkout). A full 'clean' build
is performed each time. This avoids any generated-file
build problems, but is still occasionally vulnerable to
problems such as a CVS repository being manually rearranged
(causing CVS errors on update) which are not an issue with
a full checkout.
- 'clobber': specifies that the working directory should be
deleted each time, necessitating a full checkout for each
build. This insures a clean build off a complete checkout,
avoiding any of the problems described above, but is
bandwidth intensive, as the whole source tree must be
pulled down for each build.
- 'export': is like 'clobber', except that e.g. the 'cvs
export' command is used to create the working directory.
This command removes all VC metadata files (the
CVS/.svn/{arch} directories) from the tree, which is
sometimes useful for creating source tarballs (to avoid
including the metadata in the tar file). Not all VC systems
support export.
@type retry: tuple of ints (delay, repeats) (or None)
@param retry: if provided, VC update failures are re-attempted up
to REPEATS times, with DELAY seconds between each
attempt. Some users have slaves with poor connectivity
to their VC repository, and they say that up to 80% of
their build failures are due to transient network
failures that could be handled by simply retrying a
couple times.
"""
Source.__init__(self, **kwargs)
assert mode in ("update", "copy", "clobber", "export")
if retry:
delay, repeats = retry
assert isinstance(repeats, int)
assert repeats > 0
self.args = {'mode': mode,
'retry': retry,
}
def start(self):
self.args['workdir'] = self.workdir
self.args['logEnviron'] = self.logEnviron
self.args['env'] = self.env
self.args['timeout'] = self.timeout
Source.start(self)
def commandComplete(self, cmd):
if not cmd.updates.has_key("got_revision"):
return
got_revision = cmd.updates["got_revision"][-1]
if got_revision is None:
return
self.updateSourceProperty('got_revision', str(got_revision))
class BK(SlaveSource):
"""I perform BitKeeper checkout/update operations."""
name = 'bk'
renderables = [ 'bkurl', 'baseURL' ]
def __init__(self, bkurl=None, baseURL=None,
directory=None, extra_args=None, **kwargs):
"""
@type bkurl: string
@param bkurl: the URL which points to the BitKeeper server.
@type baseURL: string
@param baseURL: if branches are enabled, this is the base URL to
which a branch name will be appended. It should
probably end in a slash. Use exactly one of
C{bkurl} and C{baseURL}.
"""
self.bkurl = _ComputeRepositoryURL(bkurl)
self.baseURL = _ComputeRepositoryURL(baseURL)
self.extra_args = extra_args
Source.__init__(self, **kwargs)
if bkurl and baseURL:
raise ValueError("you must use exactly one of bkurl and baseURL")
def computeSourceRevision(self, changes):
return changes.revision
def startVC(self, branch, revision, patch):
warnings = []
slavever = self.slaveVersion("bk")
if not slavever:
m = "slave does not have the 'bk' command"
raise BuildSlaveTooOldError(m)
if self.bkurl:
assert not branch # we need baseURL= to use branches
self.args['bkurl'] = self.bkurl
else:
self.args['bkurl'] = self.baseURL + branch
self.args['revision'] = revision
self.args['patch'] = patch
self.args['branch'] = branch
if self.extra_args is not None:
self.args['extra_args'] = self.extra_args
revstuff = []
revstuff.append("[branch]")
if revision is not None:
revstuff.append("r%s" % revision)
if patch is not None:
revstuff.append("[patch]")
self.description.extend(revstuff)
self.descriptionDone.extend(revstuff)
cmd = RemoteCommand("bk", self.args)
self.startCommand(cmd, warnings)
class CVS(SlaveSource):
"""I do CVS checkout/update operations.
Note: if you are doing anonymous/pserver CVS operations, you will need
to manually do a 'cvs login' on each buildslave before the slave has any
hope of success. XXX: fix then, take a cvs password as an argument and
figure out how to do a 'cvs login' on each build
"""
name = "cvs"
renderables = [ "cvsroot" ]
#progressMetrics = ('output',)
#
# additional things to track: update gives one stderr line per directory
# (starting with 'cvs server: Updating ') (and is fairly stable if files
# is empty), export gives one line per directory (starting with 'cvs
# export: Updating ') and another line per file (starting with U). Would
# be nice to track these, requires grepping LogFile data for lines,
# parsing each line. Might be handy to have a hook in LogFile that gets
# called with each complete line.
def __init__(self, cvsroot=None, cvsmodule="",
global_options=[], branch=None, checkoutDelay=None,
checkout_options=[], export_options=[], extra_options=[],
login=None,
**kwargs):
"""
@type cvsroot: string
@param cvsroot: CVS Repository from which the source tree should
be obtained. '/home/warner/Repository' for local
or NFS-reachable repositories,
':pserver:anon@foo.com:/cvs' for anonymous CVS,
'user@host.com:/cvs' for non-anonymous CVS or
CVS over ssh. Lots of possibilities, check the
CVS documentation for more.
@type cvsmodule: string
@param cvsmodule: subdirectory of CVS repository that should be
retrieved
@type login: string or None
@param login: if not None, a string which will be provided as a
password to the 'cvs login' command, used when a
:pserver: method is used to access the repository.
This login is only needed once, but must be run
each time (just before the CVS operation) because
there is no way for the buildslave to tell whether
it was previously performed or not.
@type branch: string
@param branch: the default branch name, will be used in a '-r'
argument to specify which branch of the source tree
should be used for this checkout. Defaults to None,
which means to use 'HEAD'.
@type checkoutDelay: int or None
@param checkoutDelay: if not None, the number of seconds to put
between the last known Change and the
timestamp given to the -D argument. This
defaults to exactly half of the parent
Build's .treeStableTimer, but it could be
set to something else if your CVS change
notification has particularly weird
latency characteristics.
@type global_options: list of strings
@param global_options: these arguments are inserted in the cvs
command line, before the
'checkout'/'update' command word. See
'cvs --help-options' for a list of what
may be accepted here. ['-r'] will make
the checked out files read only. ['-r',
'-R'] will also assume the repository is
read-only (I assume this means it won't
use locks to insure atomic access to the
,v files).
@type checkout_options: list of strings
@param checkout_options: these arguments are inserted in the cvs
command line, after 'checkout' but before
branch or revision specifiers.
@type export_options: list of strings
@param export_options: these arguments are inserted in the cvs
command line, after 'export' but before
branch or revision specifiers.
@type extra_options: list of strings
@param extra_options: these arguments are inserted in the cvs
command line, after 'checkout' or 'export' but before
branch or revision specifiers.
"""
self.checkoutDelay = checkoutDelay
self.branch = branch
self.cvsroot = _ComputeRepositoryURL(self, cvsroot)
SlaveSource.__init__(self, **kwargs)
self.args.update({'cvsmodule': cvsmodule,
'global_options': global_options,
'checkout_options':checkout_options,
'export_options':export_options,
'extra_options':extra_options,
'login': login,
})
def computeSourceRevision(self, changes):
if not changes:
return None
lastChange = max([c.when for c in changes])
if self.checkoutDelay is not None:
when = lastChange + self.checkoutDelay
else:
lastSubmit = max([br.submittedAt for br in self.build.requests])
when = (lastChange + lastSubmit) / 2
return formatdate(when)
def startVC(self, branch, revision, patch):
if self.slaveVersionIsOlderThan("cvs", "1.39"):
# the slave doesn't know to avoid re-using the same sourcedir
# when the branch changes. We have no way of knowing which branch
# the last build used, so if we're using a non-default branch and
# either 'update' or 'copy' modes, it is safer to refuse to
# build, and tell the user they need to upgrade the buildslave.
if (branch != self.branch
and self.args['mode'] in ("update", "copy")):
m = ("This buildslave (%s) does not know about multiple "
"branches, and using mode=%s would probably build the "
"wrong tree. "
"Refusing to build. Please upgrade the buildslave to "
"buildbot-0.7.0 or newer." % (self.build.slavename,
self.args['mode']))
log.msg(m)
raise BuildSlaveTooOldError(m)
if self.slaveVersionIsOlderThan("cvs", "2.10"):
if self.args['extra_options'] or self.args['export_options']:
m = ("This buildslave (%s) does not support export_options "
"or extra_options arguments to the CVS step."
% (self.build.slavename))
log.msg(m)
raise BuildSlaveTooOldError(m)
# the unwanted args are empty, and will probably be ignored by
# the slave, but delete them just to be safe
del self.args['export_options']
del self.args['extra_options']
if branch is None:
branch = "HEAD"
self.args['cvsroot'] = self.cvsroot
self.args['branch'] = branch
self.args['revision'] = revision
self.args['patch'] = patch
if self.args['branch'] == "HEAD" and self.args['revision']:
# special case. 'cvs update -r HEAD -D today' gives no files
# TODO: figure out why, see if it applies to -r BRANCH
self.args['branch'] = None
# deal with old slaves
warnings = []
slavever = self.slaveVersion("cvs", "old")
if slavever == "old":
# 0.5.0
if self.args['mode'] == "export":
self.args['export'] = 1
elif self.args['mode'] == "clobber":
self.args['clobber'] = 1
elif self.args['mode'] == "copy":
self.args['copydir'] = "source"
self.args['tag'] = self.args['branch']
assert not self.args['patch'] # 0.5.0 slave can't do patch
cmd = RemoteCommand("cvs", self.args)
self.startCommand(cmd, warnings)
class SVN(SlaveSource):
"""I perform Subversion checkout/update operations."""
name = 'svn'
branch_placeholder = '%%BRANCH%%'
renderables = [ 'svnurl', 'baseURL' ]
def __init__(self, svnurl=None, baseURL=None, defaultBranch=None,
directory=None, username=None, password=None,
extra_args=None, keep_on_purge=None, ignore_ignores=None,
always_purge=None, depth=None, **kwargs):
"""
@type svnurl: string
@param svnurl: the URL which points to the Subversion server,
combining the access method (HTTP, ssh, local file),
the repository host/port, the repository path, the
sub-tree within the repository, and the branch to
check out. Use exactly one of C{svnurl} and C{baseURL}.
@param baseURL: if branches are enabled, this is the base URL to
which a branch name will be appended. It should
probably end in a slash. Use exactly one of
C{svnurl} and C{baseURL}.
@param defaultBranch: if branches are enabled, this is the branch
to use if the Build does not specify one
explicitly. It will simply be appended
to C{baseURL} and the result handed to
the SVN command.
@type username: string
@param username: username to pass to svn's --username
@type password: string
@param password: password to pass to svn's --password
"""
if not 'workdir' in kwargs and directory is not None:
# deal with old configs
warn("Please use workdir=, not directory=", DeprecationWarning)
kwargs['workdir'] = directory
self.svnurl = svnurl and _ComputeRepositoryURL(self, svnurl)
self.baseURL = _ComputeRepositoryURL(self, baseURL)
self.branch = defaultBranch
self.username = username
self.password = password
self.extra_args = extra_args
self.keep_on_purge = keep_on_purge
self.ignore_ignores = ignore_ignores
self.always_purge = always_purge
self.depth = depth
SlaveSource.__init__(self, **kwargs)
if svnurl and baseURL:
raise ValueError("you must use either svnurl OR baseURL")
def computeSourceRevision(self, changes):
if not changes or None in [c.revision for c in changes]:
return None
lastChange = max([int(c.revision) for c in changes])
return lastChange
def checkCompatibility(self):
''' Handle compatibility between old slaves/svn clients '''
slavever = self.slaveVersion("svn", "old")
if not slavever:
m = "slave does not have the 'svn' command"
raise BuildSlaveTooOldError(m)
if self.slaveVersionIsOlderThan("svn", "1.39"):
# the slave doesn't know to avoid re-using the same sourcedir
# when the branch changes. We have no way of knowing which branch
# the last build used, so if we're using a non-default branch and
# either 'update' or 'copy' modes, it is safer to refuse to
# build, and tell the user they need to upgrade the buildslave.
if (self.args['branch'] != self.branch
and self.args['mode'] in ("update", "copy")):
m = ("This buildslave (%s) does not know about multiple "
"branches, and using mode=%s would probably build the "
"wrong tree. "
"Refusing to build. Please upgrade the buildslave to "
"buildbot-0.7.0 or newer." % (self.build.slavename,
self.args['mode']))
raise BuildSlaveTooOldError(m)
if (self.depth is not None) and self.slaveVersionIsOlderThan("svn","2.9"):
m = ("This buildslave (%s) does not support svn depth "
"arguments. Refusing to build. "
"Please upgrade the buildslave." % (self.build.slavename))
raise BuildSlaveTooOldError(m)
if (self.username is not None or self.password is not None) \
and self.slaveVersionIsOlderThan("svn", "2.8"):
m = ("This buildslave (%s) does not support svn usernames "
"and passwords. "
"Refusing to build. Please upgrade the buildslave to "
"buildbot-0.7.10 or newer." % (self.build.slavename,))
raise BuildSlaveTooOldError(m)
def getSvnUrl(self, branch):
''' Compute the svn url that will be passed to the svn remote command '''
if self.svnurl:
return self.svnurl
else:
if branch is None:
m = ("The SVN source step belonging to builder '%s' does not know "
"which branch to work with. This means that the change source "
"did not specify a branch and that defaultBranch is None." \
% self.build.builder.name)
raise RuntimeError(m)
computed = self.baseURL
if self.branch_placeholder in self.baseURL:
return computed.replace(self.branch_placeholder, branch)
else:
return computed + branch
def startVC(self, branch, revision, patch):
warnings = []
self.checkCompatibility()
self.args['svnurl'] = self.getSvnUrl(branch)
self.args['revision'] = revision
self.args['patch'] = patch
self.args['always_purge'] = self.always_purge
#Set up depth if specified
if self.depth is not None:
self.args['depth'] = self.depth
if self.username is not None:
self.args['username'] = self.username
if self.password is not None:
self.args['password'] = self.password
if self.extra_args is not None:
self.args['extra_args'] = self.extra_args
revstuff = []
#revstuff.append(self.args['svnurl'])
if self.args['svnurl'].find('trunk') == -1:
revstuff.append("[branch]")
if revision is not None:
revstuff.append("r%s" % revision)
if patch is not None:
revstuff.append("[patch]")
self.description.extend(revstuff)
self.descriptionDone.extend(revstuff)
cmd = RemoteCommand("svn", self.args)
self.startCommand(cmd, warnings)
class Darcs(SlaveSource):
"""Check out a source tree from a Darcs repository at 'repourl'.
Darcs has no concept of file modes. This means the eXecute-bit will be
cleared on all source files. As a result, you may need to invoke
configuration scripts with something like:
C{s(step.Configure, command=['/bin/sh', './configure'])}
"""
name = "darcs"
renderables = [ 'repourl', 'baseURL' ]
def __init__(self, repourl=None, baseURL=None, defaultBranch=None,
**kwargs):
"""
@type repourl: string
@param repourl: the URL which points at the Darcs repository. This
is used as the default branch. Using C{repourl} does
not enable builds of alternate branches: use
C{baseURL} to enable this. Use either C{repourl} or
C{baseURL}, not both.
@param baseURL: if branches are enabled, this is the base URL to
which a branch name will be appended. It should
probably end in a slash. Use exactly one of
C{repourl} and C{baseURL}.
@param defaultBranch: if branches are enabled, this is the branch
to use if the Build does not specify one
explicitly. It will simply be appended to
C{baseURL} and the result handed to the
'darcs pull' command.
"""
self.repourl = _ComputeRepositoryURL(self, repourl)
self.baseURL = _ComputeRepositoryURL(self, baseURL)
self.branch = defaultBranch
SlaveSource.__init__(self, **kwargs)
assert self.args['mode'] != "export", \
"Darcs does not have an 'export' mode"
if repourl and baseURL:
raise ValueError("you must provide exactly one of repourl and"
" baseURL")
def startVC(self, branch, revision, patch):
slavever = self.slaveVersion("darcs")
if not slavever:
m = "slave is too old, does not know about darcs"
raise BuildSlaveTooOldError(m)
if self.slaveVersionIsOlderThan("darcs", "1.39"):
if revision:
# TODO: revisit this once we implement computeSourceRevision
m = "0.6.6 slaves can't handle args['revision']"
raise BuildSlaveTooOldError(m)
# the slave doesn't know to avoid re-using the same sourcedir
# when the branch changes. We have no way of knowing which branch
# the last build used, so if we're using a non-default branch and
# either 'update' or 'copy' modes, it is safer to refuse to
# build, and tell the user they need to upgrade the buildslave.
if (branch != self.branch
and self.args['mode'] in ("update", "copy")):
m = ("This buildslave (%s) does not know about multiple "
"branches, and using mode=%s would probably build the "
"wrong tree. "
"Refusing to build. Please upgrade the buildslave to "
"buildbot-0.7.0 or newer." % (self.build.slavename,
self.args['mode']))
raise BuildSlaveTooOldError(m)
if self.repourl:
assert not branch # we need baseURL= to use branches
self.args['repourl'] = self.repourl
else:
self.args['repourl'] = self.baseURL + branch
self.args['revision'] = revision
self.args['patch'] = patch
revstuff = []
if branch is not None and branch != self.branch:
revstuff.append("[branch]")
self.description.extend(revstuff)
self.descriptionDone.extend(revstuff)
cmd = RemoteCommand("darcs", self.args)
self.startCommand(cmd)
class Git(SlaveSource):
"""Check out a source tree from a git repository 'repourl'."""
name = "git"
renderables = [ 'repourl' ]
def __init__(self, repourl=None,
branch="master",
submodules=False,
ignore_ignores=None,
reference=None,
shallow=False,
progress=False,
**kwargs):
"""
@type repourl: string
@param repourl: the URL which points at the git repository
@type branch: string
@param branch: The branch or tag to check out by default. If
a build specifies a different branch, it will
be used instead of this.
@type submodules: boolean
@param submodules: Whether or not to update (and initialize)
git submodules.
@type reference: string
@param reference: The path to a reference repository to obtain
objects from, if any.
@type shallow: boolean
@param shallow: Use a shallow or clone, if possible
@type progress: boolean
@param progress: Pass the --progress option when fetching. This
can solve long fetches getting killed due to
lack of output, but requires Git 1.7.2+.
"""
SlaveSource.__init__(self, **kwargs)
self.repourl = _ComputeRepositoryURL(self, repourl)
self.branch = branch
self.args.update({'submodules': submodules,
'ignore_ignores': ignore_ignores,
'reference': reference,
'shallow': shallow,
'progress': progress,
})
def computeSourceRevision(self, changes):
if not changes:
return None
return changes[-1].revision
def startVC(self, branch, revision, patch):
self.args['branch'] = branch
self.args['repourl'] = self.repourl
self.args['revision'] = revision
self.args['patch'] = patch
# check if there is any patchset we should fetch from Gerrit
if self.build.hasProperty("event.patchSet.ref"):
# GerritChangeSource
self.args['gerrit_branch'] = self.build.getProperty("event.patchSet.ref")
self.updateSourceProperty("gerrit_branch",
self.args['gerrit_branch'])
else:
try:
# forced build
change = self.build.getProperty("gerrit_change", '').split('/')
if len(change) == 2:
self.args['gerrit_branch'] = "refs/changes/%2.2d/%d/%d" \
% (int(change[0]) % 100, int(change[0]), int(change[1]))
self.updateSourceProperty("gerrit_branch",
self.args['gerrit_branch'])
except:
pass
slavever = self.slaveVersion("git")
if not slavever:
raise BuildSlaveTooOldError("slave is too old, does not know "
"about git")
cmd = RemoteCommand("git", self.args)
self.startCommand(cmd)
class Repo(SlaveSource):
"""Check out a source tree from a repo repository described by manifest."""
name = "repo"
renderables = [ "manifest_url" ]
def __init__(self,
manifest_url=None,
manifest_branch="master",
manifest_file="default.xml",
tarball=None,
jobs=None,
**kwargs):
"""
@type manifest_url: string
@param manifest_url: The URL which points at the repo manifests repository.
@type manifest_branch: string
@param manifest_branch: The manifest branch to check out by default.
@type manifest_file: string
@param manifest_file: The manifest to use for sync.
"""
SlaveSource.__init__(self, **kwargs)
self.manifest_url = _ComputeRepositoryURL(self, manifest_url)
self.args.update({'manifest_branch': manifest_branch,
'manifest_file': manifest_file,
'tarball': tarball,
'manifest_override_url': None,
'jobs': jobs
})
def computeSourceRevision(self, changes):
if not changes:
return None
return changes[-1].revision
def parseDownloadProperty(self, s):
"""
lets try to be nice in the format we want
can support several instances of "repo download proj number/patch" (direct copy paste from gerrit web site)
or several instances of "proj number/patch" (simpler version)
This feature allows integrator to build with several pending interdependant changes.
returns list of repo downloads sent to the buildslave
"""
import re
if s == None:
return []
re1 = re.compile("repo download ([^ ]+) ([0-9]+/[0-9]+)")
re2 = re.compile("([^ ]+) ([0-9]+/[0-9]+)")
re3 = re.compile("([^ ]+)/([0-9]+/[0-9]+)")
ret = []
for cur_re in [re1, re2, re3]:
res = cur_re.search(s)
while res:
ret.append("%s %s" % (res.group(1), res.group(2)))
s = s[:res.start(0)] + s[res.end(0):]
res = cur_re.search(s)
return ret
def buildDownloadList(self):
"""taken the changesource and forcebuild property,
build the repo download command to send to the slave
making this a defereable allow config to tweak this
in order to e.g. manage dependancies
"""
downloads = self.build.getProperty("repo_downloads", [])
# download patches based on GerritChangeSource events
for change in self.build.allChanges():
if (change.properties.has_key("event.type") and
change.properties["event.type"] == "patchset-created"):
downloads.append("%s %s/%s"% (change.properties["event.change.project"],
change.properties["event.change.number"],
change.properties["event.patchSet.number"]))
# download patches based on web site forced build properties:
# "repo_d", "repo_d0", .., "repo_d9"
# "repo_download", "repo_download0", .., "repo_download9"
for propName in ["repo_d"] + ["repo_d%d" % i for i in xrange(0,10)] + \
["repo_download"] + ["repo_download%d" % i for i in xrange(0,10)]:
s = self.build.getProperty(propName)
if s is not None:
downloads.extend(self.parseDownloadProperty(s))
if downloads:
self.args["repo_downloads"] = downloads
self.updateSourceProperty("repo_downloads", downloads)
return defer.succeed(None)
def startVC(self, branch, revision, patch):
self.args['manifest_url'] = self.manifest_url
# manifest override
self.args['manifest_override_url'] = None
try:
self.args['manifest_override_url'] = self.build.getProperty("manifest_override_url")
except KeyError:
pass
# only master has access to properties, so we must implement this here.
d = self.buildDownloadList()
d.addCallback(self.continueStartVC, branch, revision, patch)
d.addErrback(self.failed)
def continueStartVC(self, ignored, branch, revision, patch):
slavever = self.slaveVersion("repo")
if not slavever:
raise BuildSlaveTooOldError("slave is too old, does not know "
"about repo")
cmd = RemoteCommand("repo", self.args)
self.startCommand(cmd)
def commandComplete(self, cmd):
repo_downloaded = []
if cmd.updates.has_key("repo_downloaded"):
repo_downloaded = cmd.updates["repo_downloaded"][-1]
if repo_downloaded:
self.updateSourceProperty("repo_downloaded",
str(repo_downloaded))
else:
repo_downloaded = []
orig_downloads = self.getProperty("repo_downloads") or []
if len(orig_downloads) != len(repo_downloaded):
self.step_status.setText(["repo download issues"])
class Bzr(SlaveSource):
"""Check out a source tree from a bzr (Bazaar) repository at 'repourl'.
"""
name = "bzr"
renderables = [ 'repourl', 'baseURL' ]
def __init__(self, repourl=None, baseURL=None, defaultBranch=None,
forceSharedRepo=None,
**kwargs):
"""
@type repourl: string
@param repourl: the URL which points at the bzr repository. This
is used as the default branch. Using C{repourl} does
not enable builds of alternate branches: use
C{baseURL} to enable this. Use either C{repourl} or
C{baseURL}, not both.
@param baseURL: if branches are enabled, this is the base URL to
which a branch name will be appended. It should
probably end in a slash. Use exactly one of
C{repourl} and C{baseURL}.
@param defaultBranch: if branches are enabled, this is the branch
to use if the Build does not specify one
explicitly. It will simply be appended to
C{baseURL} and the result handed to the
'bzr checkout pull' command.
@param forceSharedRepo: Boolean, defaults to False. If set to True,
the working directory will be made into a
bzr shared repository if it is not already.
Shared repository greatly reduces the amount
of history data that needs to be downloaded
if not using update/copy mode, or if using
update/copy mode with multiple branches.
"""
self.repourl = _ComputeRepositoryURL(self, repourl)
self.baseURL = _ComputeRepositoryURL(self, baseURL)
self.branch = defaultBranch
SlaveSource.__init__(self, **kwargs)
self.args.update({'forceSharedRepo': forceSharedRepo})
if repourl and baseURL:
raise ValueError("you must provide exactly one of repourl and"
" baseURL")
def computeSourceRevision(self, changes):
if not changes:
return None
lastChange = max([int(c.revision) for c in changes])
return lastChange
def startVC(self, branch, revision, patch):
slavever = self.slaveVersion("bzr")
if not slavever:
m = "slave is too old, does not know about bzr"
raise BuildSlaveTooOldError(m)
if self.repourl:
assert not branch # we need baseURL= to use branches
self.args['repourl'] = self.repourl
else:
self.args['repourl'] = self.baseURL + branch
self.args['revision'] = revision
self.args['patch'] = patch
revstuff = []
if branch is not None and branch != self.branch:
revstuff.append("[" + branch + "]")
if revision is not None:
revstuff.append("r%s" % revision)
self.description.extend(revstuff)
self.descriptionDone.extend(revstuff)
cmd = RemoteCommand("bzr", self.args)
self.startCommand(cmd)
class Mercurial(SlaveSource):
"""Check out a source tree from a mercurial repository 'repourl'."""
name = "hg"
renderables = [ 'repourl', 'baseURL' ]
def __init__(self, repourl=None, baseURL=None, defaultBranch=None,
branchType='dirname', clobberOnBranchChange=True, **kwargs):
"""
@type repourl: string
@param repourl: the URL which points at the Mercurial repository.
This uses the 'default' branch unless defaultBranch is
specified below and the C{branchType} is set to
'inrepo'. It is an error to specify a branch without
setting the C{branchType} to 'inrepo'.
@param baseURL: if 'dirname' branches are enabled, this is the base URL
to which a branch name will be appended. It should
probably end in a slash. Use exactly one of C{repourl}
and C{baseURL}.
@param defaultBranch: if branches are enabled, this is the branch
to use if the Build does not specify one
explicitly.
For 'dirname' branches, It will simply be
appended to C{baseURL} and the result handed to
the 'hg update' command.
For 'inrepo' branches, this specifies the named
revision to which the tree will update after a
clone.
@param branchType: either 'dirname' or 'inrepo' depending on whether
the branch name should be appended to the C{baseURL}
or the branch is a mercurial named branch and can be
found within the C{repourl}
@param clobberOnBranchChange: boolean, defaults to True. If set and
using inrepos branches, clobber the tree
at each branch change. Otherwise, just
update to the branch.
"""
self.repourl = _ComputeRepositoryURL(self, repourl)
self.baseURL = _ComputeRepositoryURL(self, baseURL)
self.branch = defaultBranch
self.branchType = branchType
self.clobberOnBranchChange = clobberOnBranchChange
SlaveSource.__init__(self, **kwargs)
if repourl and baseURL:
raise ValueError("you must provide exactly one of repourl and"
" baseURL")
def startVC(self, branch, revision, patch):
slavever = self.slaveVersion("hg")
if not slavever:
raise BuildSlaveTooOldError("slave is too old, does not know "
"about hg")
if self.repourl:
# we need baseURL= to use dirname branches
assert self.branchType == 'inrepo' or not branch
self.args['repourl'] = self.repourl
if branch:
self.args['branch'] = branch
else:
self.args['repourl'] = self.baseURL + (branch or '')
self.args['revision'] = revision
self.args['patch'] = patch
self.args['clobberOnBranchChange'] = self.clobberOnBranchChange
self.args['branchType'] = self.branchType
revstuff = []
if branch is not None and branch != self.branch:
revstuff.append("[branch]")
self.description.extend(revstuff)
self.descriptionDone.extend(revstuff)
cmd = RemoteCommand("hg", self.args)
self.startCommand(cmd)
def computeSourceRevision(self, changes):
if not changes:
return None
# without knowing the revision ancestry graph, we can't sort the
# changes at all. So for now, assume they were given to us in sorted
# order, and just pay attention to the last one. See ticket #103 for
# more details.
if len(changes) > 1:
log.msg("Mercurial.computeSourceRevision: warning: "
"there are %d changes here, assuming the last one is "
"the most recent" % len(changes))
return changes[-1].revision
class P4(SlaveSource):
""" P4 is a class for accessing perforce revision control"""
name = "p4"
renderables = [ 'p4base' ]
def __init__(self, p4base=None, defaultBranch=None, p4port=None, p4user=None,
p4passwd=None, p4extra_views=[], p4line_end='local',
p4client='buildbot_%(slave)s_%(builder)s', **kwargs):
"""
@type p4base: string
@param p4base: A view into a perforce depot, typically
"//depot/proj/"
@type defaultBranch: string
@param defaultBranch: Identify a branch to build by default. Perforce
is a view based branching system. So, the branch
is normally the name after the base. For example,
branch=1.0 is view=//depot/proj/1.0/...
branch=1.1 is view=//depot/proj/1.1/...
@type p4port: string
@param p4port: Specify the perforce server to connection in the format
<host>:<port>. Example "perforce.example.com:1666"
@type p4user: string
@param p4user: The perforce user to run the command as.
@type p4passwd: string
@param p4passwd: The password for the perforce user.
@type p4extra_views: list of tuples
@param p4extra_views: Extra views to be added to
the client that is being used.
@type p4line_end: string
@param p4line_end: value of the LineEnd client specification property
@type p4client: string
@param p4client: The perforce client to use for this buildslave.
"""
self.p4base = _ComputeRepositoryURL(self, p4base)
self.branch = defaultBranch
SlaveSource.__init__(self, **kwargs)
self.args['p4port'] = p4port
self.args['p4user'] = p4user
self.args['p4passwd'] = p4passwd
self.args['p4extra_views'] = p4extra_views
self.args['p4line_end'] = p4line_end
self.p4client = p4client
def setBuild(self, build):
SlaveSource.setBuild(self, build)
self.args['p4client'] = self.p4client % {
'slave': build.slavename,
'builder': build.builder.name,
}
def computeSourceRevision(self, changes):
if not changes:
return None
lastChange = max([int(c.revision) for c in changes])
return lastChange
def startVC(self, branch, revision, patch):
slavever = self.slaveVersion("p4")
assert slavever, "slave is too old, does not know about p4"
args = dict(self.args)
args['p4base'] = self.p4base
args['branch'] = branch or self.branch
args['revision'] = revision
args['patch'] = patch
cmd = RemoteCommand("p4", args)
self.startCommand(cmd)
class Monotone(SlaveSource):
"""Check out a source tree from a monotone repository 'repourl'."""
name = "mtn"
renderables = [ 'repourl' ]
def __init__(self, repourl=None, branch=None, progress=False, **kwargs):
"""
@type repourl: string
@param repourl: the URI which points at the monotone repository.
@type branch: string
@param branch: The branch or tag to check out by default. If
a build specifies a different branch, it will
be used instead of this.
@type progress: boolean
@param progress: Pass the --ticker=dot option when pulling. This
can solve long fetches getting killed due to
lack of output.
"""
SlaveSource.__init__(self, **kwargs)
self.repourl = _ComputeRepositoryURL(self, repourl)
if (not repourl):
raise ValueError("you must provide a repository uri in 'repourl'")
if (not branch):
raise ValueError("you must provide a default branch in 'branch'")
self.args.update({'branch': branch,
'progress': progress,
})
def startVC(self, branch, revision, patch):
slavever = self.slaveVersion("mtn")
if not slavever:
raise BuildSlaveTooOldError("slave is too old, does not know "
"about mtn")
self.args['repourl'] = self.repourl
if branch:
self.args['branch'] = branch
self.args['revision'] = revision
self.args['patch'] = patch
cmd = RemoteCommand("mtn", self.args)
self.startCommand(cmd)
def computeSourceRevision(self, changes):
if not changes:
return None
# without knowing the revision ancestry graph, we can't sort the
# changes at all. So for now, assume they were given to us in sorted
# order, and just pay attention to the last one. See ticket #103 for
# more details.
if len(changes) > 1:
log.msg("Monotone.computeSourceRevision: warning: "
"there are %d changes here, assuming the last one is "
"the most recent" % len(changes))
return changes[-1].revision
|
|
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from grit.format.policy_templates.writers import template_writer
NEWLINE = '\r\n'
def GetWriter(config):
'''Factory method for creating AdmWriter objects.
See the constructor of TemplateWriter for description of
arguments.
'''
return AdmWriter(['win'], config)
class IndentedStringBuilder:
'''Utility class for building text with indented lines.'''
def __init__(self):
self.lines = []
self.indent = ''
def AddLine(self, string='', indent_diff=0):
'''Appends a string with indentation and a linebreak to |self.lines|.
Args:
string: The string to print.
indent_diff: the difference of indentation of the printed line,
compared to the next/previous printed line. Increment occurs
after printing the line, while decrement occurs before that.
'''
indent_diff *= 2
if indent_diff < 0:
self.indent = self.indent[(-indent_diff):]
if string != '':
self.lines.append(self.indent + string)
else:
self.lines.append('')
if indent_diff > 0:
self.indent += ''.ljust(indent_diff)
def AddLines(self, other):
'''Appends the content of another |IndentedStringBuilder| to |self.lines|.
Indentation of the added lines will be the sum of |self.indent| and
their original indentation.
Args:
other: The buffer from which lines are copied.
'''
for line in other.lines:
self.AddLine(line)
def ToString(self):
'''Returns |self.lines| as text string.'''
return NEWLINE.join(self.lines)
class AdmWriter(template_writer.TemplateWriter):
'''Class for generating policy templates in Windows ADM format.
It is used by PolicyTemplateGenerator to write ADM files.
'''
TYPE_TO_INPUT = {
'string': 'EDITTEXT',
'int': 'NUMERIC',
'string-enum': 'DROPDOWNLIST',
'int-enum': 'DROPDOWNLIST',
'list': 'LISTBOX'
}
def _AddGuiString(self, name, value):
# Escape newlines in the value.
value = value.replace('\n', '\\n')
if name in self.strings_seen:
assert value == self.strings_seen[name]
else:
self.strings_seen[name] = value
line = '%s="%s"' % (name, value)
self.strings.AddLine(line)
def _WriteSupported(self, builder):
builder.AddLine('#if version >= 4', 1)
builder.AddLine('SUPPORTED !!SUPPORTED_WINXPSP2')
builder.AddLine('#endif', -1)
def _WritePart(self, policy, key_name, builder):
'''Writes the PART ... END PART section of a policy.
Args:
policy: The policy to write to the output.
key_name: The registry key backing the policy.
builder: Builder to append lines to.
'''
policy_part_name = policy['name'] + '_Part'
self._AddGuiString(policy_part_name, policy['label'])
# Print the PART ... END PART section:
builder.AddLine()
adm_type = self.TYPE_TO_INPUT[policy['type']]
builder.AddLine('PART !!%s %s' % (policy_part_name, adm_type), 1)
if policy['type'] == 'list':
# Note that the following line causes FullArmor ADMX Migrator to create
# corrupt ADMX files. Please use admx_writer to get ADMX files.
builder.AddLine('KEYNAME "%s\\%s"' % (key_name, policy['name']))
builder.AddLine('VALUEPREFIX ""')
else:
builder.AddLine('VALUENAME "%s"' % policy['name'])
if policy['type'] in ('int-enum', 'string-enum'):
builder.AddLine('ITEMLIST', 1)
for item in policy['items']:
if policy['type'] == 'int-enum':
value_text = 'NUMERIC ' + str(item['value'])
else:
value_text = '"' + item['value'] + '"'
builder.AddLine('NAME !!%s_DropDown VALUE %s' %
(item['name'], value_text))
self._AddGuiString(item['name'] + '_DropDown', item['caption'])
builder.AddLine('END ITEMLIST', -1)
builder.AddLine('END PART', -1)
def _WritePolicy(self, policy, key_name, builder):
self._AddGuiString(policy['name'] + '_Policy', policy['caption'])
builder.AddLine('POLICY !!%s_Policy' % policy['name'], 1)
self._WriteSupported(builder)
policy_explain_name = policy['name'] + '_Explain'
self._AddGuiString(policy_explain_name, policy['desc'])
builder.AddLine('EXPLAIN !!' + policy_explain_name)
if policy['type'] == 'main':
builder.AddLine('VALUENAME "%s"' % policy['name'])
builder.AddLine('VALUEON NUMERIC 1')
builder.AddLine('VALUEOFF NUMERIC 0')
else:
self._WritePart(policy, key_name, builder)
builder.AddLine('END POLICY', -1)
builder.AddLine()
def WritePolicy(self, policy):
self._WritePolicy(policy,
self.config['win_reg_mandatory_key_name'],
self.policies)
def WriteRecommendedPolicy(self, policy):
self._WritePolicy(policy,
self.config['win_reg_recommended_key_name'],
self.recommended_policies)
def BeginPolicyGroup(self, group):
category_name = group['name'] + '_Category'
self._AddGuiString(category_name, group['caption'])
self.policies.AddLine('CATEGORY !!' + category_name, 1)
def EndPolicyGroup(self):
self.policies.AddLine('END CATEGORY', -1)
self.policies.AddLine('')
def BeginRecommendedPolicyGroup(self, group):
category_name = group['name'] + '_Category'
self._AddGuiString(category_name, group['caption'])
self.recommended_policies.AddLine('CATEGORY !!' + category_name, 1)
def EndRecommendedPolicyGroup(self):
self.recommended_policies.AddLine('END CATEGORY', -1)
self.recommended_policies.AddLine('')
def _CreateTemplate(self, category_path, key_name, policies):
'''Creates the whole ADM template except for the [Strings] section, and
returns it as an |IndentedStringBuilder|.
Args:
category_path: List of strings representing the category path.
key_name: Main registry key backing the policies.
policies: ADM code for all the policies in an |IndentedStringBuilder|.
'''
lines = IndentedStringBuilder()
for part in category_path:
lines.AddLine('CATEGORY !!' + part, 1)
lines.AddLine('KEYNAME "%s"' % key_name)
lines.AddLine()
lines.AddLines(policies)
for part in category_path:
lines.AddLine('END CATEGORY', -1)
lines.AddLine()
return lines
def BeginTemplate(self):
self._AddGuiString(self.config['win_supported_os'],
self.messages['win_supported_winxpsp2']['text'])
category_path = self.config['win_mandatory_category_path']
recommended_category_path = self.config['win_recommended_category_path']
recommended_name = '%s (%s)' % \
(self.config['app_name'], self.messages['doc_recommended']['text'])
if self.config['build'] == 'chrome':
self._AddGuiString(category_path[0], 'Google')
self._AddGuiString(category_path[1], self.config['app_name'])
self._AddGuiString(recommended_category_path[1], recommended_name)
elif self.config['build'] == 'chromium':
self._AddGuiString(category_path[0], self.config['app_name'])
self._AddGuiString(recommended_category_path[0], recommended_name)
# All the policies will be written into self.policies.
# The final template text will be assembled into self.lines by
# self.EndTemplate().
def EndTemplate(self):
# Copy policies into self.lines.
policy_class = self.config['win_group_policy_class'].upper()
for class_name in ['MACHINE', 'USER']:
if policy_class != 'BOTH' and policy_class != class_name:
continue
self.lines.AddLine('CLASS ' + class_name, 1)
self.lines.AddLines(self._CreateTemplate(
self.config['win_mandatory_category_path'],
self.config['win_reg_mandatory_key_name'],
self.policies))
self.lines.AddLines(self._CreateTemplate(
self.config['win_recommended_category_path'],
self.config['win_reg_recommended_key_name'],
self.recommended_policies))
self.lines.AddLine('', -1)
# Copy user strings into self.lines.
self.lines.AddLine('[Strings]')
self.lines.AddLines(self.strings)
def Init(self):
# String buffer for building the whole ADM file.
self.lines = IndentedStringBuilder()
# String buffer for building the strings section of the ADM file.
self.strings = IndentedStringBuilder()
# Map of strings seen, to avoid duplicates.
self.strings_seen = {}
# String buffer for building the policies of the ADM file.
self.policies = IndentedStringBuilder()
# String buffer for building the recommended policies of the ADM file.
self.recommended_policies = IndentedStringBuilder()
def GetTemplateText(self):
return self.lines.ToString()
|
|
# Copyright 2019 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import shutil
from azure.storage.queue.models import QueueMessage
from azure_common import BaseTest
from c7n_azure import constants
from c7n_azure.container_host.host import Host
from mock import patch, Mock, ANY
class ContainerHostTest(BaseTest):
def test_build_options(self):
with patch.dict(os.environ, {
constants.ENV_CONTAINER_OPTION_OUTPUT_DIR: '/test/dir',
constants.ENV_CONTAINER_OPTION_LOG_GROUP: 'test_log_group',
constants.ENV_CONTAINER_OPTION_METRICS: 'test_metrics'
}, clear=False):
result = Host.build_options()
self.assertEqual('test_log_group', result['log_group'])
self.assertEqual('/test/dir', result['output_dir'])
self.assertEqual('test_metrics', result['metrics'])
@patch('tempfile.mkdtemp', return_value='test_path')
def test_build_options_empty(self, _):
result = Host.build_options()
self.assertEqual(None, result['log_group'])
self.assertEqual('test_path', result['output_dir'])
self.assertEqual(None, result['metrics'])
@patch('c7n_azure.container_host.host.Host.has_required_params', return_value=True)
@patch('c7n_azure.container_host.host.BlockingScheduler.start')
@patch('c7n_azure.container_host.host.Host.prepare_queue_storage')
@patch('c7n_azure.container_host.host.Storage.get_queue_client_by_storage_account')
@patch('tempfile.mkdtemp', return_value='test_path')
def test_init(self, _1, _2, _3, _4, _5):
host = Host()
jobs = host.scheduler.get_jobs()
update_policy_job = [j for j in jobs if j.id == 'update_policies']
poll_queue_job = [j for j in jobs if j.id == 'poll_queue']
self.assertEqual('test_path', host.policy_cache)
self.assertEqual(2, len(jobs))
self.assertIsNotNone(update_policy_job)
self.assertIsNotNone(poll_queue_job)
@patch('c7n_azure.container_host.host.Host.has_required_params', return_value=True)
@patch('c7n_azure.container_host.host.BlockingScheduler.start')
@patch('c7n_azure.container_host.host.Host.prepare_queue_storage')
@patch('c7n_azure.container_host.host.Storage.get_queue_client_by_storage_account')
@patch('c7n_azure.container_host.host.Storage.get_blob_client_by_uri')
def test_update_policies(self, get_blob_client_mock, _1, _2, _3, _4):
# mock blob list call
client_mock = Mock()
client_mock.list_blobs.return_value = [
ContainerHostTest.get_mock_blob("blob1.yml", "hash1"),
ContainerHostTest.get_mock_blob("blob2.YAML", "hash2"),
ContainerHostTest.get_mock_blob("blob3.md", "hash3")
]
client_mock.get_blob_to_path = self.download_policy_blob
get_blob_client_mock.return_value = (client_mock, None, None)
# init
host = Host()
# cleanup
self.addCleanup(lambda: shutil.rmtree(host.policy_cache))
self.assertEqual({}, host.policies)
# run
host.update_policies()
# both policies were loaded
self.assertEqual(2, len(host.policies.items()))
# jobs were created
jobs = host.scheduler.get_jobs()
self.assertEqual(1, len([j for j in jobs if j.id == 'blob1.yml']))
self.assertEqual(1, len([j for j in jobs if j.id == 'blob2.YAML']))
@patch('c7n_azure.container_host.host.Host.has_required_params', return_value=True)
@patch('c7n_azure.container_host.host.BlockingScheduler.start')
@patch('c7n_azure.container_host.host.Host.prepare_queue_storage')
@patch('c7n_azure.container_host.host.Storage.get_queue_client_by_storage_account')
@patch('c7n_azure.container_host.host.Storage.get_blob_client_by_uri')
def test_update_policies_add_remove(self, get_blob_client_mock, _1, _2, _3, _4):
"""
Run a series of add/update/removal of policy blobs
and verify jobs and caches are updated correctly
"""
# mock blob list call
client_mock = Mock()
client_mock.list_blobs.return_value = [
ContainerHostTest.get_mock_blob("blob1.yml", "hash1")
]
client_mock.get_blob_to_path = self.download_policy_blob
get_blob_client_mock.return_value = (client_mock, None, None)
# init
host = Host()
# cleanup
self.addCleanup(lambda: shutil.rmtree(host.policy_cache))
self.assertEqual({}, host.policies)
# Initial load
host.update_policies()
self.assertEqual(1, len(host.policies.items()))
##################
# Add two policies
##################
client_mock.list_blobs.return_value = [
ContainerHostTest.get_mock_blob("blob1.yml", "hash1"),
ContainerHostTest.get_mock_blob("blob2.yml", "hash2"),
ContainerHostTest.get_mock_blob("blob3.yml", "hash3")
]
host.update_policies()
self.assertEqual(3, len(host.policies.items()))
self.assertIsNotNone(host.policies['blob1.yml'])
self.assertIsNotNone(host.policies['blob2.yml'])
self.assertIsNotNone(host.policies['blob3.yml'])
# jobs were updated
jobs = host.scheduler.get_jobs()
self.assertEqual(3, len([j for j in jobs if j.func == host.run_policy]))
self.assertEqual(1, len([j for j in jobs if j.id == 'blob1.yml']))
self.assertEqual(1, len([j for j in jobs if j.id == 'blob2.yml']))
self.assertEqual(1, len([j for j in jobs if j.id == 'blob3.yml']))
##############################################
# Add one, remove one, update one
##############################################
client_mock.list_blobs.return_value = [
ContainerHostTest.get_mock_blob("blob1.yml", "hash1"),
ContainerHostTest.get_mock_blob("blob4.yml", "hash4"),
ContainerHostTest.get_mock_blob("blob3.yml", "hash3_new")
]
host.update_policies()
self.assertEqual(3, len(host.policies.items()))
self.assertIsNotNone(host.policies['blob1.yml'])
self.assertIsNotNone(host.policies['blob4.yml'])
self.assertIsNotNone(host.policies['blob3.yml'])
self.assertEqual('hash3_new', host.blob_cache['blob3.yml'])
# jobs were updated
jobs = host.scheduler.get_jobs()
self.assertEqual(3, len([j for j in jobs if j.func == host.run_policy]))
self.assertEqual(1, len([j for j in jobs if j.id == 'blob1.yml']))
self.assertEqual(1, len([j for j in jobs if j.id == 'blob4.yml']))
self.assertEqual(1, len([j for j in jobs if j.id == 'blob3.yml']))
############
# remove all
############
client_mock.list_blobs.return_value = [
]
host.update_policies()
self.assertEqual(0, len(host.policies.items()))
# jobs were updated
jobs = host.scheduler.get_jobs()
self.assertEqual(0, len([j for j in jobs if j.func == host.run_policy]))
@patch('c7n_azure.container_host.host.Host.has_required_params', return_value=True)
@patch('c7n_azure.container_host.host.BlockingScheduler.start')
@patch('c7n_azure.container_host.host.Host.prepare_queue_storage')
@patch('c7n_azure.container_host.host.Storage.get_queue_client_by_storage_account')
@patch('c7n_azure.container_host.host.Host.update_policies')
@patch('c7n_azure.container_host.host.AzureEventSubscription')
@patch('c7n_azure.container_host.host.StringInAdvancedFilter')
def test_update_event_subscriptions(self, event_filter_mock, _0, _1, _2, _3, _4, _5):
host = Host()
host.event_queue_name = 'testq'
host.policies = {
'one': {
'policy': ContainerHostTest.get_mock_policy({
'name': 'one',
'mode': {
'type': 'container-event',
'events': ['ResourceGroupWrite', 'VnetWrite']
}
})
},
'two': {
'policy': ContainerHostTest.get_mock_policy({
'name': 'two',
'mode': {
'type': 'container-event',
'events': ['ResourceGroupWrite']
}
})
},
'three': {
'policy': ContainerHostTest.get_mock_policy({
'name': 'three',
'mode': {
'type': 'container-event',
'events': [{
'resourceProvider': 'Microsoft.KeyVault/vaults',
'event': 'write'
}]
}
})
}
}
# Verify we get all three events with no duplicates
host.update_event_subscriptions()
event_filter_mock.assert_called_with(key='Data.OperationName', values={
'Microsoft.KeyVault/vaults/write',
'Microsoft.Network/virtualNetworks/write',
'Microsoft.Resources/subscriptions/resourceGroups/write'})
@patch('c7n_azure.container_host.host.Host.has_required_params', return_value=True)
@patch('c7n_azure.container_host.host.BlockingScheduler.start')
@patch('c7n_azure.container_host.host.Host.prepare_queue_storage')
@patch('c7n_azure.container_host.host.Storage')
@patch('c7n_azure.container_host.host.Host.run_policies_for_event')
def test_poll_queue(self, run_policy_mock, storage_mock, _1, _2, _3):
host = Host()
host.policies = {
'one': {
'policy': ContainerHostTest.get_mock_policy({
'name': 'one',
'mode': {
'type': 'container-event',
'events': ['ResourceGroupWrite', 'VnetWrite']
}
})
}
}
q1 = QueueMessage()
q1.id = 1
q1.dequeue_count = 0
q1.content = """eyAgCiAgICJzdWJqZWN0IjoiL3N1YnNjcmlwdGlvbnMvZWE5ODk3NGItNWQyYS00ZDk4LWE3
OGEtMzgyZjM3MTVkMDdlL3Jlc291cmNlR3JvdXBzL3Rlc3RfY29udGFpbmVyX21vZGUiLAogICAiZXZlbnRUeXBlIj
oiTWljcm9zb2Z0LlJlc291cmNlcy5SZXNvdXJjZVdyaXRlU3VjY2VzcyIsCiAgICJldmVudFRpbWUiOiIyMDE5LTA3
LTE2VDE4OjMwOjQzLjM1OTUyNTVaIiwKICAgImlkIjoiNjE5ZDI2NzQtYjM5Ni00MzU2LTk2MTktNmM1YTUyZmU0ZT
g4IiwKICAgImRhdGEiOnsgICAgICAgIAogICAgICAiY29ycmVsYXRpb25JZCI6IjdkZDVhNDc2LWUwNTItNDBlMi05
OWU0LWJiOTg1MmRjMWY4NiIsCiAgICAgICJyZXNvdXJjZVByb3ZpZGVyIjoiTWljcm9zb2Z0LlJlc291cmNlcyIsCi
AgICAgICJyZXNvdXJjZVVyaSI6Ii9zdWJzY3JpcHRpb25zL2VhOTg5NzRiLTVkMmEtNGQ5OC1hNzhhLTM4MmYzNzE1
ZDA3ZS9yZXNvdXJjZUdyb3Vwcy90ZXN0X2NvbnRhaW5lcl9tb2RlIiwKICAgICAgIm9wZXJhdGlvbk5hbWUiOiJNaW
Nyb3NvZnQuUmVzb3VyY2VzL3N1YnNjcmlwdGlvbnMvcmVzb3VyY2VHcm91cHMvd3JpdGUiLAogICAgICAic3RhdHVz
IjoiU3VjY2VlZGVkIiwKICAgfSwKICAgInRvcGljIjoiL3N1YnNjcmlwdGlvbnMvYWE5ODk3NGItNWQyYS00ZDk4LW
E3OGEtMzgyZjM3MTVkMDdlIgp9"""
q2 = QueueMessage()
q2.id = 2
q2.dequeue_count = 0
q2.content = q1.content
# Return 2 messages on first call, then none
storage_mock.get_queue_messages.side_effect = [[q1, q2], []]
host.poll_queue()
self.assertEqual(2, run_policy_mock.call_count)
run_policy_mock.reset_mock()
# Return 5 messages on first call, then 2, then 0
storage_mock.get_queue_messages.side_effect = [[q1, q1, q1, q1, q1], [q1, q2], []]
host.poll_queue()
self.assertEqual(7, run_policy_mock.call_count)
run_policy_mock.reset_mock()
# High dequeue count
q1.dequeue_count = 100
storage_mock.get_queue_messages.side_effect = [[q1, q2], []]
host.poll_queue()
self.assertEqual(1, run_policy_mock.call_count)
@patch('c7n_azure.container_host.host.Host.has_required_params', return_value=True)
@patch('c7n_azure.container_host.host.Host.prepare_queue_storage')
@patch('c7n_azure.container_host.host.Storage')
@patch('c7n_azure.container_host.host.BlockingScheduler.start')
@patch('c7n_azure.container_host.host.BlockingScheduler.add_job')
def test_run_policy_for_event(self, add_job_mock, _0, _1, _2, _3):
host = Host()
host.policies = {
'one': {
'policy': ContainerHostTest.get_mock_policy({
'name': 'one',
'mode': {
'type': 'container-event',
'events': ['ResourceGroupWrite', 'VnetWrite']
}
})
}
}
message = QueueMessage()
message.id = 1
message.dequeue_count = 0
message.content = \
"""eyAgCiAgICJzdWJqZWN0IjoiL3N1YnNjcmlwdGlvbnMvZWE5ODk3NGItNWQyYS00ZDk4LWE3OGEt
MzgyZjM3MTVkMDdlL3Jlc291cmNlR3JvdXBzL3Rlc3RfY29udGFpbmVyX21vZGUiLAogICAiZXZl
bnRUeXBlIjoiTWljcm9zb2Z0LlJlc291cmNlcy5SZXNvdXJjZVdyaXRlU3VjY2VzcyIsCiAgICJl
dmVudFRpbWUiOiIyMDE5LTA3LTE2VDE4OjMwOjQzLjM1OTUyNTVaIiwKICAgImlkIjoiNjE5ZDI2
NzQtYjM5Ni00MzU2LTk2MTktNmM1YTUyZmU0ZTg4IiwKICAgImRhdGEiOnsgICAgICAgIAogICAg
ICAiY29ycmVsYXRpb25JZCI6IjdkZDVhNDc2LWUwNTItNDBlMi05OWU0LWJiOTg1MmRjMWY4NiIs
CiAgICAgICJyZXNvdXJjZVByb3ZpZGVyIjoiTWljcm9zb2Z0LlJlc291cmNlcyIsCiAgICAgICJy
ZXNvdXJjZVVyaSI6Ii9zdWJzY3JpcHRpb25zL2VhOTg5NzRiLTVkMmEtNGQ5OC1hNzhhLTM4MmYz
NzE1ZDA3ZS9yZXNvdXJjZUdyb3Vwcy90ZXN0X2NvbnRhaW5lcl9tb2RlIiwKICAgICAgIm9wZXJh
dGlvbk5hbWUiOiJNaWNyb3NvZnQuUmVzb3VyY2VzL3N1YnNjcmlwdGlvbnMvcmVzb3VyY2VHcm91
cHMvd3JpdGUiLAogICAgICAic3RhdHVzIjoiU3VjY2VlZGVkIgogICB9LAogICAidG9waWMiOiIv
c3Vic2NyaXB0aW9ucy9hYTk4OTc0Yi01ZDJhLTRkOTgtYTc4YS0zODJmMzcxNWQwN2UiCn0="""
# run with real match
host.run_policies_for_event(message)
add_job_mock.assert_called_with(ANY,
id='one619d2674-b396-4356-9619-6c5a52fe4e88',
name='one',
args=ANY,
misfire_grace_time=ANY)
add_job_mock.reset_mock()
# run with no match
host.policies = {}
host.run_policies_for_event(message)
self.assertFalse(add_job_mock.called)
def test_has_required_params(self):
with patch.dict(os.environ, {
constants.ENV_CONTAINER_POLICY_STORAGE: 'foo',
constants.ENV_CONTAINER_EVENT_QUEUE_NAME: 'foo',
constants.ENV_CONTAINER_EVENT_QUEUE_ID: 'foo'
}, clear=False):
self.assertTrue(Host.has_required_params())
with patch.dict(os.environ, {
constants.ENV_CONTAINER_POLICY_STORAGE: 'foo',
constants.ENV_CONTAINER_EVENT_QUEUE_ID: 'foo'
}, clear=False):
self.assertFalse(Host.has_required_params())
@staticmethod
def download_policy_blob(_, name, path):
policy_string = """
policies:
- name: %s
mode:
type: container-periodic
schedule: '* * * * *'
resource: azure.resourcegroup
"""
with open(path, 'w') as out_file:
out_file.write(policy_string % name)
@staticmethod
def get_mock_blob(name, md5):
new_blob = Mock()
new_blob.name = name
new_blob.properties.content_settings.content_md5 = md5
return new_blob
@staticmethod
def get_mock_policy(policy):
new_policy = Mock()
new_policy.data = policy
return new_policy
|
|
"""
Testing for grid search module (sklearn.grid_search)
"""
from collections import Iterable, Sized
from sklearn.externals.six.moves import cStringIO as StringIO
from sklearn.externals.six.moves import xrange
from itertools import chain, product
import pickle
import sys
import warnings
import numpy as np
import scipy.sparse as sp
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_not_equal
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_warns
from sklearn.utils.testing import assert_raise_message
from sklearn.utils.testing import assert_false, assert_true
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_warns
from sklearn.utils.testing import assert_no_warnings
from sklearn.utils.testing import ignore_warnings
from sklearn.utils.mocking import CheckingClassifier, MockDataFrame
from scipy.stats import distributions
from sklearn.externals.six.moves import zip
from sklearn.base import BaseEstimator
from sklearn.datasets import make_classification
from sklearn.datasets import make_blobs
from sklearn.datasets import make_multilabel_classification
from sklearn.grid_search import (GridSearchCV, RandomizedSearchCV,
ParameterGrid, ParameterSampler,
ChangedBehaviorWarning)
from sklearn.svm import LinearSVC, SVC
from sklearn.tree import DecisionTreeRegressor
from sklearn.tree import DecisionTreeClassifier
from sklearn.cluster import KMeans, SpectralClustering
from sklearn.metrics import f1_score
from sklearn.metrics import make_scorer
from sklearn.metrics import roc_auc_score
from sklearn.cross_validation import KFold, StratifiedKFold, FitFailedWarning
from sklearn.preprocessing import Imputer
from sklearn.pipeline import Pipeline
# Neither of the following two estimators inherit from BaseEstimator,
# to test hyperparameter search on user-defined classifiers.
class MockClassifier(object):
"""Dummy classifier to test the cross-validation"""
def __init__(self, foo_param=0):
self.foo_param = foo_param
def fit(self, X, Y):
assert_true(len(X) == len(Y))
return self
def predict(self, T):
return T.shape[0]
predict_proba = predict
decision_function = predict
transform = predict
def score(self, X=None, Y=None):
if self.foo_param > 1:
score = 1.
else:
score = 0.
return score
def get_params(self, deep=False):
return {'foo_param': self.foo_param}
def set_params(self, **params):
self.foo_param = params['foo_param']
return self
class LinearSVCNoScore(LinearSVC):
"""An LinearSVC classifier that has no score method."""
@property
def score(self):
raise AttributeError
X = np.array([[-1, -1], [-2, -1], [1, 1], [2, 1]])
y = np.array([1, 1, 2, 2])
def test_parameter_grid():
"""Test basic properties of ParameterGrid."""
params1 = {"foo": [1, 2, 3]}
grid1 = ParameterGrid(params1)
assert_true(isinstance(grid1, Iterable))
assert_true(isinstance(grid1, Sized))
assert_equal(len(grid1), 3)
params2 = {"foo": [4, 2],
"bar": ["ham", "spam", "eggs"]}
grid2 = ParameterGrid(params2)
assert_equal(len(grid2), 6)
# loop to assert we can iterate over the grid multiple times
for i in xrange(2):
# tuple + chain transforms {"a": 1, "b": 2} to ("a", 1, "b", 2)
points = set(tuple(chain(*(sorted(p.items())))) for p in grid2)
assert_equal(points,
set(("bar", x, "foo", y)
for x, y in product(params2["bar"], params2["foo"])))
# Special case: empty grid (useful to get default estimator settings)
empty = ParameterGrid({})
assert_equal(len(empty), 1)
assert_equal(list(empty), [{}])
has_empty = ParameterGrid([{'C': [1, 10]}, {}])
assert_equal(len(has_empty), 3)
assert_equal(list(has_empty), [{'C': 1}, {'C': 10}, {}])
def test_grid_search():
"""Test that the best estimator contains the right value for foo_param"""
clf = MockClassifier()
grid_search = GridSearchCV(clf, {'foo_param': [1, 2, 3]}, verbose=3)
# make sure it selects the smallest parameter in case of ties
old_stdout = sys.stdout
sys.stdout = StringIO()
grid_search.fit(X, y)
sys.stdout = old_stdout
assert_equal(grid_search.best_estimator_.foo_param, 2)
for i, foo_i in enumerate([1, 2, 3]):
assert_true(grid_search.grid_scores_[i][0]
== {'foo_param': foo_i})
# Smoke test the score etc:
grid_search.score(X, y)
grid_search.predict_proba(X)
grid_search.decision_function(X)
grid_search.transform(X)
# Test exception handling on scoring
grid_search.scoring = 'sklearn'
assert_raises(ValueError, grid_search.fit, X, y)
@ignore_warnings
def test_grid_search_no_score():
# Test grid-search on classifier that has no score function.
clf = LinearSVC(random_state=0)
X, y = make_blobs(random_state=0, centers=2)
Cs = [.1, 1, 10]
clf_no_score = LinearSVCNoScore(random_state=0)
grid_search = GridSearchCV(clf, {'C': Cs}, scoring='accuracy')
grid_search.fit(X, y)
grid_search_no_score = GridSearchCV(clf_no_score, {'C': Cs},
scoring='accuracy')
# smoketest grid search
grid_search_no_score.fit(X, y)
# check that best params are equal
assert_equal(grid_search_no_score.best_params_, grid_search.best_params_)
# check that we can call score and that it gives the correct result
assert_equal(grid_search.score(X, y), grid_search_no_score.score(X, y))
# giving no scoring function raises an error
grid_search_no_score = GridSearchCV(clf_no_score, {'C': Cs})
assert_raise_message(TypeError, "no scoring", grid_search_no_score.fit,
[[1]])
def test_grid_search_score_method():
X, y = make_classification(n_samples=100, n_classes=2, flip_y=.2,
random_state=0)
clf = LinearSVC(random_state=0)
grid = {'C': [.1]}
search_no_scoring = GridSearchCV(clf, grid, scoring=None).fit(X, y)
search_accuracy = GridSearchCV(clf, grid, scoring='accuracy').fit(X, y)
search_no_score_method_auc = GridSearchCV(LinearSVCNoScore(), grid,
scoring='roc_auc').fit(X, y)
search_auc = GridSearchCV(clf, grid, scoring='roc_auc').fit(X, y)
# Check warning only occurs in situation where behavior changed:
# estimator requires score method to compete with scoring parameter
score_no_scoring = assert_no_warnings(search_no_scoring.score, X, y)
score_accuracy = assert_warns(ChangedBehaviorWarning,
search_accuracy.score, X, y)
score_no_score_auc = assert_no_warnings(search_no_score_method_auc.score,
X, y)
score_auc = assert_warns(ChangedBehaviorWarning,
search_auc.score, X, y)
# ensure the test is sane
assert_true(score_auc < 1.0)
assert_true(score_accuracy < 1.0)
assert_not_equal(score_auc, score_accuracy)
assert_almost_equal(score_accuracy, score_no_scoring)
assert_almost_equal(score_auc, score_no_score_auc)
def test_trivial_grid_scores():
"""Test search over a "grid" with only one point.
Non-regression test: grid_scores_ wouldn't be set by GridSearchCV.
"""
clf = MockClassifier()
grid_search = GridSearchCV(clf, {'foo_param': [1]})
grid_search.fit(X, y)
assert_true(hasattr(grid_search, "grid_scores_"))
random_search = RandomizedSearchCV(clf, {'foo_param': [0]})
random_search.fit(X, y)
assert_true(hasattr(random_search, "grid_scores_"))
def test_no_refit():
"""Test that grid search can be used for model selection only"""
clf = MockClassifier()
grid_search = GridSearchCV(clf, {'foo_param': [1, 2, 3]}, refit=False)
grid_search.fit(X, y)
assert_true(hasattr(grid_search, "best_params_"))
def test_grid_search_error():
"""Test that grid search will capture errors on data with different
length"""
X_, y_ = make_classification(n_samples=200, n_features=100, random_state=0)
clf = LinearSVC()
cv = GridSearchCV(clf, {'C': [0.1, 1.0]})
assert_raises(ValueError, cv.fit, X_[:180], y_)
def test_grid_search_iid():
# test the iid parameter
# noise-free simple 2d-data
X, y = make_blobs(centers=[[0, 0], [1, 0], [0, 1], [1, 1]], random_state=0,
cluster_std=0.1, shuffle=False, n_samples=80)
# split dataset into two folds that are not iid
# first one contains data of all 4 blobs, second only from two.
mask = np.ones(X.shape[0], dtype=np.bool)
mask[np.where(y == 1)[0][::2]] = 0
mask[np.where(y == 2)[0][::2]] = 0
# this leads to perfect classification on one fold and a score of 1/3 on
# the other
svm = SVC(kernel='linear')
# create "cv" for splits
cv = [[mask, ~mask], [~mask, mask]]
# once with iid=True (default)
grid_search = GridSearchCV(svm, param_grid={'C': [1, 10]}, cv=cv)
grid_search.fit(X, y)
first = grid_search.grid_scores_[0]
assert_equal(first.parameters['C'], 1)
assert_array_almost_equal(first.cv_validation_scores, [1, 1. / 3.])
# for first split, 1/4 of dataset is in test, for second 3/4.
# take weighted average
assert_almost_equal(first.mean_validation_score,
1 * 1. / 4. + 1. / 3. * 3. / 4.)
# once with iid=False
grid_search = GridSearchCV(svm, param_grid={'C': [1, 10]}, cv=cv,
iid=False)
grid_search.fit(X, y)
first = grid_search.grid_scores_[0]
assert_equal(first.parameters['C'], 1)
# scores are the same as above
assert_array_almost_equal(first.cv_validation_scores, [1, 1. / 3.])
# averaged score is just mean of scores
assert_almost_equal(first.mean_validation_score,
np.mean(first.cv_validation_scores))
def test_grid_search_one_grid_point():
X_, y_ = make_classification(n_samples=200, n_features=100, random_state=0)
param_dict = {"C": [1.0], "kernel": ["rbf"], "gamma": [0.1]}
clf = SVC()
cv = GridSearchCV(clf, param_dict)
cv.fit(X_, y_)
clf = SVC(C=1.0, kernel="rbf", gamma=0.1)
clf.fit(X_, y_)
assert_array_equal(clf.dual_coef_, cv.best_estimator_.dual_coef_)
def test_grid_search_bad_param_grid():
param_dict = {"C": 1.0}
clf = SVC()
assert_raises(ValueError, GridSearchCV, clf, param_dict)
param_dict = {"C": []}
clf = SVC()
assert_raises(ValueError, GridSearchCV, clf, param_dict)
param_dict = {"C": np.ones(6).reshape(3, 2)}
clf = SVC()
assert_raises(ValueError, GridSearchCV, clf, param_dict)
def test_grid_search_sparse():
"""Test that grid search works with both dense and sparse matrices"""
X_, y_ = make_classification(n_samples=200, n_features=100, random_state=0)
clf = LinearSVC()
cv = GridSearchCV(clf, {'C': [0.1, 1.0]})
cv.fit(X_[:180], y_[:180])
y_pred = cv.predict(X_[180:])
C = cv.best_estimator_.C
X_ = sp.csr_matrix(X_)
clf = LinearSVC()
cv = GridSearchCV(clf, {'C': [0.1, 1.0]})
cv.fit(X_[:180].tocoo(), y_[:180])
y_pred2 = cv.predict(X_[180:])
C2 = cv.best_estimator_.C
assert_true(np.mean(y_pred == y_pred2) >= .9)
assert_equal(C, C2)
def test_grid_search_sparse_scoring():
X_, y_ = make_classification(n_samples=200, n_features=100, random_state=0)
clf = LinearSVC()
cv = GridSearchCV(clf, {'C': [0.1, 1.0]}, scoring="f1")
cv.fit(X_[:180], y_[:180])
y_pred = cv.predict(X_[180:])
C = cv.best_estimator_.C
X_ = sp.csr_matrix(X_)
clf = LinearSVC()
cv = GridSearchCV(clf, {'C': [0.1, 1.0]}, scoring="f1")
cv.fit(X_[:180], y_[:180])
y_pred2 = cv.predict(X_[180:])
C2 = cv.best_estimator_.C
assert_array_equal(y_pred, y_pred2)
assert_equal(C, C2)
# Smoke test the score
#np.testing.assert_allclose(f1_score(cv.predict(X_[:180]), y[:180]),
# cv.score(X_[:180], y[:180]))
# test loss where greater is worse
def f1_loss(y_true_, y_pred_):
return -f1_score(y_true_, y_pred_)
F1Loss = make_scorer(f1_loss, greater_is_better=False)
cv = GridSearchCV(clf, {'C': [0.1, 1.0]}, scoring=F1Loss)
cv.fit(X_[:180], y_[:180])
y_pred3 = cv.predict(X_[180:])
C3 = cv.best_estimator_.C
assert_equal(C, C3)
assert_array_equal(y_pred, y_pred3)
def test_grid_search_precomputed_kernel():
"""Test that grid search works when the input features are given in the
form of a precomputed kernel matrix """
X_, y_ = make_classification(n_samples=200, n_features=100, random_state=0)
# compute the training kernel matrix corresponding to the linear kernel
K_train = np.dot(X_[:180], X_[:180].T)
y_train = y_[:180]
clf = SVC(kernel='precomputed')
cv = GridSearchCV(clf, {'C': [0.1, 1.0]})
cv.fit(K_train, y_train)
assert_true(cv.best_score_ >= 0)
# compute the test kernel matrix
K_test = np.dot(X_[180:], X_[:180].T)
y_test = y_[180:]
y_pred = cv.predict(K_test)
assert_true(np.mean(y_pred == y_test) >= 0)
# test error is raised when the precomputed kernel is not array-like
# or sparse
assert_raises(ValueError, cv.fit, K_train.tolist(), y_train)
def test_grid_search_precomputed_kernel_error_nonsquare():
"""Test that grid search returns an error with a non-square precomputed
training kernel matrix"""
K_train = np.zeros((10, 20))
y_train = np.ones((10, ))
clf = SVC(kernel='precomputed')
cv = GridSearchCV(clf, {'C': [0.1, 1.0]})
assert_raises(ValueError, cv.fit, K_train, y_train)
def test_grid_search_precomputed_kernel_error_kernel_function():
"""Test that grid search returns an error when using a kernel_function"""
X_, y_ = make_classification(n_samples=200, n_features=100, random_state=0)
kernel_function = lambda x1, x2: np.dot(x1, x2.T)
clf = SVC(kernel=kernel_function)
cv = GridSearchCV(clf, {'C': [0.1, 1.0]})
assert_raises(ValueError, cv.fit, X_, y_)
class BrokenClassifier(BaseEstimator):
"""Broken classifier that cannot be fit twice"""
def __init__(self, parameter=None):
self.parameter = parameter
def fit(self, X, y):
assert_true(not hasattr(self, 'has_been_fit_'))
self.has_been_fit_ = True
def predict(self, X):
return np.zeros(X.shape[0])
def test_refit():
"""Regression test for bug in refitting
Simulates re-fitting a broken estimator; this used to break with
sparse SVMs.
"""
X = np.arange(100).reshape(10, 10)
y = np.array([0] * 5 + [1] * 5)
clf = GridSearchCV(BrokenClassifier(), [{'parameter': [0, 1]}],
scoring="precision", refit=True)
clf.fit(X, y)
def test_X_as_list():
"""Pass X as list in GridSearchCV"""
X = np.arange(100).reshape(10, 10)
y = np.array([0] * 5 + [1] * 5)
clf = CheckingClassifier(check_X=lambda x: isinstance(x, list))
cv = KFold(n=len(X), n_folds=3)
grid_search = GridSearchCV(clf, {'foo_param': [1, 2, 3]}, cv=cv)
grid_search.fit(X.tolist(), y).score(X, y)
assert_true(hasattr(grid_search, "grid_scores_"))
def test_y_as_list():
"""Pass y as list in GridSearchCV"""
X = np.arange(100).reshape(10, 10)
y = np.array([0] * 5 + [1] * 5)
clf = CheckingClassifier(check_y=lambda x: isinstance(x, list))
cv = KFold(n=len(X), n_folds=3)
grid_search = GridSearchCV(clf, {'foo_param': [1, 2, 3]}, cv=cv)
grid_search.fit(X, y.tolist()).score(X, y)
assert_true(hasattr(grid_search, "grid_scores_"))
def test_pandas_input():
# check cross_val_score doesn't destroy pandas dataframe
types = [(MockDataFrame, MockDataFrame)]
try:
from pandas import Series, DataFrame
types.append((DataFrame, Series))
except ImportError:
pass
X = np.arange(100).reshape(10, 10)
y = np.array([0] * 5 + [1] * 5)
for InputFeatureType, TargetType in types:
# X dataframe, y series
X_df, y_ser = InputFeatureType(X), TargetType(y)
check_df = lambda x: isinstance(x, InputFeatureType)
check_series = lambda x: isinstance(x, TargetType)
clf = CheckingClassifier(check_X=check_df, check_y=check_series)
grid_search = GridSearchCV(clf, {'foo_param': [1, 2, 3]})
grid_search.fit(X_df, y_ser).score(X_df, y_ser)
grid_search.predict(X_df)
assert_true(hasattr(grid_search, "grid_scores_"))
def test_unsupervised_grid_search():
# test grid-search with unsupervised estimator
X, y = make_blobs(random_state=0)
km = KMeans(random_state=0)
grid_search = GridSearchCV(km, param_grid=dict(n_clusters=[2, 3, 4]),
scoring='adjusted_rand_score')
grid_search.fit(X, y)
# ARI can find the right number :)
assert_equal(grid_search.best_params_["n_clusters"], 3)
# Now without a score, and without y
grid_search = GridSearchCV(km, param_grid=dict(n_clusters=[2, 3, 4]))
grid_search.fit(X)
assert_equal(grid_search.best_params_["n_clusters"], 4)
def test_bad_estimator():
# test grid-search with clustering algorithm which doesn't support
# "predict"
sc = SpectralClustering()
grid_search = GridSearchCV(sc, param_grid=dict(gamma=[.1, 1, 10]),
scoring='ari')
assert_raise_message(TypeError, "'score' or a 'predict'", grid_search.fit,
[[1]])
def test_param_sampler():
# test basic properties of param sampler
param_distributions = {"kernel": ["rbf", "linear"],
"C": distributions.uniform(0, 1)}
sampler = ParameterSampler(param_distributions=param_distributions,
n_iter=10, random_state=0)
samples = [x for x in sampler]
assert_equal(len(samples), 10)
for sample in samples:
assert_true(sample["kernel"] in ["rbf", "linear"])
assert_true(0 <= sample["C"] <= 1)
def test_randomized_search_grid_scores():
# Make a dataset with a lot of noise to get various kind of prediction
# errors across CV folds and parameter settings
X, y = make_classification(n_samples=200, n_features=100, n_informative=3,
random_state=0)
# XXX: as of today (scipy 0.12) it's not possible to set the random seed
# of scipy.stats distributions: the assertions in this test should thus
# not depend on the randomization
params = dict(C=distributions.expon(scale=10),
gamma=distributions.expon(scale=0.1))
n_cv_iter = 3
n_search_iter = 30
search = RandomizedSearchCV(SVC(), n_iter=n_search_iter, cv=n_cv_iter,
param_distributions=params, iid=False)
search.fit(X, y)
assert_equal(len(search.grid_scores_), n_search_iter)
# Check consistency of the structure of each cv_score item
for cv_score in search.grid_scores_:
assert_equal(len(cv_score.cv_validation_scores), n_cv_iter)
# Because we set iid to False, the mean_validation score is the
# mean of the fold mean scores instead of the aggregate sample-wise
# mean score
assert_almost_equal(np.mean(cv_score.cv_validation_scores),
cv_score.mean_validation_score)
assert_equal(list(sorted(cv_score.parameters.keys())),
list(sorted(params.keys())))
# Check the consistency with the best_score_ and best_params_ attributes
sorted_grid_scores = list(sorted(search.grid_scores_,
key=lambda x: x.mean_validation_score))
best_score = sorted_grid_scores[-1].mean_validation_score
assert_equal(search.best_score_, best_score)
tied_best_params = [s.parameters for s in sorted_grid_scores
if s.mean_validation_score == best_score]
assert_true(search.best_params_ in tied_best_params,
"best_params_={0} is not part of the"
" tied best models: {1}".format(
search.best_params_, tied_best_params))
def test_grid_search_score_consistency():
# test that correct scores are used
clf = LinearSVC(random_state=0)
X, y = make_blobs(random_state=0, centers=2)
Cs = [.1, 1, 10]
for score in ['f1', 'roc_auc']:
grid_search = GridSearchCV(clf, {'C': Cs}, scoring=score)
grid_search.fit(X, y)
cv = StratifiedKFold(n_folds=3, y=y)
for C, scores in zip(Cs, grid_search.grid_scores_):
clf.set_params(C=C)
scores = scores[2] # get the separate runs from grid scores
i = 0
for train, test in cv:
clf.fit(X[train], y[train])
if score == "f1":
correct_score = f1_score(y[test], clf.predict(X[test]))
elif score == "roc_auc":
dec = clf.decision_function(X[test])
correct_score = roc_auc_score(y[test], dec)
assert_almost_equal(correct_score, scores[i])
i += 1
def test_pickle():
"""Test that a fit search can be pickled"""
clf = MockClassifier()
grid_search = GridSearchCV(clf, {'foo_param': [1, 2, 3]}, refit=True)
grid_search.fit(X, y)
pickle.dumps(grid_search) # smoke test
random_search = RandomizedSearchCV(clf, {'foo_param': [1, 2, 3]},
refit=True)
random_search.fit(X, y)
pickle.dumps(random_search) # smoke test
def test_grid_search_with_multioutput_data():
""" Test search with multi-output estimator"""
X, y = make_multilabel_classification(return_indicator=True,
random_state=0)
est_parameters = {"max_depth": [1, 2, 3, 4]}
cv = KFold(y.shape[0], random_state=0)
estimators = [DecisionTreeRegressor(random_state=0),
DecisionTreeClassifier(random_state=0)]
# Test with grid search cv
for est in estimators:
grid_search = GridSearchCV(est, est_parameters, cv=cv)
grid_search.fit(X, y)
for parameters, _, cv_validation_scores in grid_search.grid_scores_:
est.set_params(**parameters)
for i, (train, test) in enumerate(cv):
est.fit(X[train], y[train])
correct_score = est.score(X[test], y[test])
assert_almost_equal(correct_score,
cv_validation_scores[i])
# Test with a randomized search
for est in estimators:
random_search = RandomizedSearchCV(est, est_parameters, cv=cv)
random_search.fit(X, y)
for parameters, _, cv_validation_scores in random_search.grid_scores_:
est.set_params(**parameters)
for i, (train, test) in enumerate(cv):
est.fit(X[train], y[train])
correct_score = est.score(X[test], y[test])
assert_almost_equal(correct_score,
cv_validation_scores[i])
# Test with a randomized search
for est in estimators:
random_search = RandomizedSearchCV(est, est_parameters, cv=cv)
random_search.fit(X, y)
for parameters, _, cv_validation_scores in random_search.grid_scores_:
est.set_params(**parameters)
for i, (train, test) in enumerate(cv):
est.fit(X[train], y[train])
correct_score = est.score(X[test], y[test])
assert_almost_equal(correct_score,
cv_validation_scores[i])
def test_predict_proba_disabled():
"""Test predict_proba when disabled on estimator."""
X = np.arange(20).reshape(5, -1)
y = [0, 0, 1, 1, 1]
clf = SVC(probability=False)
gs = GridSearchCV(clf, {}, cv=2).fit(X, y)
assert_false(hasattr(gs, "predict_proba"))
def test_grid_search_allows_nans():
""" Test GridSearchCV with Imputer """
X = np.arange(20, dtype=np.float64).reshape(5, -1)
X[2, :] = np.nan
y = [0, 0, 1, 1, 1]
p = Pipeline([
('imputer', Imputer(strategy='mean', missing_values='NaN')),
('classifier', MockClassifier()),
])
GridSearchCV(p, {'classifier__foo_param': [1, 2, 3]}, cv=2).fit(X, y)
class FailingClassifier(BaseEstimator):
"""Classifier that raises a ValueError on fit()"""
FAILING_PARAMETER = 2
def __init__(self, parameter=None):
self.parameter = parameter
def fit(self, X, y=None):
if self.parameter == FailingClassifier.FAILING_PARAMETER:
raise ValueError("Failing classifier failed as required")
def predict(self, X):
return np.zeros(X.shape[0])
def test_grid_search_failing_classifier():
"""GridSearchCV with on_error != 'raise'
Ensures that a warning is raised and score reset where appropriate.
"""
X, y = make_classification(n_samples=20, n_features=10, random_state=0)
clf = FailingClassifier()
# refit=False because we only want to check that errors caused by fits
# to individual folds will be caught and warnings raised instead. If
# refit was done, then an exception would be raised on refit and not
# caught by grid_search (expected behavior), and this would cause an
# error in this test.
gs = GridSearchCV(clf, [{'parameter': [0, 1, 2]}], scoring='accuracy',
refit=False, error_score=0.0)
assert_warns(FitFailedWarning, gs.fit, X, y)
# Ensure that grid scores were set to zero as required for those fits
# that are expected to fail.
assert all(np.all(this_point.cv_validation_scores == 0.0)
for this_point in gs.grid_scores_
if this_point.parameters['parameter'] ==
FailingClassifier.FAILING_PARAMETER)
gs = GridSearchCV(clf, [{'parameter': [0, 1, 2]}], scoring='accuracy',
refit=False, error_score=float('nan'))
assert_warns(FitFailedWarning, gs.fit, X, y)
assert all(np.all(np.isnan(this_point.cv_validation_scores))
for this_point in gs.grid_scores_
if this_point.parameters['parameter'] ==
FailingClassifier.FAILING_PARAMETER)
def test_grid_search_failing_classifier_raise():
"""GridSearchCV with on_error == 'raise' raises the error"""
X, y = make_classification(n_samples=20, n_features=10, random_state=0)
clf = FailingClassifier()
# refit=False because we want to test the behaviour of the grid search part
gs = GridSearchCV(clf, [{'parameter': [0, 1, 2]}], scoring='accuracy',
refit=False, error_score='raise')
# FailingClassifier issues a ValueError so this is what we look for.
assert_raises(ValueError, gs.fit, X, y)
|
|
'''
*File: domain_restriction.py
*Author: Nicholas Mattei (nicholas.mattei@nicta.com.au)
*Date: March 18, 2014
*
* Copyright (c) 2014, Nicholas Mattei and NICTA
* All rights reserved.
*
* Developed by: Nicholas Mattei
* NICTA
* http://www.nickmattei.net
* http://www.preflib.org
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of NICTA nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY NICTA ''AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL NICTA BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
About
--------------------
This file tests a profile for being single-peaked.
'''
import sys
import copy
import glob
from preflibtools import io
from preflibtools import generate_profiles
# Implement of the Single Peaked Consistancy Algorithm detailed in
# B. Escoffier, J. Lang, and M. Ozturk, "Single-peaked consistency and its complexity".
# 2008 European Conference on Artificial Intelligence.
#
# Intuitivily, this algortihm finds an axis that is single peaked with respect to
# the rmaps that are passed in, or it returns an empty axis (vector) of the candidates.
# This is achieved in time O(|rmaps|*|candmap|).
#
# Note that this algorithm only works for STRICT preferences. If a non-strict
# set of rankmaps is passed in, an error is returned.
def is_single_peaked(rmaps, candmap):
for current in rmaps:
if len(current) != len(candmap):
print("is_single_peaked called with non-strict preferences")
exit()
orders = order_vectors(rmaps)
fullorders = order_vectors(rmaps)
#Build the order...
leftside = []
rightside = []
last_cands = last_set(orders)
# Only one last makes no constraints so iterate...
while (len(last_cands) == 1):
if len(leftside) < len(rightside):
leftside.append(last_cands[0])
else:
rightside.insert(0,last_cands[0])
orders = remove_cands(orders, last_cands)
last_cands = last_set(orders)
#Only break if we have != 1 last candidate, wither we quit, or we put on one each end.
if len(last_cands) > 2:
return []
else:
leftside.append(last_cands[0])
rightside.insert(0,last_cands[1])
orders = remove_cands(orders, last_cands)
# While there are still unplaced candidates (not removed from every vote))
while len(orders[0]) > 0:
last_cands = last_set(orders)
# Should never have more than 2...
if len(last_cands) > 2:
return []
else:
x_i = leftside[len(leftside)-1]
x_j = rightside[0]
#Check Conditions outlined by Lang.
#If L={X}, Case 3
if len(last_cands) == 1:
x = last_cands[0]
# if x_i < x < x_j => leftside + x
if any(o.index(x_j) < o.index(x) and o.index(x) < o.index(x_i) for o in fullorders):
leftside.append(x)
# if x_j < x < x_i => x + right
elif any(o.index(x) < o.index(x_j) and o.index(x_i) < o.index(x) for o in fullorders):
rightside.insert(0, x)
# Otherwise it doest nmatter and we put it either place...
else:
if len(leftside) < len(rightside):
leftside.append(x)
else:
rightside.insert(0,x)
# Restrict...
orders = remove_cands(orders, last_cands)
#if L = {x, y}, Case 2c and 2d
# if x_i < x < x_j < y ==> left+x and y+right
# if x_j < x < x_i < y ==> left+y and x+right
# if both, then contradiction...
# if x_i < x < y < x_j ==> then this must be axis...
# if x_j < y < x < x_i ==> then this must be axis...
elif len(last_cands) == 2:
C1 = False
C2 = False
x = last_cands[0]
y = last_cands[1]
x_i = leftside[len(leftside)-1]
x_j = rightside[0]
# Iterate over each of the orders and check for the C1 or C2 conditions or D1 or D2... Switch on these..
for o in fullorders:
#Condition D1:
if o.index(x_i) > o.index(x) and o.index(x) > o.index(y) and o.index(y) > o.index(x_j):
# The axis is the current voter restricted to the remainder
temp_order = copy.copy(o)
temp_order = remove_cands([temp_order], list(set(leftside + rightside)))[0]
temp_order.reverse() ## Note that this reversed is the "increasing order of voter j"
social_axis = leftside + temp_order + rightside
if verify_orders_single_peaked_axis_strict(social_axis, fullorders):
return social_axis
else:
return []
#Condition D2:
if o.index(x_j) > o.index(y) and o.index(y) > o.index(x) and o.index(x) > o.index(x_i):
# The axis is the current voter restricted to the remainder
temp_order = copy.copy(o)
temp_order = remove_cands([temp_order], list(set(leftside + rightside)))[0]
social_axis = leftside + temp_order + rightside
if verify_orders_single_peaked_axis_strict(social_axis, fullorders):
return social_axis
else:
return []
#Condition C1:
if o.index(x_i) > o.index(x) and o.index(x) > o.index(x_j) and o.index(x_j) > o.index(y):
C1 = True
#Condition C2:
if o.index(x_j) > o.index(x) and o.index(x) > o.index(x_i) and o.index(x_i) > o.index(y):
C2 = True
# Short Circuit if we have C1 and C2 at any point...
if C1 and C2:
return []
# Processing C1 or C2 if necessary:
if C1:
leftside.append(x)
rightside.insert(0,y)
else: # Do C2 or it doesn't matter...
leftside.append(y)
rightside.insert(0,x)
orders = remove_cands(orders, last_cands)
#Leftside + Rightside must be the social axis
social_axis = leftside+rightside
if verify_orders_single_peaked_axis_strict(social_axis, fullorders):
return social_axis
else:
return []
# Helper function to find last place candidates
def last_set(orders):
if len(orders) > 0 and len(orders[0]) > 0:
# Make and return the set of last place candidates
last_cands = set()
for i in orders:
last_cands.add(i[len(i)-1])
return(list(last_cands))
# Helper function to compute the result of removing (set) of candidates from a list of orders.
def remove_cands(orders, cands_to_remove):
projection = []
for c_vote in orders:
tvote = copy.copy(c_vote)
for c_remove in cands_to_remove:
tvote.remove(c_remove)
projection.append(tvote)
return projection
# Helper Function: Given cands --> rank, return a vector of unique vectors in the profile
# that are just the orders of the candidates with index 0 == most prefered.
def order_vectors(rmaps):
orders = []
rank_to_candidate = io.rankmap_convert_rank_to_candidate(rmaps)
for c_map in rank_to_candidate:
c_vote = []
for i in sorted(c_map.keys()):
c_vote.append(c_map[i])
orders.append(c_vote)
return orders
# Verify that a profile of strict orders is single peaked w.r.t. the passed axis
def verify_orders_single_peaked_axis_strict(axis, orders):
# print("Candidate Axis: " + str(axis))
# print("Orders: " + str(orders))
if len(orders) < 1 or len(axis) != len(orders[0]):
return False
temporders = copy.copy(orders)
for corder in orders:
#Peal off the top element
split = axis.index(corder[0])
# Reverse the left side and compare element by element on the restricted set.
left = axis[:split]
left.reverse()
right = axis[split:]
# print("Checking Left Side")
restricted = remove_cands([corder], list(set(axis) - set(left)))
restricted = restricted[0]
#items should match element for element...
if len(left) > 0 and not all(restricted[i] == left[i] for i in range(len(left))):
print("Axis is not compatiable with order: " + str(corder))
return False
# print("Checking Right Side")
restricted = remove_cands([corder], list(set(axis) - set(right)))
restricted = restricted[0]
#items should match element for element...
if not all(restricted[i] == right[i] for i in range(len(right))):
print("Axis is not compatiable with order: " + str(corder))
return False
return True
# Generate a random instance and test it for SP -- Output the axis if it is...
if __name__ == '__main__':
ncand = 3
nvoters = 100
candmap = generate_profiles.gen_cand_map(ncand)
#rmaps, rmapscounts = generate_profiles.gen_impartial_culture_strict(nvoters, cmap)
rankmaps, rankmapcounts = generate_profiles.gen_single_peaked_impartial_culture_strict(nvoters, candmap)
io.pp_profile_toscreen(candmap, rankmaps, rankmapcounts)
social_axis = is_single_peaked(rankmaps, candmap)
if social_axis != []:
print("Single Peaked w.r.t " + str(social_axis))
else:
print("Not Single Peaked")
# Test all the SOC's... for fun....
files = glob.glob("./soc/*.soc")
total = 0
totalSP = 0
for cfile in sorted(files):
print("Testing: " + str(cfile))
inf = open(cfile, "r")
candmap, rankmaps, rankmapcounts, numvoters = io.read_election_file(inf)
total += 1
social_axis = is_single_peaked(rankmaps, candmap)
if social_axis != []:
print("Single Peaked w.r.t " + str(social_axis))
totalSP += 1
else:
print("Not Single Peaked")
inf.close()
print("Parsed " + str(total) + " SOC files")
print("Exactly " + str(totalSP) + " were single peaked")
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from flask import url_for
from freezegun import freeze_time
from CTFd.cache import clear_pages
from CTFd.utils import set_config
from CTFd.utils.config.pages import get_pages
from CTFd.utils.encoding import hexencode
from tests.helpers import (
create_ctfd,
destroy_ctfd,
gen_challenge,
gen_file,
gen_page,
login_as_user,
register_user,
)
def test_index():
"""Does the index page return a 200 by default"""
app = create_ctfd()
with app.app_context():
with app.test_client() as client:
r = client.get("/")
assert r.status_code == 200
destroy_ctfd(app)
def test_page():
"""Test that users can access pages that are created in the database"""
app = create_ctfd()
with app.app_context():
gen_page(
app.db, title="Title", route="this-is-a-route", content="This is some HTML"
)
with app.test_client() as client:
r = client.get("/this-is-a-route")
assert r.status_code == 200
destroy_ctfd(app)
def test_draft_pages():
"""Test that draft pages can't be seen"""
app = create_ctfd()
with app.app_context():
gen_page(
app.db,
title="Title",
route="this-is-a-route",
content="This is some HTML",
draft=True,
)
with app.test_client() as client:
r = client.get("/this-is-a-route")
assert r.status_code == 404
register_user(app)
client = login_as_user(app)
r = client.get("/this-is-a-route")
assert r.status_code == 404
destroy_ctfd(app)
def test_page_requiring_auth():
"""Test that pages properly require authentication"""
app = create_ctfd()
with app.app_context():
gen_page(
app.db,
title="Title",
route="this-is-a-route",
content="This is some HTML",
auth_required=True,
)
with app.test_client() as client:
r = client.get("/this-is-a-route")
assert r.status_code == 302
assert r.location == "http://localhost/login?next=%2Fthis-is-a-route%3F"
register_user(app)
client = login_as_user(app)
r = client.get("/this-is-a-route")
assert r.status_code == 200
destroy_ctfd(app)
def test_hidden_pages():
"""Test that hidden pages aren't on the navbar but can be loaded"""
app = create_ctfd()
with app.app_context():
page = gen_page(
app.db,
title="HiddenPageTitle",
route="this-is-a-hidden-route",
content="This is some HTML",
hidden=True,
)
clear_pages()
assert page not in get_pages()
with app.test_client() as client:
r = client.get("/")
assert r.status_code == 200
assert "HiddenPageTitle" not in r.get_data(as_text=True)
with app.test_client() as client:
r = client.get("/this-is-a-hidden-route")
assert r.status_code == 200
assert "This is some HTML" in r.get_data(as_text=True)
destroy_ctfd(app)
def test_not_found():
"""Should return a 404 for pages that are not found"""
app = create_ctfd()
with app.app_context():
with app.test_client() as client:
r = client.get("/this-should-404")
assert r.status_code == 404
r = client.post("/this-should-404")
assert r.status_code == 404
destroy_ctfd(app)
def test_themes_handler():
"""Test that the themes handler is working properly"""
app = create_ctfd()
with app.app_context():
with app.test_client() as client:
r = client.get("/themes/core/static/css/main.min.css")
assert r.status_code == 200
r = client.get("/themes/core/static/css/404_NOT_FOUND")
assert r.status_code == 404
r = client.get("/themes/core/static/%2e%2e/%2e%2e/%2e%2e/utils.py")
assert r.status_code == 404
r = client.get("/themes/core/static/%2e%2e%2f%2e%2e%2f%2e%2e%2futils.py")
assert r.status_code == 404
r = client.get("/themes/core/static/..%2f..%2f..%2futils.py")
assert r.status_code == 404
r = client.get("/themes/core/static/../../../utils.py")
assert r.status_code == 404
destroy_ctfd(app)
def test_pages_routing_and_rendering():
"""Test that pages are routing and rendering"""
app = create_ctfd()
with app.app_context():
html = """## The quick brown fox jumped over the lazy dog"""
route = "test"
title = "Test"
gen_page(app.db, title, route, html)
with app.test_client() as client:
r = client.get("/test")
output = r.get_data(as_text=True)
print(output)
assert "<h2>The quick brown fox jumped over the lazy dog</h2>" in output
destroy_ctfd(app)
def test_user_get_profile():
"""Can a registered user load their private profile (/profile)"""
app = create_ctfd()
with app.app_context():
register_user(app)
client = login_as_user(app)
r = client.get("/profile")
assert r.status_code == 200
destroy_ctfd(app)
def test_user_can_access_files():
app = create_ctfd()
with app.app_context():
from CTFd.utils.uploads import rmdir
chal = gen_challenge(app.db)
chal_id = chal.id
path = app.config.get("UPLOAD_FOLDER")
location = os.path.join(path, "test_file_path", "test.txt")
directory = os.path.dirname(location)
model_path = os.path.join("test_file_path", "test.txt")
try:
os.makedirs(directory)
with open(location, "wb") as obj:
obj.write("testing file load".encode())
gen_file(app.db, location=model_path, challenge_id=chal_id)
url = url_for("views.files", path=model_path)
# Unauthed user should be able to see challenges if challenges are public
set_config("challenge_visibility", "public")
with app.test_client() as client:
r = client.get(url)
assert r.status_code == 200
assert r.get_data(as_text=True) == "testing file load"
# Unauthed user should not be able to see challenges if challenges are private
set_config("challenge_visibility", "private")
with app.test_client() as client:
r = client.get(url)
assert r.status_code == 403
assert r.get_data(as_text=True) != "testing file load"
# Authed user should be able to see files if challenges are private
register_user(app)
client = login_as_user(app)
r = client.get(url)
assert r.status_code == 200
assert r.get_data(as_text=True) == "testing file load"
with freeze_time("2017-10-5"):
# Friday, October 6, 2017 12:00:00 AM GMT-04:00 DST
set_config("start", "1507262400")
for v in ("public", "private"):
set_config("challenge_visibility", v)
# Unauthed users shouldn't be able to see files if the CTF hasn't started
client = app.test_client()
r = client.get(url)
assert r.status_code == 403
assert r.get_data(as_text=True) != "testing file load"
# Authed users shouldn't be able to see files if the CTF hasn't started
client = login_as_user(app)
r = client.get(url)
assert r.status_code == 403
assert r.get_data(as_text=True) != "testing file load"
# Admins should be able to see files if the CTF hasn't started
admin = login_as_user(app, "admin")
r = admin.get(url)
assert r.status_code == 200
assert r.get_data(as_text=True) == "testing file load"
with freeze_time("2017-10-7"):
# Friday, October 6, 2017 12:00:00 AM GMT-04:00 DST
set_config("end", "1507262400")
for v in ("public", "private"):
set_config("challenge_visibility", v)
# Unauthed users shouldn't be able to see files if the CTF has ended
client = app.test_client()
r = client.get(url)
assert r.status_code == 403
assert r.get_data(as_text=True) != "testing file load"
# Authed users shouldn't be able to see files if the CTF has ended
client = login_as_user(app)
r = client.get(url)
assert r.status_code == 403
assert r.get_data(as_text=True) != "testing file load"
# Admins should be able to see files if the CTF has ended
admin = login_as_user(app, "admin")
r = admin.get(url)
assert r.status_code == 200
assert r.get_data(as_text=True) == "testing file load"
finally:
rmdir(directory)
destroy_ctfd(app)
def test_user_can_access_files_with_auth_token():
app = create_ctfd()
with app.app_context():
from CTFd.utils.uploads import rmdir
chal = gen_challenge(app.db)
chal_id = chal.id
path = app.config.get("UPLOAD_FOLDER")
md5hash = hexencode(os.urandom(16))
location = os.path.join(path, md5hash, "test.txt")
directory = os.path.dirname(location)
model_path = os.path.join(md5hash, "test.txt")
try:
os.makedirs(directory)
with open(location, "wb") as obj:
obj.write("testing file load".encode())
gen_file(app.db, location=model_path, challenge_id=chal_id)
url = url_for("views.files", path=model_path)
register_user(app)
with login_as_user(app) as client:
req = client.get("/api/v1/challenges/1")
data = req.get_json()
file_url = data["data"]["files"][0]
with app.test_client() as client:
r = client.get(url)
assert r.status_code == 403
assert r.get_data(as_text=True) != "testing file load"
r = client.get(
url_for(
"views.files",
path=model_path,
token="random_token_that_shouldnt_work",
)
)
assert r.status_code == 403
assert r.get_data(as_text=True) != "testing file load"
r = client.get(file_url)
assert r.status_code == 200
assert r.get_data(as_text=True) == "testing file load"
# Unauthed users shouldn't be able to see files if the CTF is admins only
set_config("challenge_visibility", "admins")
r = client.get(file_url)
assert r.status_code == 403
assert r.get_data(as_text=True) != "testing file load"
set_config("challenge_visibility", "private")
with freeze_time("2017-10-5"):
# Friday, October 6, 2017 12:00:00 AM GMT-04:00 DST
set_config("start", "1507262400")
# Unauthed users shouldn't be able to see files if the CTF hasn't started
r = client.get(file_url)
assert r.status_code == 403
assert r.get_data(as_text=True) != "testing file load"
with freeze_time("2017-10-5"):
# Friday, October 6, 2017 12:00:00 AM GMT-04:00 DST
set_config("start", "1507262400")
for v in ("public", "private"):
set_config("challenge_visibility", v)
# Unauthed users shouldn't be able to see files if the CTF hasn't started
client = app.test_client()
r = client.get(file_url)
assert r.status_code == 403
assert r.get_data(as_text=True) != "testing file load"
# Authed users shouldn't be able to see files if the CTF hasn't started
client = login_as_user(app)
r = client.get(file_url)
assert r.status_code == 403
assert r.get_data(as_text=True) != "testing file load"
# Admins should be able to see files if the CTF hasn't started
admin = login_as_user(app, "admin")
r = admin.get(file_url)
assert r.status_code == 200
assert r.get_data(as_text=True) == "testing file load"
with freeze_time("2017-10-7"):
# Friday, October 6, 2017 12:00:00 AM GMT-04:00 DST
set_config("end", "1507262400")
for v in ("public", "private"):
set_config("challenge_visibility", v)
# Unauthed users shouldn't be able to see files if the CTF has ended
client = app.test_client()
r = client.get(file_url)
assert r.status_code == 403
assert r.get_data(as_text=True) != "testing file load"
# Authed users shouldn't be able to see files if the CTF has ended
client = login_as_user(app)
r = client.get(file_url)
assert r.status_code == 403
assert r.get_data(as_text=True) != "testing file load"
# Admins should be able to see files if the CTF has ended
admin = login_as_user(app, "admin")
r = admin.get(file_url)
assert r.status_code == 200
assert r.get_data(as_text=True) == "testing file load"
finally:
rmdir(directory)
destroy_ctfd(app)
def test_user_can_access_files_if_view_after_ctf():
app = create_ctfd()
with app.app_context():
from CTFd.utils.uploads import rmdir
chal = gen_challenge(app.db)
chal_id = chal.id
path = app.config.get("UPLOAD_FOLDER")
md5hash = hexencode(os.urandom(16))
location = os.path.join(path, md5hash, "test.txt")
directory = os.path.dirname(location)
model_path = os.path.join(md5hash, "test.txt")
try:
os.makedirs(directory)
with open(location, "wb") as obj:
obj.write("testing file load".encode())
gen_file(app.db, location=model_path, challenge_id=chal_id)
register_user(app)
with login_as_user(app) as client:
req = client.get("/api/v1/challenges/1")
data = req.get_json()
file_url = data["data"]["files"][0]
# After ctf end
with freeze_time("2017-10-7"):
# Friday, October 6, 2017 12:00:00 AM GMT-04:00 DST
set_config("end", "1507262400")
r = client.get(file_url)
assert r.status_code == 403
assert r.get_data(as_text=True) != "testing file load"
set_config("view_after_ctf", True)
r = client.get(file_url)
assert r.status_code == 200
assert r.get_data(as_text=True) == "testing file load"
finally:
rmdir(directory)
destroy_ctfd(app)
|
|
"""Manage Treadmill allocations.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import logging
import click
import six
from treadmill import cli
from treadmill import restclient
from treadmill import context
from treadmill import admin
_DEFAULT_PRIORITY = 1
_LOGGER = logging.getLogger(__name__)
def _display_tenant(restapi, tenant):
"""Display allocations for the given tenant."""
tenant_url = '/tenant/%s' % tenant
alloc_url = '/allocation/%s' % tenant
tenant_obj = restclient.get(restapi, tenant_url).json()
allocations_obj = restclient.get(restapi, alloc_url).json()
tenant_obj['allocations'] = allocations_obj
tenant_formatter = cli.make_formatter('tenant')
cli.out(tenant_formatter(tenant_obj))
def _check_reserve_usage(empty, memory, cpu, disk):
"""Checks params constraints for reserve verb."""
if empty:
if memory:
raise click.UsageError('Cannot combine --empty and --memory')
if cpu:
raise click.UsageError('Cannot combine --empty and --cpu')
if disk:
raise click.UsageError('Cannot combine --empty and --disk')
def _check_tenant_exists(restapi, allocation):
"""Check if tenant exist."""
tenant_url = '/tenant/{}'.format(allocation)
# Check if tenant exists.
try:
restclient.get(restapi, tenant_url).json()
except restclient.NotFoundError:
raise click.UsageError(
'Allocation not found, '
'run allocation configure {} --systems ...'.format(allocation))
def _make_allocation(restapi, allocation, env):
"""Ensure allocation exists for given environment."""
# Make sure allocation exists for given environment.
alloc_url = '/allocation/{}/{}'.format(allocation, env)
try:
restclient.post(restapi, alloc_url, payload={'environment': env})
except restclient.AlreadyExistsError:
pass
def init():
"""Return top level command handler."""
alloc_formatter = cli.make_formatter('tenant')
ctx = {}
@click.group(name='allocation')
@click.option('--api', required=False, help='API url to use.',
envvar='TREADMILL_RESTAPI')
def allocation_grp(api):
"""Manage Treadmill allocations.
Allocation is a group of applications that share same capacity.
Each allocation is partitioned by environment and cell. Given
allocation, cell and environment, users reserve capacity for their
apps.
Allocations form a hierarchy, so that when reservation is underused,
extra capacity is offered to sibling apps first (by environment), and
then up the tree for applications in parent allocations.
"""
if api:
ctx['api'] = api
@allocation_grp.command(name='list')
@cli.handle_exceptions(restclient.CLI_REST_EXCEPTIONS)
def _list():
"""List allocations."""
restapi = context.GLOBAL.admin_api(ctx.get('api'))
response = restclient.get(restapi, '/tenant/')
cli.out(alloc_formatter(response.json()))
@allocation_grp.command()
@click.option('-s', '--systems', help='System ID', type=cli.LIST)
@click.argument('allocation', required=True)
@cli.handle_exceptions(restclient.CLI_REST_EXCEPTIONS)
def configure(allocation, systems):
"""Configure allocation.
Allocation name is global, and is associated with list of systems.
"""
restapi = context.GLOBAL.admin_api(ctx.get('api'))
url = '/tenant/{}'.format(allocation)
if systems:
# If tenant exists, update it with new systems. If update fails
# with resource does not exist error, try creating tenants from
# parent to child, those that do not exist will be created with
# provided systems.
try:
existing = restclient.get(restapi, url).json()
all_systems = set(existing['systems'])
all_systems.update(six.moves.map(int, systems))
restclient.put(
restapi,
url,
payload={'systems': list(all_systems)}
)
except restclient.NotFoundError:
# Create parent tenants recursively.
#
# If parent does not exist, it will be created with the systems
# specified.
parts = allocation.split(':')
for idx in range(1, len(parts) + 1):
url = '/tenant/{}'.format(':'.join(parts[:idx]))
try:
existing = restclient.get(restapi, url).json()
except restclient.NotFoundError:
restclient.post(
restapi,
url,
payload={'systems': six.moves.map(int, systems)})
_display_tenant(restapi, allocation)
@allocation_grp.command()
@click.option('-e', '--env', help='Environment.', required=True)
@click.option('-c', '--cell', help='Treadmill cell', required=True)
@click.option('-p', '--partition', help='Allocation partition')
@click.option('-r', '--rank', help='Allocation rank', type=int)
@click.option('--rank-adjustment', help='Rank adjustment', type=int)
@click.option('--max-utilization', help='Maximum utilization', type=float)
@click.option('--empty', help='Make empty (zero capacity) reservation.',
is_flag=True, default=False)
@click.option('--memory', help='Memory demand.',
metavar='G|M',
callback=cli.validate_memory)
@click.option('--cpu', help='CPU demand, %.',
metavar='XX%',
callback=cli.validate_cpu)
@click.option('--disk', help='Disk demand.',
metavar='G|M',
callback=cli.validate_disk)
@click.argument('allocation', required=True)
@cli.handle_exceptions(restclient.CLI_REST_EXCEPTIONS)
# pylint: disable=R0912
def reserve(allocation, env, cell, partition,
rank, rank_adjustment, max_utilization, empty,
memory, cpu, disk):
"""Reserve capacity on the cell for given environment."""
_check_reserve_usage(empty, memory, cpu, disk)
restapi = context.GLOBAL.admin_api(ctx.get('api'))
_check_tenant_exists(restapi, allocation)
_make_allocation(restapi, allocation, env)
data = {}
if empty:
data['memory'] = '0M'
data['disk'] = '0M'
data['cpu'] = '0%'
if memory:
data['memory'] = memory
if cpu:
data['cpu'] = cpu
if disk:
data['disk'] = disk
if rank is not None:
data['rank'] = rank
if rank_adjustment is not None:
data['rank_adjustment'] = rank_adjustment
if max_utilization is not None:
data['max_utilization'] = max_utilization
if partition:
data['partition'] = partition
if data:
reservation_url = '/allocation/{}/{}/reservation/{}'.format(
allocation, env, cell
)
try:
existing = restclient.get(restapi, reservation_url).json()
# TODO: need cleaner way of deleting attributes that are not
# valid for update. It is a hack.
for attr in existing.keys():
if (attr not in
['memory', 'cpu', 'disk', 'partition']):
del existing[attr]
existing.update(data)
restclient.put(restapi, reservation_url, payload=existing)
except restclient.NotFoundError:
# some attributes need default values when creating
if not partition:
data['partition'] = admin.DEFAULT_PARTITION
restclient.post(restapi, reservation_url, payload=data)
_display_tenant(restapi, allocation)
@allocation_grp.command()
@click.option('-e', '--env', help='Environment.', required=True)
@click.option('-c', '--cell', help='Treadmill cell', required=True)
@click.option('--pattern', help='Application pattern.', required=True)
@click.option('--priority', help='Assignment priority.', type=int)
@click.option('--delete', help='Delete assignment.',
is_flag=True, default=False)
@click.argument('allocation', required=True)
@cli.handle_exceptions(restclient.CLI_REST_EXCEPTIONS)
def assign(allocation, env, cell, pattern, priority, delete):
"""Assign application pattern:priority to the allocation.
Application pattern must start with <PROID>. and is a glob expression.
Environments of the proid and one specified in command line using
--env option must match.
Once scheduled, Treadmill scheduler will match application against all
available patterns and assign application to a reserved capacity.
All application assigned to a capacity are ordered by priority from
high to low.
"""
restapi = context.GLOBAL.admin_api(ctx.get('api'))
_check_tenant_exists(restapi, allocation)
_make_allocation(restapi, allocation, env)
reservation_url = '/allocation/{}/{}/reservation/{}'.format(
allocation, env, cell
)
try:
restclient.get(restapi, reservation_url)
except restclient.NotFoundError:
# TODO: default partition should be resolved in API, not in CLI.
restclient.post(restapi, reservation_url,
payload={'memory': '0M',
'disk': '0M',
'cpu': '0%',
'partition': admin.DEFAULT_PARTITION})
url = '/allocation/{}/{}/assignment/{}/{}'.format(
allocation, env, cell, pattern
)
if delete:
restclient.delete(restapi, url)
else:
default_prio = None
existing = restclient.get(restapi, url).json()
for assignment in existing:
if assignment['pattern'] == pattern:
default_prio = assignment['priority']
if default_prio is None:
default_prio = _DEFAULT_PRIORITY
data = {'priority': priority if priority else default_prio}
restclient.put(restapi, url, payload=data)
_display_tenant(restapi, allocation)
@allocation_grp.command()
@click.argument('item', required=True)
@cli.handle_exceptions(restclient.CLI_REST_EXCEPTIONS)
def delete(item):
"""Delete a tenant/allocation/reservation."""
restapi = context.GLOBAL.admin_api(ctx.get('api'))
path = item.split('/')
if len(path) == 1:
# delete a tenant
url = '/tenant/%s' % item
restclient.delete(restapi, url)
elif len(path) == 2:
# delete an allocation
url = '/allocation/%s' % item
restclient.delete(restapi, url)
elif len(path) == 3:
# delete a reservation
url = '/allocation/%s/%s/reservation/%s' % (path[0],
path[1],
path[2])
restclient.delete(restapi, url)
else:
# error
click.echo('Wrong format: %s' % item, err=True)
del assign
del reserve
del configure
del _list
del delete
return allocation_grp
|
|
from django import forms
from django.core.exceptions import ImproperlyConfigured
from django.forms.models import ModelForm
from django.template import TemplateSyntaxError
from django.test.utils import override_settings
from django.urls import reverse
from django.utils.encoding import smart_str
from django.utils.translation import gettext as _
from cms.api import create_page, publish_page
from cms.cms_wizards import CMSPageWizard
from cms.constants import TEMPLATE_INHERITANCE_MAGIC
from cms.forms.wizards import CreateCMSPageForm, CreateCMSSubPageForm
from cms.models import Page, PageType, UserSettings
from cms.test_utils.testcases import CMSTestCase, TransactionCMSTestCase
from cms.utils import get_current_site
from cms.utils.conf import get_cms_setting
from cms.wizards.forms import WizardStep2BaseForm, step2_form_factory
from cms.wizards.wizard_base import Wizard
from cms.wizards.wizard_pool import AlreadyRegisteredException, wizard_pool
CreateCMSPageForm = step2_form_factory(
mixin_cls=WizardStep2BaseForm,
entry_form_class=CreateCMSPageForm,
)
CreateCMSSubPageForm = step2_form_factory(
mixin_cls=WizardStep2BaseForm,
entry_form_class=CreateCMSSubPageForm,
)
class WizardForm(forms.Form):
pass
class ModelWizardForm(ModelForm):
class Meta:
model = UserSettings
exclude = []
class BadModelForm(ModelForm):
class Meta:
pass
class WizardTestMixin:
page_wizard = None
title_wizard = None
def assertSequencesEqual(self, seq_a, seq_b):
seq_a = list(seq_a)
seq_b = list(seq_b)
zipped = list(zip(seq_a, seq_b))
if len(zipped) < len(seq_a) or len(zipped) < len(seq_b):
self.fail("Sequence lengths are not the same.")
for idx, (a, b) in enumerate(zipped):
if a != b:
self.fail("Sequences differ at index {0}".format(idx))
@classmethod
def setUpClass(cls):
super(WizardTestMixin, cls).setUpClass()
# This prevents auto-discovery, which would otherwise occur as soon as
# tests start, creating unexpected starting conditions.
wizard_pool._discovered = True
class PageWizard(Wizard):
pass
# This is a basic Wizard
cls.page_wizard = PageWizard(
title=_(u"Page"),
weight=100,
form=WizardForm,
model=Page,
template_name='my_template.html', # This doesn't exist anywhere
)
class SettingsWizard(Wizard):
pass
# This is a Wizard that uses a ModelForm to define the model
cls.user_settings_wizard = SettingsWizard(
title=_(u"UserSettings"),
weight=200,
form=ModelWizardForm,
)
class TitleWizard(Wizard):
pass
# This is a bad wizard definition as it neither defines a model, nor
# uses a ModelForm that has model defined in Meta
cls.title_wizard = TitleWizard(
title=_(u"Page"),
weight=100,
form=BadModelForm,
template_name='my_template.html', # This doesn't exist anywhere
)
class TestWizardBase(WizardTestMixin, TransactionCMSTestCase):
def test_user_has_add_permission(self):
# Test does not have permission
user = self.get_staff_user_with_no_permissions()
self.assertFalse(self.page_wizard.user_has_add_permission(user))
# Test has permission
user = self.get_superuser()
self.assertTrue(self.page_wizard.user_has_add_permission(user))
def test_get_success_url(self):
user = self.get_superuser()
page = create_page(
title="Sample Page",
template=TEMPLATE_INHERITANCE_MAGIC,
language="en",
created_by=smart_str(user),
parent=None,
in_navigation=True,
published=False
)
url = "{0}?edit".format(page.get_absolute_url(language="en"))
self.assertEqual(self.page_wizard.get_success_url(
page, language="en"), url)
# Now again without a language code
url = "{0}?edit".format(page.get_absolute_url())
self.assertEqual(self.page_wizard.get_success_url(page), url)
def test_get_model(self):
self.assertEqual(self.page_wizard.get_model(), Page)
self.assertEqual(self.user_settings_wizard.get_model(), UserSettings)
with self.assertRaises(ImproperlyConfigured):
self.title_wizard.get_model()
def test_endpoint_auth_required(self):
endpoint = reverse('cms_wizard_create')
staff_active = self._create_user("staff-active", is_staff=True, is_superuser=False, is_active=True)
response = self.client.get(endpoint)
self.assertEqual(response.status_code, 403)
with self.login_user_context(staff_active):
response = self.client.get(endpoint)
self.assertEqual(response.status_code, 200)
class TestWizardPool(WizardTestMixin, CMSTestCase):
def test_discover(self):
wizard_pool._reset()
self.assertFalse(wizard_pool._discovered)
self.assertEqual(len(wizard_pool._entries), 0)
wizard_pool._discover()
self.assertTrue(wizard_pool._discovered)
def test_register_unregister_isregistered(self):
wizard_pool._clear()
self.assertEqual(len(wizard_pool._entries), 0)
wizard_pool.register(self.page_wizard)
# Now, try to register the same thing
with self.assertRaises(AlreadyRegisteredException):
wizard_pool.register(self.page_wizard)
self.assertEqual(len(wizard_pool._entries), 1)
self.assertTrue(wizard_pool.is_registered(self.page_wizard))
self.assertTrue(wizard_pool.unregister(self.page_wizard))
self.assertEqual(len(wizard_pool._entries), 0)
# Now, try to unregister something that is not registered
self.assertFalse(wizard_pool.unregister(self.user_settings_wizard))
def test_get_entry(self):
wizard_pool._clear()
wizard_pool.register(self.page_wizard)
entry = wizard_pool.get_entry(self.page_wizard)
self.assertEqual(entry, self.page_wizard)
def test_get_entries(self):
"""
Test that the registered entries are returned in weight-order, no matter
which order they were added.
"""
wizard_pool._clear()
wizard_pool.register(self.page_wizard)
wizard_pool.register(self.user_settings_wizard)
wizards = [self.page_wizard, self.user_settings_wizard]
wizards = sorted(wizards, key=lambda e: getattr(e, 'weight'))
entries = wizard_pool.get_entries()
self.assertSequencesEqual(entries, wizards)
wizard_pool._clear()
wizard_pool.register(self.user_settings_wizard)
wizard_pool.register(self.page_wizard)
wizards = [self.page_wizard, self.user_settings_wizard]
wizards = sorted(wizards, key=lambda e: getattr(e, 'weight'))
entries = wizard_pool.get_entries()
self.assertSequencesEqual(entries, wizards)
class TestPageWizard(WizardTestMixin, CMSTestCase):
def test_str(self):
page_wizard = [
entry for entry in wizard_pool.get_entries()
if isinstance(entry, CMSPageWizard)
][0]
self.assertEqual(str(page_wizard), page_wizard.title)
def test_repr(self):
page_wizard = [
entry for entry in wizard_pool.get_entries()
if isinstance(entry, CMSPageWizard)
][0]
self.assertIn("cms.cms_wizards.CMSPageWizard", repr(page_wizard))
self.assertIn("id={}".format(page_wizard.id), repr(page_wizard))
self.assertIn(hex(id(page_wizard)), repr(page_wizard))
def test_wizard_first_page_published(self):
superuser = self.get_superuser()
data = {
'title': 'page 1',
'slug': 'page_1',
'page_type': None,
}
form = CreateCMSPageForm(
data=data,
wizard_page=None,
wizard_user=superuser,
wizard_language='en',
)
self.assertTrue(form.is_valid())
page = form.save()
self.assertTrue(page.is_published('en'))
with self.login_user_context(superuser):
url = page.get_absolute_url('en')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_wizard_create_child_page(self):
superuser = self.get_superuser()
parent_page = create_page(
title="Parent",
template=TEMPLATE_INHERITANCE_MAGIC,
language="en",
)
data = {
'title': 'Child',
'slug': 'child',
'page_type': None,
}
form = CreateCMSSubPageForm(
data=data,
wizard_page=parent_page,
wizard_user=superuser,
wizard_language='en',
)
self.assertTrue(form.is_valid())
child_page = form.save()
self.assertEqual(child_page.node.depth, 2)
self.assertEqual(child_page.parent_page, parent_page)
self.assertEqual(child_page.get_title('en'), 'Child')
self.assertEqual(child_page.get_path('en'), 'parent/child')
def test_wizard_create_child_page_under_page_type(self):
"""
When a user creates a child page through the wizard,
if the parent page is a page-type, the child page should
also be a page-type.
"""
site = get_current_site()
superuser = self.get_superuser()
source_page = create_page(
title="Source",
template=TEMPLATE_INHERITANCE_MAGIC,
language="en",
)
with self.login_user_context(superuser):
self.client.post(
self.get_admin_url(PageType, 'add'),
data={'source': source_page.pk, 'title': 'type1', 'slug': 'type1', '_save': 1},
)
types_root = PageType.get_root_page(site)
parent_page = types_root.get_child_pages()[0]
data = {
'title': 'page-type-child',
'slug': 'page-type-child',
'page_type': None,
}
form = CreateCMSSubPageForm(
data=data,
wizard_page=parent_page,
wizard_user=superuser,
wizard_language='en',
)
self.assertTrue(form.is_valid())
child_page = form.save()
self.assertTrue(child_page.is_page_type)
self.assertFalse(child_page.in_navigation)
self.assertEqual(child_page.node.depth, 3)
self.assertEqual(child_page.parent_page, parent_page)
self.assertEqual(child_page.get_title('en'), 'page-type-child')
self.assertEqual(child_page.get_path('en'), 'page_types/type1/page-type-child')
def test_wizard_create_atomic(self):
# Ref: https://github.com/divio/django-cms/issues/5652
# We'll simulate a scenario where a user creates a page with an
# invalid template which causes Django to throw an error when the
# template is scanned for placeholders and thus short circuits the
# creation mechanism.
superuser = self.get_superuser()
data = {
'title': 'page 1',
'slug': 'page_1',
'page_type': None,
}
form = CreateCMSPageForm(
data=data,
wizard_page=None,
wizard_user=superuser,
wizard_language='en',
)
self.assertTrue(form.is_valid())
self.assertFalse(Page.objects.filter(template=TEMPLATE_INHERITANCE_MAGIC).exists())
with self.settings(CMS_TEMPLATES=[("col_invalid.html", "notvalid")]):
self.assertRaises(TemplateSyntaxError, form.save)
# The template raised an exception which should cause the database to roll back
# instead of committing a page in a partial state.
self.assertFalse(Page.objects.filter(template=TEMPLATE_INHERITANCE_MAGIC).exists())
def test_wizard_content_placeholder_setting(self):
"""
Tests that the PageWizard respects the
CMS_PAGE_WIZARD_CONTENT_PLACEHOLDER setting.
"""
templates = get_cms_setting('TEMPLATES')
# NOTE, there are 4 placeholders on this template, defined in this
# order: 'header', 'content', 'sub-content', 'footer'.
# 'footer' is a static-placeholder.
templates.append(('page_wizard.html', 'page_wizard.html', ))
settings = {
'CMS_TEMPLATES': templates,
'CMS_PAGE_WIZARD_DEFAULT_TEMPLATE': 'page_wizard.html',
'CMS_PAGE_WIZARD_CONTENT_PLACEHOLDER': 'sub-content',
}
with override_settings(**settings):
superuser = self.get_superuser()
page = create_page("wizard home", "page_wizard.html", "en")
publish_page(page, superuser, "en")
content = '<p>sub-content content.</p>'
data = {
'title': 'page 1',
'slug': 'page_1',
'page_type': None,
'content': content,
}
form = CreateCMSPageForm(
data=data,
wizard_page=page,
wizard_user=superuser,
wizard_language='en',
)
self.assertTrue(form.is_valid())
page = form.save()
page.publish('en')
with self.login_user_context(superuser):
url = page.get_absolute_url('en')
expected = '<div class="sub-content">{0}</div>'.format(content)
unexpected = '<div class="content">{0}</div>'.format(content)
response = self.client.get(url)
self.assertContains(response, expected, status_code=200)
self.assertNotContains(response, unexpected, status_code=200)
def test_wizard_content_placeholder_bad_setting(self):
"""
Tests that the PageWizard won't respect a 'bad' setting such as
targeting a static-placeholder. In this case, will just fail to
add the content (without error).
"""
templates = get_cms_setting('TEMPLATES')
# NOTE, there are 4 placeholders on this template, defined in this
# order: 'header', 'content', 'sub-content', 'footer'.
# 'footer' is a static-placeholder.
templates.append(('page_wizard.html', 'page_wizard.html', ))
settings = {
'CMS_TEMPLATES': templates,
'CMS_PAGE_WIZARD_DEFAULT_TEMPLATE': 'page_wizard.html',
# This is a bad setting.
'CMS_PAGE_WIZARD_CONTENT_PLACEHOLDER': 'footer',
}
with override_settings(**settings):
superuser = self.get_superuser()
page = create_page("wizard home", "page_wizard.html", "en")
publish_page(page, superuser, "en")
content = '<p>footer content.</p>'
data = {
'title': 'page 1',
'slug': 'page_1',
'page_type': None,
'content': content,
}
form = CreateCMSPageForm(
data=data,
wizard_page=page,
wizard_user=superuser,
wizard_language='en',
)
self.assertTrue(form.is_valid())
page = form.save()
page.publish('en')
with self.login_user_context(superuser):
url = page.get_absolute_url('en')
response = self.client.get(url)
self.assertNotContains(response, content, status_code=200)
def test_create_page_with_empty_fields(self):
superuser = self.get_superuser()
data = {
'title': '',
'slug': '',
'page_type': None,
}
form = CreateCMSPageForm(
data=data,
wizard_page=None,
wizard_user=superuser,
wizard_language='en',
)
self.assertFalse(form.is_valid())
def test_create_page_with_existing_slug(self):
superuser = self.get_superuser()
data = {
'title': 'page',
'slug': 'page',
'page_type': None,
}
create_page(
'page',
'nav_playground.html',
language='en',
published=True,
slug='page'
)
# slug -> page-1
form = CreateCMSPageForm(
data=data,
wizard_page=None,
wizard_user=superuser,
wizard_language='en',
)
self.assertTrue(form.is_valid())
self.assertTrue(form.save().title_set.filter(slug='page-2'))
# slug -> page-2
form = CreateCMSPageForm(
data=data,
wizard_page=None,
wizard_user=superuser,
wizard_language='en',
)
self.assertTrue(form.is_valid())
self.assertTrue(form.save().title_set.filter(slug='page-3'))
# Now explicitly request the page-2 slug
data['slug'] = 'page-2'
# slug -> page-2-2
form = CreateCMSPageForm(
data=data,
wizard_page=None,
wizard_user=superuser,
wizard_language='en',
)
self.assertTrue(form.is_valid())
self.assertTrue(form.save().title_set.filter(slug='page-2-2'))
# slug -> page-2-3
form = CreateCMSPageForm(
data=data,
wizard_page=None,
wizard_user=superuser,
wizard_language='en',
)
self.assertTrue(form.is_valid())
self.assertTrue(form.save().title_set.filter(slug='page-2-3'))
|
|
from __future__ import (absolute_import, division,
print_function, unicode_literals)
#from future.builtins import *
import os
import re
class Block(object):
_highest_id = 0
def __init__(self, selector, shape, parts_or_spec, is_blocking=False,
help_text="", defaults=[]):
self.shape = str(shape)
"""A string determining the kind of values the block reports.
* ``"command"`` -- Doesn't report a value. (puzzle-piece)
* ``"reporter"`` -- Reports a number. (round ends)
* ``"predicate"`` -- Reports a boolean. (pointy ends)
"""
if selector.startswith("_"):
raise ValueError("names starting with an underscore are reserved")
self.selector = str(selector)
"""Used by the block language to identify the block."""
if isinstance(parts_or_spec, list):
self.parts = [p if isinstance(p, Input) else str(p) for p in parts]
else:
self.parts = parse_spec(parts_or_spec)
for input_, value in zip(self.inputs, defaults):
input_.default = value
self.is_blocking = bool(is_blocking)
"""True if the block language should wait for the block to return."""
self.help_text = str(help_text)
"""Text explaining the block to a Scratch user."""
self.translations = {}
@property
def inputs(self):
return [p for p in self.parts if isinstance(p, Input)]
@property
def defaults(self):
return [x.default for x in self.inputs]
@property
def spec(self):
return generate_spec(self.parts)
def __repr__(self):
return "<Block({spec})>".format(spec=repr(generate_spec(self.parts)))
def __call__(self, func):
func._block = self
Block._highest_id += 1
func._block_id = Block._highest_id
return func
class Input(object):
"""The specification for an argument to a :class:`Block`."""
DEFAULTS = {
"number": 0,
"number-menu": 0,
"readonly-menu": None, # Set in _set_menu_defaults()
"string": "",
"boolean": False,
}
def __init__(self, shape, menu=None):
self.shape = str(shape)
"""A string identifying the kind of values the input accepts.
* ``'number'`` -- number input (round ends)
* ``'string'`` -- string input (square ends)
* ``'boolean'`` -- boolean input (pointy ends)
* ``'readonly-menu'`` -- menu input
* ``'number-menu'`` -- editable number input with menu
* ``'color'`` -- color input with picker
"""
if 'menu' in shape:
assert menu, "Menu is required"
else:
assert not menu, "Menu not allowed"
self.menu = str(menu) if menu else None
"""For menu inputs: the options the drop-down menu contains.
The options come from an earlier :attr:`Extension.menu` call::
ext.add_menu("menuName", ["one", "two", "three", ...])
"""
self.default = Input.DEFAULTS.get(self.shape)
def __repr__(self):
r = "Input({}".format(repr(self.menu))
if self.menu:
r += ", menu={}".format(repr(self.menu))
return r + ")"
def __eq__(self, other):
return (isinstance(other, Input) and self.shape == other.shape
and self.menu == other.menu)
def _set_menu_defaults(self, menus):
if self.default is None:
self.default = ""
if self.shape == "readonly-menu":
try:
options = menus[self.menu]
except KeyError:
raise ValueError(
"menu not found: {}".format(repr(self.menu))
)
self.default = options[0]
INPUT_SPECS = {
"n": "number",
"s": "string",
"b": "boolean",
"m": "readonly-menu",
"d": "number-menu",
"c": "color",
}
def parse_spec(spec):
def generate_parts(spec):
for part in re.split(r"(%[^ ](?:\.[A-z]+)?)", spec):
match = re.match(r"^%([^ ])(?:\.([A-z]+))?$", part)
if match:
shape = INPUT_SPECS.get(match.group(1))
if not shape:
raise ValueError("Unknown input shape %s" % part)
part = Input(shape, match.group(2))
else:
part = str(part)
yield part
spec = str(spec)
parts = list(generate_parts(spec))
inputs = [p for p in parts if isinstance(p, Input)]
return parts
def generate_spec(block_parts):
"""A string identifying the labels and inputs to the block.
Words starting with "%" produce input slots. Supported input types are:
* ``%n`` -- number input (round ends)
* ``%s`` -- string input (square ends)
* ``%b`` -- boolean input (pointy ends)
* ``%m.menuName`` -- menu input
* ``%d.menuName`` -- editable number input with menu
The last two input slots produce a drop-down menu. The options come
from an earlier :attr:`Extension.menu` call::
ext.add_menu("menuName", ["one", "two", "three", ...])
"""
def stringify_part(part):
if isinstance(part, Input):
for s, shape in INPUT_SPECS.items():
if shape == part.shape:
break
else:
assert False
r = "%" + s
if part.menu:
r += "." + part.menu
return r
return part
spec = "".join(map(stringify_part, block_parts))
return spec
def load_po_files(this_file, relative_folder=None, **language_file_paths):
translations = {}
base = ""
if this_file is not None:
base = os.path.abspath(os.path.dirname(this_file))
if relative_folder:
base = os.path.join(base, relative_folder)
for lang, path in language_file_paths.items():
path = os.path.join(base, path)
with open(path) as f:
translations[lang] = Language.from_po_file(f)
return translations
class Language(object):
def __init__(self, strings):
self._strings = strings
def __getitem__(self, key):
"""Return translation if possible, else untranslated string."""
return self._strings.get(key, key)
get = __getitem__
@classmethod
def from_po_file(cls, path):
return
raise NotImplementedError()
def get_menus(self, menus):
translated_menus = {}
for key, options in menus.items():
translated_menus[key] = list(map(self.get, options))
return translated_menus
class Descriptor(object):
def __init__(self, name, port, blocks, menus=None, translations=None):
self.name = str(name)
"""Human-readable name of the hardware."""
self.port = int(port)
"""Port the extension runs on."""
self.blocks = list(blocks)
"""The list of blocks displayed in the interface."""
menus = menus or {}
menus = dict((str(k), list(map(str, v))) for k, v in menus.items())
self.menus = menus
"""Options for custom drop-down menus."""
translations = translations or {}
if "en" in translations:
raise ValueError("english must be default")
translations["en"] = Language({})
self.translations = translations
"""Translations for block specs and menu options."""
# Set default menu options
for block in self.blocks:
for input_ in block.inputs:
input_._set_menu_defaults(self.menus)
def __repr__(self):
return "<Descriptor(%r, %i)>" % (self.name, self.port)
|
|
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import base64
import os
import unittest
import zipfile
try:
from io import BytesIO
except ImportError:
from cStringIO import StringIO as BytesIO
try:
unicode
except NameError:
unicode = str
from selenium import webdriver
from selenium.webdriver.common.proxy import Proxy, ProxyType
from selenium.test.selenium.webdriver.common.webserver import SimpleWebServer
class TestFirefoxProfile:
def setup_method(self, method):
self.capabilities = {'marionette': False}
self.driver = webdriver.Firefox(capabilities=self.capabilities)
self.webserver = SimpleWebServer()
self.webserver.start()
def test_that_we_can_accept_a_profile(self):
profile1 = webdriver.FirefoxProfile()
profile1.set_preference("browser.startup.homepage_override.mstone", "")
profile1.set_preference("startup.homepage_welcome_url",
self.webserver.where_is('simpleTest.html'))
profile1.update_preferences()
profile2 = webdriver.FirefoxProfile(profile1.path)
driver = webdriver.Firefox(
capabilities=self.capabilities,
firefox_profile=profile2)
title = driver.title
driver.quit()
assert "Hello WebDriver" == title
def test_that_prefs_are_written_in_the_correct_format(self):
# The setup gave us a browser but we dont need it
self.driver.quit()
profile = webdriver.FirefoxProfile()
profile.set_preference("sample.preference", "hi there")
profile.update_preferences()
assert 'hi there' == profile.default_preferences["sample.preference"]
encoded = profile.encoded
decoded = base64.decodestring(encoded)
fp = BytesIO(decoded)
zip = zipfile.ZipFile(fp, "r")
for entry in zip.namelist():
if entry.endswith("user.js"):
user_js = zip.read(entry)
for line in user_js.splitlines():
if line.startswith(b'user_pref("sample.preference",'):
assert True == line.endswith(b'hi there");')
# there should be only one user.js
break
fp.close()
def test_that_unicode_prefs_are_written_in_the_correct_format(self):
# The setup gave us a browser but we dont need it
self.driver.quit()
profile = webdriver.FirefoxProfile()
profile.set_preference('sample.preference.2', unicode('hi there'))
profile.update_preferences()
assert 'hi there' == profile.default_preferences["sample.preference.2"]
encoded = profile.encoded
decoded = base64.decodestring(encoded)
fp = BytesIO(decoded)
zip = zipfile.ZipFile(fp, "r")
for entry in zip.namelist():
if entry.endswith('user.js'):
user_js = zip.read(entry)
for line in user_js.splitlines():
if line.startswith(b'user_pref("sample.preference.2",'):
assert True == line.endswith(b'hi there");')
# there should be only one user.js
break
fp.close()
def test_that_integer_prefs_are_written_in_the_correct_format(self):
# The setup gave us a browser but we dont need it
self.driver.quit()
profile = webdriver.FirefoxProfile()
profile.set_preference("sample.int.preference", 12345)
profile.update_preferences()
assert 12345 == profile.default_preferences["sample.int.preference"]
def test_that_boolean_prefs_are_written_in_the_correct_format(self):
# The setup gave us a browser but we dont need it
self.driver.quit()
profile = webdriver.FirefoxProfile()
profile.set_preference("sample.bool.preference", True)
profile.update_preferences()
assert True == profile.default_preferences["sample.bool.preference"]
def test_that_we_delete_the_profile(self):
path = self.driver.firefox_profile.path
self.driver.quit()
assert not os.path.exists(path)
def test_profiles_do_not_share_preferences(self):
self.profile1 = webdriver.FirefoxProfile()
self.profile1.accept_untrusted_certs = False
self.profile2 = webdriver.FirefoxProfile()
# Default is true. Should remain so.
assert self.profile2.default_preferences["webdriver_accept_untrusted_certs"] == True
def test_none_proxy_is_set(self):
# The setup gave us a browser but we dont need it
self.driver.quit()
self.profile = webdriver.FirefoxProfile()
proxy = None
try:
self.profile.set_proxy(proxy)
assert False, "exception after passing empty proxy is expected"
except ValueError as e:
pass
assert "network.proxy.type" not in self.profile.default_preferences
def test_unspecified_proxy_is_set(self):
# The setup gave us a browser but we dont need it
self.driver.quit()
self.profile = webdriver.FirefoxProfile()
proxy = Proxy()
self.profile.set_proxy(proxy)
assert "network.proxy.type" not in self.profile.default_preferences
def test_manual_proxy_is_set_in_profile(self):
# The setup gave us a browser but we dont need it
self.driver.quit()
self.profile = webdriver.FirefoxProfile()
proxy = Proxy()
proxy.no_proxy = 'localhost, foo.localhost'
proxy.http_proxy = 'some.url:1234'
proxy.ftp_proxy = None
proxy.sslProxy = 'some2.url'
self.profile.set_proxy(proxy)
assert self.profile.default_preferences["network.proxy.type"] == ProxyType.MANUAL['ff_value']
assert self.profile.default_preferences["network.proxy.no_proxies_on"] == 'localhost, foo.localhost'
assert self.profile.default_preferences["network.proxy.http"] == 'some.url'
assert self.profile.default_preferences["network.proxy.http_port"] == 1234
assert self.profile.default_preferences["network.proxy.ssl"] == 'some2.url'
assert "network.proxy.ssl_port" not in self.profile.default_preferences
assert "network.proxy.ftp" not in self.profile.default_preferences
def test_pac_proxy_is_set_in_profile(self):
# The setup gave us a browser but we dont need it
self.driver.quit()
self.profile = webdriver.FirefoxProfile()
proxy = Proxy()
proxy.proxy_autoconfig_url = 'http://some.url:12345/path'
self.profile.set_proxy(proxy)
assert self.profile.default_preferences["network.proxy.type"] == ProxyType.PAC['ff_value']
assert self.profile.default_preferences["network.proxy.autoconfig_url"] == 'http://some.url:12345/path'
def test_autodetect_proxy_is_set_in_profile(self):
# The setup gave us a browser but we dont need it
self.driver.quit()
self.profile = webdriver.FirefoxProfile()
proxy = Proxy()
proxy.auto_detect = True
self.profile.set_proxy(proxy)
assert self.profile.default_preferences["network.proxy.type"] == ProxyType.AUTODETECT['ff_value']
def teardown_method(self, method):
try:
self.driver.quit()
except:
pass #don't care since we may have killed the browser above
self.webserver.stop()
def _pageURL(self, name):
return self.webserver.where_is(name + '.html')
def _loadSimplePage(self):
self._loadPage("simpleTest")
def _loadPage(self, name):
self.driver.get(self._pageURL(name))
def teardown_module(module):
try:
TestFirefoxProfile.driver.quit()
except:
pass #Don't Care since we may have killed the browser above
|
|
#!/usr/bin/env python
__author__ = 'Stephen P. Henrie'
from os import path
from StringIO import StringIO
from ndg.xacml.parsers.etree.factory import ReaderFactory
from ndg.xacml.core import Identifiers, XACML_1_0_PREFIX
from ndg.xacml.core.attribute import Attribute
from ndg.xacml.core.attributevalue import AttributeValue, AttributeValueClassFactory
from ndg.xacml.core.functions import functionMap
from ndg.xacml.core.context.request import Request
from ndg.xacml.core.context.subject import Subject
from ndg.xacml.core.context.resource import Resource
from ndg.xacml.core.context.action import Action
from ndg.xacml.core.context.environment import Environment
from ndg.xacml.core.context.pdp import PDP
from ndg.xacml.core.context.result import Decision
from pyon.core import (MSG_HEADER_ACTOR, MSG_HEADER_ROLES, MSG_HEADER_OP, MSG_HEADER_FORMAT, MSG_HEADER_USER_CONTEXT_ID,
PROCTYPE_AGENT, PROCTYPE_SERVICE)
from pyon.core.exception import NotFound
from pyon.core.governance import SUPERUSER_ROLE, ANONYMOUS_ACTOR, DECORATOR_OP_VERB
from pyon.core.governance.governance_dispatcher import GovernanceDispatcher
from pyon.core.registry import is_ion_object, message_classes, get_class_decorator_value
from pyon.util.log import log
COMMON_SERVICE_POLICY_RULES = 'common_service_policy_rules'
ROLE_ATTRIBUTE_ID = XACML_1_0_PREFIX + 'subject:subject-role-id'
SENDER_ID = XACML_1_0_PREFIX + 'subject:subject-sender-id'
USER_CONTEXT_ID = XACML_1_0_PREFIX + 'subject:user-context-id'
USER_CONTEXT_DIFFERS = XACML_1_0_PREFIX + 'subject:user-context-differs'
RECEIVER_TYPE = XACML_1_0_PREFIX + 'resource:receiver-type'
ACTION_VERB = XACML_1_0_PREFIX + 'action:action-verb'
ACTION_PARAMETERS = XACML_1_0_PREFIX + 'action:param-dict'
DICT_TYPE_URI = AttributeValue.IDENTIFIER_PREFIX + 'dict'
OBJECT_TYPE_URI = AttributeValue.IDENTIFIER_PREFIX + 'object'
# XACML DATATYPES
attributeValueFactory = AttributeValueClassFactory()
StringAttributeValue = attributeValueFactory(AttributeValue.STRING_TYPE_URI)
IntAttributeValue = attributeValueFactory(AttributeValue.INTEGER_TYPE_URI)
DoubleAttributeValue = attributeValueFactory(AttributeValue.DOUBLE_TYPE_URI)
BooleanAttributeValue = attributeValueFactory(AttributeValue.BOOLEAN_TYPE_URI)
# Policy templates
DEFAULT_POLICY_TEMPLATE = '''<?xml version="1.0" encoding="UTF-8"?>
<Policy xmlns="urn:oasis:names:tc:xacml:2.0:policy:schema:os"
xmlns:xacml-context="urn:oasis:names:tc:xacml:2.0:context:schema:os"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="urn:oasis:names:tc:xacml:2.0:policy:schema:os http://docs.oasis-open.org/xacml/access_control-xacml-2.0-policy-schema-os.xsd"
xmlns:xf="http://www.w3.org/TR/2002/WD-xquery-operators-20020816/#"
xmlns:md="http:www.med.example.com/schemas/record.xsd"
PolicyId="%s"
RuleCombiningAlgId="%s">
<PolicyDefaults>
<XPathVersion>http://www.w3.org/TR/1999/Rec-xpath-19991116</XPathVersion>
</PolicyDefaults>
%s
</Policy>'''
POLICY_RULE_CA_PERMIT_OVERRIDES = "urn:oasis:names:tc:xacml:1.0:rule-combining-algorithm:permit-overrides"
POLICY_RULE_CA_DENY_OVERRIDES = "urn:oasis:names:tc:xacml:1.0:rule-combining-algorithm:deny-overrides"
POLICY_RULE_CA_FIRST_APPLICABLE = "urn:oasis:names:tc:xacml:1.0:rule-combining-algorithm:first-applicable"
EMPTY_POLICY_ID = "urn:oasis:names:tc:xacml:2.0:example:policyid:empty_policy_set"
class PolicyDecisionPointManager(object):
def __init__(self, governance_controller):
self.resource_policy_decision_point = {}
self.service_policy_decision_point = {}
self.empty_pdp = self.get_empty_pdp()
self.set_common_service_policy_rules([])
self.governance_controller = governance_controller
# Create and register an Attribute Value derived class to handle a dict type used for the messages
_className = 'Dict' + AttributeValue.CLASS_NAME_SUFFIX
_classVars = {'TYPE': dict, 'IDENTIFIER': DICT_TYPE_URI}
_attributeValueClass = type(_className, (AttributeValue, ), _classVars)
AttributeValue.register(_attributeValueClass)
attributeValueFactory.addClass(DICT_TYPE_URI, _attributeValueClass)
self.DictAttributeValue = attributeValueFactory(DICT_TYPE_URI)
# Create and register an Attribute Value derived class to handle any object
_className = 'Object' + AttributeValue.CLASS_NAME_SUFFIX
_classVars = {'TYPE': object, 'IDENTIFIER': OBJECT_TYPE_URI}
_attributeValueClass = type(_className, (AttributeValue, ), _classVars)
AttributeValue.register(_attributeValueClass)
attributeValueFactory.addClass(OBJECT_TYPE_URI, _attributeValueClass)
self.ObjectAttributeValue = attributeValueFactory(OBJECT_TYPE_URI)
# Create and add new function for evaluating functions that take the message as a dict
from pyon.core.governance.policy.evaluate import EvaluateCode, EvaluateFunction
functionMap['urn:oasis:names:tc:xacml:1.0:function:evaluate-code'] = EvaluateCode
functionMap['urn:oasis:names:tc:xacml:1.0:function:evaluate-function'] = EvaluateFunction
def _get_default_policy_template(self):
return DEFAULT_POLICY_TEMPLATE
def create_policy_from_rules(self, policy_identifier, rules):
policy = self._get_default_policy_template()
policy_rules = policy % (policy_identifier, POLICY_RULE_CA_FIRST_APPLICABLE, rules)
return policy_rules
def create_resource_policy_from_rules(self, policy_identifier, rules):
policy = self._get_default_policy_template()
policy_rules = policy % (policy_identifier, POLICY_RULE_CA_FIRST_APPLICABLE, rules)
return policy_rules
def get_empty_pdp(self):
policy_set = self.create_policy_from_rules(EMPTY_POLICY_ID, "")
input_source = StringIO(policy_set)
pdp = PDP.fromPolicySource(input_source, ReaderFactory)
return pdp
def get_service_pdp(self, service_name):
"""Return a compiled policy indexed by the specified service name, or default (empty)"""
if service_name in self.service_policy_decision_point:
return self.service_policy_decision_point[service_name]
return self.load_common_service_pdp
def get_resource_pdp(self, resource_key):
"""Return a compiled policy indexed by the specified resource key, or default (empty)"""
if resource_key in self.resource_policy_decision_point:
return self.resource_policy_decision_point[resource_key]
return self.empty_pdp
# --- Policy rule management
def set_common_service_policy_rules(self, policy_list):
rules_text = self._get_rules_text(policy_list)
self.common_service_rules = rules_text
input_source = StringIO(self.create_policy_from_rules(COMMON_SERVICE_POLICY_RULES, rules_text))
self.load_common_service_pdp = PDP.fromPolicySource(input_source, ReaderFactory)
def list_service_policies(self):
return self.service_policy_decision_point.keys()
def has_service_policy(self, service_name):
return service_name in self.service_policy_decision_point
def set_service_policy_rules(self, service_name, policy_list):
log.debug("Loading policies for service: %s" % service_name)
self.clear_service_policy(service_name)
if not policy_list:
self.service_policy_decision_point[service_name] = None
return
# Create a new PDP object for the service
rules_text = self._get_rules_text(policy_list)
input_source = StringIO(self.create_policy_from_rules(service_name, rules_text))
self.service_policy_decision_point[service_name] = PDP.fromPolicySource(input_source, ReaderFactory)
def clear_service_policy(self, service_name):
self.service_policy_decision_point.pop(service_name, None)
def list_resource_policies(self):
return self.resource_policy_decision_point.keys()
def has_resource_policy(self, resource_key):
return resource_key in self.resource_policy_decision_point
def set_resource_policy_rules(self, resource_key, policy_list):
log.debug("Loading policies for resource: %s" % resource_key)
self.clear_resource_policy(resource_key)
if not policy_list:
self.resource_policy_decision_point[resource_key] = None
return
# Create a new PDP object for the resource
rules_text = self._get_rules_text(policy_list)
input_source = StringIO(self.create_resource_policy_from_rules(resource_key, rules_text))
self.resource_policy_decision_point[resource_key] = PDP.fromPolicySource(input_source, ReaderFactory)
def clear_resource_policy(self, resource_key):
self.resource_policy_decision_point.pop(resource_key, None)
def clear_policy_cache(self):
"""Remove all policies"""
self.resource_policy_decision_point.clear()
self.service_policy_decision_point.clear()
self.set_common_service_policy_rules([])
def _get_rules_text(self, policy_list):
if isinstance(policy_list, basestring):
rules_text = policy_list
else:
rules_text = "\n".join(p.definition for p in policy_list)
return rules_text
# --- Policy evaluation
def create_attribute(self, attrib_class, attrib_id, val):
attribute = Attribute()
attribute.attributeId = attrib_id
attribute.dataType = attrib_class.IDENTIFIER
attribute.attributeValues.append(attrib_class())
attribute.attributeValues[-1].value = val
return attribute
def create_string_attribute(self, attrib_id, val):
return self.create_attribute(StringAttributeValue, attrib_id, val)
def create_int_attribute(self, attrib_id, val):
return self.create_attribute(IntAttributeValue, attrib_id, val)
def create_double_attribute(self, attrib_id, val):
return self.create_attribute(DoubleAttributeValue, attrib_id, val)
def create_boolean_attribute(self, attrib_id, val):
return self.create_attribute(BooleanAttributeValue, attrib_id, val)
def create_dict_attribute(self, attrib_id, val):
return self.create_attribute(self.DictAttributeValue, attrib_id, val)
def create_object_attribute(self, attrib_id, val):
return self.create_attribute(self.ObjectAttributeValue, attrib_id, val)
def create_org_role_attribute(self, actor_roles, subject):
attribute = None
for role in actor_roles:
if attribute is None:
attribute = self.create_string_attribute(ROLE_ATTRIBUTE_ID, role)
else:
attribute.attributeValues.append(StringAttributeValue())
attribute.attributeValues[-1].value = role
if attribute is not None:
subject.attributes.append(attribute)
def _create_request_from_message(self, invocation, receiver, receiver_type=PROCTYPE_SERVICE):
sender, sender_type = invocation.get_message_sender()
op = invocation.get_header_value(MSG_HEADER_OP, 'Unknown')
actor_id = invocation.get_header_value(MSG_HEADER_ACTOR, ANONYMOUS_ACTOR)
user_context_id = invocation.get_header_value(MSG_HEADER_USER_CONTEXT_ID, "")
user_context_differs = bool(actor_id and actor_id != ANONYMOUS_ACTOR and user_context_id and actor_id != user_context_id)
actor_roles = invocation.get_header_value(MSG_HEADER_ROLES, {})
message_format = invocation.get_header_value(MSG_HEADER_FORMAT, '')
# if receiver == "agpro_exchange":
# print "### POLICY DECISION rty=%s recv=%s actor=%s context=%s differ:%s" % (receiver_type, receiver, actor_id, user_context_id, user_context_differs)
# print " Headers: %s" % invocation.headers
#log.debug("Checking XACML Request: receiver_type: %s, sender: %s, receiver:%s, op:%s, ion_actor_id:%s, ion_actor_roles:%s", receiver_type, sender, receiver, op, ion_actor_id, actor_roles)
request = Request()
subject = Subject()
subject.attributes.append(self.create_string_attribute(SENDER_ID, sender))
subject.attributes.append(self.create_string_attribute(Identifiers.Subject.SUBJECT_ID, actor_id))
subject.attributes.append(self.create_string_attribute(USER_CONTEXT_ID, user_context_id))
subject.attributes.append(self.create_string_attribute(USER_CONTEXT_DIFFERS, str(user_context_differs)))
# Get the Org name associated with the endpoint process
endpoint_process = invocation.get_arg_value('process', None)
if endpoint_process is not None and hasattr(endpoint_process, 'org_governance_name'):
org_governance_name = endpoint_process.org_governance_name
else:
org_governance_name = self.governance_controller.system_root_org_name
# If this process is not associated with the root Org, then iterate over the roles associated
# with the user only for the Org that this process is associated with otherwise include all roles
# and create attributes for each
if org_governance_name == self.governance_controller.system_root_org_name:
#log.debug("Including roles for all Orgs")
# If the process Org name is the same for the System Root Org, then include all of them to be safe
for org in actor_roles:
self.create_org_role_attribute(actor_roles[org], subject)
else:
if org_governance_name in actor_roles:
log.debug("Org Roles (%s): %s", org_governance_name, ' '.join(actor_roles[org_governance_name]))
self.create_org_role_attribute(actor_roles[org_governance_name], subject)
# Handle the special case for the ION system actor
if self.governance_controller.system_root_org_name in actor_roles:
if SUPERUSER_ROLE in actor_roles[self.governance_controller.system_root_org_name]:
log.debug("Including SUPERUSER role")
self.create_org_role_attribute([SUPERUSER_ROLE], subject)
request.subjects.append(subject)
resource = Resource()
resource.attributes.append(self.create_string_attribute(Identifiers.Resource.RESOURCE_ID, receiver))
resource.attributes.append(self.create_string_attribute(RECEIVER_TYPE, receiver_type))
request.resources.append(resource)
request.action = Action()
request.action.attributes.append(self.create_string_attribute(Identifiers.Action.ACTION_ID, op))
# Check to see if there is a OperationVerb decorator specifying a Verb used with policy
if is_ion_object(message_format):
try:
msg_class = message_classes[message_format]
operation_verb = get_class_decorator_value(msg_class, DECORATOR_OP_VERB)
if operation_verb is not None:
request.action.attributes.append(self.create_string_attribute(ACTION_VERB, operation_verb))
except NotFound:
pass
# Create generic attributes for each of the primitive message parameter types to be available in XACML rules
# and evaluation functions
parameter_dict = {'message': invocation.message,
'headers': invocation.headers,
'annotations': invocation.message_annotations}
if endpoint_process is not None:
parameter_dict['process'] = endpoint_process
request.action.attributes.append(self.create_dict_attribute(ACTION_PARAMETERS, parameter_dict))
return request
def check_agent_request_policies(self, invocation):
process = invocation.get_arg_value('process')
if not process:
raise NotFound('Cannot find process in message')
decision = self.check_resource_request_policies(invocation, process.resource_id)
log.debug("Resource policy Decision: %s", decision)
# TODO: check if its OK to treat everything but Deny as Permit (Ex: NotApplicable)
# Return if agent service policies deny the operation
if decision == Decision.DENY:
return decision
# Else check any policies that might be associated with the resource.
decision = self._check_service_request_policies(invocation, PROCTYPE_AGENT)
return decision
def check_service_request_policies(self, invocation):
decision = self._check_service_request_policies(invocation, PROCTYPE_SERVICE)
return decision
def _check_service_request_policies(self, invocation, receiver_type):
receiver = invocation.get_message_receiver() # The name of service or resource type
if not receiver:
raise NotFound('No receiver for this message')
requestCtx = self._create_request_from_message(invocation, receiver, receiver_type)
pdp = self.get_service_pdp(receiver)
if pdp is None:
return Decision.NOT_APPLICABLE
return self._evaluate_pdp(invocation, pdp, requestCtx)
def check_resource_request_policies(self, invocation, resource_id):
if not resource_id:
raise NotFound('The resource_id is not set')
requestCtx = self._create_request_from_message(invocation, resource_id, 'resource')
pdp = self.get_resource_pdp(resource_id)
if pdp is None:
return Decision.NOT_APPLICABLE
return self._evaluate_pdp(invocation, pdp, requestCtx)
def _evaluate_pdp(self, invocation, pdp, requestCtx):
try:
response = pdp.evaluate(requestCtx)
except Exception as e:
log.error("Error evaluating policies: %s" % e.message)
return Decision.NOT_APPLICABLE
if response is None:
log.warn("response from PDP contains nothing, so not authorized")
return Decision.DENY
if GovernanceDispatcher.POLICY__STATUS_REASON_ANNOTATION in invocation.message_annotations:
return Decision.DENY
for result in response.results:
if result.decision == Decision.DENY:
break
return result.decision
|
|
from numpy import array, mean, median, std, size, arange, percentile,\
asarray, zeros, corrcoef, where, unique, array_equal, delete, \
ravel, logical_not, unravel_index, prod, random, shape, \
dot, outer, expand_dims, ScalarType, ndarray, sqrt, pi, angle, fft, \
roll, polyfit, polyval, ceil, float64, fix
import logging
from itertools import product
from bolt.utils import tupleize
from six import string_types
from ..utils import check_options
from ..base import Data
class Series(Data):
"""
Collection of indexed 1d array data.
Backed by an array-like object, including a numpy array
(for local computation) or a bolt array (for spark computation).
Attributes
----------
values : array-like
numpy array or bolt array
index : array-like or one-dimensional list
Values must be unique, same length as the arrays in the input data.
Defaults to arange(len(data)) if not provided.
labels : array-like
A set of labels, one per series record.
"""
_metadata = Data._metadata
_attributes = Data._attributes + ['index']
def __init__(self, values, index=None, labels=None, mode='local'):
super(Series, self).__init__(values, mode=mode)
self.labels = labels
self._index = None
if index is not None:
self._index = index
@property
def index(self):
if self._index is None:
self._index = arange(self.shape[-1])
return self._index
@index.setter
def index(self, value):
lenself = len(self.index)
if type(value) is str:
value = [value]
try:
value[0]
except:
value = [value]
try:
lenvalue = len(value)
except:
raise TypeError('Index must be an object with a length')
if lenvalue != lenself:
raise ValueError("Length of new index '%g' must match length of original index '%g'"
.format(lenvalue, lenself))
self._index = value
@property
def length(self):
return len(self.index)
@property
def baseaxes(self):
return tuple(range(0, len(self.shape)-1))
@property
def _constructor(self):
return Series
def flatten(self):
"""
Reshape all dimensions but the last into a single dimension
"""
size = prod([s for i, s in enumerate(self.shape) if i in self.baseaxes])
newvalues = self.values.reshape(size, self.shape[-1])
if self.labels is not None:
fullshape = prod(self.labels.shape)
newlabels = self.labels.reshape(size, fullshape/size).squeeze()
else:
newlabels = None
return self._constructor(newvalues, labels=newlabels).__finalize__(self, noprop=('labels',))
def count(self):
"""
Count the number of records.
For lazy or distributed data, will force a computation.
"""
if self.mode == 'local':
return prod(self.shape[:-1])
if self.mode == 'spark':
return self.tordd().count()
def first(self):
"""
Return the first element.
"""
if self.mode == 'local':
return self.values[tuple(zeros(len(self.baseaxes))) + (slice(None, None),)]
if self.mode == 'spark':
return self.values.first().toarray()
def tolocal(self):
"""
Convert to local mode.
"""
from thunder.series.readers import fromarray
if self.mode == 'local':
logging.getLogger('thunder').warn('images already in local mode')
pass
return fromarray(self.toarray(), index=self.index, labels=self.labels)
def tospark(self, engine=None):
"""
Convert to spark mode.
"""
from thunder.series.readers import fromarray
if self.mode == 'spark':
logging.getLogger('thunder').warn('images already in local mode')
pass
if engine is None:
raise ValueError('Must provide SparkContext')
return fromarray(self.toarray(), index=self.index, labels=self.lables, engine=engine)
def sample(self, n=100, seed=None):
"""
Extract random sample of records.
Parameters
----------
n : int, optional, default = 100
The number of data points to sample.
seed : int, optional, default = None
Random seed.
"""
if n < 1:
raise ValueError("Number of samples must be larger than 0, got '%g'" % n)
if seed is None:
seed = random.randint(0, 2 ** 32)
if self.mode == 'spark':
result = asarray(self.values.tordd().values().takeSample(False, n, seed))
else:
basedims = [self.shape[d] for d in self.baseaxes]
inds = [unravel_index(int(k), basedims) for k in random.rand(n) * prod(basedims)]
result = asarray([self.values[tupleize(i) + (slice(None, None),)] for i in inds])
return self._constructor(result, index=self.index)
def map(self, func, index=None, dtype=None, with_keys=False):
"""
Map an array -> array function over each record.
Parameters
----------
func : function
A function of a single record.
index : array-like, optional, default = None
If known, the index to be used following function evaluation.
dtype : numpy.dtype, optional, default = None
If known, the type of the data following function evaluation.
with_keys : boolean, optional, default = False
If true, function should be of both tuple indices and series values.
"""
value_shape = len(index) if index is not None else None
new = self._map(func, axis=self.baseaxes, value_shape=value_shape, dtype=dtype, with_keys=with_keys)
return self._constructor(new.values, index=index, labels=self.labels)
def reduce(self, func):
"""
Reduce a function over records.
Parameters
----------
func : function
A function of two records.
"""
return self._reduce(func, axis=self.baseaxes)
def mean(self):
"""
Compute the mean across records
"""
return self._constructor(self.values.mean(axis=self.baseaxes, keepdims=True))
def var(self):
"""
Compute the variance across records
"""
return self._constructor(self.values.var(axis=self.baseaxes, keepdims=True))
def std(self):
"""
Compute the standard deviation across records.
"""
return self._constructor(self.values.std(axis=self.baseaxes, keepdims=True))
def sum(self):
"""
Compute the sum across records.
"""
return self._constructor(self.values.sum(axis=self.baseaxes, keepdims=True))
def max(self):
"""
Compute the max across records.
"""
return self._constructor(self.values.max(axis=self.baseaxes, keepdims=True))
def min(self):
"""
Compute the min across records.
"""
return self._constructor(self.values.min(axis=self.baseaxes, keepdims=True))
def between(self, left, right):
"""
Select subset of values within the given index range.
Inclusive on the left; exclusive on the right.
Parameters
----------
left : int
Left-most index in the desired range.
right: int
Right-most index in the desired range.
"""
crit = lambda x: left <= x < right
return self.select(crit)
def select(self, crit):
"""
Select subset of values that match a given index criterion.
Parameters
----------
crit : function, list, str, int
Criterion function to map to indices, specific index value,
or list of indices.
"""
import types
# handle lists, strings, and ints
if not isinstance(crit, types.FunctionType):
# set("foo") -> {"f", "o"}; wrap in list to prevent:
if isinstance(crit, string_types):
critlist = set([crit])
else:
try:
critlist = set(crit)
except TypeError:
# typically means crit is not an iterable type; for instance, crit is an int
critlist = set([crit])
crit = lambda x: x in critlist
# if only one index, return it directly or throw an error
index = self.index
if size(index) == 1:
if crit(index[0]):
return self
else:
raise Exception('No indices found matching criterion')
# determine new index and check the result
newindex = [i for i in index if crit(i)]
if len(newindex) == 0:
raise Exception('No indices found matching criterion')
if array(newindex == index).all():
return self
# use fast logical indexing to get the new values
subinds = where([crit(i) for i in index])
new = self.map(lambda x: x[subinds], index=newindex)
# if singleton, need to check whether it's an array or a scalar/int
# if array, recompute a new set of indices
if len(newindex) == 1:
new = new.map(lambda x: x[0], index=newindex)
val = new.first()
if size(val) == 1:
newindex = [newindex[0]]
else:
newindex = arange(0, size(val))
new._index = newindex
return new
def center(self, axis=1):
"""
Subtract the mean either within or across records.
Parameters
----------
axis : int, optional, default = 0
Which axis to center along, within (1) or across (0) records.
"""
if axis == 1:
return self.map(lambda x: x - mean(x))
elif axis == 0:
meanval = self.mean().toarray()
return self.map(lambda x: x - meanval)
else:
raise Exception('Axis must be 0 or 1')
def standardize(self, axis=1):
"""
Divide by standard deviation either within or across records.
Parameters
----------
axis : int, optional, default = 0
Which axis to standardize along, within (1) or across (0) records
"""
if axis == 1:
return self.map(lambda x: x / std(x))
elif axis == 0:
stdval = self.std().toarray()
return self.map(lambda x: x / stdval)
else:
raise Exception('Axis must be 0 or 1')
def zscore(self, axis=1):
"""
Subtract the mean and divide by standard deviation within or across records.
Parameters
----------
axis : int, optional, default = 0
Which axis to zscore along, within (1) or across (0) records
"""
if axis == 1:
return self.map(lambda x: (x - mean(x)) / std(x))
elif axis == 0:
meanval = self.mean().toarray()
stdval = self.std().toarray()
return self.map(lambda x: (x - meanval) / stdval)
else:
raise Exception('Axis must be 0 or 1')
def squelch(self, threshold):
"""
Set all records that do not exceed the given threhsold to 0.
Parameters
----------
threshold : scalar
Level below which to set records to zero
"""
func = lambda x: zeros(x.shape) if max(x) < threshold else x
return self.map(func)
def correlate(self, signal):
"""
Correlate records against one or many one-dimensional arrays.
Parameters
----------
signal : array-like
One or more signals to correlate against.
"""
s = asarray(signal)
if s.ndim == 1:
if size(s) != self.shape[-1]:
raise ValueError("Length of signal '%g' does not match record length '%g'"
% (size(s), self.shape[-1]))
return self.map(lambda x: corrcoef(x, s)[0, 1], index=[1])
elif s.ndim == 2:
if s.shape[1] != self.shape[-1]:
raise ValueError("Length of signal '%g' does not match record length '%g'"
% (s.shape[1], self.shape[-1]))
newindex = arange(0, s.shape[0])
return self.map(lambda x: array([corrcoef(x, y)[0, 1] for y in s]), index=newindex)
else:
raise Exception('Signal to correlate with must have 1 or 2 dimensions')
def _check_panel(self, length):
"""
Check that given fixed panel length evenly divides index.
Parameters
----------
length : int
Fixed length with which to subdivide index
"""
n = len(self.index)
if divmod(n, length)[1] != 0:
raise ValueError("Panel length '%g' must evenly divide length of series '%g'"
% (length, n))
if n == length:
raise ValueError("Panel length '%g' cannot be length of series '%g'"
% (length, n))
def mean_by_panel(self, length):
"""
Compute the mean across fixed sized panels of each record.
Splits each record into panels of size `length`,
and then computes the mean across panels.
Panel length must subdivide record exactly.
Parameters
----------
length : int
Fixed length with which to subdivide.
"""
self._check_panel(length)
func = lambda v: v.reshape(-1, length).mean(axis=0)
newindex = arange(length)
return self.map(func, index=newindex)
def _makemasks(self, index=None, level=0):
"""
Internal function for generating masks for selecting values based on multi-index values.
As all other multi-index functions will call this function, basic type-checking is also
performed at this stage.
"""
if index is None:
index = self.index
try:
dims = len(array(index).shape)
if dims == 1:
index = array(index, ndmin=2).T
except:
raise TypeError('A multi-index must be convertible to a numpy ndarray')
try:
index = index[:, level]
except:
raise ValueError("Levels must be indices into individual elements of the index")
lenIdx = index.shape[0]
nlevels = index.shape[1]
combs = product(*[unique(index.T[i, :]) for i in range(nlevels)])
combs = array([l for l in combs])
masks = array([[array_equal(index[i], c) for i in range(lenIdx)] for c in combs])
return zip(*[(masks[x], combs[x]) for x in range(len(masks)) if masks[x].any()])
def _map_by_index(self, function, level=0):
"""
An internal function for maping a function to groups of values based on a multi-index
Elements of each record are grouped according to unique value combinations of the multi-
index across the given levels of the multi-index. Then the given function is applied
to to each of these groups separately. If this function is many-to-one, the result
can be recast as a Series indexed by the unique index values used for grouping.
"""
if type(level) is int:
level = [level]
masks, ind = self._makemasks(index=self.index, level=level)
nMasks = len(masks)
newindex = array(ind)
if len(newindex[0]) == 1:
newindex = ravel(newindex)
return self.map(lambda v: asarray([array(function(v[masks[x]])) for x in range(nMasks)]),
index=newindex)
def select_by_index(self, val, level=0, squeeze=False, filter=False, return_mask=False):
"""
Select or filter elements of the Series by index values (across levels, if multi-index).
The index is a property of a Series object that assigns a value to each position within
the arrays stored in the records of the Series. This function returns a new Series where,
within each record, only the elements indexed by a given value(s) are retained. An index
where each value is a list of a fixed length is referred to as a 'multi-index',
as it provides multiple labels for each index location. Each of the dimensions in these
sublists is a 'level' of the multi-index. If the index of the Series is a multi-index, then
the selection can proceed by first selecting one or more levels, and then selecting one
or more values at each level.
Parameters
----------
val : list of lists
Specifies the selected index values. List must contain one list for each level of the
multi-index used in the selection. For any singleton lists, the list may be replaced
with just the integer.
level : list of ints, optional, default=0
Specifies which levels in the multi-index to use when performing selection. If a single
level is selected, the list can be replaced with an integer. Must be the same length
as val.
squeeze : bool, optional, default=False
If True, the multi-index of the resulting Series will drop any levels that contain
only a single value because of the selection. Useful if indices are used as unique
identifiers.
filter : bool, optional, default=False
If True, selection process is reversed and all index values EXCEPT those specified
are selected.
return_mask : bool, optional, default=False
If True, return the mask used to implement the selection.
"""
try:
level[0]
except:
level = [level]
try:
val[0]
except:
val = [val]
remove = []
if len(level) == 1:
try:
val[0][0]
except:
val = [val]
if squeeze and not filter and len(val) == 1:
remove.append(level[0])
else:
for i in range(len(val)):
try:
val[i][0]
except:
val[i] = [val[i]]
if squeeze and not filter and len(val[i]) == 1:
remove.append(level[i])
if len(level) != len(val):
raise ValueError("List of levels must be same length as list of corresponding values")
p = product(*val)
selected = set([x for x in p])
masks, ind = self._makemasks(index=self.index, level=level)
nmasks = len(masks)
masks = array([masks[x] for x in range(nmasks) if tuple(ind[x]) in selected])
final_mask = masks.any(axis=0)
if filter:
final_mask = logical_not(final_mask)
indFinal = array(self.index)
if len(indFinal.shape) == 1:
indFinal = array(indFinal, ndmin=2).T
indFinal = indFinal[final_mask]
if squeeze:
indFinal = delete(indFinal, remove, axis=1)
if len(indFinal[0]) == 1:
indFinal = ravel(indFinal)
elif len(indFinal[1]) == 0:
indFinal = arange(sum(final_mask))
result = self.map(lambda v: v[final_mask], index=indFinal)
if return_mask:
return result, final_mask
else:
return result
def aggregate_by_index(self, function, level=0):
"""
Aggregrate data in each record, grouping by index values.
For each unique value of the index, applies a function to the group
indexed by that value. Returns a Series indexed by those unique values.
For the result to be a valid Series object, the aggregating function should
return a simple numeric type. Also allows selection of levels within a
multi-index. See select_by_index for more info on indices and multi-indices.
Parameters
----------
function : function
Aggregating function to map to Series values. Should take a list or ndarray
as input and return a simple numeric value.
level : list of ints, optional, default=0
Specifies the levels of the multi-index to use when determining unique index values.
If only a single level is desired, can be an int.
"""
result = self._map_by_index(function, level=level)
return result.map(lambda v: array(v), index=result.index)
def stat_by_index(self, stat, level=0):
"""
Compute the desired statistic for each uniue index values (across levels, if multi-index)
Parameters
----------
stat : string
Statistic to be computed: sum, mean, median, stdev, max, min, count
level : list of ints, optional, default=0
Specifies the levels of the multi-index to use when determining unique index values.
If only a single level is desired, can be an int.
"""
from numpy import sum, min, max
STATS = {
'sum': sum,
'mean': mean,
'median': median,
'stdev': std,
'max': max,
'min': min,
'count': size
}
func = STATS[stat.lower()]
return self.aggregate_by_index(level=level, function=func)
def sum_by_index(self, level=0):
"""
Compute sums for each unique index value (across levels, if multi-index)
"""
return self.stat_by_index(level=level, stat='sum')
def mean_by_index(self, level=0):
"""
Compute means for each unique index value (across levels, if multi-index)
"""
return self.stat_by_index(level=level, stat='mean')
def median_by_index(self, level=0):
"""
Compute medians for each unique index value (across levels, if multi-index)
"""
return self.stat_by_index(level=level, stat='median')
def std_by_index(self, level=0):
"""
Compute means for each unique index value (across levels, if multi-index)
"""
return self.stat_by_index(level=level, stat='stdev')
def max_by_index(self, level=0):
"""
Compute maximum values for each unique index value (across levels, if multi-index)
"""
return self.stat_by_index(level=level, stat='max')
def min_by_index(self, level=0):
"""
Compute minimum values for each unique index value (across level, if multi-index)
"""
return self.stat_by_index(level=level, stat='min')
def count_by_index(self, level=0):
"""
Count the number for each unique index value (across levels, if multi-index)
"""
return self.stat_by_index(level=level, stat='count')
def cov(self):
"""
Compute covariance of a distributed matrix.
Parameters
----------
axis : int, optional, default = None
Axis for performing mean subtraction, None (no subtraction), 0 (rows) or 1 (columns)
"""
return self.center(axis=0).gramian().times(1.0 / (self.shape[0] - 1))
def gramian(self):
"""
Compute gramian of a distributed matrix.
The gramian is defined as the product of the matrix
with its transpose, i.e. A^T * A.
"""
if self.mode == 'spark':
rdd = self.values.tordd()
from pyspark.accumulators import AccumulatorParam
class MatrixAccumulator(AccumulatorParam):
def zero(self, value):
return zeros(shape(value))
def addInPlace(self, val1, val2):
val1 += val2
return val1
global mat
init = zeros((self.shape[1], self.shape[1]))
mat = rdd.context.accumulator(init, MatrixAccumulator())
def outer_sum(x):
global mat
mat += outer(x, x)
rdd.values().foreach(outer_sum)
return self._constructor(mat.value, index=self.index)
if self.mode == 'local':
return self._constructor(dot(self.values.T, self.values), index=self.index)
def times(self, other):
"""
Multiply a matrix by another one.
Other matrix must be a numpy array, a scalar,
or another matrix in local mode.
Parameters
----------
other : Matrix, scalar, or numpy array
A matrix to multiply with
"""
if isinstance(other, ScalarType):
other = asarray(other)
index = self.index
else:
if isinstance(other, list):
other = asarray(other)
if isinstance(other, ndarray) and other.ndim < 2:
other = expand_dims(other, 1)
if not self.shape[1] == other.shape[0]:
raise ValueError('shapes %s and %s are not aligned' % (self.shape, other.shape))
index = arange(other.shape[1])
if self.mode == 'local' and isinstance(other, Series) and other.mode == 'spark':
raise NotImplementedError
if self.mode == 'spark' and isinstance(other, Series) and other.mode == 'spark':
raise NotImplementedError
if self.mode == 'local' and isinstance(other, (ndarray, ScalarType)):
return self._constructor(dot(self.values, other), index=index)
if self.mode == 'local' and isinstance(other, Series):
return self._constructor(dot(self.values, other.values), index=index)
if self.mode == 'spark' and isinstance(other, (ndarray, ScalarType)):
return self.map(lambda x: dot(x, other), index=index)
if self.mode == 'spark' and isinstance(other, Series):
return self.map(lambda x: dot(x, other.values), index=index)
def _makewindows(self, indices, window):
"""
Make masks used by windowing functions
Given a list of indices specifying window centers,
and a window size, construct a list of index arrays,
one per window, that index into the target array
Parameters
----------
indices : array-like
List of times specifying window centers
window : int
Window size
"""
div = divmod(window, 2)
before = div[0]
after = div[0] + div[1]
index = asarray(self.index)
indices = asarray(indices)
if where(index == max(indices))[0][0] + after > len(index):
raise ValueError("Maximum requested index %g, with window %g, exceeds length %g"
% (max(indices), window, len(index)))
if where(index == min(indices))[0][0] - before < 0:
raise ValueError("Minimum requested index %g, with window %g, is less than 0"
% (min(indices), window))
masks = [arange(where(index == i)[0][0]-before, where(index == i)[0][0]+after, dtype='int') for i in indices]
return masks
def mean_by_window(self, indices, window):
"""
Average series across multiple windows specified by their centers.
Parameters
----------
indices : array-like
List of times specifying window centers
window : int
Window size
"""
masks = self._makewindows(indices, window)
newindex = arange(0, len(masks[0]))
return self.map(lambda x: mean([x[m] for m in masks], axis=0), index=newindex)
def subsample(self, sampleFactor=2):
"""
Subsample series by an integer factor.
Parameters
----------
sampleFactor : positive integer, optional, default=2
"""
if sampleFactor < 0:
raise Exception('Factor for subsampling must be postive, got %g' % sampleFactor)
s = slice(0, len(self.index), sampleFactor)
newindex = self.index[s]
return self.map(lambda v: v[s], index=newindex)
def fourier(self, freq=None):
"""
Compute statistics of a Fourier decomposition on series data.
Parameters
----------
freq : int
Digital frequency at which to compute coherence and phase
"""
def get(y, freq):
y = y - mean(y)
nframes = len(y)
ft = fft.fft(y)
ft = ft[0:int(fix(nframes/2))]
ampFt = 2*abs(ft)/nframes
amp = ampFt[freq]
ampSum = sqrt(sum(ampFt**2))
co = amp / ampSum
ph = -(pi/2) - angle(ft[freq])
if ph < 0:
ph += pi * 2
return array([co, ph])
if freq >= int(fix(size(self.index)/2)):
raise Exception('Requested frequency, %g, is too high, '
'must be less than half the series duration' % freq)
index = ['coherence', 'phase']
return self.map(lambda x: get(x, freq), index=index)
def convolve(self, signal, mode='full'):
"""
Conolve series data against another signal.
Parameters
----------
signal : array
Signal to convolve with (must be 1D)
var : str
Variable name if loading from a MAT file
mode : str, optional, default='full'
Mode of convolution, options are 'full', 'same', and 'same'
"""
from numpy import convolve
s = asarray(signal)
n = size(self.index)
m = size(s)
# use expected lengths to make a new index
if mode == 'same':
newmax = max(n, m)
elif mode == 'valid':
newmax = max(m, n) - min(m, n) + 1
else:
newmax = n+m-1
newindex = arange(0, newmax)
return self.map(lambda x: convolve(x, signal, mode), index=newindex)
def crosscorr(self, signal, lag=0):
"""
Cross correlate series data against another signal.
Parameters
----------
signal : array
Signal to correlate against (must be 1D).
lag : int
Range of lags to consider, will cover (-lag, +lag).
"""
from scipy.linalg import norm
s = asarray(signal)
s = s - mean(s)
s = s / norm(s)
if size(s) != size(self.index):
raise Exception('Size of signal to cross correlate with, %g, '
'does not match size of series' % size(s))
# created a matrix with lagged signals
if lag is not 0:
shifts = range(-lag, lag+1)
d = len(s)
m = len(shifts)
sshifted = zeros((m, d))
for i in range(0, len(shifts)):
tmp = roll(s, shifts[i])
if shifts[i] < 0:
tmp[(d+shifts[i]):] = 0
if shifts[i] > 0:
tmp[:shifts[i]] = 0
sshifted[i, :] = tmp
s = sshifted
else:
shifts = [0]
def get(y, s):
y = y - mean(y)
n = norm(y)
if n == 0:
b = zeros((s.shape[0],))
else:
y /= norm(y)
b = dot(s, y)
return b
return self.map(lambda x: get(x, s), index=shifts)
def detrend(self, method='linear', order=5):
"""
Detrend series data with linear or nonlinear detrending.
Preserve intercept so that subsequent operations can adjust the baseline.
Parameters
----------
method : str, optional, default = 'linear'
Detrending method
order : int, optional, default = 5
Order of polynomial, for non-linear detrending only
"""
check_options(method, ['linear', 'nonlinear'])
if method == 'linear':
order = 1
def func(y):
x = arange(len(y))
p = polyfit(x, y, order)
p[-1] = 0
yy = polyval(p, x)
return y - yy
return self.map(func)
def normalize(self, method='percentile', window=None, perc=20, offset=0.1):
"""
Normalize by subtracting and dividing by a baseline.
Baseline can be derived from a global mean or percentile,
or a smoothed percentile estimated within a rolling window.
Windowed baselines may only be well-defined for
temporal series data.
Parameters
----------
baseline : str, optional, default = 'percentile'
Quantity to use as the baseline, options are 'mean', 'percentile',
'window', or 'window-exact'.
window : int, optional, default = 6
Size of window for baseline estimation,
for 'window' and 'window-exact' baseline only.
perc : int, optional, default = 20
Percentile value to use, for 'percentile',
'window', or 'window-exact' baseline only.
offset : float, optional, default = 0.1
Scalar added to baseline during division to avoid division by 0.
"""
check_options(method, ['mean', 'percentile', 'window', 'window-exact'])
from warnings import warn
if not (method == 'window' or method == 'window-exact') and window is not None:
warn('Setting window without using method "window" has no effect')
if method == 'mean':
baseFunc = mean
if method == 'percentile':
baseFunc = lambda x: percentile(x, perc)
if method == 'window':
from scipy.ndimage.filters import percentile_filter
baseFunc = lambda x: percentile_filter(x.astype(float64), perc, window, mode='nearest')
if method == 'window-exact':
if window & 0x1:
left, right = (ceil(window/2), ceil(window/2) + 1)
else:
left, right = (window/2, window/2)
n = len(self.index)
baseFunc = lambda x: asarray([percentile(x[max(ix-left, 0):min(ix+right+1, n)], perc)
for ix in arange(0, n)])
def get(y):
b = baseFunc(y)
return (y - b) / (b + offset)
return self.map(get)
def toimages(self, size='150'):
"""
Converts Series to Images.
Equivalent to calling series.toBlocks(size).toImages()
Parameters
----------
size : str, optional, default = "150M"
String interpreted as memory size.
"""
from thunder.images.images import Images
n = len(self.shape) - 1
if self.mode == 'spark':
return Images(self.values.swap(tuple(range(n)), (0,), size=size))
if self.mode == 'local':
return Images(self.values.transpose((n,) + tuple(range(0, n))))
def tobinary(self, path, prefix='series', overwrite=False, credentials=None):
"""
Write data to binary files.
Parameters
----------
path : string path or URI to directory to be created
Output files will be written underneath path.
Directory will be created as a result of this call.
prefix : str, optional, default = 'series'
String prefix for files.
overwrite : bool
If true, path and all its contents will be deleted and
recreated as partof this call.
"""
from thunder.series.writers import tobinary
tobinary(self, path, prefix=prefix, overwrite=overwrite, credentials=credentials)
|
|
# coding=utf-8
from contextlib import contextmanager
import datetime
from django.test import TestCase
from django.utils import timezone
import mock
from cla_common.constants import CASE_SOURCE
from cla_common import call_centre_availability
from core.tests.mommy_utils import make_recipe, make_user
from cla_eventlog import event_registry
from cla_eventlog.models import Log
from legalaid.forms import get_sla_time
from reports.forms import MICB1Extract
from call_centre.tests.test_utils import CallCentreFixedOperatingHours
def _make_datetime(year=None, month=None, day=None, hour=0, minute=0, second=0):
today = datetime.date.today()
year = year if year else today.year
month = month if month else today.month
day = day if day else today.day
dt = datetime.datetime(year, month, day, hour, minute, second)
return timezone.make_aware(dt, timezone.get_current_timezone())
def mock_now(dt):
return dt
@contextmanager
def patch_field(cls, field_name, dt):
field = cls._meta.get_field(field_name)
with mock.patch.object(field, "default", new=mock_now(dt)):
yield
class MiSlaTestCaseBase(CallCentreFixedOperatingHours):
source = None
requires_action_at_minutes_offset = 60
def setUp(self):
super(MiSlaTestCaseBase, self).setUp()
self.current_case = None
self.now = self.get_default_dt()
self.timezone_mock = mock.patch.object(timezone, "now", lambda: self.now)
self.timezone_mock.start()
now_naive = timezone.make_naive(self.now, timezone.get_current_timezone())
self.cla_common_datetime_mock = mock.patch.object(
call_centre_availability, "current_datetime", lambda: now_naive
)
self.cla_common_datetime_mock.start()
def tearDown(self):
super(MiSlaTestCaseBase, self).tearDown()
self.timezone_mock.stop()
self.cla_common_datetime_mock.stop()
def get_default_dt(self):
raise NotImplementedError()
def get_requires_action_at(self):
raise NotImplementedError
def get_sla1_datetime(self):
raise NotImplementedError()
def get_sla2_datetime(self):
raise NotImplementedError()
def move_time_forward_minutes_before_sla1(self, minutes):
return self._move_time_forward(self.get_sla1_datetime(), -minutes)
def move_time_forward_minutes_after_sla1(self, minutes):
return self._move_time_forward(self.get_sla1_datetime(), minutes)
def move_time_forward_minutes_before_sla2(self, minutes):
return self._move_time_forward(self.get_sla2_datetime(), -minutes)
def move_time_forward_minutes_after_sla2(self, minutes):
return self._move_time_forward(self.get_sla2_datetime(), minutes)
def _move_time_forward(self, dt, minutes_forward):
self.now = dt + datetime.timedelta(minutes=minutes_forward)
self.timezone_mock.return_value = self.now
self.cla_common_datetime_mock.return_value = timezone.make_naive(dt, dt.tzinfo)
return self.now
def make_case(self, dt, **kwargs):
with patch_field(Log, "created", dt - datetime.timedelta(minutes=1)):
self.current_case = make_recipe("legalaid.case", source=self.source, **kwargs)
return self.current_case
def schedule_callback(self, case, user, created, requires_action_at=None):
requires_action_at = requires_action_at or created + datetime.timedelta(minutes=35)
sla_base_time = requires_action_at
if case.source in [CASE_SOURCE.SMS, CASE_SOURCE.VOICEMAIL]:
sla_base_time = case.created
event = event_registry.get_event("call_me_back")()
with patch_field(Log, "created", created):
event.get_log_code(case=case)
event.process(
case,
created_by=user,
notes="",
context={
"requires_action_at": requires_action_at,
"sla_15": get_sla_time(sla_base_time, 15),
"sla_30": get_sla_time(sla_base_time, 30),
"sla_120": get_sla_time(sla_base_time, 120),
"sla_480": get_sla_time(sla_base_time, 480),
"sla_72h": get_sla_time(sla_base_time, 4320),
},
)
case.set_requires_action_at(requires_action_at)
def start_call(self, case, user, created):
event = event_registry.get_event("case")()
with patch_field(Log, "created", created):
event.process(case, status="call_started", created_by=user, notes="Call started")
def get_report(self, date_range):
with mock.patch("reports.forms.MICB1Extract.date_range", date_range):
report = MICB1Extract()
qs = report.get_queryset()
headers = report.get_headers()
return {k: v for k, v in zip(headers, qs[0])}
def create_and_get_report(self, callback_minutes_after, case=None):
created = case.created if case else _make_datetime(2015, 1, 2, 9, 1, 0)
case = case or self.make_case(created)
user = make_user()
make_recipe("call_centre.operator", user=user)
requires_action_at = created + datetime.timedelta(minutes=self.requires_action_at_minutes_offset)
self.schedule_callback(case, user, created, requires_action_at)
self.start_call(case, user, requires_action_at + datetime.timedelta(minutes=callback_minutes_after))
date_range = (
created.replace(hour=0, minute=0),
created.replace(hour=0, minute=0) + datetime.timedelta(days=1),
)
return self.get_report(date_range)
def test_current_time_within_sla1(self):
case = self.make_case(self.now, created=self.now)
user = make_user()
make_recipe("call_centre.operator", user=user)
# Create a callback that is due now 1 hour from now
self.schedule_callback(case, user, created=self.now, requires_action_at=self.get_requires_action_at())
# Move current time to 1 minute before SLA1
now_tz = self.move_time_forward_minutes_before_sla1(minutes=1)
# Generate report without a callback
date_range = (now_tz - datetime.timedelta(days=2), now_tz + datetime.timedelta(days=2))
values = self.get_report(date_range)
self.assertFalse(values["missed_sla_1"])
self.assertFalse(values["missed_sla_2"])
# Start a call
self.start_call(case, user, now_tz)
# Generate report with a successful callback
date_range = (now_tz - datetime.timedelta(days=2), now_tz + datetime.timedelta(days=2))
values = self.get_report(date_range)
self.assertFalse(values["missed_sla_1"])
self.assertFalse(values["missed_sla_2"])
def test_current_time_after_sla1(self):
case = self.make_case(self.now, created=self.now)
user = make_user()
make_recipe("call_centre.operator", user=user)
# Create a callback that is due now 1 hour from now
self.schedule_callback(case, user, created=self.now, requires_action_at=self.get_requires_action_at())
# Move current time to 1 minute after SLA1
now_tz = self.move_time_forward_minutes_after_sla1(minutes=1)
# Generate report without a callback
date_range = (now_tz - datetime.timedelta(days=2), now_tz + datetime.timedelta(days=2))
values = self.get_report(date_range)
self.assertTrue(values["missed_sla_1"])
self.assertFalse(values["missed_sla_2"])
# Start a call
self.start_call(case, user, now_tz)
# Generate report with a successful callback
date_range = (now_tz - datetime.timedelta(days=2), now_tz + datetime.timedelta(days=2))
values = self.get_report(date_range)
self.assertTrue(values["missed_sla_1"])
self.assertFalse(values["missed_sla_2"])
def test_current_time_before_sla2(self):
case = self.make_case(self.now, created=self.now)
user = make_user()
make_recipe("call_centre.operator", user=user)
# Create a callback that is due now 1 hour from now
self.schedule_callback(case, user, created=self.now, requires_action_at=self.get_requires_action_at())
# Move current time to 1 minute before SLA2
self.move_time_forward_minutes_before_sla2(minutes=1)
# Generate report without a callback
date_range = (case.created, self.now + datetime.timedelta(days=2))
values = self.get_report(date_range)
self.assertTrue(values["missed_sla_1"])
self.assertFalse(values["missed_sla_2"])
# Start a call
self.start_call(case, user, self.now)
# Generate report with a successful callback
date_range = (case.created, self.now + datetime.timedelta(days=2))
values = self.get_report(date_range)
self.assertTrue(values["missed_sla_1"])
self.assertFalse(values["missed_sla_2"])
def test_current_time_after_sla2(self):
case = self.make_case(self.now, created=self.now)
user = make_user()
make_recipe("call_centre.operator", user=user)
# Create a callback that is due now
self.schedule_callback(case, user, created=self.now, requires_action_at=self.get_requires_action_at())
# Move current time to 1 minute after SLA2
now_tz = self.move_time_forward_minutes_after_sla2(minutes=1)
# Generate report without a callback
date_range = (case.created, self.now + datetime.timedelta(days=2))
values = self.get_report(date_range)
self.assertTrue(values["missed_sla_1"])
self.assertTrue(values["missed_sla_2"])
# Start a call
self.start_call(case, user, now_tz)
# Generate report with a successful callback
date_range = (case.created, self.now + datetime.timedelta(days=2))
values = self.get_report(date_range)
self.assertTrue(values["missed_sla_1"])
self.assertTrue(values["missed_sla_2"])
def test_cb2_current_time_within_sla1(self):
case = self.make_case(self.now, created=self.now)
user = make_user()
make_recipe("call_centre.operator", user=user)
# Create CB1
self.schedule_callback(case, user, created=self.now, requires_action_at=self.get_requires_action_at())
# Move current time to 1 minute before SLA1
self.move_time_forward_minutes_before_sla1(minutes=1)
# Create CB2
self.schedule_callback(case, user, created=self.now, requires_action_at=self.get_requires_action_at())
# Generate report without a successful callback
date_range = (case.created, self.now + datetime.timedelta(days=2))
values = self.get_report(date_range)
self.assertFalse(values["missed_sla_1"])
self.assertFalse(values["missed_sla_2"])
# Start a call
self.start_call(case, user, self.now)
# Generate report with a successful callback
date_range = (case.created, self.now + datetime.timedelta(days=2))
values = self.get_report(date_range)
self.assertFalse(values["missed_sla_1"])
self.assertFalse(values["missed_sla_2"])
def test_cb2_current_time_after_sla1(self):
case = self.make_case(self.now, created=self.now)
user = make_user()
make_recipe("call_centre.operator", user=user)
# Create CB1
self.schedule_callback(case, user, created=self.now, requires_action_at=self.get_requires_action_at())
# Move current time 1 minute after SLA1
now_tz = self.move_time_forward_minutes_after_sla1(minutes=1)
# Create CB2
self.schedule_callback(case, user, created=now_tz, requires_action_at=self.get_requires_action_at())
# Generate report without a successful callback
date_range = (case.created, self.now + datetime.timedelta(days=2))
values = self.get_report(date_range)
self.assertTrue(values["missed_sla_1"])
self.assertFalse(values["missed_sla_2"])
# Start a call
self.start_call(case, user, now_tz)
# Generate report with a successful callback
date_range = (case.created, self.now + datetime.timedelta(days=2))
values = self.get_report(date_range)
self.assertTrue(values["missed_sla_1"])
self.assertFalse(values["missed_sla_2"])
def test_cb2_current_time_before_sla2(self):
case = self.make_case(self.now, created=self.now)
user = make_user()
make_recipe("call_centre.operator", user=user)
# Create CB1
self.schedule_callback(case, user, created=self.now, requires_action_at=self.get_requires_action_at())
# Move current time 1 minute before SLA2
self.move_time_forward_minutes_before_sla2(minutes=1)
# Create CB2
self.schedule_callback(case, user, created=self.now, requires_action_at=self.get_requires_action_at())
# Generate report without a successful callback
date_range = (case.created, self.now + datetime.timedelta(days=2))
values = self.get_report(date_range)
self.assertTrue(values["missed_sla_1"])
self.assertFalse(values["missed_sla_2"])
# Start a call
self.start_call(case, user, self.now)
# Generate report with a successful callback
date_range = (case.created, self.now + datetime.timedelta(days=2))
values = self.get_report(date_range)
self.assertTrue(values["missed_sla_1"])
self.assertFalse(values["missed_sla_2"])
def test_cb2_current_time_after_sla2(self):
case = self.make_case(self.now, created=self.now)
user = make_user()
make_recipe("call_centre.operator", user=user)
# Create CB1
self.schedule_callback(case, user, created=self.now, requires_action_at=self.get_requires_action_at())
# Move current time 1 minute after SLA2
self.move_time_forward_minutes_after_sla2(minutes=1)
# Create CB2
self.schedule_callback(case, user, created=self.now, requires_action_at=self.get_requires_action_at())
# Generate report without a successful callback
date_range = (case.created, self.now + datetime.timedelta(days=2))
values = self.get_report(date_range)
self.assertTrue(values["missed_sla_1"])
self.assertTrue(values["missed_sla_2"])
# Start a call
self.start_call(case, user, self.now)
# Generate report with a successful callback
date_range = (case.created, self.now + datetime.timedelta(days=2))
values = self.get_report(date_range)
self.assertTrue(values["missed_sla_1"])
self.assertTrue(values["missed_sla_2"])
def test_cb3_current_time_within_sla1(self):
case = self.make_case(self.now, created=self.now)
user = make_user()
make_recipe("call_centre.operator", user=user)
# Create CB1
self.schedule_callback(case, user, created=self.now, requires_action_at=self.get_requires_action_at())
# Move current time to 2 minute before SLA1
self.move_time_forward_minutes_before_sla1(minutes=2)
# Create CB2
self.schedule_callback(case, user, created=self.now, requires_action_at=self.get_requires_action_at())
# Move current time to 1 minute before SLA1
self.move_time_forward_minutes_before_sla1(minutes=1)
# Create CB3
self.schedule_callback(case, user, created=self.now, requires_action_at=self.get_requires_action_at())
# Generate report without a successful callback
date_range = (case.created, self.now + datetime.timedelta(days=2))
values = self.get_report(date_range)
self.assertFalse(values["missed_sla_1"])
self.assertFalse(values["missed_sla_2"])
# Start a call
self.start_call(case, user, self.now)
# Generate report with a successful callback
date_range = (case.created, self.now + datetime.timedelta(days=2))
values = self.get_report(date_range)
self.assertFalse(values["missed_sla_1"])
self.assertFalse(values["missed_sla_2"])
class MiSlaTestCaseWeb(MiSlaTestCaseBase, TestCase):
source = CASE_SOURCE.WEB
# fmt: off
"""
CB1
+--------------+-------------------------------+---------------------------------------------+--------------------------------+
| | | | Call answered |
| | | | after 30m |
| | Call answered | Call answered | AND |
| | Within 30m window | before 30m window | current time within 72h window |
+--------------+-------------------------------+---------------------------------------------+--------------------------------+
| SLA 1 missed | FALSE | TRUE | TRUE |
+--------------+-------------------------------+---------------------------------------------+--------------------------------+
| SLA 2 missed | FALSE | FALSE | FALSE |
+--------------+-------------------------------+---------------------------------------------+--------------------------------+
| test | test_current_time_within_sla1 | test_current_time_before_requires_action_at | test_current_time_after_sla1 |
+--------------+-------------------------------+---------------------------------------------+--------------------------------+
CB2
+--------------+-----------------------------------+-------------------------------------------------+----------------------------------+----------------------------------+----------------------------------+-----------------------------------+
| | CB2 Scheduled | CB2 Scheduled | CB2 Scheduled | CB2 Scheduled | Call answered | CB2 Scheduled |
| | within 30m window | before 30m window | after 30m window | after 30m window | after 72 window | within 30m window |
| | AND | AND | AND | AND | | AND |
| | current time within 72h window | current time within 72h window | current time within 72h window | current time after 72h window | | call answered within 72 window |
+--------------+-----------------------------------+-------------------------------------------------+----------------------------------+----------------------------------+----------------------------------+-----------------------------------+
| SLA 1 missed | FALSE | TRUE | TRUE | TRUE | TRUE | FALSE |
+--------------+-----------------------------------+-------------------------------------------------+----------------------------------+----------------------------------+----------------------------------+-----------------------------------+
| SLA 2 missed | FALSE | FALSE | FALSE | TRUE | TRUE | FALSE |
+--------------+-----------------------------------+-------------------------------------------------+----------------------------------+----------------------------------+----------------------------------+-----------------------------------+
| test | test_cb2_current_time_within_sla1 | test_cb2_current_time_before_requires_action_at | test_cb2_current_time_after_sla1 | test_cb2_current_time_after_sla2 | test_cb2_current_time_after_sla2 | test_cb2_current_time_within_sla1 |
+--------------+-----------------------------------+-------------------------------------------------+----------------------------------+----------------------------------+----------------------------------+-----------------------------------+
CB3
+--------------+-----------------------------------+
| | CB3 Scheduled |
| | within 30m window |
| | AND |
| | call answered within 72 window |
+--------------+-----------------------------------+
| SLA 1 missed | FALSE |
+--------------+-----------------------------------+
| SLA 2 missed | FALSE |
+--------------+-----------------------------------+
| test | test_cb3_current_time_within_sla1 |
+--------------+-----------------------------------+
"""
# fmt: on
def get_default_dt(self):
return _make_datetime(year=2020, month=9, day=7, hour=9, minute=0)
def get_requires_action_at(self):
return self.now + datetime.timedelta(hours=1)
def get_sla1_datetime(self):
return self.get_requires_action_at() + datetime.timedelta(minutes=30)
def get_sla2_datetime(self):
return _make_datetime(year=2020, month=9, day=15, hour=12, minute=30)
def test_current_time_before_requires_action_at(self):
case = self.make_case(self.now, created=self.now)
user = make_user()
make_recipe("call_centre.operator", user=user)
# Create a callback that is due now 1 hour from now
requires_action_at = self.get_requires_action_at()
self.schedule_callback(case, user, created=self.now, requires_action_at=requires_action_at)
# Move current time to 5 minute before requires_action_at
self._move_time_forward(requires_action_at, minutes_forward=-5)
# Generate report without a callback
date_range = (self.now - datetime.timedelta(days=2), self.now + datetime.timedelta(days=2))
values = self.get_report(date_range)
self.assertFalse(values["missed_sla_1"])
self.assertFalse(values["missed_sla_2"])
# Start a call
self.start_call(case, user, self.now)
# Generate report with a successful callback
date_range = (self.now - datetime.timedelta(days=2), self.now + datetime.timedelta(days=2))
values = self.get_report(date_range)
# Contacting customer before the requires_action_at will result in a failure to meet SLA
self.assertTrue(values["missed_sla_1"])
self.assertFalse(values["missed_sla_2"])
def test_cb2_current_time_before_requires_action_at(self):
case = self.make_case(self.now, created=self.now)
user = make_user()
make_recipe("call_centre.operator", user=user)
# Create CB1
requires_action_at = self.get_requires_action_at()
self.schedule_callback(case, user, created=self.now, requires_action_at=requires_action_at)
# Move current time to 5 minute before requires_action_at
self._move_time_forward(requires_action_at, minutes_forward=-5)
# Create CB2
self.schedule_callback(case, user, created=self.now, requires_action_at=self.get_requires_action_at())
# Generate report without a successful callback
date_range = (case.created, self.now + datetime.timedelta(days=2))
values = self.get_report(date_range)
self.assertTrue(values["missed_sla_1"])
self.assertFalse(values["missed_sla_2"])
# Start a call
self.start_call(case, user, self.now)
# Generate report with a successful callback
date_range = (case.created, self.now + datetime.timedelta(days=2))
values = self.get_report(date_range)
self.assertTrue(values["missed_sla_1"])
self.assertFalse(values["missed_sla_2"])
class MiSlaTestCasePhone(MiSlaTestCaseWeb):
source = CASE_SOURCE.PHONE
class MiSlaTestCaseSMS(MiSlaTestCaseBase, TestCase):
source = CASE_SOURCE.SMS
# fmt: off
"""
Rules used to determine if SLA1/SLA2 was missed
Note: A callback attempt is when the operator has clicked the start call button after successfully contacting the user
+-----------+--------------+-----------------------------------+--------------------------------------------------------------------+-------------------------------------------------------------------+
| | | Callback attempted within 2 hours | Callback attempted after 2 hours AND current time within 8h window | Callback attempted after 2 hours AND current time after 8h window |
+-----------+--------------+-----------------------------------+--------------------------------------------------------------------+-------------------------------------------------------------------+
| SMS | SLA 1 missed | FALSE | TRUE | TRUE |
+-----------+--------------+-----------------------------------+--------------------------------------------------------------------+-------------------------------------------------------------------+
| | SLA 2 missed | FALSE | FALSE | TRUE |
+-----------+--------------+-----------------------------------+--------------------------------------------------------------------+-------------------------------------------------------------------+
| Voicemail | SLA 1 missed | FALSE | TRUE | TRUE |
+-----------+--------------+-----------------------------------+--------------------------------------------------------------------+-------------------------------------------------------------------+
| | SLA 2 missed | FALSE | FALSE | TRUE |
+-----------+--------------+-----------------------------------+--------------------------------------------------------------------+-------------------------------------------------------------------+
"""
# fmt: on
def get_default_dt(self):
return _make_datetime(year=2020, month=9, day=7, hour=9, minute=0)
def get_requires_action_at(self):
return self.now + datetime.timedelta(hours=1)
def get_sla1_datetime(self):
return _make_datetime(year=2020, month=9, day=7, hour=11, minute=0)
def get_sla2_datetime(self):
return _make_datetime(year=2020, month=9, day=7, hour=17, minute=0)
class MiSlaTestCaseVoiceMail(MiSlaTestCaseSMS):
source = CASE_SOURCE.VOICEMAIL
|
|
# ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
#
# The contents of this file are subject to the Mozilla Public License Version
# 1.1 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
#
# The Original Code is configman
#
# The Initial Developer of the Original Code is
# Mozilla Foundation
# Portions created by the Initial Developer are Copyright (C) 2011
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# K Lars Lohn, lars@mozilla.com
# Peter Bengtsson, peterbe@mozilla.com
#
# Alternatively, the contents of this file may be used under the terms of
# either the GNU General Public License Version 2 or later (the "GPL"), or
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
# in which case the provisions of the GPL or the LGPL are applicable instead
# of those above. If you wish to allow use of your version of this file only
# under the terms of either the GPL or the LGPL, and not to allow others to
# use your version of this file under the terms of the MPL, indicate your
# decision by deleting the provisions above and replace them with the notice
# and other provisions required by the GPL or the LGPL. If you do not delete
# the provisions above, a recipient may use your version of this file under
# the terms of any one of the MPL, the GPL or the LGPL.
#
# ***** END LICENSE BLOCK *****
import unittest
import tempfile
from configman import converters
from configman import RequiredConfig, Namespace, ConfigurationManager
# the following two classes are used in test_classes_in_namespaces_converter1
# and need to be declared at module level scope
class Foo(RequiredConfig):
required_config = Namespace()
required_config.add_option('x',
default=17)
required_config.add_option('y',
default=23)
class Bar(RequiredConfig):
required_config = Namespace()
required_config.add_option('x',
default=227)
required_config.add_option('a',
default=11)
# the following two classes are used in test_classes_in_namespaces_converter2
# and test_classes_in_namespaces_converter_3. They need to be declared at
#module level scope
class Alpha(RequiredConfig):
required_config = Namespace()
required_config.add_option('a',
doc='a',
default=17)
def __init__(self, config):
self.config = config
self.a = config.a
class Beta(RequiredConfig):
required_config = Namespace()
required_config.add_option('b',
doc='b',
default=23)
def __init__(self, config):
self.config = config
self.b = config.b
class TestCase(unittest.TestCase):
def test_str_dict_keys(self):
function = converters.str_dict_keys
result = function({u'name': u'Peter', 'age': 99, 10: 11})
self.assertEqual(result,
{'name': u'Peter', 'age': 99, 10: 11})
for key in result.keys():
if key in ('name', 'age'):
self.assertTrue(not isinstance(key, unicode))
self.assertTrue(isinstance(key, str))
else:
self.assertTrue(isinstance(key, int))
def test_option_value_str(self):
function = converters.option_value_str
class _Option(object):
def __init__(self, value=None, from_string_converter=None):
self.value = value
self.from_string_converter = from_string_converter
opt = _Option()
self.assertEqual(function(opt), '')
opt = _Option(3.14)
self.assertEqual(function(opt), '3.14')
from decimal import Decimal
opt = _Option(Decimal('3.14'))
self.assertEqual(function(opt), '3.14')
# FIXME: need a way to test a value whose 'from_string_converter'
# requires quotes
def test_io_converter(self):
function = converters.io_converter
import sys
import os
self.assertEqual(function(100.0), 100.0)
self.assertEqual(function('stdout'), sys.stdout)
self.assertEqual(function('STDOut'), sys.stdout)
self.assertEqual(function('Stderr'), sys.stderr)
tmp_filename = os.path.join(tempfile.gettempdir(), 'test.anything')
try:
r = function(tmp_filename)
self.assertTrue(hasattr(r, 'write'))
self.assertTrue(hasattr(r, 'close'))
r.write('stuff\n')
r.close()
self.assertEqual(open(tmp_filename).read(), 'stuff\n')
finally:
if os.path.isfile(tmp_filename):
os.remove(tmp_filename)
def test_timedelta_converter(self):
function = converters.timedelta_converter
from datetime import timedelta
self.assertEqual(function('1'), timedelta(seconds=1))
self.assertEqual(function('2:1'), timedelta(minutes=2, seconds=1))
self.assertEqual(function('3:2:1'),
timedelta(hours=3, minutes=2, seconds=1))
self.assertEqual(function('4:3:2:1'),
timedelta(days=4, hours=3, minutes=2, seconds=1))
self.assertRaises(ValueError, function, 'xxx')
self.assertRaises(ValueError, function, 10.1)
def test_class_converter_nothing(self):
function = converters.class_converter
self.assertEqual(function(''), None)
def test_py_obj_to_str(self):
function = converters.py_obj_to_str
self.assertEqual(function(None), '')
from configman import tests as tests_module
self.assertEqual(function(tests_module), 'configman.tests')
self.assertEqual(function(int), 'int')
def test_list_to_str(self):
function = converters.list_to_str
self.assertEqual(function([]), '')
self.assertEqual(function(tuple()), '')
import configman
self.assertEqual(function([configman.tests.test_converters.TestCase]),
'configman.tests.test_converters.TestCase')
self.assertEqual(function([configman.tests, configman]),
'configman.tests, configman')
self.assertEqual(function([int, str, 123, "hello"]),
'int, str, 123, hello')
self.assertEqual(function((configman.tests.test_converters.TestCase,)),
'configman.tests.test_converters.TestCase')
self.assertEqual(function((configman.tests, configman)),
'configman.tests, configman')
self.assertEqual(function((int, str, 123, "hello")),
'int, str, 123, hello')
def test_classes_in_namespaces_converter_1(self):
converter_fn = converters.classes_in_namespaces_converter('HH%d')
class_list_str = ('configman.tests.test_converters.Foo,'
'configman.tests.test_converters.Bar')
result = converter_fn(class_list_str)
self.assertTrue(hasattr(result, 'required_config'))
req = result.required_config
self.assertEqual(len(req), 2)
self.assertTrue('HH0' in req)
self.assertEqual(len(req.HH0), 1)
self.assertTrue('cls' in req.HH0)
self.assertTrue('HH1' in req)
self.assertEqual(len(req.HH1), 1)
self.assertTrue('cls' in req.HH1)
self.assertEqual(
sorted([x.strip() for x in class_list_str.split(',')]),
sorted([x.strip() for x in
converters.py_obj_to_str(result).split(',')]))
def test_classes_in_namespaces_converter_2(self):
converter_fn = converters.classes_in_namespaces_converter('HH%d')
class_sequence = (Foo, Bar)
self.assertRaises(TypeError, converter_fn, class_sequence)
def test_classes_in_namespaces_converter_3(self):
n = Namespace()
n.add_option('kls_list',
default='configman.tests.test_converters.Alpha, '
'configman.tests.test_converters.Alpha, '
'configman.tests.test_converters.Alpha',
from_string_converter=
converters.classes_in_namespaces_converter('kls%d'))
cm = ConfigurationManager(n, argv_source=[])
config = cm.get_config()
self.assertEqual(len(config.kls_list.subordinate_namespace_names), 3)
for x in config.kls_list.subordinate_namespace_names:
self.assertTrue(x in config)
self.assertEqual(config[x].cls, Alpha)
self.assertTrue('cls_instance' not in config[x])
def test_classes_in_namespaces_converter_4(self):
n = Namespace()
n.add_option('kls_list',
default='configman.tests.test_converters.Alpha, '
'configman.tests.test_converters.Alpha, '
'configman.tests.test_converters.Alpha',
from_string_converter=
converters.classes_in_namespaces_converter(
'kls%d',
'kls',
instantiate_classes=True))
cm = ConfigurationManager(
n,
[{'kls_list':'configman.tests.test_converters.Alpha, '
'configman.tests.test_converters.Beta, '
'configman.tests.test_converters.Beta, '
'configman.tests.test_converters.Alpha'}])
config = cm.get_config()
self.assertEqual(len(config.kls_list.subordinate_namespace_names), 4)
for x in config.kls_list.subordinate_namespace_names:
self.assertTrue(x in config)
self.assertTrue('kls_instance' in config[x])
self.assertTrue(isinstance(config[x].kls_instance,
config[x].kls))
|
|
from decimal import Decimal
from functools import partial
import pytz
import unittest
from mock import Mock
from flask_restful.fields import MarshallingException
from flask_restful.utils import OrderedDict
from flask_restful import fields
from datetime import datetime, timedelta, tzinfo
from flask import Flask, Blueprint
#noinspection PyUnresolvedReferences
from nose.tools import assert_equals # you need it for tests in form of continuations
class Foo(object):
def __init__(self):
self.hey = 3
class Bar(object):
def __marshallable__(self):
return {"hey": 3}
def check_field(expected, field, value):
assert_equals(expected, field.output('a', {'a': value}))
def test_float():
values = [
("-3.13", -3.13),
(str(-3.13), -3.13),
(3, 3.0),
]
for value, expected in values:
yield check_field, expected, fields.Float(), value
def test_boolean():
values = [
(True, True),
(False, False),
({}, False),
("false", True), # These are different from php
("0", True), # Will this be a problem?
]
for value, expected in values:
yield check_field, expected, fields.Boolean(), value
def test_rfc822_datetime_formatters():
dates = [
(datetime(2011, 1, 1), "Sat, 01 Jan 2011 00:00:00 -0000"),
(datetime(2011, 1, 1, 23, 59, 59),
"Sat, 01 Jan 2011 23:59:59 -0000"),
(datetime(2011, 1, 1, 23, 59, 59, tzinfo=pytz.utc),
"Sat, 01 Jan 2011 23:59:59 -0000"),
(datetime(2011, 1, 1, 23, 59, 59, tzinfo=pytz.timezone('CET')),
"Sat, 01 Jan 2011 22:59:59 -0000")
]
for date_obj, expected in dates:
yield assert_equals, fields._rfc822(date_obj), expected
def test_iso8601_datetime_formatters():
dates = [
(datetime(2011, 1, 1), "2011-01-01T00:00:00"),
(datetime(2011, 1, 1, 23, 59, 59),
"2011-01-01T23:59:59"),
(datetime(2011, 1, 1, 23, 59, 59, 1000),
"2011-01-01T23:59:59.001000"),
(datetime(2011, 1, 1, 23, 59, 59, tzinfo=pytz.utc),
"2011-01-01T23:59:59+00:00"),
(datetime(2011, 1, 1, 23, 59, 59, 1000, tzinfo=pytz.utc),
"2011-01-01T23:59:59.001000+00:00"),
(datetime(2011, 1, 1, 23, 59, 59, tzinfo=pytz.timezone('CET')),
"2011-01-01T23:59:59+01:00")
]
for date_obj, expected in dates:
yield assert_equals, fields._iso8601(date_obj), expected
class FieldsTestCase(unittest.TestCase):
def test_decimal_trash(self):
self.assertRaises(MarshallingException, lambda: fields.Float().output('a', {'a': 'Foo'}))
def test_basic_dictionary(self):
obj = {"foo": 3}
field = fields.String()
self.assertEqual(field.output("foo", obj), "3")
def test_no_attribute(self):
obj = {"bar": 3}
field = fields.String()
self.assertEqual(field.output("foo", obj), None)
def test_date_field_invalid(self):
obj = {"bar": 3}
field = fields.DateTime()
self.assertRaises(MarshallingException, lambda: field.output("bar", obj))
def test_attribute(self):
obj = {"bar": 3}
field = fields.String(attribute="bar")
self.assertEqual(field.output("foo", obj), "3")
def test_formatting_field_none(self):
obj = {}
field = fields.FormattedString("/foo/{0[account_sid]}/{0[sid]}/")
self.assertRaises(MarshallingException, lambda: field.output("foo", obj))
def test_formatting_field_tuple(self):
obj = (3, 4)
field = fields.FormattedString("/foo/{0[account_sid]}/{0[sid]}/")
self.assertRaises(MarshallingException, lambda: field.output("foo", obj))
def test_formatting_field_dict(self):
obj = {
"sid": 3,
"account_sid": 4,
}
field = fields.FormattedString("/foo/{account_sid}/{sid}/")
self.assertEqual(field.output("foo", obj), "/foo/4/3/")
def test_formatting_field(self):
obj = Mock()
obj.sid = 3
obj.account_sid = 4
field = fields.FormattedString("/foo/{account_sid}/{sid}/")
self.assertEqual(field.output("foo", obj), "/foo/4/3/")
def test_basic_field(self):
obj = Mock()
obj.foo = 3
field = fields.Raw()
self.assertEqual(field.output("foo", obj), 3)
def test_raw_field(self):
obj = Mock()
obj.foo = 3
field = fields.Raw()
self.assertEqual(field.output("foo", obj), 3)
def test_nested_raw_field(self):
foo = Mock()
bar = Mock()
bar.value = 3
foo.bar = bar
field = fields.Raw()
self.assertEqual(field.output("bar.value", foo), 3)
def test_formatted_string_invalid_obj(self):
field = fields.FormattedString("{hey}")
self.assertRaises(MarshallingException, lambda: field.output("hey", None))
def test_formatted_string(self):
field = fields.FormattedString("{hey}")
self.assertEqual("3", field.output("hey", Foo()))
def test_string_with_attribute(self):
field = fields.String(attribute="hey")
self.assertEqual("3", field.output("foo", Foo()))
def test_string_with_lambda(self):
field = fields.String(attribute=lambda x: x.hey)
self.assertEqual("3", field.output("foo", Foo()))
def test_string_with_partial(self):
def f(x, suffix):
return "%s-%s" % (x.hey, suffix)
p = partial(f, suffix="whatever")
field = fields.String(attribute=p)
self.assertEqual("3-whatever", field.output("foo", Foo()))
def test_url_invalid_object(self):
app = Flask(__name__)
app.add_url_rule("/<hey>", "foobar", view_func=lambda x: x)
field = fields.Url("foobar")
with app.test_request_context("/"):
self.assertRaises(MarshallingException, lambda: field.output("hey", None))
def test_url(self):
app = Flask(__name__)
app.add_url_rule("/<hey>", "foobar", view_func=lambda x: x)
field = fields.Url("foobar")
with app.test_request_context("/"):
self.assertEqual("/3", field.output("hey", Foo()))
def test_url_absolute(self):
app = Flask(__name__)
app.add_url_rule("/<hey>", "foobar", view_func=lambda x: x)
field = fields.Url("foobar", absolute=True)
with app.test_request_context("/"):
self.assertEqual("http://localhost/3", field.output("hey", Foo()))
def test_url_absolute_scheme(self):
"""Url.scheme should override current_request.scheme"""
app = Flask(__name__)
app.add_url_rule("/<hey>", "foobar", view_func=lambda x: x)
field = fields.Url("foobar", absolute=True, scheme='https')
with app.test_request_context("/", base_url="http://localhost"):
self.assertEqual("https://localhost/3", field.output("hey", Foo()))
def test_url_without_endpoint_invalid_object(self):
app = Flask(__name__)
app.add_url_rule("/<hey>", "foobar", view_func=lambda x: x)
field = fields.Url()
with app.test_request_context("/hey"):
self.assertRaises(MarshallingException, lambda: field.output("hey", None))
def test_url_without_endpoint(self):
app = Flask(__name__)
app.add_url_rule("/<hey>", "foobar", view_func=lambda x: x)
field = fields.Url()
with app.test_request_context("/hey"):
self.assertEqual("/3", field.output("hey", Foo()))
def test_url_without_endpoint_absolute(self):
app = Flask(__name__)
app.add_url_rule("/<hey>", "foobar", view_func=lambda x: x)
field = fields.Url(absolute=True)
with app.test_request_context("/hey"):
self.assertEqual("http://localhost/3", field.output("hey", Foo()))
def test_url_without_endpoint_absolute_scheme(self):
app = Flask(__name__)
app.add_url_rule("/<hey>", "foobar", view_func=lambda x: x)
field = fields.Url(absolute=True, scheme='https')
with app.test_request_context("/hey", base_url="http://localhost"):
self.assertEqual("https://localhost/3", field.output("hey", Foo()))
def test_url_with_blueprint_invalid_object(self):
app = Flask(__name__)
bp = Blueprint("foo", __name__, url_prefix="/foo")
bp.add_url_rule("/<hey>", "foobar", view_func=lambda x: x)
app.register_blueprint(bp)
field = fields.Url()
with app.test_request_context("/foo/hey"):
self.assertRaises(MarshallingException, lambda: field.output("hey", None))
def test_url_with_blueprint(self):
app = Flask(__name__)
bp = Blueprint("foo", __name__, url_prefix="/foo")
bp.add_url_rule("/<hey>", "foobar", view_func=lambda x: x)
app.register_blueprint(bp)
field = fields.Url()
with app.test_request_context("/foo/hey"):
self.assertEqual("/foo/3", field.output("hey", Foo()))
def test_url_with_blueprint_absolute(self):
app = Flask(__name__)
bp = Blueprint("foo", __name__, url_prefix="/foo")
bp.add_url_rule("/<hey>", "foobar", view_func=lambda x: x)
app.register_blueprint(bp)
field = fields.Url(absolute=True)
with app.test_request_context("/foo/hey"):
self.assertEqual("http://localhost/foo/3", field.output("hey", Foo()))
def test_url_with_blueprint_absolute_scheme(self):
app = Flask(__name__)
bp = Blueprint("foo", __name__, url_prefix="/foo")
bp.add_url_rule("/<hey>", "foobar", view_func=lambda x: x)
app.register_blueprint(bp)
field = fields.Url(absolute=True, scheme='https')
with app.test_request_context("/foo/hey", base_url="http://localhost"):
self.assertEqual("https://localhost/foo/3", field.output("hey", Foo()))
def test_url_superclass_kwargs(self):
app = Flask(__name__)
app.add_url_rule("/<hey>", "foobar", view_func=lambda x: x)
field = fields.Url(absolute=True, attribute='hey')
with app.test_request_context("/hey"):
self.assertEqual("http://localhost/3", field.output("hey", Foo()))
def test_int(self):
field = fields.Integer()
self.assertEqual(3, field.output("hey", {'hey': 3}))
def test_int_default(self):
field = fields.Integer(default=1)
self.assertEqual(1, field.output("hey", {'hey': None}))
def test_no_int(self):
field = fields.Integer()
self.assertEqual(0, field.output("hey", {'hey': None}))
def test_int_decode_error(self):
field = fields.Integer()
self.assertRaises(MarshallingException, lambda: field.output("hey", {'hey': 'Explode please I am nowhere looking like an int'}))
def test_float(self):
field = fields.Float()
self.assertEqual(3.0, field.output("hey", {'hey': 3.0}))
def test_float_decode_error(self):
field = fields.Float()
self.assertRaises(MarshallingException, lambda: field.output("hey", {'hey': 'Explode!'}))
PI_STR = u'3.14159265358979323846264338327950288419716939937510582097494459230781640628620899862803482534211706798214808651328230664709384460955058223172535940812848111745028410270193852110555964462294895493038196442881097566593344612847564823378678316527120190914564856692346034861'
PI = Decimal(PI_STR)
def test_arbitrary(self):
field = fields.Arbitrary()
self.assertEqual(self.PI_STR, field.output("hey", {'hey': self.PI}))
def test_fixed(self):
field5 = fields.Fixed(5)
field4 = fields.Fixed(4)
self.assertEqual('3.14159', field5.output("hey", {'hey': self.PI}))
self.assertEqual('3.1416', field4.output("hey", {'hey': self.PI}))
self.assertEqual('3.0000', field4.output("hey", {'hey': '3'}))
self.assertEqual('3.0000', field4.output("hey", {'hey': '03'}))
self.assertEqual('3.0000', field4.output("hey", {'hey': '03.0'}))
def test_zero_fixed(self):
field = fields.Fixed()
self.assertEqual('0.00000', field.output('hey', {'hey': 0}))
def test_infinite_fixed(self):
field = fields.Fixed()
self.assertRaises(MarshallingException, lambda: field.output("hey", {'hey': '+inf'}))
self.assertRaises(MarshallingException, lambda: field.output("hey", {'hey': '-inf'}))
def test_advanced_fixed(self):
field = fields.Fixed()
self.assertRaises(MarshallingException, lambda: field.output("hey", {'hey': 'NaN'}))
def test_fixed_with_attribute(self):
field = fields.Fixed(4, attribute="bar")
self.assertEqual('3.0000', field.output("foo", {'bar': '3'}))
def test_string(self):
field = fields.String()
self.assertEqual("3", field.output("hey", Foo()))
def test_string_no_value(self):
field = fields.String()
self.assertEqual(None, field.output("bar", Foo()))
def test_string_none(self):
field = fields.String()
self.assertEqual(None, field.output("empty", {'empty': None}))
def test_rfc822_date_field_without_offset(self):
obj = {"bar": datetime(2011, 8, 22, 20, 58, 45)}
field = fields.DateTime()
self.assertEqual("Mon, 22 Aug 2011 20:58:45 -0000", field.output("bar", obj))
def test_rfc822_date_field_with_offset(self):
obj = {"bar": datetime(2011, 8, 22, 20, 58, 45, tzinfo=pytz.timezone('CET'))}
field = fields.DateTime()
self.assertEqual("Mon, 22 Aug 2011 19:58:45 -0000", field.output("bar", obj))
def test_iso8601_date_field_without_offset(self):
obj = {"bar": datetime(2011, 8, 22, 20, 58, 45)}
field = fields.DateTime(dt_format='iso8601')
self.assertEqual("2011-08-22T20:58:45", field.output("bar", obj))
def test_iso8601_date_field_with_offset(self):
obj = {"bar": datetime(2011, 8, 22, 20, 58, 45, tzinfo=pytz.timezone('CET'))}
field = fields.DateTime(dt_format='iso8601')
self.assertEqual("2011-08-22T20:58:45+01:00", field.output("bar", obj))
def test_unsupported_datetime_format(self):
obj = {"bar": datetime(2011, 8, 22, 20, 58, 45)}
field = fields.DateTime(dt_format='raw')
self.assertRaises(MarshallingException, lambda: field.output('bar', obj))
def test_to_dict(self):
obj = {"hey": 3}
self.assertEqual(obj, fields.to_marshallable_type(obj))
def test_to_dict_obj(self):
obj = {"hey": 3}
self.assertEqual(obj, fields.to_marshallable_type(Foo()))
def test_to_dict_custom_marshal(self):
obj = {"hey": 3}
self.assertEqual(obj, fields.to_marshallable_type(Bar()))
def test_get_value(self):
self.assertEqual(3, fields.get_value("hey", {"hey": 3}))
def test_get_value_no_value(self):
self.assertEqual(None, fields.get_value("foo", {"hey": 3}))
def test_get_value_obj(self):
self.assertEqual(3, fields.get_value("hey", Foo()))
def test_list(self):
obj = {'list': ['a', 'b', 'c']}
field = fields.List(fields.String)
self.assertEqual(['a', 'b', 'c'], field.output('list', obj))
def test_list_from_set(self):
obj = {'list': set(['a', 'b', 'c'])}
field = fields.List(fields.String)
self.assertEqual(set(['a', 'b', 'c']), set(field.output('list', obj)))
def test_list_from_object(self):
class TestObject(object):
def __init__(self, list):
self.list = list
obj = TestObject(['a', 'b', 'c'])
field = fields.List(fields.String)
self.assertEqual(['a', 'b', 'c'], field.output('list', obj))
def test_list_with_attribute(self):
class TestObject(object):
def __init__(self, list):
self.foo = list
obj = TestObject(['a', 'b', 'c'])
field = fields.List(fields.String, attribute='foo')
self.assertEqual(['a', 'b', 'c'], field.output('list', obj))
def test_list_with_scoped_attribute_on_dict_or_obj(self):
class TestObject(object):
def __init__(self, list_):
self.bar = list_
class TestEgg(object):
def __init__(self, val):
self.attrib = val
eggs = [TestEgg(i) for i in ['a', 'b', 'c']]
test_obj = TestObject(eggs)
test_dict = {'bar': [{'attrib': 'a'}, {'attrib':'b'}, {'attrib':'c'}]}
field = fields.List(fields.String(attribute='attrib'), attribute='bar')
self.assertEqual(['a', 'b', 'c'], field.output('bar', test_obj))
self.assertEqual(['a', 'b', 'c'], field.output('bar', test_dict))
def test_null_list(self):
class TestObject(object):
def __init__(self, list):
self.list = list
obj = TestObject(None)
field = fields.List(fields.String)
self.assertEqual(None, field.output('list', obj))
def test_indexable_object(self):
class TestObject(object):
def __init__(self, foo):
self.foo = foo
def __getitem__(self, n):
if type(n) is int:
if n < 3:
return n
raise IndexError
raise TypeError
obj = TestObject("hi")
field = fields.String(attribute="foo")
self.assertEqual("hi", field.output("foo", obj))
def test_list_from_dict_with_attribute(self):
obj = {'list': [{'a': 1, 'b': 1}, {'a': 2, 'b': 1}, {'a': 3, 'b': 1}]}
field = fields.List(fields.Integer(attribute='a'))
self.assertEqual([1, 2, 3], field.output('list', obj))
def test_list_of_nested(self):
obj = {'list': [{'a': 1, 'b': 1}, {'a': 2, 'b': 1}, {'a': 3, 'b': 1}]}
field = fields.List(fields.Nested({'a': fields.Integer}))
self.assertEqual([OrderedDict([('a', 1)]), OrderedDict([('a', 2)]), OrderedDict([('a', 3)])],
field.output('list', obj))
def test_nested_with_default(self):
obj = None
field = fields.Nested({'a': fields.Integer, 'b': fields.String}, default={})
self.assertEqual({}, field.output('a', obj))
def test_list_of_raw(self):
obj = {'list': [{'a': 1, 'b': 1}, {'a': 2, 'b': 1}, {'a': 3, 'b': 1}]}
field = fields.List(fields.Raw)
self.assertEqual([OrderedDict([('a', 1), ('b', 1), ]),
OrderedDict([('a', 2), ('b', 1), ]),
OrderedDict([('a', 3), ('b', 1), ])],
field.output('list', obj))
obj = {'list': [1, 2, 'a']}
field = fields.List(fields.Raw)
self.assertEqual([1, 2, 'a'], field.output('list', obj))
if __name__ == '__main__':
unittest.main()
|
|
import os
import re
import codecs
import numpy as np
import theano
models_path = "./models"
eval_path = "./evaluation"
eval_temp = os.path.join(eval_path, "temp")
eval_script = os.path.join(eval_path, "conlleval")
def get_name(parameters):
"""
Generate a model name from its parameters.
"""
l = []
for k, v in parameters.items():
if type(v) is str and "/" in v:
l.append((k, v[::-1][:v[::-1].index('/')][::-1]))
else:
l.append((k, v))
name = ",".join(["%s=%s" % (k, str(v).replace(',', '')) for k, v in l])
return "".join(i for i in name if i not in "\/:*?<>|")
def set_values(name, param, pretrained):
"""
Initialize a network parameter with pretrained values.
We check that sizes are compatible.
"""
param_value = param.get_value()
if pretrained.size != param_value.size:
raise Exception(
"Size mismatch for parameter %s. Expected %i, found %i."
% (name, param_value.size, pretrained.size)
)
param.set_value(np.reshape(
pretrained, param_value.shape
).astype(np.float32))
def shared(shape, name):
"""
Create a shared object of a numpy array.
"""
if len(shape) == 1:
value = np.zeros(shape) # bias are initialized with zeros
else:
drange = np.sqrt(6. / (np.sum(shape)))
value = drange * np.random.uniform(low=-1.0, high=1.0, size=shape)
return theano.shared(value=value.astype(theano.config.floatX), name=name)
def create_dico(item_list):
"""
Create a dictionary of items from a list of list of items.
"""
assert type(item_list) is list
dico = {}
for items in item_list:
for item in items:
if item not in dico:
dico[item] = 1
else:
dico[item] += 1
return dico
def create_mapping(dico):
"""
Create a mapping (item to ID / ID to item) from a dictionary.
Items are ordered by decreasing frequency.
"""
sorted_items = sorted(dico.items(), key=lambda x: (-x[1], x[0]))
id_to_item = {i: v[0] for i, v in enumerate(sorted_items)}
item_to_id = {v: k for k, v in id_to_item.items()}
return item_to_id, id_to_item
def zero_digits(s):
"""
Replace every digit in a string by a zero.
"""
return re.sub('\d', '0', s)
def iob2(tags):
"""
Check that tags have a valid IOB format.
Tags in IOB1 format are converted to IOB2.
"""
for i, tag in enumerate(tags):
if tag == 'O':
continue
split = tag.split('-')
if len(split) != 2 or split[0] not in ['I', 'B']:
return False
if split[0] == 'B':
continue
elif i == 0 or tags[i - 1] == 'O': # conversion IOB1 to IOB2
tags[i] = 'B' + tag[1:]
elif tags[i - 1][1:] == tag[1:]:
continue
else: # conversion IOB1 to IOB2
tags[i] = 'B' + tag[1:]
return True
def iob_iobes(tags):
"""
IOB -> IOBES
"""
new_tags = []
for i, tag in enumerate(tags):
if tag == 'O':
new_tags.append(tag)
elif tag.split('-')[0] == 'B':
if i + 1 != len(tags) and \
tags[i + 1].split('-')[0] == 'I':
new_tags.append(tag)
else:
new_tags.append(tag.replace('B-', 'S-'))
elif tag.split('-')[0] == 'I':
if i + 1 < len(tags) and \
tags[i + 1].split('-')[0] == 'I':
new_tags.append(tag)
else:
new_tags.append(tag.replace('I-', 'E-'))
else:
raise Exception('Invalid IOB format!')
return new_tags
def iobes_iob(tags):
"""
IOBES -> IOB
"""
new_tags = []
for i, tag in enumerate(tags):
if tag.split('-')[0] == 'B':
new_tags.append(tag)
elif tag.split('-')[0] == 'I':
new_tags.append(tag)
elif tag.split('-')[0] == 'S':
new_tags.append(tag.replace('S-', 'B-'))
elif tag.split('-')[0] == 'E':
new_tags.append(tag.replace('E-', 'I-'))
elif tag.split('-')[0] == 'O':
new_tags.append(tag)
else:
raise Exception('Invalid format!')
return new_tags
def insert_singletons(words, singletons, p=0.5):
"""
Replace singletons by the unknown word with a probability p.
"""
new_words = []
for word in words:
if word in singletons and np.random.uniform() < p:
new_words.append(0)
else:
new_words.append(word)
return new_words
def pad_word_chars(words):
"""
Pad the characters of the words in a sentence.
Input:
- list of lists of ints (list of words, a word being a list of char indexes)
Output:
- padded list of lists of ints
- padded list of lists of ints (where chars are reversed)
- list of ints corresponding to the index of the last character of each word
"""
max_length = max([len(word) for word in words])
char_for = []
char_rev = []
char_pos = []
for word in words:
padding = [0] * (max_length - len(word))
char_for.append(word + padding)
char_rev.append(word[::-1] + padding)
char_pos.append(len(word) - 1)
return char_for, char_rev, char_pos
def create_input(data, parameters, add_label, singletons=None, wid=None):
"""
Take sentence data and return an input for
the training or the evaluation function.
"""
words = data['words']
chars = data['chars']
if singletons is not None:
words = insert_singletons(words, singletons)
if parameters['cap_dim']:
caps = data['caps']
char_for, char_rev, char_pos = pad_word_chars(chars)
input = []
if parameters['word_dim']:
input.append(words)
if parameters['char_dim']:
input.append(char_for)
if parameters['char_bidirect']:
input.append(char_rev)
input.append(char_pos)
if parameters['cap_dim']:
input.append(caps)
if add_label:
input.append(data['tags'])
if wid!=None:
s_len = len(input[0])
input.append([wid] * s_len)
else:
#insert a dummy worker id
s_len = len(input[0])
input.append([0] * s_len)
return input
def evaluate(parameters, f_eval, raw_sentences, parsed_sentences,
id_to_tag, dictionary_tags):
"""
Evaluate current model using CoNLL script.
"""
n_tags = len(id_to_tag)
predictions = []
count = np.zeros((n_tags, n_tags), dtype=np.int32)
for raw_sentence, data in zip(raw_sentences, parsed_sentences):
input = create_input(data, parameters, False)
if parameters['crf']:
y_preds = np.array(f_eval(*input))[1:-1]
else:
y_preds = f_eval(*input).argmax(axis=1)
y_reals = np.array(data['tags']).astype(np.int32)
assert len(y_preds) == len(y_reals)
p_tags = [id_to_tag[y_pred] for y_pred in y_preds]
r_tags = [id_to_tag[y_real] for y_real in y_reals]
if parameters['tag_scheme'] == 'iobes':
p_tags = iobes_iob(p_tags)
r_tags = iobes_iob(r_tags)
for i, (y_pred, y_real) in enumerate(zip(y_preds, y_reals)):
new_line = " ".join(raw_sentence[i][:-1] + [r_tags[i], p_tags[i]])
predictions.append(new_line)
count[y_real, y_pred] += 1
predictions.append("")
# Write predictions to disk and run CoNLL script externally
eval_id = np.random.randint(1000000, 2000000)
output_path = os.path.join(eval_temp, "eval.%i.output" % eval_id)
scores_path = os.path.join(eval_temp, "eval.%i.scores" % eval_id)
with codecs.open(output_path, 'w', 'utf8') as f:
f.write("\n".join(predictions))
os.system("%s < %s > %s" % (eval_script, output_path, scores_path))
# CoNLL evaluation results
eval_lines = [l.rstrip() for l in codecs.open(scores_path, 'r', 'utf8')]
for line in eval_lines:
print line
# Remove temp files
os.remove(output_path)
os.remove(scores_path)
# Confusion matrix with accuracy for each tag
print ("{: >2}{: >7}{: >7}%s{: >9}" % ("{: >7}" * n_tags)).format(
"ID", "NE", "Total",
*([id_to_tag[i] for i in xrange(n_tags)] + ["Percent"])
)
for i in xrange(n_tags):
print ("{: >2}{: >7}{: >7}%s{: >9}" % ("{: >7}" * n_tags)).format(
str(i), id_to_tag[i], str(count[i].sum()),
*([count[i][j] for j in xrange(n_tags)] +
["%.3f" % (count[i][i] * 100. / max(1, count[i].sum()))])
)
# Global accuracy
print "%i/%i (%.5f%%)" % (
count.trace(), count.sum(), 100. * count.trace() / max(1, count.sum())
)
# F1 on all entities
return float(eval_lines[1].strip().split()[-1])
|
|
from TimelineView import *
from random import random
from PyQt4 import QtOpenGL
from time import time
_keyid = 1
class TestKey():
def __init__( self, track ):
global _keyid
_keyid += 1
self.name = 'key - %d' % _keyid
# self.length = random()*500/1000.0
self.length = 1
self.pos = ( random()*1000 + 50 ) /1000.0
self.track = track
self.value = 100
self.mode = TWEEN_MODE_BEZIER
self.preTPValue = (0.5, 0 )
self.postTPValue = (0.5, 0 )
def isResizable( self ):
return False
class TestEventKey( TestKey ):
def isResizable( self ):
return True
_trackId = 0
class TestTrack():
def __init__( self, name, pos = None ):
global _trackId
pos = _trackId * 25
_trackId += 1
self.name = name
if self.isResizable():
self.keys = [
TestEventKey( self ),
TestEventKey( self ),
TestEventKey( self ),
# TestKey( self )
]
else:
self.keys = [
TestKey( self ),
TestKey( self ),
TestKey( self ),
TestKey( self )
]
self.pos = pos
def isResizable( self ):
return False
def isCurve( self ):
return True
class TestEventTrack( TestTrack ):
def isResizable( self ):
return True
def isCurve( self ):
return False
class TestEvent():
def __init__( self ):
self.name = 'event'
dataset = [
TestTrack( 'track' ),
TestTrack( 'track0' ),
TestEventTrack( 'track1' ),
TestTrack( 'track2' ),
TestTrack( 'track3' ),
TestTrack( 'track1' ),
TestEventTrack( 'track2' ),
TestTrack( 'track3' ),
TestTrack( 'track1' ),
TestEventTrack( 'track2' ),
TestTrack( 'track3' )
]
class TestMarker(object):
pass
class TestTimeline( TimelineView ):
def getTrackNodes( self ):
return dataset
def getKeyNodes( self, trackNode ):
return trackNode.keys
def getKeyParam( self, keyNode ): #pos, length, resizable
return keyNode.pos, keyNode.length, keyNode.isResizable()
def getKeyBezierPoints( self, keyNode ):
( tpx0, tpy0 ) = keyNode.preTPValue
( tpx1, tpy1 ) = keyNode.postTPValue
return tpx0, tpy0, tpx1, tpy1
def getKeyCurveValue( self, keyNode ):
return keyNode.value
def getKeyMode( self, keyNode ):
return keyNode.mode
def getParentTrackNode( self, keyNode ):
return keyNode.track
def updateTrackContent( self, track, trackNode, **option ):
# track.getHeaderItem().setText( trackNode.name )
pass
def updateKeyContent( self, key, keyNode, **option ):
pass
def isTrackVisible( self, track ):
return True
def isCurveTrack( self, track ):
return track.isCurve()
def getTrackPos( self, track ):
return track.pos
def formatPos( self, pos ):
i = int( pos/1000 )
f = int( pos - i*1000 )
return '%d:%02d' % ( i, f/10 )
def getRulerParam( self ):
return dict( zoom = 1 )
# return dict( zoom = 5 )
def createTrackItem( self, node ):
if node.isResizable():
return TimelineEventTrackItem()
else:
return TimelineTrackItem()
class TestFrame( QtGui.QFrame ):
def __init__( self ):
super( TestFrame, self ).__init__()
layout = QtGui.QVBoxLayout( self )
layout.setMargin( 0 )
timeline = TestTimeline()
layout.addWidget( timeline )
timeline.setRange( 0, 3.2 )
timeline.rebuild()
timeline.setTrackSelection( [ dataset[0] ] )
self.testMarker = TestMarker()
timeline.addMarker( self.testMarker )
self.testMarker2 = TestMarker()
mitem = timeline.addMarker( self.testMarker2 )
mitem.setText( 'FightStart')
mitem.setTimePos( 2.3 )
timeline.keyChanged.connect( self.onKeyChanged )
timeline.keyBezierPointChanged.connect( self.onKeyBezierPointChanged )
timeline.keyCurveValueChanged.connect( self.onKeyCurveValueChanged )
timeline.markerChanged.connect( self.onMarkerChanged )
self.timer = QtCore.QTimer( self )
self.timer.timeout.connect( self.onTimer )
self.timer.setInterval( 100 )
self.timer.start()
self.t0 = time()
def onKeyChanged( self, key, pos, length ):
key.pos = pos
key.length = length
def onKeyCurveValueChanged( self, key, value ):
key.value = value
def onKeyBezierPointChanged( self, key, tpx0, tpy0, tpx1, tpy1 ):
key.preTPValue = ( tpx0, tpy0 )
key.postTPValue = ( tpx1, tpy1 )
def onMarkerChanged( self, marker, pos ):
print 'marker changed', marker, pos
def onTimer( self ):
t1 = time()
# print '%.2f' % (t1- self.t0)
self.t0 = t1
app = QtGui.QApplication( sys.argv )
styleSheetName = 'gii.qss'
app.setStyleSheet(
open( '/Users/tommo/prj/gii/data/theme/' + styleSheetName ).read()
)
frame = TestFrame()
frame.resize( 600, 300 )
frame.show()
frame.raise_()
# # timeline.setZoom( 10 )
# # timeline.selectTrack( dataset[1] )
# timeline.selectKey( dataset[1].keys[0] )
app.exec_()
|
|
"""Functions that select points from a point cloud and put them in a new point cloud."""
import math
import os
import shapefile
import shapely
import sys
from shapely.geometry import Point
from shapely.errors import WKTReadingError
from shapely.wkt import loads
from shapely.geometry import box
from shapely.vectorized import contains
import numpy as np
from laserchicken.keys import point
from laserchicken import kd_tree
from laserchicken.utils import copy_point_cloud, add_metadata
def select_equal(point_cloud, attribute, value, return_mask=False):
"""
Return the selection of the input point cloud that contains only points with a given attribute equal to some value.
If a list of values is given, select the points corresponding to any of the provided values.
:param point_cloud: Input point cloud.
:param attribute: The attribute name used for selection
:param value: The value(s) to compare the attribute to
:param return_mask: If true, return the mask corresponding to the selection
:return:
"""
_check_valid_arguments(attribute, point_cloud)
# broadcast using shape of the values
mask = point_cloud[point][attribute]['data'] == np.array(value)[..., None]
if mask.ndim > 1:
mask = np.any(mask, axis=0) # reduce
if return_mask:
return mask
point_cloud_filtered = copy_point_cloud(point_cloud, mask)
add_metadata(point_cloud_filtered, sys.modules[__name__],
{'attribute': attribute, 'value': value})
return point_cloud_filtered
def select_above(point_cloud, attribute, threshold, return_mask=False):
"""
Return the selection of the input point cloud that contains only points with a given attribute above some value.
:param point_cloud: Input point cloud
:param attribute: The attribute name used for selection
:param threshold: The threshold value used for selection
:param return_mask: If true, return the mask corresponding to the selection
:return:
"""
_check_valid_arguments(attribute, point_cloud)
mask = point_cloud[point][attribute]['data'] > threshold
if return_mask:
return mask
point_cloud_filtered = copy_point_cloud(point_cloud, mask)
add_metadata(point_cloud_filtered, sys.modules[__name__],
{'attribute': attribute, 'threshold': threshold})
return point_cloud_filtered
def select_below(point_cloud, attribute, threshold, return_mask=False):
"""
Return the selection of the input point cloud that contains only points with a given attribute below some value.
:param point_cloud: Input point cloud
:param attribute: The attribute name used for selection
:param threshold: The threshold value used for selection
:param return_mask: If true, return the mask corresponding to the selection
:return:
"""
_check_valid_arguments(attribute, point_cloud)
mask = point_cloud[point][attribute]['data'] < threshold
if return_mask:
return mask
point_cloud_filtered = copy_point_cloud(point_cloud, mask)
add_metadata(point_cloud_filtered, sys.modules[__name__],
{'attribute': attribute, 'threshold': threshold})
return point_cloud_filtered
def _check_valid_arguments(attribute, point_cloud):
"""
Raise if arguments are not valid for select_above/select_below functions.
:param attribute:
:param point_cloud:
:return: None
"""
if point_cloud is None:
raise ValueError('Input point cloud cannot be None.')
if attribute not in point_cloud[point]:
raise ValueError('Attribute key {} for selection not found in point cloud.'.format(attribute))
def select_polygon(point_cloud, polygon_string, read_from_file=False, return_mask=False):
"""
Return the selection of the input point cloud that contains only points within a given polygon.
:param point_cloud: Input point cloud
:param polygon_string: Polygon, either defined in a WKT string or in a file (WKT and ESRI formats supported)
:param read_from_file: if true, polygon is expected to be the name of the file where the polygon is defined
:param return_mask: if true, return a mask of selected points, rather than point cloud
:return:
"""
if point_cloud is None:
raise ValueError('Input point cloud cannot be None.')
if not isinstance(polygon_string, str):
raise ValueError('Polygon (or its filename) should be a string')
if read_from_file:
format = os.path.splitext(polygon_string)[1].lower()
reader = _get_polygon_reader(format)
polygon = reader(polygon_string)
else:
polygon = _load_polygon(polygon_string)
if isinstance(polygon, shapely.geometry.polygon.Polygon):
points_in = _contains(point_cloud, polygon)
elif isinstance(polygon,shapely.geometry.multipolygon.MultiPolygon):
points_in = []
count=1
for poly in polygon:
if not(count%200) or count==len(polygon):
print('Checking polygon {}/{}...'.format(count, len(polygon)))
points_in.extend(_contains(point_cloud, poly))
count=count+1
print('{} points found in {} polygons.'.format(len(points_in), len(polygon)))
else:
raise ValueError('It is not a Polygon or Multipolygon.')
if return_mask:
mask = np.zeros(len(point_cloud['vertex']['x']['data']), dtype=bool)
mask[points_in] = True
return mask
else:
point_cloud_filtered = copy_point_cloud(point_cloud, points_in)
add_metadata(point_cloud_filtered, sys.modules[__name__],
{'polygon_string': polygon_string,
'read_from_file': read_from_file})
return point_cloud_filtered
def _read_wkt_file(path):
with open(path) as f:
content = f.readlines()
content = [_load_polygon(x.strip()) for x in content]
geom = shapely.geometry.MultiPolygon(content) if len(content) > 1 else content[0]
return geom
def _read_shp_file(path):
shape = shapefile.Reader(path)
features = shape.shapeRecords()
shp_geoms = [shapely.geometry.shape(feature.shape.__geo_interface__)
for feature in features]
shp_geom = shapely.geometry.MultiPolygon(shp_geoms) if len(shp_geoms) > 1 else shp_geoms[0]
return shp_geom
polygon_readers = {
'.wkt': _read_wkt_file,
'.shp': _read_shp_file
}
def _get_polygon_reader(format):
if format not in polygon_readers:
raise NotImplementedError(
'Polygon file format {} unknown. Implemented formats are:'.format(format, ','.join(polygon_readers.keys())))
else:
return polygon_readers[format]
def _load_polygon(string):
try:
return loads(string)
except WKTReadingError:
raise ValueError('Polygon is invalid. --> {}'.format(string))
def _contains(pc, polygon):
"""
Return indices of points in point cloud that are contained by a polygon, i.e., all points within the boundaries of
Polygon excluding the ones overlaping Polygon's boundaries.
:param pc: point cloud in
:param polygon: containing polygon
:return: point indices
"""
x = pc[point]['x']['data']
y = pc[point]['y']['data']
points_in = []
if not polygon.is_valid:
raise ValueError('Invalid polygon in input')
mbr = polygon.envelope
point_box = box(np.min(x), np.min(y), np.max(x), np.max(y))
if point_box.intersects(mbr):
(x_min, y_min, x_max, y_max) = mbr.bounds
rad = math.ceil(math.sqrt(math.pow(x_max - x_min, 2) +
math.pow(y_max - y_min, 2)) / 2)
p = [x_min + ((x_max - x_min) / 2), y_min + ((y_max - y_min) / 2)]
tree = kd_tree.get_kdtree_for_pc(pc)
indices = np.sort(tree.query_ball_point(x=p, r=rad))
if len(indices) > 0:
mask = contains(polygon, x[indices], y[indices])
points_in.extend(indices[mask])
return points_in
|
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
import subprocess
from textwrap import dedent
from mock import patch
from pants.backend.jvm.targets.java_tests import JavaTests
from pants.backend.jvm.tasks.junit_run import JUnitRun
from pants.backend.python.targets.python_tests import PythonTests
from pants.base.exceptions import TargetDefinitionException, TaskError
from pants.build_graph.build_file_aliases import BuildFileAliases
from pants.build_graph.resources import Resources
from pants.ivy.bootstrapper import Bootstrapper
from pants.ivy.ivy_subsystem import IvySubsystem
from pants.java.distribution.distribution import DistributionLocator
from pants.java.executor import SubprocessExecutor
from pants.util.contextutil import environment_as
from pants.util.dirutil import safe_file_dump
from pants.util.timeout import TimeoutReached
from pants_test.jvm.jvm_tool_task_test_base import JvmToolTaskTestBase
from pants_test.subsystem.subsystem_util import subsystem_instance
class JUnitRunnerTest(JvmToolTaskTestBase):
"""Tests for junit_run._JUnitRunner class"""
@classmethod
def task_type(cls):
return JUnitRun
@property
def alias_groups(self):
return super(JUnitRunnerTest, self).alias_groups.merge(BuildFileAliases(
targets={
'java_tests': JavaTests,
'python_tests': PythonTests,
},
))
def test_junit_runner_success(self):
self.execute_junit_runner(
dedent("""
import org.junit.Test;
import static org.junit.Assert.assertTrue;
public class FooTest {
@Test
public void testFoo() {
assertTrue(5 > 3);
}
}
""")
)
def test_junit_runner_failure(self):
with self.assertRaises(TaskError) as cm:
self.execute_junit_runner(
dedent("""
import org.junit.Test;
import static org.junit.Assert.assertTrue;
public class FooTest {
@Test
public void testFoo() {
assertTrue(5 < 3);
}
}
""")
)
self.assertEqual([t.name for t in cm.exception.failed_targets], ['foo_test'])
def test_junit_runner_error(self):
with self.assertRaises(TaskError) as cm:
self.execute_junit_runner(
dedent("""
import org.junit.Test;
public class FooTest {
@Test
public void testFoo() {
throw new RuntimeException("test error");
}
}
""")
)
self.assertEqual([t.name for t in cm.exception.failed_targets], ['foo_test'])
def test_junit_runner_timeout_success(self):
"""When we set a timeout and don't force failure, succeed."""
with patch('pants.task.testrunner_task_mixin.Timeout') as mock_timeout:
self.set_options(timeout_default=1)
self.set_options(timeouts=True)
self.execute_junit_runner(
dedent("""
import org.junit.Test;
import static org.junit.Assert.assertTrue;
public class FooTest {
@Test
public void testFoo() {
assertTrue(5 > 3);
}
}
""")
)
# Ensures that Timeout is instantiated with a 1 second timeout.
args, kwargs = mock_timeout.call_args
self.assertEqual(args, (1,))
def test_junit_runner_timeout_fail(self):
"""When we set a timeout and force a failure, fail."""
with patch('pants.task.testrunner_task_mixin.Timeout') as mock_timeout:
mock_timeout().__exit__.side_effect = TimeoutReached(1)
self.set_options(timeout_default=1)
self.set_options(timeouts=True)
with self.assertRaises(TaskError) as cm:
self.execute_junit_runner(
dedent("""
import org.junit.Test;
import static org.junit.Assert.assertTrue;
public class FooTest {
@Test
public void testFoo() {
assertTrue(5 > 3);
}
}
""")
)
self.assertEqual([t.name for t in cm.exception.failed_targets], ['foo_test'])
# Ensures that Timeout is instantiated with a 1 second timeout.
args, kwargs = mock_timeout.call_args
self.assertEqual(args, (1,))
def execute_junit_runner(self, content, create_some_resources=True, **kwargs):
# Create the temporary base test directory
test_rel_path = 'tests/java/org/pantsbuild/foo'
test_abs_path = self.create_dir(test_rel_path)
# Generate the temporary java test source code.
test_java_file_rel_path = os.path.join(test_rel_path, 'FooTest.java')
test_java_file_abs_path = self.create_file(test_java_file_rel_path, content)
# Create the temporary classes directory under work dir
test_classes_abs_path = self.create_workdir_dir(test_rel_path)
# Invoke ivy to resolve classpath for junit.
classpath_file_abs_path = os.path.join(test_abs_path, 'junit.classpath')
with subsystem_instance(IvySubsystem) as ivy_subsystem:
distribution = DistributionLocator.cached(jdk=True)
ivy = Bootstrapper(ivy_subsystem=ivy_subsystem).ivy()
ivy.execute(args=['-cachepath', classpath_file_abs_path,
'-dependency', 'junit', 'junit-dep', '4.10'],
executor=SubprocessExecutor(distribution=distribution))
with open(classpath_file_abs_path) as fp:
classpath = fp.read()
# Now directly invoking javac to compile the test java code into java class
# so later we can inject the class into products mapping for JUnitRun to execute
# the test on.
javac = distribution.binary('javac')
subprocess.check_call(
[javac, '-d', test_classes_abs_path, '-cp', classpath, test_java_file_abs_path])
# If a target_name is specified, create a target with it, otherwise create a java_tests target.
if 'target_name' in kwargs:
target = self.target(kwargs['target_name'])
else:
target = self.create_library(test_rel_path, 'java_tests', 'foo_test', ['FooTest.java'])
target_roots = []
if create_some_resources:
# Create a synthetic resource target.
target_roots.append(self.make_target('some_resources', Resources))
target_roots.append(target)
# Set the context with the two targets, one java_tests target and
# one synthetic resources target.
# The synthetic resources target is to make sure we won't regress
# in the future with bug like https://github.com/pantsbuild/pants/issues/508. Note
# in that bug, the resources target must be the first one in the list.
context = self.context(target_roots=target_roots)
# Before we run the task, we need to inject the "runtime_classpath" with
# the compiled test java classes that JUnitRun will know which test
# classes to execute. In a normal run, this "runtime_classpath" will be
# populated by java compilation step.
self.populate_runtime_classpath(context=context, classpath=[test_classes_abs_path])
# Finally execute the task.
self.execute(context)
def test_junit_runner_raises_no_error_on_non_junit_target(self):
"""Run pants against a `python_tests` target, but set an option for the `test.junit` task. This
should execute without error.
"""
self.add_to_build_file('foo', dedent('''
python_tests(
name='hello',
sources=['some_file.py'],
)
'''
))
self.set_options(test='#abc')
task = self.create_task(self.context(target_roots=[self.target('foo:hello')]))
task.execute()
def test_empty_sources(self):
self.add_to_build_file('foo', dedent('''
java_tests(
name='empty',
sources=[],
)
'''
))
task = self.create_task(self.context(target_roots=[self.target('foo:empty')]))
with self.assertRaisesRegexp(TargetDefinitionException,
r'must include a non-empty set of sources'):
task.execute()
def test_allow_empty_sources(self):
self.add_to_build_file('foo', dedent('''
java_tests(
name='empty',
sources=[],
)
'''
))
self.set_options(allow_empty_sources=True)
context = self.context(target_roots=[self.target('foo:empty')])
self.populate_runtime_classpath(context=context)
self.create_task(context).execute()
def test_request_classes_by_source(self):
"""`classes_by_source` is expensive to compute: confirm that it is only computed when needed."""
# Class names (with and without a method name) should not trigger.
self.assertFalse(JUnitRun.request_classes_by_source(['com.goo.ber']))
self.assertFalse(JUnitRun.request_classes_by_source(['com.goo.ber#method']))
# Existing files (with and without the method name) should trigger.
srcfile = os.path.join(self.test_workdir, 'this.is.a.source.file.scala')
safe_file_dump(srcfile, 'content!')
self.assertTrue(JUnitRun.request_classes_by_source([srcfile]))
self.assertTrue(JUnitRun.request_classes_by_source(['{}#method'.format(srcfile)]))
def test_junit_runner_extra_jvm_options(self):
self.make_target(
spec='foo:foo_test',
target_type=JavaTests,
sources=['FooTest.java'],
extra_jvm_options=['-Dexample.property=1'],
)
self.execute_junit_runner(dedent("""
import org.junit.Test;
import static org.junit.Assert.assertTrue;
public class FooTest {
@Test
public void testFoo() {
String exampleProperty = System.getProperty("example.property");
assertTrue(exampleProperty != null && exampleProperty.equals("1"));
}
}
"""),
target_name='foo:foo_test'
)
def test_junit_runner_multiple_extra_jvm_options(self):
self.make_target(
spec='foo:foo_test',
target_type=JavaTests,
sources=['FooTest.java'],
extra_jvm_options=['-Dexample.property1=1','-Dexample.property2=2'],
)
self.execute_junit_runner(dedent("""
import org.junit.Test;
import static org.junit.Assert.assertTrue;
public class FooTest {
@Test
public void testFoo() {
String exampleProperty1 = System.getProperty("example.property1");
assertTrue(exampleProperty1 != null && exampleProperty1.equals("1"));
String exampleProperty2 = System.getProperty("example.property2");
assertTrue(exampleProperty2 != null && exampleProperty2.equals("2"));
String exampleProperty3 = System.getProperty("example.property3");
assertTrue(exampleProperty3 == null);
}
}
"""),
target_name='foo:foo_test'
)
def test_junit_runner_extra_env_vars(self):
self.make_target(
spec='foo:foo_test',
target_type=JavaTests,
sources=['FooTest.java'],
extra_env_vars={
'HELLO': 27,
'THERE': 32,
},
)
self.make_target(
spec='bar:bar_test',
target_type=JavaTests,
sources=['FooTest.java'],
extra_env_vars={
'THE_ANSWER': 42,
'HELLO': 12,
},
)
self.execute_junit_runner(dedent("""
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class FooTest {
@Test
public void testFoo() {
assertEquals("27", System.getenv().get("HELLO"));
assertEquals("32", System.getenv().get("THERE"));
}
}
"""), target_name='foo:foo_test')
# Execute twice in a row to make sure the environment changes aren't sticky.
self.execute_junit_runner(dedent("""
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
public class FooTest {
@Test
public void testFoo() {
assertEquals("12", System.getenv().get("HELLO"));
assertEquals("42", System.getenv().get("THE_ANSWER"));
assertFalse(System.getenv().containsKey("THERE"));
}
}
"""), target_name='bar:bar_test', create_some_resources=False)
def test_junit_runner_extra_env_vars_none(self):
with environment_as(THIS_VARIABLE="12", THAT_VARIABLE="This is a variable."):
self.make_target(
spec='foo:foo_test',
target_type=JavaTests,
sources=['FooTest.java'],
extra_env_vars={
'HELLO': None,
'THERE': False,
'THIS_VARIABLE': None
},
)
self.execute_junit_runner(dedent("""
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
public class FooTest {
@Test
public void testFoo() {
assertEquals("False", System.getenv().get("THERE"));
assertEquals("This is a variable.", System.getenv().get("THAT_VARIABLE"));
assertFalse(System.getenv().containsKey("HELLO"));
assertFalse(System.getenv().containsKey("THIS_VARIABLE"));
}
}
"""), target_name='foo:foo_test')
|
|
""" A fast implementation of the Voigt function.
"""
# p2.6+ compatibility
from __future__ import division, print_function, unicode_literals
import numpy as np
from math import sqrt, pi
sqrtpi = sqrt(pi)
# u values corresponding to H table values below
U = np.arange(0, 20, 0.01)
# Table of h1 and h3 for use in Voigt function
H1 = np.array(
[-1.128379167096,-1.128153506307,-1.127476704444,-1.126349302806,
-1.124772202831,-1.122746665023,-1.120274307436,-1.117357103734,
-1.113997380833,-1.110197816115,-1.105961434223,-1.101291603457,
-1.096192031753,-1.090666762269,-1.084720168573,-1.078356949458,
-1.071582123367,-1.064401022458,-1.056819286313,-1.048842855295,
-1.040477963566,-1.031731131783,-1.022609159473,-1.013119117112,
-1.003268337903,-0.993064409287,-0.982515164176,-0.971628671947,
-0.960413229188,-0.948877350223,-0.937029757433,-0.924879371370,
-0.912435300708,-0.899706832010,-0.886703419362,-0.873434673861,
-0.859910352995,-0.846140349907,-0.832134682585,-0.817903482971,
-0.803456986018,-0.788805518705,-0.773959489031,-0.758929374994,
-0.743725713581,-0.728359089780,-0.712840125622,-0.697179469285,
-0.681387784257,-0.665475738580,-0.649453994194,-0.633333196387,
-0.617123963365,-0.600836875965,-0.584482467511,-0.568071213835,
-0.551613523468,-0.535119728024,-0.518600072770,-0.502064707417,
-0.485523677115,-0.468986913693,-0.452464227121,-0.435965297228,
-0.419499665677,-0.403076728196,-0.386705727090,-0.370395744018,
-0.354155693073,-0.337994314132,-0.321920166521,-0.305941622964,
-0.290066863852,-0.274303871809,-0.258660426580,-0.243144100224,
-0.227762252632,-0.212522027360,-0.197430347784,-0.182493913569,
-0.167719197466,-0.153112442425,-0.138679659026,-0.124426623230,
-0.110358874442,-0.096481713891,-0.082800203315,-0.069319163960,
-0.056043175874,-0.042976577504,-0.030123465589,-0.017487695339,
-0.005072880900, 0.007117603905, 0.019080624559, 0.030813284570,
0.042312923934, 0.053577117337, 0.064603672120, 0.075390626005,
0.085936244587, 0.096239018606, 0.106297661007, 0.116111103790,
0.125678494668, 0.134999193535, 0.144072768747, 0.152898993242,
0.161477840493, 0.169809480309, 0.177894274488, 0.185732772342,
0.193325706088, 0.200673986130, 0.207778696230, 0.214641088574,
0.221262578761, 0.227644740700, 0.233789301442, 0.239698135944,
0.245373261784, 0.250816833826, 0.256031138848, 0.261018590143,
0.265781722099, 0.270323184764, 0.274645738407, 0.278752248083,
0.282645678207, 0.286329087142, 0.289805621818, 0.293078512380,
0.296151066866, 0.299026665943, 0.301708757680, 0.304200852389,
0.306506517515, 0.308629372606, 0.310573084348, 0.312341361679,
0.313937950984, 0.315366631379, 0.316631210086, 0.317735517896,
0.318683404738, 0.319478735348, 0.320125385037, 0.320627235572,
0.320988171168, 0.321212074587, 0.321302823355, 0.321264286099,
0.321100318992, 0.320814762332, 0.320411437226, 0.319894142361,
0.319266651125, 0.318532708393, 0.317696027886, 0.316760289435,
0.315729136408, 0.314606173263, 0.313394963225, 0.312099026077,
0.310721836083, 0.309266820022, 0.307737355347, 0.306136768458,
0.304468333094, 0.302735268836, 0.300940739729, 0.299087853007,
0.297179657938, 0.295219144761, 0.293209243745, 0.291152824341,
0.289052694434, 0.286911599696, 0.284732223035, 0.282517184136,
0.280269039094, 0.277990280130, 0.275683335398, 0.273350568872,
0.270994280310, 0.268616705302, 0.266220015385, 0.263806318229,
0.261377657901, 0.258936015180, 0.256483307947, 0.254021391629,
0.251552059701, 0.249077044244, 0.246598016547, 0.244116587773,
0.241634309652, 0.239152675232, 0.236673119663, 0.234197021025,
0.231725701178, 0.229260426666, 0.226802409630, 0.224352808763,
0.221912730283, 0.219483228934, 0.217065309004, 0.214659925362,
0.212267984517, 0.209890345679, 0.207527821848, 0.205181180899,
0.202851146686, 0.200538400145, 0.198243580407, 0.195967285910,
0.193710075514, 0.191472469618, 0.189254951272, 0.187057967287,
0.184881929338, 0.182727215069, 0.180594169179, 0.178483104495,
0.176394303104, 0.174328017332, 0.172284470848, 0.170263859698,
0.168266353322, 0.166292095577, 0.164341205726, 0.162413779421,
0.160509889676, 0.158629587805, 0.156772904362, 0.154939850049,
0.153130416616, 0.151344577731, 0.149582289845, 0.147843493024,
0.146128111769, 0.144436055819, 0.142767220922, 0.141121489599,
0.139498731881, 0.137898806029, 0.136321559226, 0.134766828262,
0.133234440187, 0.131724212947, 0.130235956007, 0.128769470940,
0.127324552012, 0.125900986736, 0.124498556410, 0.123117036639,
0.121756197832, 0.120415805687, 0.119095621649, 0.117795403363,
0.116514905091, 0.115253878132, 0.114012071207, 0.112789230837,
0.111585101702, 0.110399426986, 0.109231948698, 0.108082407992,
0.106950545455, 0.105836101395, 0.104738816107, 0.103658430123,
0.102594684457, 0.101547320830, 0.100516081880, 0.099500711373,
0.098500954380, 0.097516557468, 0.096547268856, 0.095592838578,
0.094653018623, 0.093727563075, 0.092816228235, 0.091918772736,
0.091034957656, 0.090164546608, 0.089307305837, 0.088463004296,
0.087631413725, 0.086812308713, 0.086005466764, 0.085210668341,
0.084427696923, 0.083656339036, 0.082896384293, 0.082147625423,
0.081409858291, 0.080682881921, 0.079966498505, 0.079260513420,
0.078564735226, 0.077878975668, 0.077203049679, 0.076536775369,
0.075879974015, 0.075232470052, 0.074594091054, 0.073964667719,
0.073344033845, 0.072732026308, 0.072128485038, 0.071533252989,
0.070946176112, 0.070367103323, 0.069795886469, 0.069232380298,
0.068676442412, 0.068127933258, 0.067586716054, 0.067052656776,
0.066525624108, 0.066005489409, 0.065492126662, 0.064985412445,
0.064485225879, 0.063991448591, 0.063503964673, 0.063022660634,
0.062547425362, 0.062078150081, 0.061614728306, 0.061157055799,
0.060705030534, 0.060258552644, 0.059817524389, 0.059381850104,
0.058951436167, 0.058526190951, 0.058106024786, 0.057690849916,
0.057280580461, 0.056875132379, 0.056474423420, 0.056078373094,
0.055686902597, 0.055299934908, 0.054917394549, 0.054539207682,
0.054165302043, 0.053795606904, 0.053430053038, 0.053068572685,
0.052711099521, 0.052357568621, 0.052007916428, 0.051662080722,
0.051320000589, 0.050981616389, 0.050646869729, 0.050315703428,
0.049988061495, 0.049663889096, 0.049343132530, 0.049025739199,
0.048711657583, 0.048400837216, 0.048093228658, 0.047788783472,
0.047487454201, 0.047189194340, 0.046893958321, 0.046601701483,
0.046312380055, 0.046025951131, 0.045742372654, 0.045461603391,
0.045183602918, 0.044908331595, 0.044635750552, 0.044365821669,
0.044098507559, 0.043833771549, 0.043571577663, 0.043311890609,
0.043054675757, 0.042799899130, 0.042547527383, 0.042297527792,
0.042049868238, 0.041804517192, 0.041561443703, 0.041320617383,
0.041082008396, 0.040845587444, 0.040611325754, 0.040379195067,
0.040149167627, 0.039921216168, 0.039695313903, 0.039471434514,
0.039249552141, 0.039029641375, 0.038811677240, 0.038595635191,
0.038381491103, 0.038169221257, 0.037958802339, 0.037750211423,
0.037543425969, 0.037338423811, 0.037135183150, 0.036933682546,
0.036733900911, 0.036535817501, 0.036339411909, 0.036144664057,
0.035951554189, 0.035760062868, 0.035570170964, 0.035381859650,
0.035195110398, 0.035009904968, 0.034826225408, 0.034644054043,
0.034463373472, 0.034284166563, 0.034106416445, 0.033930106506,
0.033755220385, 0.033581741970, 0.033409655381, 0.033238945006,
0.033069595437, 0.032901591502, 0.032734918257, 0.032569560976,
0.032405505149, 0.032242736480, 0.032081240877, 0.031921004457,
0.031762013533, 0.031604254614, 0.031447714408, 0.031292379807,
0.031138237889, 0.030985275916, 0.030833481330, 0.030682841747,
0.030533344956, 0.030384978917, 0.030237731757, 0.030091591765,
0.029946547392, 0.029802587246, 0.029659700092, 0.029517874848,
0.029377100578, 0.029237366499, 0.029098661968, 0.028960976487,
0.028824299698, 0.028688621380, 0.028553931447, 0.028420219946,
0.028287477146, 0.028155693172, 0.028024858549, 0.027894963834,
0.027765999707, 0.027637956969, 0.027510826538, 0.027384599450,
0.027259266855, 0.027134820015, 0.027011250304, 0.026888549204,
0.026766708306, 0.026645719305, 0.026525574002, 0.026406264299,
0.026287782199, 0.026170119804, 0.026053269315, 0.025937223027,
0.025821973333, 0.025707512715, 0.025593833750, 0.025480929104,
0.025368791533, 0.025257413880, 0.025146789074, 0.025036910129,
0.024927770145, 0.024819362301, 0.024711679860, 0.024604716163,
0.024498464633, 0.024392918767, 0.024288072140, 0.024183918405,
0.024080451285, 0.023977664578, 0.023875552157, 0.023774107962,
0.023673326005, 0.023573200367, 0.023473725197, 0.023374894712,
0.023276703194, 0.023179144990, 0.023082214514, 0.022985906240,
0.022890214705, 0.022795134511, 0.022700660318, 0.022606786845,
0.022513508873, 0.022420821238, 0.022328718838, 0.022237196622,
0.022146249601, 0.022055872835, 0.021966061444, 0.021876810597,
0.021788115520, 0.021699971488, 0.021612373829, 0.021525317922,
0.021438799195, 0.021352813126, 0.021267355243, 0.021182421120,
0.021098006380, 0.021014106692, 0.020930717773, 0.020847835383,
0.020765455329, 0.020683573461, 0.020602185675, 0.020521287909,
0.020440876143, 0.020360946400, 0.020281494745, 0.020202517283,
0.020124010162, 0.020045969566, 0.019968391723, 0.019891272897,
0.019814609391, 0.019738397547, 0.019662633744, 0.019587314399,
0.019512435964, 0.019437994927, 0.019363987815, 0.019290411187,
0.019217261637, 0.019144535794, 0.019072230323, 0.019000341920,
0.018928867313, 0.018857803268, 0.018787146573, 0.018716894064,
0.018647042595, 0.018577589056, 0.018508530367, 0.018439863479,
0.018371585372, 0.018303693057, 0.018236183573, 0.018169053990,
0.018102301405, 0.018035922942, 0.017969915757, 0.017904277029,
0.017839003967, 0.017774093806, 0.017709543809, 0.017645351263,
0.017581513483, 0.017518027809, 0.017454891607, 0.017392102266,
0.017329657201, 0.017267553854, 0.017205789687, 0.017144362190,
0.017083268872, 0.017022507270, 0.016962074942, 0.016901969469,
0.016842188454, 0.016782729524, 0.016723590326, 0.016664768531,
0.016606261831, 0.016548067937, 0.016490184585, 0.016432609529,
0.016375340544, 0.016318375428, 0.016261711995, 0.016205348082,
0.016149281544, 0.016093510257, 0.016038032116, 0.015982845035,
0.015927946945, 0.015873335798, 0.015819009564, 0.015764966230,
0.015711203802, 0.015657720305, 0.015604513780, 0.015551582286,
0.015498923899, 0.015446536712, 0.015394418835, 0.015342568396,
0.015290983537, 0.015239662419, 0.015188603218, 0.015137804125,
0.015087263349, 0.015036979113, 0.014986949655, 0.014937173231,
0.014887648110, 0.014838372576, 0.014789344929, 0.014740563483,
0.014692026566, 0.014643732522, 0.014595679707, 0.014547866494,
0.014500291267, 0.014452952426, 0.014405848383, 0.014358977564,
0.014312338410, 0.014265929373, 0.014219748919, 0.014173795527,
0.014128067689, 0.014082563909, 0.014037282704, 0.013992222604,
0.013947382150, 0.013902759897, 0.013858354410, 0.013814164268,
0.013770188060, 0.013726424388, 0.013682871864, 0.013639529114,
0.013596394772, 0.013553467487, 0.013510745917, 0.013468228730,
0.013425914607, 0.013383802238, 0.013341890327, 0.013300177583,
0.013258662731, 0.013217344504, 0.013176221644, 0.013135292905,
0.013094557051, 0.013054012855, 0.013013659100, 0.012973494580,
0.012933518098, 0.012893728465, 0.012854124505, 0.012814705048,
0.012775468934, 0.012736415014, 0.012697542147, 0.012658849200,
0.012620335051, 0.012581998585, 0.012543838698, 0.012505854291,
0.012468044278, 0.012430407578, 0.012392943121, 0.012355649844,
0.012318526692, 0.012281572619, 0.012244786587, 0.012208167567,
0.012171714535, 0.012135426478, 0.012099302390, 0.012063341272,
0.012027542133, 0.011991903989, 0.011956425866, 0.011921106795,
0.011885945815, 0.011850941973, 0.011816094322, 0.011781401924,
0.011746863846, 0.011712479164, 0.011678246960, 0.011644166323,
0.011610236415, 0.011576456207, 0.011542824873, 0.011509341531,
0.011476005304, 0.011442815321, 0.011409770717, 0.011376870636,
0.011344114226, 0.011311500642, 0.011279029046, 0.011246698605,
0.011214508494, 0.011182457891, 0.011150545983, 0.011118771961,
0.011087135025, 0.011055634376, 0.011024269226, 0.010993038789,
0.010961942287, 0.010930978946, 0.010900147999, 0.010869448684,
0.010838880244, 0.010808441929, 0.010778132993, 0.010747952695,
0.010717900302, 0.010687975084, 0.010658176315, 0.010628503279,
0.010598955260, 0.010569531549, 0.010540231444, 0.010511054245,
0.010481999259, 0.010453065798, 0.010424253175, 0.010395560715,
0.010366987742, 0.010338533587, 0.010310197585, 0.010281979076,
0.010253877405, 0.010225891921, 0.010198021978, 0.010170266935,
0.010142626154, 0.010115099002, 0.010087684852, 0.010060383080,
0.010033193065, 0.010006114194, 0.009979145854, 0.009952287440,
0.009925538348, 0.009898897981, 0.009872365744, 0.009845941047,
0.009819623305, 0.009793411934, 0.009767306357, 0.009741306000,
0.009715410293, 0.009689618670, 0.009663930567, 0.009638345428,
0.009612862696, 0.009587481821, 0.009562202255, 0.009537023456,
0.009511944883, 0.009486966000, 0.009462086274, 0.009437305177,
0.009412622182, 0.009388036769, 0.009363548418, 0.009339156614,
0.009314860847, 0.009290660607, 0.009266555391, 0.009242544697,
0.009218628026, 0.009194804885, 0.009171074781, 0.009147437227,
0.009123891737, 0.009100437830, 0.009077075027, 0.009053802853,
0.009030620835, 0.009007528505, 0.008984525396, 0.008961611045,
0.008938784992, 0.008916046779, 0.008893395954, 0.008870832064,
0.008848354662, 0.008825963303, 0.008803657543, 0.008781436944,
0.008759301068, 0.008737249482, 0.008715281756, 0.008693397459,
0.008671596168, 0.008649877459, 0.008628240913, 0.008606686111,
0.008585212639, 0.008563820085, 0.008542508040, 0.008521276096,
0.008500123850, 0.008479050900, 0.008458056847, 0.008437141293,
0.008416303846, 0.008395544113, 0.008374861705, 0.008354256236,
0.008333727321, 0.008313274579, 0.008292897630, 0.008272596097,
0.008252369605, 0.008232217783, 0.008212140259, 0.008192136667,
0.008172206641, 0.008152349817, 0.008132565835, 0.008112854337,
0.008093214965, 0.008073647366, 0.008054151187, 0.008034726079,
0.008015371695, 0.007996087687, 0.007976873714, 0.007957729433,
0.007938654505, 0.007919648594, 0.007900711364, 0.007881842482,
0.007863041617, 0.007844308441, 0.007825642625, 0.007807043845,
0.007788511779, 0.007770046104, 0.007751646503, 0.007733312657,
0.007715044251, 0.007696840973, 0.007678702509, 0.007660628552,
0.007642618793, 0.007624672926, 0.007606790647, 0.007588971653,
0.007571215645, 0.007553522324, 0.007535891393, 0.007518322556,
0.007500815521, 0.007483369996, 0.007465985690, 0.007448662316,
0.007431399588, 0.007414197219, 0.007397054929, 0.007379972434,
0.007362949455, 0.007345985715, 0.007329080936, 0.007312234844,
0.007295447166, 0.007278717629, 0.007262045965, 0.007245431904,
0.007228875179, 0.007212375525, 0.007195932679, 0.007179546378,
0.007163216361, 0.007146942369, 0.007130724144, 0.007114561431,
0.007098453973, 0.007082401519, 0.007066403816, 0.007050460614,
0.007034571663, 0.007018736717, 0.007002955529, 0.006987227854,
0.006971553450, 0.006955932074, 0.006940363485, 0.006924847446,
0.006909383717, 0.006893972062, 0.006878612247, 0.006863304038,
0.006848047201, 0.006832841507, 0.006817686725, 0.006802582626,
0.006787528984, 0.006772525572, 0.006757572167, 0.006742668544,
0.006727814481, 0.006713009757, 0.006698254153, 0.006683547451,
0.006668889432, 0.006654279882, 0.006639718585, 0.006625205327,
0.006610739897, 0.006596322083, 0.006581951674, 0.006567628463,
0.006553352241, 0.006539122801, 0.006524939939, 0.006510803450,
0.006496713130, 0.006482668778, 0.006468670193, 0.006454717175,
0.006440809524, 0.006426947044, 0.006413129538, 0.006399356810,
0.006385628666, 0.006371944913, 0.006358305357, 0.006344709808,
0.006331158076, 0.006317649971, 0.006304185306, 0.006290763892,
0.006277385544, 0.006264050077, 0.006250757306, 0.006237507049,
0.006224299123, 0.006211133347, 0.006198009541, 0.006184927525,
0.006171887122, 0.006158888154, 0.006145930444, 0.006133013818,
0.006120138100, 0.006107303117, 0.006094508697, 0.006081754667,
0.006069040857, 0.006056367097, 0.006043733219, 0.006031139053,
0.006018584432, 0.006006069192, 0.005993593165, 0.005981156187,
0.005968758095, 0.005956398726, 0.005944077917, 0.005931795508,
0.005919551338, 0.005907345249, 0.005895177080, 0.005883046675,
0.005870953876, 0.005858898528, 0.005846880474, 0.005834899562,
0.005822955635, 0.005811048543, 0.005799178132, 0.005787344252,
0.005775546751, 0.005763785481, 0.005752060291, 0.005740371034,
0.005728717562, 0.005717099730, 0.005705517389, 0.005693970397,
0.005682458607, 0.005670981876, 0.005659540062, 0.005648133022,
0.005636760614, 0.005625422698, 0.005614119134, 0.005602849782,
0.005591614503, 0.005580413160, 0.005569245616, 0.005558111734,
0.005547011377, 0.005535944412, 0.005524910702, 0.005513910116,
0.005502942519, 0.005492007778, 0.005481105763, 0.005470236342,
0.005459399385, 0.005448594761, 0.005437822342, 0.005427081999,
0.005416373605, 0.005405697031, 0.005395052152, 0.005384438841,
0.005373856973, 0.005363306424, 0.005352787069, 0.005342298785,
0.005331841449, 0.005321414939, 0.005311019132, 0.005300653909,
0.005290319149, 0.005280014731, 0.005269740537, 0.005259496447,
0.005249282345, 0.005239098112, 0.005228943631, 0.005218818786,
0.005208723461, 0.005198657585, 0.005188620956, 0.005178613502,
0.005168635112, 0.005158685671, 0.005148765068, 0.005138873190,
0.005129009927, 0.005119175167, 0.005109368800, 0.005099590716,
0.005089840807, 0.005080118963, 0.005070425077, 0.005060759041,
0.005051120748, 0.005041510090, 0.005031926964, 0.005022371261,
0.005012842879, 0.005003341711, 0.004993867654, 0.004984420605,
0.004975000461, 0.004965607118, 0.004956240476, 0.004946900431,
0.004937586884, 0.004928299734, 0.004919038880, 0.004909804223,
0.004900595664, 0.004891413104, 0.004882256445, 0.004873125588,
0.004864020438, 0.004854940896, 0.004845886867, 0.004836858255,
0.004827854964, 0.004818876899, 0.004809923965, 0.004800996069,
0.004792093117, 0.004783215015, 0.004774361670, 0.004765532991,
0.004756728886, 0.004747949262, 0.004739194029, 0.004730463096,
0.004721756373, 0.004713073770, 0.004704415197, 0.004695780566,
0.004687169789, 0.004678582776, 0.004670019440, 0.004661479694,
0.004652963451, 0.004644470625, 0.004636001128, 0.004627554876,
0.004619131783, 0.004610731765, 0.004602354735, 0.004594000612,
0.004585669309, 0.004577360745, 0.004569074837, 0.004560811501,
0.004552570655, 0.004544352217, 0.004536156107, 0.004527982242,
0.004519830542, 0.004511700927, 0.004503593316, 0.004495507630,
0.004487443790, 0.004479401716, 0.004471381331, 0.004463382555,
0.004455405311, 0.004447449521, 0.004439515108, 0.004431601996,
0.004423710108, 0.004415839367, 0.004407989699, 0.004400161027,
0.004392353276, 0.004384566371, 0.004376800239, 0.004369054805,
0.004361329995, 0.004353625735, 0.004345941954, 0.004338278577,
0.004330635532, 0.004323012748, 0.004315410152, 0.004307827672,
0.004300265239, 0.004292722780, 0.004285200226, 0.004277697505,
0.004270214548, 0.004262751286, 0.004255307649, 0.004247883568,
0.004240478974, 0.004233093799, 0.004225727975, 0.004218381434,
0.004211054109, 0.004203745931, 0.004196456836, 0.004189186754,
0.004181935622, 0.004174703371, 0.004167489937, 0.004160295255,
0.004153119258, 0.004145961882, 0.004138823063, 0.004131702735,
0.004124600836, 0.004117517301, 0.004110452067, 0.004103405070,
0.004096376248, 0.004089365537, 0.004082372876, 0.004075398203,
0.004068441455, 0.004061502571, 0.004054581489, 0.004047678149,
0.004040792489, 0.004033924449, 0.004027073970, 0.004020240990,
0.004013425450, 0.004006627290, 0.003999846452, 0.003993082875,
0.003986336503, 0.003979607275, 0.003972895133, 0.003966200021,
0.003959521879, 0.003952860651, 0.003946216278, 0.003939588705,
0.003932977874, 0.003926383729, 0.003919806213, 0.003913245271,
0.003906700846, 0.003900172883, 0.003893661326, 0.003887166121,
0.003880687213, 0.003874224546, 0.003867778066, 0.003861347720,
0.003854933453, 0.003848535212, 0.003842152942, 0.003835786592,
0.003829436106, 0.003823101434, 0.003816782521, 0.003810479316,
0.003804191766, 0.003797919820, 0.003791663426, 0.003785422532,
0.003779197086, 0.003772987038, 0.003766792337, 0.003760612932,
0.003754448773, 0.003748299808, 0.003742165989, 0.003736047266,
0.003729943588, 0.003723854906, 0.003717781171, 0.003711722334,
0.003705678346, 0.003699649159, 0.003693634723, 0.003687634991,
0.003681649915, 0.003675679447, 0.003669723539, 0.003663782143,
0.003657855213, 0.003651942701, 0.003646044561, 0.003640160745,
0.003634291208, 0.003628435902, 0.003622594783, 0.003616767803,
0.003610954917, 0.003605156080, 0.003599371246, 0.003593600370,
0.003587843407, 0.003582100311, 0.003576371040, 0.003570655547,
0.003564953789, 0.003559265722, 0.003553591301, 0.003547930483,
0.003542283225, 0.003536649482, 0.003531029212, 0.003525422371,
0.003519828917, 0.003514248807, 0.003508681999, 0.003503128449,
0.003497588116, 0.003492060959, 0.003486546934, 0.003481046000,
0.003475558116, 0.003470083240, 0.003464621331, 0.003459172348,
0.003453736250, 0.003448312997, 0.003442902548, 0.003437504862,
0.003432119899, 0.003426747620, 0.003421387983, 0.003416040950,
0.003410706481, 0.003405384537, 0.003400075077, 0.003394778063,
0.003389493457, 0.003384221218, 0.003378961309, 0.003373713691,
0.003368478325, 0.003363255173, 0.003358044198, 0.003352845361,
0.003347658624, 0.003342483951, 0.003337321302, 0.003332170642,
0.003327031932, 0.003321905136, 0.003316790217, 0.003311687138,
0.003306595863, 0.003301516354, 0.003296448576, 0.003291392492,
0.003286348067, 0.003281315264, 0.003276294048, 0.003271284383,
0.003266286233, 0.003261299564, 0.003256324339, 0.003251360524,
0.003246408084, 0.003241466984, 0.003236537189, 0.003231618665,
0.003226711377, 0.003221815291, 0.003216930373, 0.003212056589,
0.003207193904, 0.003202342286, 0.003197501700, 0.003192672113,
0.003187853491, 0.003183045802, 0.003178249012, 0.003173463088,
0.003168687997, 0.003163923706, 0.003159170184, 0.003154427396,
0.003149695312, 0.003144973898, 0.003140263123, 0.003135562954,
0.003130873359, 0.003126194308, 0.003121525767, 0.003116867706,
0.003112220093, 0.003107582897, 0.003102956086, 0.003098339630,
0.003093733497, 0.003089137657, 0.003084552079, 0.003079976733,
0.003075411587, 0.003070856612, 0.003066311777, 0.003061777051,
0.003057252406, 0.003052737811, 0.003048233237, 0.003043738652,
0.003039254029, 0.003034779337, 0.003030314547, 0.003025859629,
0.003021414556, 0.003016979297, 0.003012553823, 0.003008138106,
0.003003732117, 0.002999335828, 0.002994949209, 0.002990572233,
0.002986204871, 0.002981847094, 0.002977498876, 0.002973160187,
0.002968831000, 0.002964511287, 0.002960201021, 0.002955900173,
0.002951608717, 0.002947326624, 0.002943053868, 0.002938790421,
0.002934536257, 0.002930291348, 0.002926055667, 0.002921829188,
0.002917611884, 0.002913403728, 0.002909204693, 0.002905014755,
0.002900833885, 0.002896662058, 0.002892499247, 0.002888345427,
0.002884200572, 0.002880064656, 0.002875937653, 0.002871819538,
0.002867710284, 0.002863609866, 0.002859518260, 0.002855435439,
0.002851361379, 0.002847296054, 0.002843239440, 0.002839191511,
0.002835152243, 0.002831121611, 0.002827099590, 0.002823086155,
0.002819081283, 0.002815084948, 0.002811097127, 0.002807117795,
0.002803146928, 0.002799184502, 0.002795230493, 0.002791284877,
0.002787347631, 0.002783418730, 0.002779498151, 0.002775585871,
0.002771681866, 0.002767786112, 0.002763898587, 0.002760019267,
0.002756148128, 0.002752285149, 0.002748430306, 0.002744583575,
0.002740744935, 0.002736914363, 0.002733091835, 0.002729277330,
0.002725470825, 0.002721672297, 0.002717881724, 0.002714099084,
0.002710324354, 0.002706557514, 0.002702798539, 0.002699047410,
0.002695304103, 0.002691568597, 0.002687840870, 0.002684120901,
0.002680408668, 0.002676704149, 0.002673007324, 0.002669318170,
0.002665636667, 0.002661962793, 0.002658296527, 0.002654637849,
0.002650986737, 0.002647343170, 0.002643707128, 0.002640078589,
0.002636457533, 0.002632843940, 0.002629237789, 0.002625639059,
0.002622047730, 0.002618463782, 0.002614887194, 0.002611317947,
0.002607756019, 0.002604201392, 0.002600654045, 0.002597113958,
0.002593581111, 0.002590055485, 0.002586537060, 0.002583025816,
0.002579521733, 0.002576024793, 0.002572534976, 0.002569052261,
0.002565576631, 0.002562108065, 0.002558646545, 0.002555192052,
0.002551744566, 0.002548304068, 0.002544870540, 0.002541443962,
0.002538024316, 0.002534611584, 0.002531205746, 0.002527806784,
0.002524414679, 0.002521029413, 0.002517650967, 0.002514279324,
0.002510914464, 0.002507556370, 0.002504205023, 0.002500860405,
0.002497522499, 0.002494191286, 0.002490866749, 0.002487548869,
0.002484237628, 0.002480933010, 0.002477634995, 0.002474343568,
0.002471058709, 0.002467780402, 0.002464508629, 0.002461243373,
0.002457984616, 0.002454732341, 0.002451486532, 0.002448247169,
0.002445014238, 0.002441787720, 0.002438567598, 0.002435353856,
0.002432146477, 0.002428945444, 0.002425750740, 0.002422562349,
0.002419380253, 0.002416204437, 0.002413034883, 0.002409871575,
0.002406714497, 0.002403563633, 0.002400418965, 0.002397280478,
0.002394148183, 0.002391022009, 0.002387901967, 0.002384788041,
0.002381680216, 0.002378578474, 0.002375482801, 0.002372393180,
0.002369309596, 0.002366232033, 0.002363160475, 0.002360094907,
0.002357035312, 0.002353981676, 0.002350933983, 0.002347892217,
0.002344856364, 0.002341826407, 0.002338802331, 0.002335784122,
0.002332771764, 0.002329765241, 0.002326764540, 0.002323769644,
0.002320780538, 0.002317797209, 0.002314819640, 0.002311847818,
0.002308881726, 0.002305921351, 0.002302966678, 0.002300017692,
0.002297074378, 0.002294136722, 0.002291204709, 0.002288278326,
0.002285357556, 0.002282442387, 0.002279532804, 0.002276628792,
0.002273730337, 0.002270837425, 0.002267950042, 0.002265068174,
0.002262191807, 0.002259320926, 0.002256455517, 0.002253595568,
0.002250741063, 0.002247891990, 0.002245048333, 0.002242210080,
0.002239377217, 0.002236549730, 0.002233727606, 0.002230910830,
0.002228099390, 0.002225293271, 0.002222492461, 0.002219696946,
0.002216906713, 0.002214121748, 0.002211342038, 0.002208567569,
0.002205798330, 0.002203034306, 0.002200275484, 0.002197521851,
0.002194773395, 0.002192030102, 0.002189291959, 0.002186558953,
0.002183831072, 0.002181108303, 0.002178390633, 0.002175678048,
0.002172970537, 0.002170268087, 0.002167570685, 0.002164878319,
0.002162190975, 0.002159508643, 0.002156831308, 0.002154158958,
0.002151491582, 0.002148829167, 0.002146171700, 0.002143519169,
0.002140871562, 0.002138228868, 0.002135591072, 0.002132958164,
0.002130330132, 0.002127706963, 0.002125088645, 0.002122475167,
0.002119866516, 0.002117262680, 0.002114663648, 0.002112069408,
0.002109479948, 0.002106895257, 0.002104315321, 0.002101740131,
0.002099169674, 0.002096603938, 0.002094042912, 0.002091486585,
0.002088934945, 0.002086387980, 0.002083845679, 0.002081308031,
0.002078775024, 0.002076246648, 0.002073722889, 0.002071203738,
0.002068689184, 0.002066179214, 0.002063673818, 0.002061172985,
0.002058676703, 0.002056184961, 0.002053697750, 0.002051215056,
0.002048736871, 0.002046263181, 0.002043793978, 0.002041329250,
0.002038868985, 0.002036413174, 0.002033961805, 0.002031514868,
0.002029072352, 0.002026634246, 0.002024200540, 0.002021771223,
0.002019346285, 0.002016925715, 0.002014509503, 0.002012097637,
0.002009690108, 0.002007286906, 0.002004888019, 0.002002493438,
0.002000103152, 0.001997717151, 0.001995335424, 0.001992957962,
0.001990584754, 0.001988215791, 0.001985851061, 0.001983490555,
0.001981134263, 0.001978782174, 0.001976434279, 0.001974090568,
0.001971751031, 0.001969415658, 0.001967084438, 0.001964757363,
0.001962434422, 0.001960115605, 0.001957800903, 0.001955490306,
0.001953183805, 0.001950881388, 0.001948583048, 0.001946288774,
0.001943998556, 0.001941712386, 0.001939430253, 0.001937152148,
0.001934878062, 0.001932607985, 0.001930341907, 0.001928079819,
0.001925821713, 0.001923567577, 0.001921317404, 0.001919071184,
0.001916828908, 0.001914590565, 0.001912356148, 0.001910125647,
0.001907899053, 0.001905676356, 0.001903457548, 0.001901242619,
0.001899031560, 0.001896824363, 0.001894621019, 0.001892421517,
0.001890225851, 0.001888034009, 0.001885845985, 0.001883661768,
0.001881481350, 0.001879304722, 0.001877131876, 0.001874962802,
0.001872797491, 0.001870635936, 0.001868478127, 0.001866324056,
0.001864173714, 0.001862027093, 0.001859884183, 0.001857744977,
0.001855609465, 0.001853477639, 0.001851349492, 0.001849225013,
0.001847104196, 0.001844987031, 0.001842873509, 0.001840763624,
0.001838657366, 0.001836554726, 0.001834455698, 0.001832360271,
0.001830268439, 0.001828180193, 0.001826095525, 0.001824014426,
0.001821936888, 0.001819862904, 0.001817792465, 0.001815725562,
0.001813662189, 0.001811602337, 0.001809545997, 0.001807493163,
0.001805443825, 0.001803397977, 0.001801355609, 0.001799316715,
0.001797281286, 0.001795249314, 0.001793220792, 0.001791195711,
0.001789174065, 0.001787155845, 0.001785141043, 0.001783129652,
0.001781121664, 0.001779117071, 0.001777115866, 0.001775118040,
0.001773123588, 0.001771132500, 0.001769144769, 0.001767160388,
0.001765179349, 0.001763201645, 0.001761227267, 0.001759256210,
0.001757288464, 0.001755324024, 0.001753362880, 0.001751405027,
0.001749450457, 0.001747499161, 0.001745551134, 0.001743606367,
0.001741664853, 0.001739726586, 0.001737791557, 0.001735859760,
0.001733931188, 0.001732005833, 0.001730083687, 0.001728164745,
0.001726248999, 0.001724336441, 0.001722427065, 0.001720520863,
0.001718617829, 0.001716717955, 0.001714821235, 0.001712927662,
0.001711037227, 0.001709149926, 0.001707265750, 0.001705384693,
0.001703506748, 0.001701631907, 0.001699760165, 0.001697891514,
0.001696025948, 0.001694163459, 0.001692304041, 0.001690447687,
0.001688594391, 0.001686744146, 0.001684896944, 0.001683052780,
0.001681211646, 0.001679373537, 0.001677538444, 0.001675706363,
0.001673877285, 0.001672051206, 0.001670228117, 0.001668408013,
0.001666590887, 0.001664776732, 0.001662965542, 0.001661157311,
0.001659352031, 0.001657549698, 0.001655750303, 0.001653953842,
0.001652160307, 0.001650369692, 0.001648581990, 0.001646797197,
0.001645015304, 0.001643236306, 0.001641460196, 0.001639686969,
0.001637916618, 0.001636149137, 0.001634384519, 0.001632622759,
0.001630863850, 0.001629107786, 0.001627354561, 0.001625604168,
0.001623856603, 0.001622111858, 0.001620369927, 0.001618630805,
0.001616894486, 0.001615160962, 0.001613430230, 0.001611702281,
0.001609977111, 0.001608254714, 0.001606535083, 0.001604818212,
0.001603104096, 0.001601392729, 0.001599684105, 0.001597978218,
0.001596275062, 0.001594574631, 0.001592876920, 0.001591181923,
0.001589489633, 0.001587800046, 0.001586113155, 0.001584428955,
0.001582747439, 0.001581068603, 0.001579392440, 0.001577718945,
0.001576048112, 0.001574379936, 0.001572714411, 0.001571051531,
0.001569391290, 0.001567733684, 0.001566078706, 0.001564426351,
0.001562776613, 0.001561129487, 0.001559484967, 0.001557843049,
0.001556203725, 0.001554566992, 0.001552932843, 0.001551301272,
0.001549672276, 0.001548045847, 0.001546421981, 0.001544800672,
0.001543181915, 0.001541565705, 0.001539952036, 0.001538340903,
0.001536732301, 0.001535126223, 0.001533522666, 0.001531921623,
0.001530323090, 0.001528727061, 0.001527133531, 0.001525542495,
0.001523953947, 0.001522367883, 0.001520784296, 0.001519203183,
0.001517624538, 0.001516048355, 0.001514474629, 0.001512903357,
0.001511334531, 0.001509768148, 0.001508204202, 0.001506642688,
0.001505083601, 0.001503526936, 0.001501972689, 0.001500420853,
0.001498871424, 0.001497324398, 0.001495779768, 0.001494237530,
0.001492697680, 0.001491160212, 0.001489625121, 0.001488092403,
0.001486562052, 0.001485034064, 0.001483508433, 0.001481985156,
0.001480464226, 0.001478945640, 0.001477429393, 0.001475915478,
0.001474403893, 0.001472894632, 0.001471387690, 0.001469883063,
0.001468380745, 0.001466880732, 0.001465383020, 0.001463887603,
0.001462394478, 0.001460903638, 0.001459415080, 0.001457928799,
0.001456444791, 0.001454963050, 0.001453483572, 0.001452006352,
0.001450531386, 0.001449058669, 0.001447588197, 0.001446119965,
0.001444653969, 0.001443190203, 0.001441728664, 0.001440269347,
0.001438812248, 0.001437357361, 0.001435904683, 0.001434454208,
0.001433005933, 0.001431559854, 0.001430115964, 0.001428674261,
0.001427234740, 0.001425797396, 0.001424362225, 0.001422929223,
0.001421498385, 0.001420069706, 0.001418643183, 0.001417218812])
H3 = np.array(
[-0.752252778064,-0.751951907041,-0.751049654958,-0.749547104192,
-0.747446056783,-0.744749031550,-0.741459260066,-0.737580681497,
-0.733117936310,-0.728076358876,-0.722461968963,-0.716281462157,
-0.709542199216,-0.702252194382,-0.694420102672,-0.686055206185,
-0.677167399429,-0.667767173726,-0.657865600697,-0.647474314879,
-0.636605495506,-0.625271847477,-0.613486581562,-0.601263393883,
-0.588616444696,-0.575560336534,-0.562110091745,-0.548281129459,
-0.534089242044,-0.519550571089,-0.504681582955,-0.489499043946,
-0.474019995148,-0.458261726974,-0.442241753478,-0.425977786464,
-0.409487709464,-0.392789551607,-0.375901461443,-0.358841680766,
-0.341628518477,-0.324280324544,-0.306815464089,-0.289252291671,
-0.271609125783,-0.253904223627,-0.236155756202,-0.218381783743,
-0.200600231563,-0.182828866326,-0.165085272797,-0.147386831102,
-0.129750694537,-0.112193767960,-0.094732686795,-0.077383796679,
-0.060163133797,-0.043086405902,-0.026168974085,-0.009425835284,
0.007128394424, 0.023479495729, 0.039613661181, 0.055517509450,
0.071178098729, 0.086582939277, 0.101720005089, 0.116577744673,
0.131145090944, 0.145411470205, 0.159366810241, 0.173001547492,
0.186306633327, 0.199273539414, 0.211894262182, 0.224161326392,
0.236067787813, 0.247607235019, 0.258773790309, 0.269562109768,
0.279967382485, 0.289985328924, 0.299612198492, 0.308844766297,
0.317680329127, 0.326116700665, 0.334152205965, 0.341785675206,
0.349016436756, 0.355844309555, 0.362269594861, 0.368293067366,
0.373915965728, 0.379139982525, 0.383967253684, 0.388400347386,
0.392442252501, 0.396096366567, 0.399366483349, 0.402256780005,
0.404771803894, 0.406916459051, 0.408695992364, 0.410115979481,
0.411182310478, 0.411901175319, 0.412279049136, 0.412322677365,
0.412039060758, 0.411435440304, 0.410519282100, 0.409298262172,
0.407780251309, 0.405973299902, 0.403885622848, 0.401525584513,
0.398901683806, 0.396022539369, 0.392896874922, 0.389533504769,
0.385941319503, 0.382129271921, 0.378106363172, 0.373881629157,
0.369464127198, 0.364862923000, 0.360087077909, 0.355145636493,
0.350047614456, 0.344801986899, 0.339417676935, 0.333903544682,
0.328268376626, 0.322520875384, 0.316669649852, 0.310723205768,
0.304689936683, 0.298578115342, 0.292395885492, 0.286151254111,
0.279852084063, 0.273506087181, 0.267120817773, 0.260703666564,
0.254261855060, 0.247802430334, 0.241332260239, 0.234858029039,
0.228386233449, 0.221923179092, 0.215474977354, 0.209047542641,
0.202646590024, 0.196277633269, 0.189945983241, 0.183656746711,
0.177414825371, 0.171224915479, 0.165091507575, 0.159018886653,
0.153011132637, 0.147072121151, 0.141205524598, 0.135414813519,
0.129703258229, 0.124073930714, 0.118529706783, 0.113073268455,
0.107707106577, 0.102433523653, 0.097254636883, 0.092172381387,
0.087188513608, 0.082304614890, 0.077522095202, 0.072842197009,
0.068265999280, 0.063794421603, 0.059428228425, 0.055168033374,
0.051014303682, 0.046967364672, 0.043027404315, 0.039194477841,
0.035468512397, 0.031849311736, 0.028336560933, 0.024929831117,
0.021628584210, 0.018432177663, 0.015339869187, 0.012350821460,
0.009464106811, 0.006678711873, 0.003993542194, 0.001407426800,
-0.001080877289,-0.003472680608,-0.005769356857,-0.007972338583,
-0.010083112932,-0.012103217485,-0.014034236175,-0.015877795293,
-0.017635559582,-0.019309228431,-0.020900532156,-0.022411228396,
-0.023843098593,-0.025197944596,-0.026477585353,-0.027683853723,
-0.028818593393,-0.029883655903,-0.030880897783,-0.031812177804,
-0.032679354333,-0.033484282807,-0.034228813314,-0.034914788284,
-0.035544040295,-0.036118389978,-0.036639644043,-0.037109593368,
-0.037530011368,-0.037902652131,-0.038229248990,-0.038511513028,
-0.038751131720,-0.038949767669,-0.039109057435,-0.039230610461,
-0.039316008087,-0.039366802653,-0.039384516689,-0.039370642184,
-0.039326639941,-0.039253939009,-0.039153936188,-0.039027995612,
-0.038877448393,-0.038703592350,-0.038507691783,-0.038290977323,
-0.038054645839,-0.037799860397,-0.037527750284,-0.037239411070,
-0.036935904732,-0.036618259817,-0.036287471656,-0.035944502607,
-0.035590282354,-0.035225708231,-0.034851645582,-0.034468928160,
-0.034078358546,-0.033680708602,-0.033276719950,-0.032867104470,
-0.032452544821,-0.032033694986,-0.031611180823,-0.031185600648,
-0.030757525815,-0.030327501320,-0.029896046409,-0.029463655198,
-0.029030797297,-0.028597918444,-0.028165441141,-0.027733765290,
-0.027303268837,-0.026874308413,-0.026447219972,-0.026022319432,
-0.025599903313,-0.025180249365,-0.024763617198,-0.024350248907,
-0.023940369683,-0.023534188425,-0.023131898342,-0.022733677544,
-0.022339689625,-0.021950084244,-0.021564997680,-0.021184553396,
-0.020808862576,-0.020438024658,-0.020072127859,-0.019711249681,
-0.019355457408,-0.019004808592,-0.018659351527,-0.018319125708,
-0.017984162280,-0.017654484470,-0.017330108015,-0.017011041567,
-0.016697287096,-0.016388840267,-0.016085690819,-0.015787822923,
-0.015495215525,-0.015207842688,-0.014925673910,-0.014648674439,
-0.014376805566,-0.014110024919,-0.013848286738,-0.013591542134,
-0.013339739353,-0.013092824008,-0.012850739321,-0.012613426340,
-0.012380824123,-0.012152870052,-0.011929499863,-0.011710647936,
-0.011496247440,-0.011286230502,-0.011080528362,-0.010879071520,
-0.010681789878,-0.010488612873,-0.010299469600,-0.010114288929,
-0.009932999618,-0.009755530417,-0.009581810161,-0.009411767864,
-0.009245332805,-0.009082434602,-0.008923003292,-0.008766969393,
-0.008614263971,-0.008464818695,-0.008318565892,-0.008175438598,
-0.008035370597,-0.007898296464,-0.007764151606,-0.007632872286,
-0.007504395421,-0.007378659557,-0.007255603473,-0.007135167138,
-0.007017291500,-0.006901918498,-0.006788991072,-0.006678453177,
-0.006570249787,-0.006464326900,-0.006360631544,-0.006259111777,
-0.006159716688,-0.006062396393,-0.005967102034,-0.005873785773,
-0.005782400788,-0.005692901265,-0.005605242388,-0.005519380334,
-0.005435272260,-0.005352876292,-0.005272151518,-0.005193057969,
-0.005115556612,-0.005039609333,-0.004965178925,-0.004892229074,
-0.004820724345,-0.004750630162,-0.004681912801,-0.004614539369,
-0.004548477789,-0.004483696787,-0.004420165872,-0.004357855325,
-0.004296736179,-0.004236780206,-0.004177959899,-0.004120248458,
-0.004063619772,-0.004008048406,-0.003953509583,-0.003899979171,
-0.003847433665,-0.003795850176,-0.003745206410,-0.003695480658,
-0.003646651783,-0.003598699199,-0.003551602863,-0.003505343259,
-0.003459901384,-0.003415258734,-0.003371397294,-0.003328299521,
-0.003285948336,-0.003244327106,-0.003203419637,-0.003163210160,
-0.003123683318,-0.003084824158,-0.003046618118,-0.003009051013,
-0.002972109033,-0.002935778724,-0.002900046982,-0.002864901043,
-0.002830328472,-0.002796317156,-0.002762855295,-0.002729931387,
-0.002697534230,-0.002665652905,-0.002634276769,-0.002603395454,
-0.002572998849,-0.002543077100,-0.002513620601,-0.002484619985,
-0.002456066121,-0.002427950100,-0.002400263239,-0.002372997065,
-0.002346143316,-0.002319693929,-0.002293640920,-0.002267976851,
-0.002242694116,-0.002217785407,-0.002193243589,-0.002169061700,
-0.002145232941,-0.002121750674,-0.002098608417,-0.002075799841,
-0.002053318764,-0.002031159126,-0.002009315067,-0.001987780805,
-0.001966550708,-0.001945619272,-0.001924981119,-0.001904630990,
-0.001884563747,-0.001864774365,-0.001845257931,-0.001826009640,
-0.001807024795,-0.001788298800,-0.001769827160,-0.001751605476,
-0.001733629448,-0.001715894863,-0.001698397604,-0.001681133636,
-0.001664099014,-0.001647289874,-0.001630702434,-0.001614332989,
-0.001598179129,-0.001582234872,-0.001566497956,-0.001550964973,
-0.001535632585,-0.001520497524,-0.001505556585,-0.001490806630,
-0.001476244583,-0.001461867430,-0.001447672214,-0.001433656041,
-0.001419816070,-0.001406149518,-0.001392653656,-0.001379325806,
-0.001366163345,-0.001353163699,-0.001340324341,-0.001327642797,
-0.001315116635,-0.001302743474,-0.001290520974,-0.001278446841,
-0.001266518822,-0.001254734709,-0.001243092331,-0.001231589562,
-0.001220224309,-0.001208994523,-0.001197898190,-0.001186933332,
-0.001176098007,-0.001165390310,-0.001154808368,-0.001144350341,
-0.001134014426,-0.001123798847,-0.001113701861,-0.001103721759,
-0.001093856857,-0.001084105504,-0.001074466076,-0.001064936978,
-0.001055516643,-0.001046203530,-0.001036996125,-0.001027892940,
-0.001018892513,-0.001009993405,-0.001001194204,-0.000992493520,
-0.000983889986,-0.000975382261,-0.000966969022,-0.000958648972,
-0.000950420834,-0.000942283351,-0.000934235288,-0.000926275431,
-0.000918402585,-0.000910615574,-0.000902913243,-0.000895294453,
-0.000887758087,-0.000880303042,-0.000872928236,-0.000865632604,
-0.000858415095,-0.000851274680,-0.000844210340,-0.000837221079,
-0.000830305911,-0.000823463869,-0.000816693999,-0.000809995364,
-0.000803367040,-0.000796808118,-0.000790317704,-0.000783894917,
-0.000777538888,-0.000771248764,-0.000765023705,-0.000758862881,
-0.000752765478,-0.000746730693,-0.000740757735,-0.000734845825,
-0.000728994196,-0.000723202092,-0.000717468770,-0.000711793497,
-0.000706175549,-0.000700614217,-0.000695108799,-0.000689658606,
-0.000684262956,-0.000678921179,-0.000673632543,-0.000668396541,
-0.000663212461,-0.000658079671,-0.000652997548,-0.000647965479,
-0.000642982859,-0.000638049094,-0.000633163594,-0.000628325781,
-0.000623535085,-0.000618790943,-0.000614092801,-0.000609440112,
-0.000604832337,-0.000600268946,-0.000595749415,-0.000591273227,
-0.000586839873,-0.000582448852,-0.000578099668,-0.000573791833,
-0.000569524867,-0.000565298294,-0.000561111648,-0.000556964465,
-0.000552856292,-0.000548786679,-0.000544755183,-0.000540761368,
-0.000536804803,-0.000532885063,-0.000529001730,-0.000525154389,
-0.000521342633,-0.000517566059,-0.000513824271,-0.000510116877,
-0.000506443491,-0.000502803731,-0.000499197222,-0.000495623592,
-0.000492082475,-0.000488573510,-0.000485096341,-0.000481650614,
-0.000478235985,-0.000474852108,-0.000471498648,-0.000468175269,
-0.000464881625,-0.000461617425,-0.000458382332,-0.000455176027,
-0.000451998200,-0.000448848541,-0.000445726746,-0.000442632514,
-0.000439565547,-0.000436525555,-0.000433512245,-0.000430525334,
-0.000427564539,-0.000424629581,-0.000421720186,-0.000418836083,
-0.000415977003,-0.000413142681,-0.000410332856,-0.000407547271,
-0.000404785670,-0.000402047802,-0.000399333418,-0.000396642273,
-0.000393974124,-0.000391328732,-0.000388705860,-0.000386105276,
-0.000383526747,-0.000380970048,-0.000378434951,-0.000375921236,
-0.000373428682,-0.000370957073,-0.000368506194,-0.000366075834,
-0.000363665783,-0.000361275835,-0.000358905786,-0.000356555434,
-0.000354224580,-0.000351913028,-0.000349620581,-0.000347347050,
-0.000345092242,-0.000342855972,-0.000340638053,-0.000338438303,
-0.000336256539,-0.000334092584,-0.000331946261,-0.000329817395,
-0.000327705812,-0.000325611343,-0.000323533819,-0.000321473073,
-0.000319428941,-0.000317401259,-0.000315389867,-0.000313394605,
-0.000311415316,-0.000309451846,-0.000307504039,-0.000305571745,
-0.000303654813,-0.000301753094,-0.000299866443,-0.000297994713,
-0.000296137762,-0.000294295448,-0.000292467630,-0.000290654171,
-0.000288854932,-0.000287069779,-0.000285298577,-0.000283541195,
-0.000281797501,-0.000280067365,-0.000278350660,-0.000276647259,
-0.000274957037,-0.000273279870,-0.000271615635,-0.000269964212,
-0.000268325481,-0.000266699323,-0.000265085621,-0.000263484260,
-0.000261895124,-0.000260318101,-0.000258753078,-0.000257199946,
-0.000255658593,-0.000254128911,-0.000252610794,-0.000251104135,
-0.000249610990,-0.000248126920,-0.000246653996,-0.000245192119,
-0.000243741185,-0.000242301097,-0.000240871755,-0.000239453063,
-0.000238044925,-0.000236647244,-0.000235259927,-0.000233882881,
-0.000232516014,-0.000231159234,-0.000229812450,-0.000228475574,
-0.000227148518,-0.000225831193,-0.000224523513,-0.000223225393,
-0.000221936748,-0.000220657494,-0.000219387548,-0.000218126828,
-0.000216875252,-0.000215632741,-0.000214399215,-0.000213174594,
-0.000211958802,-0.000210751761,-0.000209553394,-0.000208363626,
-0.000207182383,-0.000206009589,-0.000204845173,-0.000203689061,
-0.000202541182,-0.000201401464,-0.000200269790,-0.000199146185,
-0.000198030532,-0.000196922764,-0.000195822813,-0.000194730613,
-0.000193646097,-0.000192569199,-0.000191499856,-0.000190438001,
-0.000189383574,-0.000188336509,-0.000187296746,-0.000186264221,
-0.000185238875,-0.000184220647,-0.000183209477,-0.000182205306,
-0.000181208074,-0.000180217725,-0.000179234200,-0.000178257443,
-0.000177287397,-0.000176324006,-0.000175367216,-0.000174416971,
-0.000173473217,-0.000172535901,-0.000171604968,-0.000170680368,
-0.000169762047,-0.000168849954,-0.000167944038,-0.000167044248,
-0.000166150534,-0.000165262847,-0.000164381136,-0.000163505354,
-0.000162635453,-0.000161771383,-0.000160913100,-0.000160060554,
-0.000159213700,-0.000158372493,-0.000157536885,-0.000156706833,
-0.000155882291,-0.000155063216,-0.000154249563,-0.000153441289,
-0.000152638351,-0.000151840707,-0.000151048314,-0.000150261130,
-0.000149479114,-0.000148702224,-0.000147930421,-0.000147163665,
-0.000146401914,-0.000145645130,-0.000144893273,-0.000144146305,
-0.000143404187,-0.000142666880,-0.000141934348,-0.000141206553,
-0.000140483459,-0.000139765027,-0.000139051222,-0.000138342008,
-0.000137637349,-0.000136937210,-0.000136241554,-0.000135550350,
-0.000134863561,-0.000134181153,-0.000133503093,-0.000132829347,
-0.000132159882,-0.000131494665,-0.000130833663,-0.000130176845,
-0.000129524178,-0.000128875630,-0.000128231170,-0.000127590768,
-0.000126954392,-0.000126322010,-0.000125693594,-0.000125069114,
-0.000124448538,-0.000123831839,-0.000123218986,-0.000122609950,
-0.000122004703,-0.000121403217,-0.000120805462,-0.000120211398,
-0.000119621024,-0.000119034299,-0.000118451195,-0.000117871686,
-0.000117295744,-0.000116723344,-0.000116154459,-0.000115589063,
-0.000115027129,-0.000114468633,-0.000113913550,-0.000113361853,
-0.000112813518,-0.000112268520,-0.000111726834,-0.000111188437,
-0.000110653305,-0.000110121412,-0.000109592736,-0.000109067253,
-0.000108544940,-0.000108025773,-0.000107509731,-0.000106996789,
-0.000106486925,-0.000105980118,-0.000105476346,-0.000104975585,
-0.000104477815,-0.000103983013,-0.000103491159,-0.000103002232,
-0.000102516209,-0.000102033071,-0.000101552797,-0.000101075366,
-0.000100600758,-0.000100128953,-0.000099659931,-0.000099193670,
-0.000098730154,-0.000098269362,-0.000097811273,-0.000097355870,
-0.000096903134,-0.000096453045,-0.000096005584,-0.000095560734,
-0.000095118477,-0.000094678791,-0.000094241663,-0.000093807071,
-0.000093375000,-0.000092945431,-0.000092518346,-0.000092093728,
-0.000091671561,-0.000091251828,-0.000090834509,-0.000090419590,
-0.000090007054,-0.000089596884,-0.000089189066,-0.000088783579,
-0.000088380409,-0.000087979542,-0.000087580961,-0.000087184648,
-0.000086790590,-0.000086398772,-0.000086009176,-0.000085621788,
-0.000085236594,-0.000084853579,-0.000084472726,-0.000084094021,
-0.000083717451,-0.000083343000,-0.000082970655,-0.000082600399,
-0.000082232220,-0.000081866104,-0.000081502037,-0.000081140003,
-0.000080779991,-0.000080421987,-0.000080065976,-0.000079711946,
-0.000079359883,-0.000079009773,-0.000078661606,-0.000078315366,
-0.000077971040,-0.000077628617,-0.000077288085,-0.000076949428,
-0.000076612636,-0.000076277697,-0.000075944598,-0.000075613325,
-0.000075283869,-0.000074956216,-0.000074630355,-0.000074306274,
-0.000073983961,-0.000073663404,-0.000073344592,-0.000073027514,
-0.000072712158,-0.000072398512,-0.000072086567,-0.000071776309,
-0.000071467731,-0.000071160818,-0.000070855559,-0.000070551947,
-0.000070249969,-0.000069949614,-0.000069650873,-0.000069353734,
-0.000069058188,-0.000068764223,-0.000068471830,-0.000068181000,
-0.000067891719,-0.000067603982,-0.000067317776,-0.000067033091,
-0.000066749920,-0.000066468249,-0.000066188072,-0.000065909379,
-0.000065632160,-0.000065356404,-0.000065082104,-0.000064809249,
-0.000064537830,-0.000064267841,-0.000063999270,-0.000063732106,
-0.000063466345,-0.000063201975,-0.000062938987,-0.000062677374,
-0.000062417127,-0.000062158236,-0.000061900694,-0.000061644492,
-0.000061389621,-0.000061136073,-0.000060883842,-0.000060632915,
-0.000060383289,-0.000060134953,-0.000059887864,-0.000059642083,
-0.000059397570,-0.000059154315,-0.000058912309,-0.000058671548,
-0.000058432022,-0.000058193723,-0.000057956645,-0.000057720779,
-0.000057486117,-0.000057252655,-0.000057020383,-0.000056789292,
-0.000056559380,-0.000056330634,-0.000056103051,-0.000055876620,
-0.000055651339,-0.000055427196,-0.000055204187,-0.000054982304,
-0.000054761541,-0.000054541891,-0.000054323347,-0.000054105902,
-0.000053889548,-0.000053674282,-0.000053460095,-0.000053246980,
-0.000053034932,-0.000052823945,-0.000052614010,-0.000052405123,
-0.000052197277,-0.000051990464,-0.000051784681,-0.000051579921,
-0.000051376175,-0.000051173441,-0.000050971711,-0.000050770978,
-0.000050571237,-0.000050372484,-0.000050174711,-0.000049977910,
-0.000049782080,-0.000049587212,-0.000049393303,-0.000049200344,
-0.000049008332,-0.000048820422,-0.000048630261,-0.000048441030,
-0.000048252722,-0.000048065332,-0.000047878856,-0.000047693287,
-0.000047508621,-0.000047324852,-0.000047141973,-0.000046959983,
-0.000046778874,-0.000046598641,-0.000046419280,-0.000046240785,
-0.000046063150,-0.000045886371,-0.000045710445,-0.000045535364,
-0.000045361124,-0.000045187721,-0.000045015150,-0.000044843405,
-0.000044672483,-0.000044502378,-0.000044333084,-0.000044164600,
-0.000043996919,-0.000043830035,-0.000043663946,-0.000043498647,
-0.000043334133,-0.000043170399,-0.000043007443,-0.000042845255,
-0.000042683836,-0.000042523182,-0.000042363284,-0.000042204141,
-0.000042045748,-0.000041888100,-0.000041731194,-0.000041575025,
-0.000041419591,-0.000041264882,-0.000041110901,-0.000040957641,
-0.000040805096,-0.000040653264,-0.000040502141,-0.000040351722,
-0.000040202005,-0.000040052984,-0.000039904655,-0.000039757016,
-0.000039610063,-0.000039463789,-0.000039318195,-0.000039173273,
-0.000039029021,-0.000038885436,-0.000038742515,-0.000038600250,
-0.000038458642,-0.000038317685,-0.000038177376,-0.000038037711,
-0.000037898687,-0.000037760301,-0.000037622548,-0.000037485426,
-0.000037348930,-0.000037213058,-0.000037077806,-0.000036943170,
-0.000036809146,-0.000036675734,-0.000036542929,-0.000036410724,
-0.000036279120,-0.000036148115,-0.000036017700,-0.000035887877,
-0.000035758640,-0.000035629986,-0.000035501914,-0.000035374420,
-0.000035247496,-0.000035121149,-0.000034995368,-0.000034870150,
-0.000034745493,-0.000034621399,-0.000034497857,-0.000034374867,
-0.000034252431,-0.000034130540,-0.000034009195,-0.000033888389,
-0.000033768121,-0.000033648386,-0.000033529187,-0.000033410517,
-0.000033292372,-0.000033174751,-0.000033057655,-0.000032941073,
-0.000032825009,-0.000032709445,-0.000032594403,-0.000032479866,
-0.000032365835,-0.000032252309,-0.000032139280,-0.000032026745,
-0.000031914707,-0.000031803162,-0.000031692103,-0.000031581532,
-0.000031471444,-0.000031361837,-0.000031252709,-0.000031144055,
-0.000031035877,-0.000030928170,-0.000030820929,-0.000030714158,
-0.000030607851,-0.000030502003,-0.000030396614,-0.000030291683,
-0.000030187205,-0.000030083179,-0.000029979603,-0.000029876472,
-0.000029773788,-0.000029671547,-0.000029569744,-0.000029468381,
-0.000029367450,-0.000029266957,-0.000029166892,-0.000029067256,
-0.000028968050,-0.000028869268,-0.000028770907,-0.000028672964,
-0.000028575442,-0.000028478335,-0.000028381639,-0.000028285359,
-0.000028189486,-0.000028094022,-0.000027998959,-0.000027904304,
-0.000027810049,-0.000027716193,-0.000027622734,-0.000027529671,
-0.000027436999,-0.000027344717,-0.000027252827,-0.000027161322,
-0.000027070206,-0.000026979468,-0.000026889117,-0.000026799141,
-0.000026709543,-0.000026620320,-0.000026531473,-0.000026442997,
-0.000026354890,-0.000026267149,-0.000026179778,-0.000026092771,
-0.000026006124,-0.000025919839,-0.000025833911,-0.000025748342,
-0.000025663130,-0.000025578271,-0.000025493760,-0.000025409601,
-0.000025325792,-0.000025242328,-0.000025159208,-0.000025076433,
-0.000024993995,-0.000024911902,-0.000024830144,-0.000024748726,
-0.000024667636,-0.000024586884,-0.000024506464,-0.000024426371,
-0.000024346609,-0.000024267170,-0.000024188056,-0.000024109265,
-0.000024030799,-0.000023952651,-0.000023874822,-0.000023797310,
-0.000023720113,-0.000023643230,-0.000023566661,-0.000023490400,
-0.000023414450,-0.000023338806,-0.000023263470,-0.000023188440,
-0.000023113711,-0.000023039284,-0.000022965158,-0.000022891329,
-0.000022817800,-0.000022744566,-0.000022671628,-0.000022598981,
-0.000022526626,-0.000022454564,-0.000022382789,-0.000022311301,
-0.000022240098,-0.000022169181,-0.000022098547,-0.000022028196,
-0.000021958126,-0.000021888335,-0.000021818823,-0.000021749584,
-0.000021680622,-0.000021611936,-0.000021543520,-0.000021475377,
-0.000021407504,-0.000021339898,-0.000021272563,-0.000021205493,
-0.000021138686,-0.000021072146,-0.000021005867,-0.000020939846,
-0.000020874089,-0.000020808589,-0.000020743350,-0.000020678365,
-0.000020613631,-0.000020549154,-0.000020484934,-0.000020420957,
-0.000020357237,-0.000020293765,-0.000020230538,-0.000020167562,
-0.000020104832,-0.000020042343,-0.000019980097,-0.000019918094,
-0.000019856335,-0.000019794814,-0.000019733531,-0.000019672485,
-0.000019611678,-0.000019551105,-0.000019490768,-0.000019430661,
-0.000019370790,-0.000019311147,-0.000019251735,-0.000019192557,
-0.000019133600,-0.000019074873,-0.000019016371,-0.000018958095,
-0.000018900039,-0.000018842210,-0.000018784603,-0.000018727213,
-0.000018670047,-0.000018613097,-0.000018556365,-0.000018499848,
-0.000018443548,-0.000018387465,-0.000018331591,-0.000018275933,
-0.000018220484,-0.000018165250,-0.000018110224,-0.000018055409,
-0.000018000797,-0.000017946368,-0.000017892173,-0.000017838178,
-0.000017784394,-0.000017730808,-0.000017677427,-0.000017624243,
-0.000017571262,-0.000017518482,-0.000017465898,-0.000017413516,
-0.000017361325,-0.000017309333,-0.000017257536,-0.000017205934,
-0.000017154522,-0.000017103307,-0.000017052279,-0.000017001446,
-0.000016950802,-0.000016900344,-0.000016850076,-0.000016799996,
-0.000016750100,-0.000016700391,-0.000016650869,-0.000016601528,
-0.000016552372,-0.000016503396,-0.000016454605,-0.000016405991,
-0.000016357560,-0.000016309309,-0.000016261231,-0.000016213333,
-0.000016165615,-0.000016118065,-0.000016070700,-0.000016023506,
-0.000015976480,-0.000015929631,-0.000015882957,-0.000015836450,
-0.000015790114,-0.000015743950,-0.000015697953,-0.000015652126,
-0.000015606467,-0.000015560973,-0.000015515645,-0.000015470483,
-0.000015425484,-0.000015380653,-0.000015335981,-0.000015291478,
-0.000015247131,-0.000015202943,-0.000015158921,-0.000015115055,
-0.000015071350,-0.000015027801,-0.000014984411,-0.000014941180,
-0.000014898105,-0.000014855180,-0.000014812414,-0.000014769805,
-0.000014727347,-0.000014685040,-0.000014642884,-0.000014600881,
-0.000014559031,-0.000014517328,-0.000014475782,-0.000014434379,
-0.000014393121,-0.000014352019,-0.000014311058,-0.000014270245,
-0.000014229578,-0.000014189055,-0.000014148680,-0.000014108446,
-0.000014068353,-0.000014028407,-0.000013988601,-0.000013948938,
-0.000013909417,-0.000013870032,-0.000013830791,-0.000013791682,
-0.000013752719,-0.000013713888,-0.000013675201,-0.000013636646,
-0.000013598224,-0.000013559943,-0.000013521794,-0.000013483787,
-0.000013445904,-0.000013408159,-0.000013370545,-0.000013333065,
-0.000013295714,-0.000013258497,-0.000013221409,-0.000013184449,
-0.000013147625,-0.000013110920,-0.000013074350,-0.000013037908,
-0.000013001586,-0.000012965401,-0.000012929339,-0.000012893399,
-0.000012857583,-0.000012821894,-0.000012786331,-0.000012750890,
-0.000012715572,-0.000012680376,-0.000012645302,-0.000012610349,
-0.000012575520,-0.000012540810,-0.000012506216,-0.000012471746,
-0.000012437394,-0.000012403161,-0.000012369047,-0.000012335050,
-0.000012301167,-0.000012267404,-0.000012233750,-0.000012200220,
-0.000012166803,-0.000012133497,-0.000012100313,-0.000012067236,
-0.000012034272,-0.000012001421,-0.000011968683,-0.000011936060,
-0.000011903547,-0.000011871144,-0.000011838848,-0.000011806664,
-0.000011774589,-0.000011742629,-0.000011710771,-0.000011679025,
-0.000011647383,-0.000011615852,-0.000011584427,-0.000011553112,
-0.000011521895,-0.000011490790,-0.000011459786,-0.000011428886,
-0.000011398096,-0.000011367410,-0.000011336821,-0.000011306340,
-0.000011275960,-0.000011245680,-0.000011215503,-0.000011185429,
-0.000011155458,-0.000011125582,-0.000011095810,-0.000011066134,
-0.000011036565,-0.000011007086,-0.000010977713,-0.000010948432,
-0.000010919250,-0.000010890170,-0.000010861182,-0.000010832294,
-0.000010803504,-0.000010774803,-0.000010746203,-0.000010717697,
-0.000010689281,-0.000010660963,-0.000010632742,-0.000010604605,
-0.000010576575,-0.000010548631,-0.000010520774,-0.000010493012,
-0.000010465346,-0.000010437768,-0.000010410283,-0.000010382885,
-0.000010355578,-0.000010328369,-0.000010301239,-0.000010274208,
-0.000010251560,-0.000010224675,-0.000010197872,-0.000010171157,
-0.000010144533,-0.000010117994,-0.000010091545,-0.000010065176,
-0.000010038899,-0.000010012705,-0.000009986596,-0.000009960573,
-0.000009934633,-0.000009908783,-0.000009883016,-0.000009857331,
-0.000009831723,-0.000009806208,-0.000009780772,-0.000009755416,
-0.000009730149,-0.000009704965,-0.000009679854,-0.000009654825,
-0.000009629881,-0.000009605015,-0.000009580232,-0.000009555527,
-0.000009530906,-0.000009506359,-0.000009481894,-0.000009457507,
-0.000009433201,-0.000009408968,-0.000009384813,-0.000009360741,
-0.000009336743,-0.000009312822,-0.000009288983,-0.000009265219,
-0.000009241523,-0.000009217903,-0.000009194367,-0.000009170904,
-0.000009147513,-0.000009124185,-0.000009100951,-0.000009077784,
-0.000009054691,-0.000009031668,-0.000009008727,-0.000008985847,
-0.000008963044,-0.000008940323,-0.000008917665,-0.000008895083,
-0.000008872572,-0.000008850132,-0.000008827756,-0.000008805460,
-0.000008783231,-0.000008761077,-0.000008738982,-0.000008716967,
-0.000008695016,-0.000008673142,-0.000008651317,-0.000008629577,
-0.000008607915,-0.000008586302,-0.000008564769,-0.000008543299,
-0.000008521889,-0.000008500552,-0.000008479283,-0.000008458087,
-0.000008436951,-0.000008415879,-0.000008394877,-0.000008373949,
-0.000008353056,-0.000008332256,-0.000008311510,-0.000008290834,
-0.000008270228,-0.000008249672,-0.000008229185,-0.000008208760,
-0.000008188409,-0.000008168114,-0.000008147887,-0.000008127708,
-0.000008107605,-0.000008087561,-0.000008067576,-0.000008047655,
-0.000008027797,-0.000008007992,-0.000007988257,-0.000007968579,
-0.000007948961,-0.000007929423,-0.000007909923,-0.000007890483,
-0.000007871118,-0.000007851794,-0.000007832545,-0.000007813350,
-0.000007794215,-0.000007775134,-0.000007756107,-0.000007737143,
-0.000007718239,-0.000007699398,-0.000007680602,-0.000007661878,
-0.000007643192,-0.000007624581,-0.000007606023,-0.000007587516,
-0.000007569072,-0.000007550684,-0.000007532342,-0.000007514064,
-0.000007495842,-0.000007477669,-0.000007459564,-0.000007441513,
-0.000007423500,-0.000007405565,-0.000007387653,-0.000007369819,
-0.000007352033,-0.000007334302,-0.000007316624,-0.000007298991,
-0.000007281419,-0.000007263898,-0.000007246430,-0.000007229017,
-0.000007211665,-0.000007194341,-0.000007177091,-0.000007159891,
-0.000007142723,-0.000007125624,-0.000007108576,-0.000007091568,
-0.000007074623,-0.000007057725,-0.000007040876,-0.000007024086,
-0.000007007336,-0.000006990642,-0.000006973998,-0.000006957391,
-0.000006940839,-0.000006924342,-0.000006907899,-0.000006891498,
-0.000006875140,-0.000006858837,-0.000006842582,-0.000006826375,
-0.000006810219,-0.000006794105,-0.000006778029,-0.000006762023,
-0.000006746059,-0.000006730128,-0.000006714271,-0.000006698432,
-0.000006682660,-0.000006666918,-0.000006651234,-0.000006635596,
-0.000006620005,-0.000006604454,-0.000006588941,-0.000006573490,
-0.000006558070,-0.000006542702,-0.000006527380,-0.000006512109,
-0.000006496882,-0.000006481687,-0.000006466546,-0.000006451435,
-0.000006436391,-0.000006421375,-0.000006406417,-0.000006391483,
-0.000006376616,-0.000006361774,-0.000006346978,-0.000006332234,
-0.000006317521,-0.000006302863,-0.000006288236,-0.000006273658,
-0.000006259130,-0.000006244639,-0.000006230172,-0.000006215769,
-0.000006201406,-0.000006187056,-0.000006172780,-0.000006158538,
-0.000006144333,-0.000006130175,-0.000006116049,-0.000006101973,
-0.000006087918,-0.000006073934,-0.000006059967,-0.000006046060,
-0.000006032157,-0.000006018329,-0.000006004539,-0.000005990767,
-0.000005977053,-0.000005963368,-0.000005949736,-0.000005936125,
-0.000005922564,-0.000005909033,-0.000005895557,-0.000005882108,
-0.000005868694,-0.000005855330,-0.000005841986,-0.000005828695,
-0.000005815440,-0.000005802219,-0.000005789042,-0.000005775894,
-0.000005762793,-0.000005749713,-0.000005736681,-0.000005723693,
-0.000005710736,-0.000005697801,-0.000005684917,-0.000005672083,
-0.000005659257,-0.000005646489,-0.000005633742,-0.000005621052,
-0.000005608376,-0.000005595751,-0.000005583147,-0.000005570596,
-0.000005558069,-0.000005545574,-0.000005533131,-0.000005520686,
-0.000005508306,-0.000005495958,-0.000005483642,-0.000005471368,
-0.000005459117,-0.000005446911,-0.000005434723,-0.000005422582,
-0.000005410465,-0.000005398405,-0.000005386318,-0.000005374325,
-0.000005362350,-0.000005350395,-0.000005338497,-0.000005326630,
-0.000005314785,-0.000005302970,-0.000005291186,-0.000005279453,
-0.000005267752,-0.000005256061,-0.000005244408,-0.000005232808,
-0.000005221217,-0.000005209672,-0.000005198143,-0.000005186671,
-0.000005175216,-0.000005163788,-0.000005152404,-0.000005141034,
-0.000005129712,-0.000005118411,-0.000005107148,-0.000005095903,
-0.000005084703,-0.000005073544,-0.000005062386,-0.000005051294,
-0.000005040207,-0.000005029153,-0.000005018130,-0.000005007138,
-0.000004996188,-0.000004985255,-0.000004974357,-0.000004963482,
-0.000004952658,-0.000004941822,-0.000004931049,-0.000004920300,
-0.000004909585,-0.000004898883,-0.000004888218,-0.000004877591,
-0.000004866978,-0.000004856408,-0.000004845851,-0.000004835339,
-0.000004824820,-0.000004814384,-0.000004803951,-0.000004793527,
-0.000004783167,-0.000004772816,-0.000004762493,-0.000004752199,
-0.000004741928,-0.000004731692,-0.000004721489,-0.000004711313,
-0.000004701153,-0.000004691028,-0.000004680906,-0.000004670841,
-0.000004660800,-0.000004650784,-0.000004640794,-0.000004630823,
-0.000004620897,-0.000004610976,-0.000004601088,-0.000004591223,
-0.000004581397,-0.000004571577,-0.000004561807,-0.000004552078,
-0.000004542317,-0.000004532634,-0.000004522956,-0.000004513308,
-0.000004503683,-0.000004494084,-0.000004484512,-0.000004474978,
-0.000004465445,-0.000004455950,-0.000004446495,-0.000004437029,
-0.000004427618,-0.000004418215,-0.000004408848,-0.000004399505,
-0.000004390179,-0.000004380881,-0.000004371601,-0.000004362365,
-0.000004353139,-0.000004343950,-0.000004334749,-0.000004325623,
-0.000004316509,-0.000004307391,-0.000004298319,-0.000004289266,
-0.000004280235,-0.000004271233,-0.000004262246,-0.000004253291,
-0.000004244373,-0.000004235455,-0.000004226566,-0.000004217706,
-0.000004208841,-0.000004200037,-0.000004191235,-0.000004182468,
-0.000004173720,-0.000004164993,-0.000004156290,-0.000004147604,
-0.000004138957,-0.000004130317,-0.000004121718,-0.000004113097,
-0.000004104542,-0.000004096010,-0.000004087470,-0.000004078981,
-0.000004070497,-0.000004062048,-0.000004053610,-0.000004045201,
-0.000004036807,-0.000004028443,-0.000004020089,-0.000004011762,
-0.000004003482,-0.000003995175,-0.000003986913,-0.000003978681,
-0.000003970453,-0.000003962262,-0.000003954084,-0.000003945935,
-0.000003937788,-0.000003929692,-0.000003921595,-0.000003913524,
-0.000003905457,-0.000003897431,-0.000003889449,-0.000003881430,
-0.000003873473,-0.000003865523,-0.000003857602,-0.000003849695,
-0.000003841812,-0.000003833951,-0.000003826106,-0.000003818275,
-0.000003810468,-0.000003802692,-0.000003794902,-0.000003787173,
-0.000003779443,-0.000003771744,-0.000003764045,-0.000003756378,
-0.000003748734,-0.000003741099,-0.000003733489,-0.000003725902,
-0.000003718344,-0.000003710764,-0.000003703250,-0.000003695750,
-0.000003688226,-0.000003680780,-0.000003673310,-0.000003665875,
-0.000003658465,-0.000003651067,-0.000003643680,-0.000003636336,
-0.000003628987,-0.000003621648,-0.000003614371,-0.000003607042,
-0.000003599790,-0.000003592538,-0.000003585317,-0.000003578103])
def voigt_wofz(a, u):
""" Compute the Voigt function using Scipy's wofz().
Parameters
----------
a: float
Ratio of Lorentzian to Gaussian linewidths.
u: array of floats
The frequency or velocity offsets from the line centre, in units
of the Gaussian broadening linewidth.
See the notes for `voigt` for more details.
"""
try:
from scipy.special import wofz
except ImportError:
s = ("Can't find scipy.special.wofz(), can only calculate Voigt "
" function for 0 < a < 0.1 (a=%g)" % a)
print(s)
else:
return wofz(u + 1j * a).real
def voigt_slow(a, u):
""" Calculate the voigt function to very high accuracy.
Uses numerical integration, so is slow. Answer is correct to 20
significant figures.
Note this needs `mpmath` or `sympy` to be installed.
"""
try:
import mpmath as mp
except ImportError:
from sympy import mpmath as mp
with mp.workdps(20):
z = mp.mpc(u, a)
result = mp.exp(-z*z) * mp.erfc(-1j*z)
return result.real
def voigt(a, u):
""" Compute the Voigt function using a fast approximation.
Parameters
----------
a : float
Ratio of Lorentzian to Gaussian linewidths (see below).
u : array of floats, shape (N,)
The frequency or velocity offsets from the line centre, in units
of the FWHM of the Gaussian broadening (see below).
Returns
-------
H : array of floats, shape (N,)
The Voigt function.
Notes
-----
The Voigt function is useful for calculating the optical depth as
function of frequency for an absorption transition associated with
an atom or ion.
The Voigt function H(a, u) is related to the Voigt profile
V(x, sigma, gamma)::
V(x, sigma, gamma) = H(a, u) / (sqrt(2*pi) * sigma)
where::
a = gamma / (sqrt(2) * sigma)
u = x / (sqrt(2) * sigma)
The Voigt profile is convolution of a Gaussian profile::
G(x, sigma) = exp(-0.5 * (x / sigma)^2) / (sigma * sqrt(2*pi))
and a Lorentzian profile::
L(x, gamma) = gamma / ((x^2 + gamma^2) * pi)
It is normalised; the integral of V over all x is 1.
This function uses a Taylor approximation to the Voigt function
for 0 < a < 0.1. (Harris 1948, ApJ, 108, 112). Relative error
with respect to `voigt_wofz` is < 10^-4.9 for a < 0.1. For larger
`a` the exact calculation is done in `voigt_wofz`.
"""
a = float(a)
if a > 0.1:
return voigt_wofz(a, u)
elif a < 0:
raise ValueError('a must be > 0 (%f)' % a)
u = np.abs(u)
out = np.empty_like(u)
u2 = u*u
cond = u > 19.99
if cond.any():
# Use asymptotic approximation.
iu2c = 1. / u2[cond]
iu2c2 = iu2c * iu2c
iu2c3 = iu2c2 * iu2c
iu2c4 = iu2c3 * iu2c
a2 = a**2
k2 = 1.5 + a2
k3 = 3.75 + 5 * a2
k4 = 26.25 * a2
out[cond] = a / sqrtpi * (iu2c + k2 * iu2c2 + k3 * iu2c3 + k4 * iu2c4)
# for u values with abs(u) <= 19.99 use lookup tables
notcond = ~cond
u = u[notcond]
u2 = u2[notcond]
expmu2 = np.exp(-u2)
out[notcond] = expmu2 + a*(np.interp(u, U, H1) + a*(
(1. - 2.*u2)*expmu2 + a*(np.interp(u, U, H3) + a*(
0.5 - 2.*u2 + 2./3.*u2*u2)*expmu2)))
return out
|
|
#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2019, Anaconda, Inc., and Bokeh Contributors.
# All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function, unicode_literals
import pytest ; pytest
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
from mock import patch
# External imports
# Bokeh imports
from bokeh.core.properties import Int, String, NumberSpec, List, Override, Either
from bokeh.core.property.descriptors import BasicPropertyDescriptor, DataSpecPropertyDescriptor
# Module under test
import bokeh.core.has_props as hp
#-----------------------------------------------------------------------------
# Setup
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
class Parent(hp.HasProps):
int1 = Int(default=10)
ds1 = NumberSpec()
lst1 = List(String)
class Child(Parent):
int2 = Int()
str2 = String(default="foo")
ds2 = NumberSpec()
lst2 = List(Int, default=[1,2,3])
@property
def str2_proxy(self):
return self.str2
@str2_proxy.setter
def str2_proxy(self, value):
self.str2 = value*2
class OverrideChild(Parent):
int1 = Override(default=20)
def test_HasProps_default_init():
p = Parent()
assert p.int1 == 10
assert p.ds1 == None
assert p.lst1 == []
c = Child()
assert c.int1 == 10
assert c.ds1 == None
assert c.lst1 == []
assert c.int2 == None
assert c.str2 == "foo"
assert c.ds2 == None
assert c.lst2 == [1,2,3]
def test_HasProps_kw_init():
p = Parent(int1=30, ds1="foo")
assert p.int1 == 30
assert p.ds1 == "foo"
assert p.lst1 == []
c = Child(str2="bar", lst2=[2,3,4], ds2=10)
assert c.int1 == 10
assert c.ds1 == None
assert c.lst1 == []
assert c.int2 == None
assert c.str2 == "bar"
assert c.ds2 == 10
assert c.lst2 == [2,3,4]
def test_HasProps_override():
ov = OverrideChild()
assert ov.int1 == 20
assert ov.ds1 == None
assert ov.lst1 == []
def test_HasProps_equals():
p1 = Parent()
p2 = Parent()
assert p1.equals(p2)
p1.int1 = 25
assert not p1.equals(p2)
p2.int1 = 25
assert p1.equals(p2)
def test_HasProps_update():
c = Child()
c.update(**dict(lst2=[1,2], str2="baz", int1=25, ds1=dict(field="foo")))
assert c.int1 == 25
assert c.ds1 == dict(field="foo")
assert c.lst1 == []
assert c.int2 == None
assert c.str2 == "baz"
assert c.ds2 == None
assert c.lst2 == [1,2]
def test_HasProps_set_from_json():
c = Child()
c.set_from_json('lst2', [1,2])
assert c.int1 == 10
assert c.ds1 == None
assert c.lst1 == []
assert c.int2 == None
assert c.str2 == "foo"
assert c.ds2 == None
assert c.lst2 == [1,2]
c.set_from_json('ds1', "foo")
assert c.int1 == 10
assert c.ds1 == "foo"
assert c.lst1 == []
assert c.int2 == None
assert c.str2 == "foo"
assert c.ds2 == None
assert c.lst2 == [1,2]
c.set_from_json('int2', 100)
assert c.int1 == 10
assert c.ds1 == "foo"
assert c.lst1 == []
assert c.int2 == 100
assert c.str2 == "foo"
assert c.ds2 == None
assert c.lst2 == [1,2]
def test_HasProps_update_from_json():
c = Child()
c.update_from_json(dict(lst2=[1,2], str2="baz", int1=25, ds1=dict(field="foo")))
assert c.int1 == 25
assert c.ds1 == dict(field="foo")
assert c.lst1 == []
assert c.int2 == None
assert c.str2 == "baz"
assert c.ds2 == None
assert c.lst2 == [1,2]
@patch('bokeh.core.has_props.HasProps.set_from_json')
def test_HasProps_update_from_json_passes_models_and_setter(mock_set):
c = Child()
c.update_from_json(dict(lst1=[1,2]), models="foo", setter="bar")
assert mock_set.called
assert mock_set.call_args[0] == ('lst1', [1, 2], 'foo', 'bar')
assert mock_set.call_args[1] == {}
def test_HasProps_set():
c = Child()
c.update(**dict(lst2=[1,2], str2="baz", int1=25, ds1=dict(field="foo")))
assert c.int1 == 25
assert c.ds1 == dict(field="foo")
assert c.lst1 == []
assert c.int2 == None
assert c.str2 == "baz"
assert c.ds2 == None
assert c.lst2 == [1,2]
c.str2_proxy = "some"
assert c.str2 == "somesome"
assert c.str2_proxy == "somesome"
def test_HasProps_set_error():
c = Child()
with pytest.raises(AttributeError) as e:
c.int3 = 10
assert str(e).endswith("unexpected attribute 'int3' to Child, similar attributes are int2 or int1")
with pytest.raises(AttributeError) as e:
c.junkjunk = 10
assert str(e).endswith("unexpected attribute 'junkjunk' to Child, possible attributes are ds1, ds2, int1, int2, lst1, lst2 or str2")
def test_HasProps_lookup():
p = Parent()
d = p.lookup('int1')
assert isinstance(d, BasicPropertyDescriptor)
assert d.name == 'int1'
d = p.lookup('ds1')
assert isinstance(d, DataSpecPropertyDescriptor)
assert d.name == 'ds1'
d = p.lookup('lst1')
assert isinstance(d, BasicPropertyDescriptor)
assert d.name == 'lst1'
def test_HasProps_apply_theme():
c = Child()
theme = dict(int2=10, lst1=["foo", "bar"])
c.apply_theme(theme)
assert c.themed_values() is theme
c.apply_theme(theme)
assert c.themed_values() is theme
assert c.int2 == 10
assert c.lst1 == ["foo", "bar"]
assert c.int1 == 10
assert c.ds1 == None
assert c.str2 == "foo"
assert c.ds2 == None
assert c.lst2 == [1,2,3]
c.int2 = 25
assert c.int2 == 25
assert c.lst1 == ["foo", "bar"]
assert c.int1 == 10
assert c.ds1 == None
assert c.str2 == "foo"
assert c.ds2 == None
assert c.lst2 == [1,2,3]
c.ds2 = "foo"
assert c.int2 == 25
assert c.lst1 == ["foo", "bar"]
assert c.int1 == 10
assert c.ds1 == None
assert c.str2 == "foo"
assert c.ds2 == "foo"
assert c.lst2 == [1,2,3]
def test_HasProps_unapply_theme():
c = Child()
theme = dict(int2=10, lst1=["foo", "bar"])
c.apply_theme(theme)
assert c.int2 == 10
assert c.lst1 == ["foo", "bar"]
assert c.int1 == 10
assert c.ds1 == None
assert c.str2 == "foo"
assert c.ds2 == None
assert c.lst2 == [1,2,3]
c.unapply_theme()
assert c.int2 == None
assert c.lst1 == []
assert c.int1 == 10
assert c.ds1 == None
assert c.str2 == "foo"
assert c.ds2 == None
assert c.lst2 == [1,2,3]
assert c.themed_values() == None
class EitherSimpleDefault(hp.HasProps):
foo = Either(List(Int), Int, default=10)
def test_HasProps_apply_theme_either_simple():
# check applying multiple themes
c = EitherSimpleDefault()
assert c.foo == 10
theme = dict(foo=20)
c.apply_theme(theme)
assert c.foo == 20
theme = dict(foo=30)
c.apply_theme(theme)
assert c.foo == 30
# check user set before theme
c = EitherSimpleDefault()
theme = dict(foo=30)
c.foo = 50
c.apply_theme(theme)
assert c.foo == 50
# check user set after theme
c = EitherSimpleDefault()
theme = dict(foo=30)
c.apply_theme(theme)
c.foo = 50
assert c.foo == 50
# check user set alt type
c = EitherSimpleDefault()
theme = dict(foo=30)
c.foo = [50]
c.apply_theme(theme)
assert c.foo == [50]
# check themed alt type
c = EitherSimpleDefault()
theme = dict(foo=[100])
c.apply_theme(theme)
assert c.foo == [100]
class EitherContainerDefault(hp.HasProps):
foo = Either(List(Int), Int, default=[10])
def test_HasProps_apply_theme_either_container():
# check applying multiple themes
c = EitherContainerDefault()
assert c.foo == [10]
theme = dict(foo=[20])
c.apply_theme(theme)
assert c.foo == [20]
theme = dict(foo=[30])
c.apply_theme(theme)
assert c.foo == [30]
# check user set before theme
c = EitherContainerDefault()
theme = dict(foo=[30])
c.foo = [50]
c.apply_theme(theme)
assert c.foo == [50]
# check user set after theme
c = EitherContainerDefault()
theme = dict(foo=[30])
c.apply_theme(theme)
c.foo = [50]
assert c.foo == [50]
# check user set alt type
c = EitherContainerDefault()
theme = dict(foo=[30])
c.foo = 50
c.apply_theme(theme)
assert c.foo == 50
# check themed alt type
c = EitherContainerDefault()
theme = dict(foo=100)
c.apply_theme(theme)
assert c.foo == 100
class IntFuncDefault(hp.HasProps):
foo = Int(default=lambda: 10)
def test_HasProps_apply_theme_func_default():
# check applying multiple themes
c = IntFuncDefault()
assert c.foo == 10
theme = dict(foo=20)
c.apply_theme(theme)
assert c.foo == 20
theme = dict(foo=30)
c.apply_theme(theme)
assert c.foo == 30
# check user set before theme
c = IntFuncDefault()
theme = dict(foo=30)
c.foo = 50
c.apply_theme(theme)
assert c.foo == 50
# check user set after theme
c = IntFuncDefault()
theme = dict(foo=30)
c.apply_theme(theme)
c.foo = 50
assert c.foo == 50
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
|
|
# This is a variant of the very old (early 90's) file
# Demo/threads/bug.py. It simply provokes a number of threads into
# trying to import the same module "at the same time".
# There are no pleasant failure modes -- most likely is that Python
# complains several times about module random having no attribute
# randrange, and then Python hangs.
import _imp as imp
import os
import importlib
import sys
import time
import shutil
import threading
import unittest
from unittest import mock
from test.support import (
verbose, run_unittest, TESTFN, reap_threads,
forget, unlink, rmtree, start_threads)
def task(N, done, done_tasks, errors):
try:
# We don't use modulefinder but still import it in order to stress
# importing of different modules from several threads.
if len(done_tasks) % 2:
import modulefinder
import random
else:
import random
import modulefinder
# This will fail if random is not completely initialized
x = random.randrange(1, 3)
except Exception as e:
errors.append(e.with_traceback(None))
finally:
done_tasks.append(threading.get_ident())
finished = len(done_tasks) == N
if finished:
done.set()
def mock_register_at_fork(func):
# bpo-30599: Mock os.register_at_fork() when importing the random module,
# since this function doesn't allow to unregister callbacks and would leak
# memory.
return mock.patch('os.register_at_fork', create=True)(func)
# Create a circular import structure: A -> C -> B -> D -> A
# NOTE: `time` is already loaded and therefore doesn't threaten to deadlock.
circular_imports_modules = {
'A': """if 1:
import time
time.sleep(%(delay)s)
x = 'a'
import C
""",
'B': """if 1:
import time
time.sleep(%(delay)s)
x = 'b'
import D
""",
'C': """import B""",
'D': """import A""",
}
class Finder:
"""A dummy finder to detect concurrent access to its find_spec()
method."""
def __init__(self):
self.numcalls = 0
self.x = 0
self.lock = threading.Lock()
def find_spec(self, name, path=None, target=None):
# Simulate some thread-unsafe behaviour. If calls to find_spec()
# are properly serialized, `x` will end up the same as `numcalls`.
# Otherwise not.
assert imp.lock_held()
with self.lock:
self.numcalls += 1
x = self.x
time.sleep(0.01)
self.x = x + 1
class FlushingFinder:
"""A dummy finder which flushes sys.path_importer_cache when it gets
called."""
def find_spec(self, name, path=None, target=None):
sys.path_importer_cache.clear()
class ThreadedImportTests(unittest.TestCase):
def setUp(self):
self.old_random = sys.modules.pop('random', None)
def tearDown(self):
# If the `random` module was already initialized, we restore the
# old module at the end so that pickling tests don't fail.
# See http://bugs.python.org/issue3657#msg110461
if self.old_random is not None:
sys.modules['random'] = self.old_random
@mock_register_at_fork
def check_parallel_module_init(self, mock_os):
if imp.lock_held():
# This triggers on, e.g., from test import autotest.
raise unittest.SkipTest("can't run when import lock is held")
done = threading.Event()
for N in (20, 50) * 3:
if verbose:
print("Trying", N, "threads ...", end=' ')
# Make sure that random and modulefinder get reimported freshly
for modname in ['random', 'modulefinder']:
try:
del sys.modules[modname]
except KeyError:
pass
errors = []
done_tasks = []
done.clear()
t0 = time.monotonic()
with start_threads(threading.Thread(target=task,
args=(N, done, done_tasks, errors,))
for i in range(N)):
pass
completed = done.wait(10 * 60)
dt = time.monotonic() - t0
if verbose:
print("%.1f ms" % (dt*1e3), flush=True, end=" ")
dbg_info = 'done: %s/%s' % (len(done_tasks), N)
self.assertFalse(errors, dbg_info)
self.assertTrue(completed, dbg_info)
if verbose:
print("OK.")
def test_parallel_module_init(self):
self.check_parallel_module_init()
def test_parallel_meta_path(self):
finder = Finder()
sys.meta_path.insert(0, finder)
try:
self.check_parallel_module_init()
self.assertGreater(finder.numcalls, 0)
self.assertEqual(finder.x, finder.numcalls)
finally:
sys.meta_path.remove(finder)
def test_parallel_path_hooks(self):
# Here the Finder instance is only used to check concurrent calls
# to path_hook().
finder = Finder()
# In order for our path hook to be called at each import, we need
# to flush the path_importer_cache, which we do by registering a
# dedicated meta_path entry.
flushing_finder = FlushingFinder()
def path_hook(path):
finder.find_spec('')
raise ImportError
sys.path_hooks.insert(0, path_hook)
sys.meta_path.append(flushing_finder)
try:
# Flush the cache a first time
flushing_finder.find_spec('')
numtests = self.check_parallel_module_init()
self.assertGreater(finder.numcalls, 0)
self.assertEqual(finder.x, finder.numcalls)
finally:
sys.meta_path.remove(flushing_finder)
sys.path_hooks.remove(path_hook)
def test_import_hangers(self):
# In case this test is run again, make sure the helper module
# gets loaded from scratch again.
try:
del sys.modules['test.threaded_import_hangers']
except KeyError:
pass
import test.threaded_import_hangers
self.assertFalse(test.threaded_import_hangers.errors)
def test_circular_imports(self):
# The goal of this test is to exercise implementations of the import
# lock which use a per-module lock, rather than a global lock.
# In these implementations, there is a possible deadlock with
# circular imports, for example:
# - thread 1 imports A (grabbing the lock for A) which imports B
# - thread 2 imports B (grabbing the lock for B) which imports A
# Such implementations should be able to detect such situations and
# resolve them one way or the other, without freezing.
# NOTE: our test constructs a slightly less trivial import cycle,
# in order to better stress the deadlock avoidance mechanism.
delay = 0.5
os.mkdir(TESTFN)
self.addCleanup(shutil.rmtree, TESTFN)
sys.path.insert(0, TESTFN)
self.addCleanup(sys.path.remove, TESTFN)
for name, contents in circular_imports_modules.items():
contents = contents % {'delay': delay}
with open(os.path.join(TESTFN, name + ".py"), "wb") as f:
f.write(contents.encode('utf-8'))
self.addCleanup(forget, name)
importlib.invalidate_caches()
results = []
def import_ab():
import A
results.append(getattr(A, 'x', None))
def import_ba():
import B
results.append(getattr(B, 'x', None))
t1 = threading.Thread(target=import_ab)
t2 = threading.Thread(target=import_ba)
t1.start()
t2.start()
t1.join()
t2.join()
self.assertEqual(set(results), {'a', 'b'})
@mock_register_at_fork
def test_side_effect_import(self, mock_os):
code = """if 1:
import threading
def target():
import random
t = threading.Thread(target=target)
t.start()
t.join()
t = None"""
sys.path.insert(0, os.curdir)
self.addCleanup(sys.path.remove, os.curdir)
filename = TESTFN + ".py"
with open(filename, "wb") as f:
f.write(code.encode('utf-8'))
self.addCleanup(unlink, filename)
self.addCleanup(forget, TESTFN)
self.addCleanup(rmtree, '__pycache__')
importlib.invalidate_caches()
__import__(TESTFN)
del sys.modules[TESTFN]
@reap_threads
def test_main():
old_switchinterval = None
try:
old_switchinterval = sys.getswitchinterval()
sys.setswitchinterval(1e-5)
except AttributeError:
pass
try:
run_unittest(ThreadedImportTests)
finally:
if old_switchinterval is not None:
sys.setswitchinterval(old_switchinterval)
if __name__ == "__main__":
test_main()
|
|
#!/usr/bin/env python2
import os
import sys
import socket
import select
import enum
import time
import threading
import pprint; pp = pprint.pprint
import socks5
# ----------------------------------------------------------------------
def shuffle(src, dst):
try:
data = src.recv(2**16)
if not data:
return 0
dst.sendall(data)
return len(data)
except socket.error, e:
print e
return False
class Association(object):
def __init__(self, clientconn, remoteconn=None):
self.client = clientconn
self.remote = remoteconn
self.thread = None
self.closed = False
self.timeout = 1.0
def __hash__(self):
return id(self)
#def __repr__(self): return "<Association #%x>" % (id(self),)
def close(self):
if self.closed:
return
self.closed = True
if hasattr(self, 'server'):
self.server.remove_assoc(self)
print "closing assoc", self
if self.client:
self.client.shutdown(socket.SHUT_RDWR)
self.client.close()
self.client = None
if self.remote:
self.remote.shutdown(socket.SHUT_RDWR)
self.remote.close()
self.remote = None
def authenticate(self):
# CLIENT AUTH : GET OFFERED METHODS
offered = socks5.read_methods_offer(self.client)
if socks5.Method.NO_AUTH in offered['methods']:
selected = socks5.Method.NO_AUTH
elif socks5.Method.PASSWORD in offered['methods']:
selected = socks5.Method.PASSWORD
else:
selected = socks5.Method.NONE_ACCEPTED
# CLIENT AUTH : SELECT METHOD
socks5.write_method_selected(self.client, selected)
if selected == socks5.Method.NONE_ACCEPTED:
self.close()
return False
# CLIENT AUTH : AUTHENTICATE...
if selected == socks5.Method.PASSWORD:
request = socks5.read_userpass_request(clientconn)
print "USER, PASSWORD:", request
socks5.write_userpass_status(clientconn, socks5.Reply.SUCCEEDED)
return True
def accept_command(self):
# RECEIVE COMMAND
request = socks5.read_request(self.client)
if request['command'] != socks5.Command.CONNECT:
socks5.write_reply(self.client, socks5.Reply.CMD_NOT_SUPPORTED)
self.close()
return False
assert request['command'] == socks5.Command.CONNECT
assert request['atyp'] in (socks5.AddressType.IPV4, socks5.AddressType.DOMAINNAME)
print "connection to", repr(request['dst_addr']), "port", request['dst_port']
remoteconn = socket.socket()
if self.remotebind:
remoteconn.bind(self.remotebind)
remoteconn.connect((request['dst_addr'], request['dst_port']))
(paddr, pport) = remoteconn.getsockname()
socks5.write_reply(self.client, socks5.Reply.SUCCEEDED,
socks5.AddressType.IPV4,
paddr, pport
)
self.remote = remoteconn
return True
def run(self):
self.thread = threading.Thread(target=self._thread)
self.thread.start()
def _thread(self):
# 0: closed
# 1: check read-end
# 2: check write-end
# progression 1 -> 2 -> 3:shuffle -> 0 or 1
out_state = 1
in_state = 1
while (out_state > 0) or (in_state > 0):
#print "out", out_state, "in", in_state
if (self.client is None) or (self.remote is None):
break
readables = []
if out_state == 1: readables.append(self.client)
if in_state == 1: readables.append(self.remote)
writables = []
if out_state == 2: writables.append(self.remote)
if in_state == 2: writables.append(self.client)
(rfd,wfd,_) = select.select(readables, writables, [], self.timeout)
if self.client in rfd: out_state = 2
if self.remote in rfd: in_state = 2
if self.client in wfd: in_state = 3
if self.remote in wfd: out_state = 3
#print "-> out", out_state, "in", in_state
if out_state == 3:
res = shuffle(self.client, self.remote)
if not res:
out_state = 0
self.remote.shutdown(socket.SHUT_WR)
else:
out_state = 1
if in_state == 3:
res = shuffle(self.remote, self.client)
if not res:
in_state = 0
self.client.shutdown(socket.SHUT_WR)
else:
in_state = 1
#print "done spooling"
self.close()
class Endpoint(enum.Enum):
CLIENT = 1
REMOTE = 2
class SocksServer(object):
def __init__(self, interface):
self.interface = interface
self.assocs = set()
self.remotebind = None
self.timeout = 1.0
def accept_client(self):
(clientconn, addr) = self.serversock.accept()
print "new connection from", addr
assoc = Association(clientconn)
assoc.remotebind = self.remotebind
if assoc.authenticate() == False:
assoc.close()
return
res = assoc.accept_command()
if not res:
assoc.close()
return
assert assoc.remote is not None
assoc.server = self
self.add_assoc(assoc)
assoc.run()
def add_assoc(self, assoc):
self.assocs.add(assoc)
def remove_assoc(self, assoc):
if assoc in self.assocs:
self.assocs.remove(assoc)
def run(self):
self.serversock = socket.socket()
self.serversock.bind(self.interface)
self.serversock.listen(5)
while True:
rfd = [self.serversock]
try:
(rfd,_,_) = select.select(rfd, [], [], self.timeout)
except KeyboardInterrupt:
break
if self.serversock in rfd:
self.accept_client()
for a in list(self.assocs):
a.close()
# ----------------------------------------------------------------------
if __name__ == '__main__':
if len(sys.argv) >= 3:
clientaddr = sys.argv[1]
clientport = int(sys.argv[2])
else:
clientaddr = '127.0.0.1'
clientport = 1080
server = SocksServer((clientaddr, clientport))
if len(sys.argv) >= 4:
server.remotebind = (sys.argv[3], 0)
server.run()
|
|
#
# Copyright (C) 2014 Tommy Winther
# http://tommy.winther.nu
#
# This Program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This Program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this Program; see the file LICENSE.txt. If not, write to
# the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.
# http://www.gnu.org/copyleft/gpl.html
#
import datetime
import threading
import time
import xbmc
import xbmcgui
import source as src
from notification import Notification
from strings import *
import buggalo
import streaming
DEBUG = False
MODE_EPG = 'EPG'
MODE_TV = 'TV'
MODE_OSD = 'OSD'
ACTION_LEFT = 1
ACTION_RIGHT = 2
ACTION_UP = 3
ACTION_DOWN = 4
ACTION_PAGE_UP = 5
ACTION_PAGE_DOWN = 6
ACTION_SELECT_ITEM = 7
ACTION_PARENT_DIR = 9
ACTION_PREVIOUS_MENU = 10
ACTION_SHOW_INFO = 11
ACTION_NEXT_ITEM = 14
ACTION_PREV_ITEM = 15
ACTION_MOUSE_WHEEL_UP = 104
ACTION_MOUSE_WHEEL_DOWN = 105
ACTION_MOUSE_MOVE = 107
KEY_NAV_BACK = 92
KEY_CONTEXT_MENU = 117
KEY_HOME = 159
CHANNELS_PER_PAGE = 9
HALF_HOUR = datetime.timedelta(minutes=30)
def debug(s):
if DEBUG: xbmc.log(str(s), xbmc.LOGDEBUG)
class Point(object):
def __init__(self):
self.x = self.y = 0
def __repr__(self):
return 'Point(x=%d, y=%d)' % (self.x, self.y)
class EPGView(object):
def __init__(self):
self.top = self.left = self.right = self.bottom = self.width = self.cellHeight = 0
class ControlAndProgram(object):
def __init__(self, control, program):
self.control = control
self.program = program
class TVGuide(xbmcgui.WindowXML):
C_MAIN_DATE = 4000
C_MAIN_TITLE = 4020
C_MAIN_TIME = 4021
C_MAIN_DESCRIPTION = 4022
C_MAIN_IMAGE = 4023
C_MAIN_LOGO = 4024
C_MAIN_TIMEBAR = 4100
C_MAIN_LOADING = 4200
C_MAIN_LOADING_PROGRESS = 4201
C_MAIN_LOADING_TIME_LEFT = 4202
C_MAIN_LOADING_CANCEL = 4203
C_MAIN_MOUSE_CONTROLS = 4300
C_MAIN_MOUSE_HOME = 4301
C_MAIN_MOUSE_LEFT = 4302
C_MAIN_MOUSE_UP = 4303
C_MAIN_MOUSE_DOWN = 4304
C_MAIN_MOUSE_RIGHT = 4305
C_MAIN_MOUSE_EXIT = 4306
C_MAIN_BACKGROUND = 4600
C_MAIN_EPG = 5000
C_MAIN_EPG_VIEW_MARKER = 5001
C_MAIN_OSD = 6000
C_MAIN_OSD_TITLE = 6001
C_MAIN_OSD_TIME = 6002
C_MAIN_OSD_DESCRIPTION = 6003
C_MAIN_OSD_CHANNEL_LOGO = 6004
C_MAIN_OSD_CHANNEL_TITLE = 6005
def __new__(cls):
return super(TVGuide, cls).__new__(cls, 'script-tvguide-main.xml', ADDON.getAddonInfo('path'))
def __init__(self):
super(TVGuide, self).__init__()
self.migrateSettings()
self.notification = None
self.redrawingEPG = False
self.isClosing = False
self.controlAndProgramList = list()
self.ignoreMissingControlIds = list()
self.channelIdx = 0
self.focusPoint = Point()
self.epgView = EPGView()
self.streamingService = streaming.StreamsService()
self.player = xbmc.Player()
self.database = None
self.mode = MODE_EPG
self.currentChannel = None
self.osdEnabled = ADDON.getSetting('enable.osd') == 'true' and ADDON.getSetting(
'alternative.playback') != 'true'
self.alternativePlayback = ADDON.getSetting('alternative.playback') == 'true'
self.osdChannel = None
self.osdProgram = None
# find nearest half hour
self.viewStartDate = datetime.datetime.today()
self.viewStartDate -= datetime.timedelta(minutes=self.viewStartDate.minute % 30,
seconds=self.viewStartDate.second)
def migrateSettings(self):
if ADDON.getSetting('source') == 'ONTV.dk':
ADDON.setSetting('source', 'XMLTV')
ADDON.setSetting('xmltv.url', ADDON.getSetting('ontv.url'))
def getControl(self, controlId):
try:
return super(TVGuide, self).getControl(controlId)
except:
if controlId in self.ignoreMissingControlIds:
return None
if not self.isClosing:
xbmcgui.Dialog().ok(buggalo.getRandomHeading(), strings(SKIN_ERROR_LINE1), strings(SKIN_ERROR_LINE2),
strings(SKIN_ERROR_LINE3))
self.close()
return None
def close(self):
if not self.isClosing:
self.isClosing = True
if self.player.isPlaying():
self.player.stop()
if self.database:
self.database.close(super(TVGuide, self).close)
else:
super(TVGuide, self).close()
@buggalo.buggalo_try_except({'method': 'TVGuide.onInit'})
def onInit(self):
self._hideControl(self.C_MAIN_MOUSE_CONTROLS, self.C_MAIN_OSD)
self._showControl(self.C_MAIN_EPG, self.C_MAIN_LOADING)
self.setControlLabel(self.C_MAIN_LOADING_TIME_LEFT, strings(BACKGROUND_UPDATE_IN_PROGRESS))
self.setFocusId(self.C_MAIN_LOADING_CANCEL)
control = self.getControl(self.C_MAIN_EPG_VIEW_MARKER)
if control:
left, top = control.getPosition()
self.focusPoint.x = left
self.focusPoint.y = top
self.epgView.left = left
self.epgView.top = top
self.epgView.right = left + control.getWidth()
self.epgView.bottom = top + control.getHeight()
self.epgView.width = control.getWidth()
self.epgView.cellHeight = control.getHeight() / CHANNELS_PER_PAGE
try:
self.database = src.Database()
except src.SourceNotConfiguredException:
self.onSourceNotConfigured()
self.close()
return
self.database.initialize(self.onSourceInitialized, self.isSourceInitializationCancelled)
self.updateTimebar()
@buggalo.buggalo_try_except({'method': 'TVGuide.onAction'})
def onAction(self, action):
debug('Mode is: %s' % self.mode)
if self.mode == MODE_TV:
self.onActionTVMode(action)
elif self.mode == MODE_OSD:
self.onActionOSDMode(action)
elif self.mode == MODE_EPG:
self.onActionEPGMode(action)
def onActionTVMode(self, action):
if action.getId() == ACTION_PAGE_UP:
self._channelUp()
elif action.getId() == ACTION_PAGE_DOWN:
self._channelDown()
elif not self.osdEnabled:
pass # skip the rest of the actions
elif action.getId() in [ACTION_PARENT_DIR, KEY_NAV_BACK, KEY_CONTEXT_MENU, ACTION_PREVIOUS_MENU]:
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
elif action.getId() == ACTION_SHOW_INFO:
self._showOsd()
def onActionOSDMode(self, action):
if action.getId() == ACTION_SHOW_INFO:
self._hideOsd()
elif action.getId() in [ACTION_PARENT_DIR, KEY_NAV_BACK, KEY_CONTEXT_MENU, ACTION_PREVIOUS_MENU]:
self._hideOsd()
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
elif action.getId() == ACTION_SELECT_ITEM:
if self.playChannel(self.osdChannel):
self._hideOsd()
elif action.getId() == ACTION_PAGE_UP:
self._channelUp()
self._showOsd()
elif action.getId() == ACTION_PAGE_DOWN:
self._channelDown()
self._showOsd()
elif action.getId() == ACTION_UP:
self.osdChannel = self.database.getPreviousChannel(self.osdChannel)
self.osdProgram = self.database.getCurrentProgram(self.osdChannel)
self._showOsd()
elif action.getId() == ACTION_DOWN:
self.osdChannel = self.database.getNextChannel(self.osdChannel)
self.osdProgram = self.database.getCurrentProgram(self.osdChannel)
self._showOsd()
elif action.getId() == ACTION_LEFT:
previousProgram = self.database.getPreviousProgram(self.osdProgram)
if previousProgram:
self.osdProgram = previousProgram
self._showOsd()
elif action.getId() == ACTION_RIGHT:
nextProgram = self.database.getNextProgram(self.osdProgram)
if nextProgram:
self.osdProgram = nextProgram
self._showOsd()
def onActionEPGMode(self, action):
if action.getId() in [ACTION_PARENT_DIR, KEY_NAV_BACK, ACTION_PREVIOUS_MENU]:
self.close()
return
elif action.getId() == ACTION_MOUSE_MOVE:
self._showControl(self.C_MAIN_MOUSE_CONTROLS)
return
elif action.getId() == KEY_CONTEXT_MENU:
if self.player.isPlaying():
self._hideEpg()
controlInFocus = None
currentFocus = self.focusPoint
try:
controlInFocus = self.getFocus()
if controlInFocus in [elem.control for elem in self.controlAndProgramList]:
(left, top) = controlInFocus.getPosition()
currentFocus = Point()
currentFocus.x = left + (controlInFocus.getWidth() / 2)
currentFocus.y = top + (controlInFocus.getHeight() / 2)
except Exception:
control = self._findControlAt(self.focusPoint)
if control is None and len(self.controlAndProgramList) > 0:
control = self.controlAndProgramList[0].control
if control is not None:
self.setFocus(control)
return
if action.getId() == ACTION_LEFT:
self._left(currentFocus)
elif action.getId() == ACTION_RIGHT:
self._right(currentFocus)
elif action.getId() == ACTION_UP:
self._up(currentFocus)
elif action.getId() == ACTION_DOWN:
self._down(currentFocus)
elif action.getId() == ACTION_NEXT_ITEM:
self._nextDay()
elif action.getId() == ACTION_PREV_ITEM:
self._previousDay()
elif action.getId() == ACTION_PAGE_UP:
self._moveUp(CHANNELS_PER_PAGE)
elif action.getId() == ACTION_PAGE_DOWN:
self._moveDown(CHANNELS_PER_PAGE)
elif action.getId() == ACTION_MOUSE_WHEEL_UP:
self._moveUp(scrollEvent=True)
elif action.getId() == ACTION_MOUSE_WHEEL_DOWN:
self._moveDown(scrollEvent=True)
elif action.getId() == KEY_HOME:
self.viewStartDate = datetime.datetime.today()
self.viewStartDate -= datetime.timedelta(minutes=self.viewStartDate.minute % 30,
seconds=self.viewStartDate.second)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
elif action.getId() in [KEY_CONTEXT_MENU] and controlInFocus is not None:
program = self._getProgramFromControl(controlInFocus)
if program is not None:
self._showContextMenu(program)
@buggalo.buggalo_try_except({'method': 'TVGuide.onClick'})
def onClick(self, controlId):
if controlId in [self.C_MAIN_LOADING_CANCEL, self.C_MAIN_MOUSE_EXIT]:
self.close()
return
if self.isClosing:
return
if controlId == self.C_MAIN_MOUSE_HOME:
self.viewStartDate = datetime.datetime.today()
self.viewStartDate -= datetime.timedelta(minutes=self.viewStartDate.minute % 30, seconds=self.viewStartDate.second)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
return
elif controlId == self.C_MAIN_MOUSE_LEFT:
self.viewStartDate -= datetime.timedelta(hours=2)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
return
elif controlId == self.C_MAIN_MOUSE_UP:
self._moveUp(count=CHANNELS_PER_PAGE)
return
elif controlId == self.C_MAIN_MOUSE_DOWN:
self._moveDown(count=CHANNELS_PER_PAGE)
return
elif controlId == self.C_MAIN_MOUSE_RIGHT:
self.viewStartDate += datetime.timedelta(hours=2)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
return
program = self._getProgramFromControl(self.getControl(controlId))
if program is None:
return
if not self.playChannel(program.channel):
result = self.streamingService.detectStream(program.channel)
if not result:
# could not detect stream, show context menu
self._showContextMenu(program)
elif type(result) == str:
# one single stream detected, save it and start streaming
self.database.setCustomStreamUrl(program.channel, result)
self.playChannel(program.channel)
else:
# multiple matches, let user decide
d = ChooseStreamAddonDialog(result)
d.doModal()
if d.stream is not None:
self.database.setCustomStreamUrl(program.channel, d.stream)
self.playChannel(program.channel)
def _showContextMenu(self, program):
self._hideControl(self.C_MAIN_MOUSE_CONTROLS)
d = PopupMenu(self.database, program, not program.notificationScheduled)
d.doModal()
buttonClicked = d.buttonClicked
del d
if buttonClicked == PopupMenu.C_POPUP_REMIND:
if program.notificationScheduled:
self.notification.removeNotification(program)
else:
self.notification.addNotification(program)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
elif buttonClicked == PopupMenu.C_POPUP_CHOOSE_STREAM:
d = StreamSetupDialog(self.database, program.channel)
d.doModal()
del d
elif buttonClicked == PopupMenu.C_POPUP_PLAY:
self.playChannel(program.channel)
elif buttonClicked == PopupMenu.C_POPUP_CHANNELS:
d = ChannelsMenu(self.database)
d.doModal()
del d
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
elif buttonClicked == PopupMenu.C_POPUP_QUIT:
self.close()
def setFocusId(self, controlId):
control = self.getControl(controlId)
if control:
self.setFocus(control)
def setFocus(self, control):
debug('setFocus %d' % control.getId())
if control in [elem.control for elem in self.controlAndProgramList]:
debug('Focus before %s' % self.focusPoint)
(left, top) = control.getPosition()
if left > self.focusPoint.x or left + control.getWidth() < self.focusPoint.x:
self.focusPoint.x = left
self.focusPoint.y = top + (control.getHeight() / 2)
debug('New focus at %s' % self.focusPoint)
super(TVGuide, self).setFocus(control)
@buggalo.buggalo_try_except({'method': 'TVGuide.onFocus'})
def onFocus(self, controlId):
try:
controlInFocus = self.getControl(controlId)
except Exception:
return
program = self._getProgramFromControl(controlInFocus)
if program is None:
return
self.setControlLabel(self.C_MAIN_TITLE, '[B]%s[/B]' % program.title)
if program.startDate or program.endDate:
self.setControlLabel(self.C_MAIN_TIME,
'[B]%s - %s[/B]' % (self.formatTime(program.startDate), self.formatTime(program.endDate)))
else:
self.setControlLabel(self.C_MAIN_TIME, '')
if program.description:
description = program.description
else:
description = strings(NO_DESCRIPTION)
self.setControlText(self.C_MAIN_DESCRIPTION, description)
if program.channel.logo is not None:
self.setControlImage(self.C_MAIN_LOGO, program.channel.logo)
if program.imageSmall is not None:
self.setControlImage(self.C_MAIN_IMAGE, program.imageSmall)
if ADDON.getSetting('program.background.enabled') == 'true' and program.imageLarge is not None:
self.setControlImage(self.C_MAIN_BACKGROUND, program.imageLarge)
if not self.osdEnabled and self.player.isPlaying():
self.player.stop()
def _left(self, currentFocus):
control = self._findControlOnLeft(currentFocus)
if control is not None:
self.setFocus(control)
elif control is None:
self.viewStartDate -= datetime.timedelta(hours=2)
self.focusPoint.x = self.epgView.right
self.onRedrawEPG(self.channelIdx, self.viewStartDate, focusFunction=self._findControlOnLeft)
def _right(self, currentFocus):
control = self._findControlOnRight(currentFocus)
if control is not None:
self.setFocus(control)
elif control is None:
self.viewStartDate += datetime.timedelta(hours=2)
self.focusPoint.x = self.epgView.left
self.onRedrawEPG(self.channelIdx, self.viewStartDate, focusFunction=self._findControlOnRight)
def _up(self, currentFocus):
currentFocus.x = self.focusPoint.x
control = self._findControlAbove(currentFocus)
if control is not None:
self.setFocus(control)
elif control is None:
self.focusPoint.y = self.epgView.bottom
self.onRedrawEPG(self.channelIdx - CHANNELS_PER_PAGE, self.viewStartDate,
focusFunction=self._findControlAbove)
def _down(self, currentFocus):
currentFocus.x = self.focusPoint.x
control = self._findControlBelow(currentFocus)
if control is not None:
self.setFocus(control)
elif control is None:
self.focusPoint.y = self.epgView.top
self.onRedrawEPG(self.channelIdx + CHANNELS_PER_PAGE, self.viewStartDate,
focusFunction=self._findControlBelow)
def _nextDay(self):
self.viewStartDate += datetime.timedelta(days=1)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
def _previousDay(self):
self.viewStartDate -= datetime.timedelta(days=1)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
def _moveUp(self, count=1, scrollEvent=False):
if scrollEvent:
self.onRedrawEPG(self.channelIdx - count, self.viewStartDate)
else:
self.focusPoint.y = self.epgView.bottom
self.onRedrawEPG(self.channelIdx - count, self.viewStartDate, focusFunction=self._findControlAbove)
def _moveDown(self, count=1, scrollEvent=False):
if scrollEvent:
self.onRedrawEPG(self.channelIdx + count, self.viewStartDate)
else:
self.focusPoint.y = self.epgView.top
self.onRedrawEPG(self.channelIdx + count, self.viewStartDate, focusFunction=self._findControlBelow)
def _channelUp(self):
channel = self.database.getNextChannel(self.currentChannel)
self.playChannel(channel)
def _channelDown(self):
channel = self.database.getPreviousChannel(self.currentChannel)
self.playChannel(channel)
def playChannel(self, channel):
self.currentChannel = channel
wasPlaying = self.player.isPlaying()
url = self.database.getStreamUrl(channel)
if url:
if url[0:9] == 'plugin://':
if self.alternativePlayback:
xbmc.executebuiltin('XBMC.RunPlugin(%s)' % url)
elif self.osdEnabled:
xbmc.executebuiltin('PlayMedia(%s,1)' % url)
else:
xbmc.executebuiltin('PlayMedia(%s)' % url)
else:
self.player.play(item=url, windowed=self.osdEnabled)
if not wasPlaying:
self._hideEpg()
threading.Timer(1, self.waitForPlayBackStopped).start()
self.osdProgram = self.database.getCurrentProgram(self.currentChannel)
return url is not None
def waitForPlayBackStopped(self):
for retry in range(0, 100):
time.sleep(0.1)
if self.player.isPlaying():
break
while self.player.isPlaying() and not xbmc.abortRequested and not self.isClosing:
time.sleep(0.5)
self.onPlayBackStopped()
def _showOsd(self):
if not self.osdEnabled:
return
if self.mode != MODE_OSD:
self.osdChannel = self.currentChannel
if self.osdProgram is not None:
self.setControlLabel(self.C_MAIN_OSD_TITLE, '[B]%s[/B]' % self.osdProgram.title)
if self.osdProgram.startDate or self.osdProgram.endDate:
self.setControlLabel(self.C_MAIN_OSD_TIME, '[B]%s - %s[/B]' % (
self.formatTime(self.osdProgram.startDate), self.formatTime(self.osdProgram.endDate)))
else:
self.setControlLabel(self.C_MAIN_OSD_TIME, '')
self.setControlText(self.C_MAIN_OSD_DESCRIPTION, self.osdProgram.description)
self.setControlLabel(self.C_MAIN_OSD_CHANNEL_TITLE, self.osdChannel.title)
if self.osdProgram.channel.logo is not None:
self.setControlImage(self.C_MAIN_OSD_CHANNEL_LOGO, self.osdProgram.channel.logo)
else:
self.setControlImage(self.C_MAIN_OSD_CHANNEL_LOGO, '')
self.mode = MODE_OSD
self._showControl(self.C_MAIN_OSD)
def _hideOsd(self):
self.mode = MODE_TV
self._hideControl(self.C_MAIN_OSD)
def _hideEpg(self):
self._hideControl(self.C_MAIN_EPG)
self.mode = MODE_TV
self._clearEpg()
def onRedrawEPG(self, channelStart, startTime, focusFunction=None):
if self.redrawingEPG or (self.database is not None and self.database.updateInProgress) or self.isClosing:
debug('onRedrawEPG - already redrawing')
return # ignore redraw request while redrawing
debug('onRedrawEPG')
self.redrawingEPG = True
self.mode = MODE_EPG
self._showControl(self.C_MAIN_EPG)
self.updateTimebar(scheduleTimer=False)
# show Loading screen
self.setControlLabel(self.C_MAIN_LOADING_TIME_LEFT, strings(CALCULATING_REMAINING_TIME))
self._showControl(self.C_MAIN_LOADING)
self.setFocusId(self.C_MAIN_LOADING_CANCEL)
# remove existing controls
self._clearEpg()
try:
self.channelIdx, channels, programs = self.database.getEPGView(channelStart, startTime, self.onSourceProgressUpdate, clearExistingProgramList=False)
except src.SourceException:
self.onEPGLoadError()
return
channelsWithoutPrograms = list(channels)
# date and time row
self.setControlLabel(self.C_MAIN_DATE, self.formatDate(self.viewStartDate))
for col in range(1, 5):
self.setControlLabel(4000 + col, self.formatTime(startTime))
startTime += HALF_HOUR
if programs is None:
self.onEPGLoadError()
return
# set channel logo or text
for idx in range(0, CHANNELS_PER_PAGE):
if idx >= len(channels):
self.setControlImage(4110 + idx, ' ')
self.setControlLabel(4010 + idx, ' ')
else:
channel = channels[idx]
self.setControlLabel(4010 + idx, channel.title)
if channel.logo is not None:
self.setControlImage(4110 + idx, channel.logo)
else:
self.setControlImage(4110 + idx, ' ')
for program in programs:
idx = channels.index(program.channel)
if program.channel in channelsWithoutPrograms:
channelsWithoutPrograms.remove(program.channel)
startDelta = program.startDate - self.viewStartDate
stopDelta = program.endDate - self.viewStartDate
cellStart = self._secondsToXposition(startDelta.seconds)
if startDelta.days < 0:
cellStart = self.epgView.left
cellWidth = self._secondsToXposition(stopDelta.seconds) - cellStart
if cellStart + cellWidth > self.epgView.right:
cellWidth = self.epgView.right - cellStart
if cellWidth > 1:
if program.notificationScheduled:
noFocusTexture = 'tvguide-program-red.png'
focusTexture = 'tvguide-program-red-focus.png'
else:
noFocusTexture = 'tvguide-program-grey.png'
focusTexture = 'tvguide-program-grey-focus.png'
if cellWidth < 25:
title = '' # Text will overflow outside the button if it is too narrow
else:
title = program.title
control = xbmcgui.ControlButton(
cellStart,
self.epgView.top + self.epgView.cellHeight * idx,
cellWidth - 2,
self.epgView.cellHeight - 2,
title,
noFocusTexture=noFocusTexture,
focusTexture=focusTexture
)
self.controlAndProgramList.append(ControlAndProgram(control, program))
for channel in channelsWithoutPrograms:
idx = channels.index(channel)
control = xbmcgui.ControlButton(
self.epgView.left,
self.epgView.top + self.epgView.cellHeight * idx,
(self.epgView.right - self.epgView.left) - 2,
self.epgView.cellHeight - 2,
strings(NO_PROGRAM_AVAILABLE),
noFocusTexture='tvguide-program-grey.png',
focusTexture='tvguide-program-grey-focus.png'
)
program = src.Program(channel, strings(NO_PROGRAM_AVAILABLE), None, None, None)
self.controlAndProgramList.append(ControlAndProgram(control, program))
# add program controls
if focusFunction is None:
focusFunction = self._findControlAt
focusControl = focusFunction(self.focusPoint)
controls = [elem.control for elem in self.controlAndProgramList]
self.addControls(controls)
if focusControl is not None:
debug('onRedrawEPG - setFocus %d' % focusControl.getId())
self.setFocus(focusControl)
self.ignoreMissingControlIds.extend([elem.control.getId() for elem in self.controlAndProgramList])
if focusControl is None and len(self.controlAndProgramList) > 0:
self.setFocus(self.controlAndProgramList[0].control)
self._hideControl(self.C_MAIN_LOADING)
self.redrawingEPG = False
def _clearEpg(self):
controls = [elem.control for elem in self.controlAndProgramList]
try:
self.removeControls(controls)
except RuntimeError:
for elem in self.controlAndProgramList:
try:
self.removeControl(elem.control)
except RuntimeError:
pass # happens if we try to remove a control that doesn't exist
del self.controlAndProgramList[:]
def onEPGLoadError(self):
self.redrawingEPG = False
self._hideControl(self.C_MAIN_LOADING)
xbmcgui.Dialog().ok(strings(LOAD_ERROR_TITLE), strings(LOAD_ERROR_LINE1), strings(LOAD_ERROR_LINE2))
self.close()
def onSourceNotConfigured(self):
self.redrawingEPG = False
self._hideControl(self.C_MAIN_LOADING)
xbmcgui.Dialog().ok(strings(LOAD_ERROR_TITLE), strings(LOAD_ERROR_LINE1), strings(CONFIGURATION_ERROR_LINE2))
self.close()
def isSourceInitializationCancelled(self):
return xbmc.abortRequested or self.isClosing
def onSourceInitialized(self, success):
if success:
self.notification = Notification(self.database, ADDON.getAddonInfo('path'))
self.onRedrawEPG(0, self.viewStartDate)
def onSourceProgressUpdate(self, percentageComplete):
control = self.getControl(self.C_MAIN_LOADING_PROGRESS)
if percentageComplete < 1:
if control:
control.setPercent(1)
self.progressStartTime = datetime.datetime.now()
self.progressPreviousPercentage = percentageComplete
elif percentageComplete != self.progressPreviousPercentage:
if control:
control.setPercent(percentageComplete)
self.progressPreviousPercentage = percentageComplete
delta = datetime.datetime.now() - self.progressStartTime
if percentageComplete < 20:
self.setControlLabel(self.C_MAIN_LOADING_TIME_LEFT, strings(CALCULATING_REMAINING_TIME))
else:
secondsLeft = int(delta.seconds) / float(percentageComplete) * (100.0 - percentageComplete)
if secondsLeft > 30:
secondsLeft -= secondsLeft % 10
self.setControlLabel(self.C_MAIN_LOADING_TIME_LEFT, strings(TIME_LEFT) % secondsLeft)
return not xbmc.abortRequested and not self.isClosing
def onPlayBackStopped(self):
if not self.player.isPlaying() and not self.isClosing:
self._hideControl(self.C_MAIN_OSD)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
def _secondsToXposition(self, seconds):
return self.epgView.left + (seconds * self.epgView.width / 7200)
def _findControlOnRight(self, point):
distanceToNearest = 10000
nearestControl = None
for elem in self.controlAndProgramList:
control = elem.control
(left, top) = control.getPosition()
x = left + (control.getWidth() / 2)
y = top + (control.getHeight() / 2)
if point.x < x and point.y == y:
distance = abs(point.x - x)
if distance < distanceToNearest:
distanceToNearest = distance
nearestControl = control
return nearestControl
def _findControlOnLeft(self, point):
distanceToNearest = 10000
nearestControl = None
for elem in self.controlAndProgramList:
control = elem.control
(left, top) = control.getPosition()
x = left + (control.getWidth() / 2)
y = top + (control.getHeight() / 2)
if point.x > x and point.y == y:
distance = abs(point.x - x)
if distance < distanceToNearest:
distanceToNearest = distance
nearestControl = control
return nearestControl
def _findControlBelow(self, point):
nearestControl = None
for elem in self.controlAndProgramList:
control = elem.control
(leftEdge, top) = control.getPosition()
y = top + (control.getHeight() / 2)
if point.y < y:
rightEdge = leftEdge + control.getWidth()
if leftEdge <= point.x < rightEdge and (nearestControl is None or nearestControl.getPosition()[1] > top):
nearestControl = control
return nearestControl
def _findControlAbove(self, point):
nearestControl = None
for elem in self.controlAndProgramList:
control = elem.control
(leftEdge, top) = control.getPosition()
y = top + (control.getHeight() / 2)
if point.y > y:
rightEdge = leftEdge + control.getWidth()
if leftEdge <= point.x < rightEdge and (nearestControl is None or nearestControl.getPosition()[1] < top):
nearestControl = control
return nearestControl
def _findControlAt(self, point):
for elem in self.controlAndProgramList:
control = elem.control
(left, top) = control.getPosition()
bottom = top + control.getHeight()
right = left + control.getWidth()
if left <= point.x <= right and top <= point.y <= bottom:
return control
return None
def _getProgramFromControl(self, control):
for elem in self.controlAndProgramList:
if elem.control == control:
return elem.program
return None
def _hideControl(self, *controlIds):
"""
Visibility is inverted in skin
"""
for controlId in controlIds:
control = self.getControl(controlId)
if control:
control.setVisible(True)
def _showControl(self, *controlIds):
"""
Visibility is inverted in skin
"""
for controlId in controlIds:
control = self.getControl(controlId)
if control:
control.setVisible(False)
def formatTime(self, timestamp):
if timestamp:
format = xbmc.getRegion('time').replace(':%S', '').replace('%H%H', '%H')
return timestamp.strftime(format)
else:
return ''
def formatDate(self, timestamp):
if timestamp:
format = xbmc.getRegion('dateshort')
return timestamp.strftime(format)
else:
return ''
def setControlImage(self, controlId, image):
control = self.getControl(controlId)
if control:
control.setImage(image.encode('utf-8'))
def setControlLabel(self, controlId, label):
control = self.getControl(controlId)
if control and label:
control.setLabel(label)
def setControlText(self, controlId, text):
control = self.getControl(controlId)
if control:
control.setText(text)
def updateTimebar(self, scheduleTimer=True):
try:
# move timebar to current time
timeDelta = datetime.datetime.today() - self.viewStartDate
control = self.getControl(self.C_MAIN_TIMEBAR)
if control:
(x, y) = control.getPosition()
try:
# Sometimes raises:
# exceptions.RuntimeError: Unknown exception thrown from the call "setVisible"
control.setVisible(timeDelta.days == 0)
except:
pass
control.setPosition(self._secondsToXposition(timeDelta.seconds), y)
if scheduleTimer and not xbmc.abortRequested and not self.isClosing:
threading.Timer(1, self.updateTimebar).start()
except Exception:
buggalo.onExceptionRaised()
class PopupMenu(xbmcgui.WindowXMLDialog):
C_POPUP_PLAY = 4000
C_POPUP_CHOOSE_STREAM = 4001
C_POPUP_REMIND = 4002
C_POPUP_CHANNELS = 4003
C_POPUP_QUIT = 4004
C_POPUP_CHANNEL_LOGO = 4100
C_POPUP_CHANNEL_TITLE = 4101
C_POPUP_PROGRAM_TITLE = 4102
def __new__(cls, database, program, showRemind):
return super(PopupMenu, cls).__new__(cls, 'script-tvguide-menu.xml', ADDON.getAddonInfo('path'))
def __init__(self, database, program, showRemind):
"""
@type database: source.Database
@param program:
@type program: source.Program
@param showRemind:
"""
super(PopupMenu, self).__init__()
self.database = database
self.program = program
self.showRemind = showRemind
self.buttonClicked = None
@buggalo.buggalo_try_except({'method': 'PopupMenu.onInit'})
def onInit(self):
playControl = self.getControl(self.C_POPUP_PLAY)
remindControl = self.getControl(self.C_POPUP_REMIND)
channelLogoControl = self.getControl(self.C_POPUP_CHANNEL_LOGO)
channelTitleControl = self.getControl(self.C_POPUP_CHANNEL_TITLE)
programTitleControl = self.getControl(self.C_POPUP_PROGRAM_TITLE)
playControl.setLabel(strings(WATCH_CHANNEL, self.program.channel.title))
if not self.program.channel.isPlayable():
playControl.setEnabled(False)
self.setFocusId(self.C_POPUP_CHOOSE_STREAM)
if self.database.getCustomStreamUrl(self.program.channel):
chooseStrmControl = self.getControl(self.C_POPUP_CHOOSE_STREAM)
chooseStrmControl.setLabel(strings(REMOVE_STRM_FILE))
if self.program.channel.logo is not None:
channelLogoControl.setImage(self.program.channel.logo)
channelTitleControl.setVisible(False)
else:
channelTitleControl.setLabel(self.program.channel.title)
channelLogoControl.setVisible(False)
programTitleControl.setLabel(self.program.title)
if self.program.startDate:
remindControl.setEnabled(True)
if self.showRemind:
remindControl.setLabel(strings(REMIND_PROGRAM))
else:
remindControl.setLabel(strings(DONT_REMIND_PROGRAM))
else:
remindControl.setEnabled(False)
@buggalo.buggalo_try_except({'method': 'PopupMenu.onAction'})
def onAction(self, action):
if action.getId() in [ACTION_PARENT_DIR, ACTION_PREVIOUS_MENU, KEY_NAV_BACK, KEY_CONTEXT_MENU]:
self.close()
return
@buggalo.buggalo_try_except({'method': 'PopupMenu.onClick'})
def onClick(self, controlId):
if controlId == self.C_POPUP_CHOOSE_STREAM and self.database.getCustomStreamUrl(self.program.channel):
self.database.deleteCustomStreamUrl(self.program.channel)
chooseStrmControl = self.getControl(self.C_POPUP_CHOOSE_STREAM)
chooseStrmControl.setLabel(strings(CHOOSE_STRM_FILE))
if not self.program.channel.isPlayable():
playControl = self.getControl(self.C_POPUP_PLAY)
playControl.setEnabled(False)
else:
self.buttonClicked = controlId
self.close()
def onFocus(self, controlId):
pass
class ChannelsMenu(xbmcgui.WindowXMLDialog):
C_CHANNELS_LIST = 6000
C_CHANNELS_SELECTION_VISIBLE = 6001
C_CHANNELS_SELECTION = 6002
C_CHANNELS_SAVE = 6003
C_CHANNELS_CANCEL = 6004
def __new__(cls, database):
return super(ChannelsMenu, cls).__new__(cls, 'script-tvguide-channels.xml', ADDON.getAddonInfo('path'))
def __init__(self, database):
"""
@type database: source.Database
"""
super(ChannelsMenu, self).__init__()
self.database = database
self.channelList = database.getChannelList(onlyVisible=False)
self.swapInProgress = False
@buggalo.buggalo_try_except({'method': 'ChannelsMenu.onInit'})
def onInit(self):
self.updateChannelList()
self.setFocusId(self.C_CHANNELS_LIST)
@buggalo.buggalo_try_except({'method': 'ChannelsMenu.onAction'})
def onAction(self, action):
if action.getId() in [ACTION_PARENT_DIR, ACTION_PREVIOUS_MENU, KEY_NAV_BACK, KEY_CONTEXT_MENU]:
self.close()
return
if self.getFocusId() == self.C_CHANNELS_LIST and action.getId() == ACTION_LEFT:
listControl = self.getControl(self.C_CHANNELS_LIST)
idx = listControl.getSelectedPosition()
buttonControl = self.getControl(self.C_CHANNELS_SELECTION)
buttonControl.setLabel('[B]%s[/B]' % self.channelList[idx].title)
self.getControl(self.C_CHANNELS_SELECTION_VISIBLE).setVisible(False)
self.setFocusId(self.C_CHANNELS_SELECTION)
elif self.getFocusId() == self.C_CHANNELS_SELECTION and action.getId() in [ACTION_RIGHT, ACTION_SELECT_ITEM]:
self.getControl(self.C_CHANNELS_SELECTION_VISIBLE).setVisible(True)
xbmc.sleep(350)
self.setFocusId(self.C_CHANNELS_LIST)
elif self.getFocusId() == self.C_CHANNELS_SELECTION and action.getId() == ACTION_UP:
listControl = self.getControl(self.C_CHANNELS_LIST)
idx = listControl.getSelectedPosition()
if idx > 0:
self.swapChannels(idx, idx - 1)
elif self.getFocusId() == self.C_CHANNELS_SELECTION and action.getId() == ACTION_DOWN:
listControl = self.getControl(self.C_CHANNELS_LIST)
idx = listControl.getSelectedPosition()
if idx < listControl.size() - 1:
self.swapChannels(idx, idx + 1)
@buggalo.buggalo_try_except({'method': 'ChannelsMenu.onClick'})
def onClick(self, controlId):
if controlId == self.C_CHANNELS_LIST:
listControl = self.getControl(self.C_CHANNELS_LIST)
item = listControl.getSelectedItem()
channel = self.channelList[int(item.getProperty('idx'))]
channel.visible = not channel.visible
if channel.visible:
iconImage = 'tvguide-channel-visible.png'
else:
iconImage = 'tvguide-channel-hidden.png'
item.setIconImage(iconImage)
elif controlId == self.C_CHANNELS_SAVE:
self.database.saveChannelList(self.close, self.channelList)
elif controlId == self.C_CHANNELS_CANCEL:
self.close()
def onFocus(self, controlId):
pass
def updateChannelList(self):
listControl = self.getControl(self.C_CHANNELS_LIST)
listControl.reset()
for idx, channel in enumerate(self.channelList):
if channel.visible:
iconImage = 'tvguide-channel-visible.png'
else:
iconImage = 'tvguide-channel-hidden.png'
item = xbmcgui.ListItem('%3d. %s' % (idx + 1, channel.title), iconImage=iconImage)
item.setProperty('idx', str(idx))
listControl.addItem(item)
def updateListItem(self, idx, item):
channel = self.channelList[idx]
item.setLabel('%3d. %s' % (idx + 1, channel.title))
if channel.visible:
iconImage = 'tvguide-channel-visible.png'
else:
iconImage = 'tvguide-channel-hidden.png'
item.setIconImage(iconImage)
item.setProperty('idx', str(idx))
def swapChannels(self, fromIdx, toIdx):
if self.swapInProgress:
return
self.swapInProgress = True
c = self.channelList[fromIdx]
self.channelList[fromIdx] = self.channelList[toIdx]
self.channelList[toIdx] = c
# recalculate weight
for idx, channel in enumerate(self.channelList):
channel.weight = idx
listControl = self.getControl(self.C_CHANNELS_LIST)
self.updateListItem(fromIdx, listControl.getListItem(fromIdx))
self.updateListItem(toIdx, listControl.getListItem(toIdx))
listControl.selectItem(toIdx)
xbmc.sleep(50)
self.swapInProgress = False
class StreamSetupDialog(xbmcgui.WindowXMLDialog):
C_STREAM_STRM_TAB = 101
C_STREAM_FAVOURITES_TAB = 102
C_STREAM_ADDONS_TAB = 103
C_STREAM_STRM_BROWSE = 1001
C_STREAM_STRM_FILE_LABEL = 1005
C_STREAM_STRM_PREVIEW = 1002
C_STREAM_STRM_OK = 1003
C_STREAM_STRM_CANCEL = 1004
C_STREAM_FAVOURITES = 2001
C_STREAM_FAVOURITES_PREVIEW = 2002
C_STREAM_FAVOURITES_OK = 2003
C_STREAM_FAVOURITES_CANCEL = 2004
C_STREAM_ADDONS = 3001
C_STREAM_ADDONS_STREAMS = 3002
C_STREAM_ADDONS_NAME = 3003
C_STREAM_ADDONS_DESCRIPTION = 3004
C_STREAM_ADDONS_PREVIEW = 3005
C_STREAM_ADDONS_OK = 3006
C_STREAM_ADDONS_CANCEL = 3007
C_STREAM_VISIBILITY_MARKER = 100
VISIBLE_STRM = 'strm'
VISIBLE_FAVOURITES = 'favourites'
VISIBLE_ADDONS = 'addons'
def __new__(cls, database, channel):
return super(StreamSetupDialog, cls).__new__(cls, 'script-tvguide-streamsetup.xml', ADDON.getAddonInfo('path'))
def __init__(self, database, channel):
"""
@type database: source.Database
@type channel:source.Channel
"""
super(StreamSetupDialog, self).__init__()
self.database = database
self.channel = channel
self.player = xbmc.Player()
self.previousAddonId = None
self.strmFile = None
self.streamingService = streaming.StreamsService()
def close(self):
if self.player.isPlaying():
self.player.stop()
super(StreamSetupDialog, self).close()
@buggalo.buggalo_try_except({'method': 'StreamSetupDialog.onInit'})
def onInit(self):
self.getControl(self.C_STREAM_VISIBILITY_MARKER).setLabel(self.VISIBLE_STRM)
favourites = self.streamingService.loadFavourites()
items = list()
for label, value in favourites:
item = xbmcgui.ListItem(label)
item.setProperty('stream', value)
items.append(item)
listControl = self.getControl(StreamSetupDialog.C_STREAM_FAVOURITES)
listControl.addItems(items)
items = list()
for id in self.streamingService.getAddons():
try:
addon = xbmcaddon.Addon(id) # raises Exception if addon is not installed
item = xbmcgui.ListItem(addon.getAddonInfo('name'), iconImage=addon.getAddonInfo('icon'))
item.setProperty('addon_id', id)
items.append(item)
except Exception:
pass
listControl = self.getControl(StreamSetupDialog.C_STREAM_ADDONS)
listControl.addItems(items)
self.updateAddonInfo()
@buggalo.buggalo_try_except({'method': 'StreamSetupDialog.onAction'})
def onAction(self, action):
if action.getId() in [ACTION_PARENT_DIR, ACTION_PREVIOUS_MENU, KEY_NAV_BACK, KEY_CONTEXT_MENU]:
self.close()
return
elif self.getFocusId() == self.C_STREAM_ADDONS:
self.updateAddonInfo()
@buggalo.buggalo_try_except({'method': 'StreamSetupDialog.onClick'})
def onClick(self, controlId):
if controlId == self.C_STREAM_STRM_BROWSE:
stream = xbmcgui.Dialog().browse(1, ADDON.getLocalizedString(30304), 'video', '.strm')
if stream:
self.database.setCustomStreamUrl(self.channel, stream)
self.getControl(self.C_STREAM_STRM_FILE_LABEL).setText(stream)
self.strmFile = stream
elif controlId == self.C_STREAM_ADDONS_OK:
listControl = self.getControl(self.C_STREAM_ADDONS_STREAMS)
item = listControl.getSelectedItem()
if item:
stream = item.getProperty('stream')
self.database.setCustomStreamUrl(self.channel, stream)
self.close()
elif controlId == self.C_STREAM_FAVOURITES_OK:
listControl = self.getControl(self.C_STREAM_FAVOURITES)
item = listControl.getSelectedItem()
if item:
stream = item.getProperty('stream')
self.database.setCustomStreamUrl(self.channel, stream)
self.close()
elif controlId == self.C_STREAM_STRM_OK:
self.database.setCustomStreamUrl(self.channel, self.strmFile)
self.close()
elif controlId in [self.C_STREAM_ADDONS_CANCEL, self.C_STREAM_FAVOURITES_CANCEL, self.C_STREAM_STRM_CANCEL]:
self.close()
elif controlId in [self.C_STREAM_ADDONS_PREVIEW, self.C_STREAM_FAVOURITES_PREVIEW, self.C_STREAM_STRM_PREVIEW]:
if self.player.isPlaying():
self.player.stop()
self.getControl(self.C_STREAM_ADDONS_PREVIEW).setLabel(strings(PREVIEW_STREAM))
self.getControl(self.C_STREAM_FAVOURITES_PREVIEW).setLabel(strings(PREVIEW_STREAM))
self.getControl(self.C_STREAM_STRM_PREVIEW).setLabel(strings(PREVIEW_STREAM))
return
stream = None
visible = self.getControl(self.C_STREAM_VISIBILITY_MARKER).getLabel()
if visible == self.VISIBLE_ADDONS:
listControl = self.getControl(self.C_STREAM_ADDONS_STREAMS)
item = listControl.getSelectedItem()
if item:
stream = item.getProperty('stream')
elif visible == self.VISIBLE_FAVOURITES:
listControl = self.getControl(self.C_STREAM_FAVOURITES)
item = listControl.getSelectedItem()
if item:
stream = item.getProperty('stream')
elif visible == self.VISIBLE_STRM:
stream = self.strmFile
if stream is not None:
self.player.play(item=stream, windowed=True)
if self.player.isPlaying():
self.getControl(self.C_STREAM_ADDONS_PREVIEW).setLabel(strings(STOP_PREVIEW))
self.getControl(self.C_STREAM_FAVOURITES_PREVIEW).setLabel(strings(STOP_PREVIEW))
self.getControl(self.C_STREAM_STRM_PREVIEW).setLabel(strings(STOP_PREVIEW))
@buggalo.buggalo_try_except({'method': 'StreamSetupDialog.onFocus'})
def onFocus(self, controlId):
if controlId == self.C_STREAM_STRM_TAB:
self.getControl(self.C_STREAM_VISIBILITY_MARKER).setLabel(self.VISIBLE_STRM)
elif controlId == self.C_STREAM_FAVOURITES_TAB:
self.getControl(self.C_STREAM_VISIBILITY_MARKER).setLabel(self.VISIBLE_FAVOURITES)
elif controlId == self.C_STREAM_ADDONS_TAB:
self.getControl(self.C_STREAM_VISIBILITY_MARKER).setLabel(self.VISIBLE_ADDONS)
def updateAddonInfo(self):
listControl = self.getControl(self.C_STREAM_ADDONS)
item = listControl.getSelectedItem()
if item is None:
return
if item.getProperty('addon_id') == self.previousAddonId:
return
self.previousAddonId = item.getProperty('addon_id')
addon = xbmcaddon.Addon(id=item.getProperty('addon_id'))
self.getControl(self.C_STREAM_ADDONS_NAME).setLabel('[B]%s[/B]' % addon.getAddonInfo('name'))
self.getControl(self.C_STREAM_ADDONS_DESCRIPTION).setText(addon.getAddonInfo('description'))
streams = self.streamingService.getAddonStreams(item.getProperty('addon_id'))
items = list()
for (label, stream) in streams:
item = xbmcgui.ListItem(label)
item.setProperty('stream', stream)
items.append(item)
listControl = self.getControl(StreamSetupDialog.C_STREAM_ADDONS_STREAMS)
listControl.reset()
listControl.addItems(items)
class ChooseStreamAddonDialog(xbmcgui.WindowXMLDialog):
C_SELECTION_LIST = 1000
def __new__(cls, addons):
return super(ChooseStreamAddonDialog, cls).__new__(cls, 'script-tvguide-streamaddon.xml',
ADDON.getAddonInfo('path'))
def __init__(self, addons):
super(ChooseStreamAddonDialog, self).__init__()
self.addons = addons
self.stream = None
@buggalo.buggalo_try_except({'method': 'ChooseStreamAddonDialog.onInit'})
def onInit(self):
items = list()
for id, label, url in self.addons:
addon = xbmcaddon.Addon(id)
item = xbmcgui.ListItem(label, addon.getAddonInfo('name'), addon.getAddonInfo('icon'))
item.setProperty('stream', url)
items.append(item)
listControl = self.getControl(ChooseStreamAddonDialog.C_SELECTION_LIST)
listControl.addItems(items)
self.setFocus(listControl)
@buggalo.buggalo_try_except({'method': 'ChooseStreamAddonDialog.onAction'})
def onAction(self, action):
if action.getId() in [ACTION_PARENT_DIR, ACTION_PREVIOUS_MENU, KEY_NAV_BACK]:
self.close()
@buggalo.buggalo_try_except({'method': 'ChooseStreamAddonDialog.onClick'})
def onClick(self, controlId):
if controlId == ChooseStreamAddonDialog.C_SELECTION_LIST:
listControl = self.getControl(ChooseStreamAddonDialog.C_SELECTION_LIST)
self.stream = listControl.getSelectedItem().getProperty('stream')
self.close()
@buggalo.buggalo_try_except({'method': 'ChooseStreamAddonDialog.onFocus'})
def onFocus(self, controlId):
pass
|
|
# -*- coding: utf-8 -*-
"""Parser for Windows EventLog (EVT) files."""
import pyevt
from dfdatetime import posix_time as dfdatetime_posix_time
from dfdatetime import semantic_time as dfdatetime_semantic_time
from plaso.containers import events
from plaso.containers import time_events
from plaso.lib import definitions
from plaso.lib import specification
from plaso.parsers import interface
from plaso.parsers import manager
class WinEvtRecordEventData(events.EventData):
"""Windows EventLog (EVT) record event data.
Attributes:
computer_name (str): computer name stored in the event record.
event_category (int): event category.
event_identifier (int): event identifier.
event_type (int): event type.
facility (int): event facility.
message_identifier (int): event message identifier.
offset (int): offset of the EVT record relative to the start of the file,
from which the event data was extracted.
record_number (int): event record number.
recovered (bool): True if the record was recovered.
severity (int): event severity.
source_name (str): name of the event source.
strings (list[str]): event strings.
user_sid (str): user security identifier (SID) stored in the event record.
"""
DATA_TYPE = 'windows:evt:record'
def __init__(self):
"""Initializes event data."""
super(WinEvtRecordEventData, self).__init__(data_type=self.DATA_TYPE)
self.computer_name = None
self.event_category = None
self.event_identifier = None
self.event_type = None
self.facility = None
self.message_identifier = None
self.offset = None
self.record_number = None
self.recovered = None
self.severity = None
self.source_name = None
self.strings = None
self.user_sid = None
class WinEvtParser(interface.FileObjectParser):
"""Parses Windows EventLog (EVT) files."""
_INITIAL_FILE_OFFSET = None
NAME = 'winevt'
DATA_FORMAT = 'Windows EventLog (EVT) file'
@classmethod
def GetFormatSpecification(cls):
"""Retrieves the format specification.
Returns:
FormatSpecification: format specification.
"""
format_specification = specification.FormatSpecification(cls.NAME)
format_specification.AddNewSignature(b'LfLe', offset=4)
return format_specification
def _GetEventData(
self, parser_mediator, record_index, evt_record, recovered=False):
"""Retrieves event data from the Windows EventLog (EVT) record.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
record_index (int): event record index.
evt_record (pyevt.record): event record.
recovered (Optional[bool]): True if the record was recovered.
Returns:
WinEvtRecordEventData: event data.
"""
event_data = WinEvtRecordEventData()
try:
event_data.record_number = evt_record.identifier
except OverflowError as exception:
warning_message = (
'unable to read record identifier from event record: {0:d} '
'with error: {1!s}').format(record_index, exception)
if recovered:
parser_mediator.ProduceRecoveryWarning(warning_message)
else:
parser_mediator.ProduceExtractionWarning(warning_message)
try:
event_identifier = evt_record.event_identifier
except OverflowError as exception:
warning_message = (
'unable to read event identifier from event record: {0:d} '
'with error: {1!s}').format(record_index, exception)
if recovered:
parser_mediator.ProduceRecoveryWarning(warning_message)
else:
parser_mediator.ProduceExtractionWarning(warning_message)
event_identifier = None
event_data.offset = evt_record.offset
event_data.recovered = recovered
# We want the event identifier to match the behavior of that of the EVTX
# event records.
if event_identifier is not None:
event_data.event_identifier = event_identifier & 0xffff
event_data.facility = (event_identifier >> 16) & 0x0fff
event_data.severity = event_identifier >> 30
event_data.message_identifier = event_identifier
event_data.event_type = evt_record.event_type
event_data.event_category = evt_record.event_category
event_data.source_name = evt_record.source_name
# Computer name is the value stored in the event record and does not
# necessarily correspond with the actual hostname.
event_data.computer_name = evt_record.computer_name
event_data.user_sid = evt_record.user_security_identifier
event_data.strings = list(evt_record.strings)
return event_data
def _ParseRecord(
self, parser_mediator, record_index, evt_record, recovered=False):
"""Parses a Windows EventLog (EVT) record.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
record_index (int): event record index.
evt_record (pyevt.record): event record.
recovered (Optional[bool]): True if the record was recovered.
"""
event_data = self._GetEventData(
parser_mediator, record_index, evt_record, recovered=recovered)
try:
creation_time = evt_record.get_creation_time_as_integer()
except OverflowError as exception:
warning_message = (
'unable to read creation time from event record: {0:d} '
'with error: {1!s}').format(record_index, exception)
if recovered:
parser_mediator.ProduceRecoveryWarning(warning_message)
else:
parser_mediator.ProduceExtractionWarning(warning_message)
creation_time = None
if creation_time:
date_time = dfdatetime_posix_time.PosixTime(timestamp=creation_time)
event = time_events.DateTimeValuesEvent(
date_time, definitions.TIME_DESCRIPTION_CREATION)
parser_mediator.ProduceEventWithEventData(event, event_data)
try:
written_time = evt_record.get_written_time_as_integer()
except OverflowError as exception:
warning_message = (
'unable to read written time from event record: {0:d} '
'with error: {1!s}').format(record_index, exception)
if recovered:
parser_mediator.ProduceRecoveryWarning(warning_message)
else:
parser_mediator.ProduceExtractionWarning(warning_message)
written_time = None
if written_time:
date_time = dfdatetime_posix_time.PosixTime(timestamp=written_time)
event = time_events.DateTimeValuesEvent(
date_time, definitions.TIME_DESCRIPTION_WRITTEN)
parser_mediator.ProduceEventWithEventData(event, event_data)
if not creation_time and not written_time:
date_time = dfdatetime_semantic_time.NotSet()
event = time_events.DateTimeValuesEvent(
date_time, definitions.TIME_DESCRIPTION_NOT_A_TIME)
parser_mediator.ProduceEventWithEventData(event, event_data)
def _ParseRecords(self, parser_mediator, evt_file):
"""Parses Windows EventLog (EVT) records.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
evt_file (pyevt.file): Windows EventLog (EVT) file.
"""
# To handle errors when parsing a Windows EventLog (EVT) file in the most
# granular way the following code iterates over every event record. The
# call to evt_file.get_record() and access to members of evt_record should
# be called within a try-except.
for record_index in range(evt_file.number_of_records):
if parser_mediator.abort:
break
try:
evt_record = evt_file.get_record(record_index)
self._ParseRecord(parser_mediator, record_index, evt_record)
except IOError as exception:
parser_mediator.ProduceExtractionWarning(
'unable to parse event record: {0:d} with error: {1!s}'.format(
record_index, exception))
for record_index in range(evt_file.number_of_recovered_records):
if parser_mediator.abort:
break
try:
evt_record = evt_file.get_recovered_record(record_index)
self._ParseRecord(
parser_mediator, record_index, evt_record, recovered=True)
except IOError as exception:
parser_mediator.ProduceRecoveryWarning((
'unable to parse recovered event record: {0:d} with error: '
'{1!s}').format(record_index, exception))
def ParseFileObject(self, parser_mediator, file_object):
"""Parses a Windows EventLog (EVT) file-like object.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
file_object (dfvfs.FileIO): a file-like object.
"""
evt_file = pyevt.file()
evt_file.set_ascii_codepage(parser_mediator.codepage)
try:
evt_file.open_file_object(file_object)
except IOError as exception:
parser_mediator.ProduceExtractionWarning(
'unable to open file with error: {0!s}'.format(exception))
return
try:
self._ParseRecords(parser_mediator, evt_file)
finally:
evt_file.close()
manager.ParsersManager.RegisterParser(WinEvtParser)
|
|
from collections import OrderedDict
from qnet import (
One, Zero, ZeroOperator, IdentityOperator, ZeroSuperOperator,
IdentitySuperOperator, ZeroKet, TrivialKet, FullSpace, TrivialSpace,
CIdentity, CircuitZero, IdxSym, BasisKet, OperatorSymbol, FockIndex,
KetIndexedSum, OperatorIndexedSum, StrLabel, LocalSpace,
IndexOverList, IndexOverFockSpace, IndexOverRange, Sum, ScalarExpression,
QuantumDerivative, OperatorDerivative, Scalar, ScalarTimes, Create,
Destroy)
import sympy
from sympy import IndexedBase, symbols
import pytest
def test_neutral_elements():
"""test the properties of the neutral elements in the quantum algebras.
This tests the resolution of #63
*Only* the scalar neutral elements compare to and hash as 0 and 1. The
neutral elements of all other algebras are "unique" and don't compare to 0
and 1. Elements of a quantum algebra have an is_zero attribute
"""
assert One == 1
assert One is not 1
assert hash(One) == hash(1)
assert Zero == 0
assert Zero is not 0
assert hash(Zero) == hash(0)
assert Zero.is_zero
assert IdentityOperator != 1
assert hash(IdentityOperator) != hash(1)
assert ZeroOperator != 0
assert hash(ZeroOperator) != hash(0)
assert ZeroOperator.is_zero
assert IdentitySuperOperator != 1
assert hash(IdentitySuperOperator) != hash(1)
assert ZeroSuperOperator != 0
assert hash(ZeroSuperOperator) != hash(0)
assert ZeroSuperOperator.is_zero
assert TrivialKet != 1
assert hash(TrivialKet) != hash(1)
assert ZeroKet != 0
assert hash(ZeroKet) != hash(0)
assert ZeroKet.is_zero
# the remainder are not quantum algebra elements, to they don't have
# is_zero
assert FullSpace != 1
assert hash(FullSpace) != hash(1)
assert TrivialSpace != 0
assert hash(TrivialSpace) != hash(0)
assert CIdentity != 1
assert hash(CIdentity) != hash(1)
assert CircuitZero != 0
assert hash(CircuitZero) != hash(0)
def test_sum_instantiator():
"""Test use of Sum instantiator"""
i = IdxSym('i')
j = IdxSym('j')
ket_i = BasisKet(FockIndex(i), hs=0)
ket_j = BasisKet(FockIndex(j), hs=0)
A_i = OperatorSymbol(StrLabel(IndexedBase('A')[i]), hs=0)
hs0 = LocalSpace('0')
sum = Sum(i)(ket_i)
ful = KetIndexedSum(ket_i, IndexOverFockSpace(i, hs=hs0))
assert sum == ful
assert sum == Sum(i, hs0)(ket_i)
assert sum == Sum(i, hs=hs0)(ket_i)
sum = Sum(i, 1, 10)(ket_i)
ful = KetIndexedSum(ket_i, IndexOverRange(i, 1, 10))
assert sum == ful
assert sum == Sum(i, 1, 10, 1)(ket_i)
assert sum == Sum(i, 1, to=10, step=1)(ket_i)
assert sum == Sum(i, 1, 10, step=1)(ket_i)
sum = Sum(i, (1, 2, 3))(ket_i)
ful = KetIndexedSum(ket_i, IndexOverList(i, (1, 2, 3)))
assert sum == KetIndexedSum(ket_i, IndexOverList(i, (1, 2, 3)))
assert sum == Sum(i, [1, 2, 3])(ket_i)
sum = Sum(i)(Sum(j)(ket_i * ket_j.dag()))
ful = OperatorIndexedSum(
ket_i * ket_j.dag(),
IndexOverFockSpace(i, hs0), IndexOverFockSpace(j, hs0))
assert sum == ful
#sum = Sum(i)(Sum(j)(ket_i.dag() * ket_j)) # TODO
#assert sum == ful
# TODO: sum over A_i
@pytest.fixture
def MyScalarFunc():
class MyScalarDerivative(QuantumDerivative, Scalar):
pass
class ScalarFunc(ScalarExpression):
def __init__(self, name, *sym_args):
self._name = name
self._sym_args = sym_args
super().__init__(name, *sym_args)
def _adjoint(self):
return self
@property
def args(self):
return (self._name, *self._sym_args)
def _diff(self, sym):
return MyScalarDerivative(self, derivs={sym: 1})
return ScalarFunc
def test_quantum_derivative(MyScalarFunc):
"""Test the basic properties of a QuantumDerivative"""
s, t, x = symbols('s, t, x', real=True)
f = MyScalarFunc("f", s, t)
assert f.diff(x) == Zero
fdot = f.diff(s, n=2).diff(t)
with pytest.raises(ValueError):
fdot.__class__(f, derivs={t: 0})
with pytest.raises(TypeError):
f.diff(2)
with pytest.raises(TypeError):
fdot.__class__(f, derivs={2: 1})
assert isinstance(fdot, QuantumDerivative)
assert fdot.kwargs == OrderedDict(
[('derivs', ((s, 2), (t, 1))), ('vals', None)])
assert fdot.minimal_kwargs == {'derivs': ((s, 2), (t, 1))}
assert fdot.derivs == {s: 2, t: 1}
assert isinstance(fdot.derivs, OrderedDict)
assert fdot.syms == {s, t}
assert fdot.vals == dict()
assert isinstance(fdot.vals, OrderedDict)
assert fdot.free_symbols == set([t, s])
assert len(fdot.bound_symbols) == 0
assert fdot.n == 3
assert fdot.adjoint() == fdot
def test_quantum_derivative_evaluated(MyScalarFunc):
"""Test the basic properties of a QuantumDerivative, evaluated at a
point"""
s, t, t0, x = symbols('s, t, t_0, x', real=True)
f = MyScalarFunc("f", s, t)
fdot = f.diff(s, n=2).diff(t)
fdot = fdot.evaluate_at({t: t0})
D = fdot.__class__
with pytest.raises(ValueError):
fdot.evaluate_at({x: t0})
assert fdot == D(f, derivs={s: 2, t: 1}, vals={t: t0})
assert fdot == D.create(f, derivs={s: 2, t: 1}, vals={t: t0})
assert fdot.kwargs == OrderedDict(
[('derivs', ((s, 2), (t, 1))), ('vals', ((t, t0), ))])
assert fdot.minimal_kwargs == fdot.kwargs
assert fdot.derivs == {s: 2, t: 1}
assert fdot.syms == {s, t}
assert fdot.vals == {t: t0}
assert isinstance(fdot.vals, OrderedDict)
assert fdot.free_symbols == set([s, t0])
assert len(fdot.bound_symbols) == 1
assert fdot.bound_symbols == set([t, ])
assert fdot.all_symbols == set([s, t, t0])
assert fdot.n == 3
assert fdot.adjoint() == fdot
assert fdot.diff(t0) == D(fdot, derivs={t0: 1})
assert fdot.diff(t) == Zero
assert fdot._diff(t) == Zero
assert fdot.diff(s) == D(f, derivs={s: 3, t: 1}, vals={t: t0})
with pytest.raises(TypeError):
fdot.diff(2)
with pytest.raises(TypeError):
fdot._diff(2)
def test_quantum_derivative_nonatomic_free_symbols(MyScalarFunc):
"""Test the fee_symbols of an evaluated derivative for non-atomic
symbols"""
s = IndexedBase('s')
t = IndexedBase('t')
i = IdxSym('i')
j = IdxSym('j')
t0 = symbols('t_0', real=True)
f = MyScalarFunc("f", s[i], t[j])
fdot = f.diff(s[i], n=2).diff(t[j]).evaluate_at({t[j]: t0})
assert fdot == fdot.__class__(
f, derivs={s[i]: 2, t[j]: 1}, vals={t[j]: t0})
assert fdot.kwargs == OrderedDict(
[('derivs', ((s[i], 2), (t[j], 1))), ('vals', ((t[j], t0), ))])
assert fdot.derivs == {s[i]: 2, t[j]: 1}
assert fdot.syms == {s[i], t[j]}
assert fdot.vals == {t[j]: t0}
assert fdot.free_symbols == set([s.args[0], i, t0])
assert fdot.bound_symbols == set([t.args[0], j])
assert fdot.all_symbols == set([s.args[0], t.args[0], t0, i, j])
assert fdot.diff(s[i]).n == 4
assert fdot.diff(t[j]) == Zero
f = MyScalarFunc("f", s[i], t[j], j)
fdot = f.diff(s[i], n=2).diff(t[j]).evaluate_at({t[j]: t0})
assert fdot.free_symbols == set([s.args[0], i, j, t0])
assert fdot.bound_symbols == set([t.args[0], j])
def test_abstract_taylor_series(MyScalarFunc):
"""Test a series expansion that is the abstract Taylor series only"""
s = IndexedBase('s')
t = IndexedBase('t')
i = IdxSym('i')
j = IdxSym('j')
t0 = symbols('t_0', real=True)
f = MyScalarFunc("f", s[i], t[j])
series = f.series_expand(t[j], about=0, order=3)
assert isinstance(series[0], MyScalarFunc)
assert isinstance(series[1], QuantumDerivative)
assert isinstance(series[2], ScalarTimes)
D = series[1].__class__
assert series[0] == MyScalarFunc("f", s[i], 0)
assert series[1] == D(f, derivs={t[j]: 1}, vals={t[j]: 0})
assert series[2] == D(f, derivs={t[j]: 2}, vals={t[j]: 0}) / 2
assert series[3] == D(f, derivs={t[j]: 3}, vals={t[j]: 0}) / 6
series = f.series_expand(t[j], about=t0, order=3)
assert series[0] == MyScalarFunc("f", s[i], t0)
assert series[1] == D(f, derivs={t[j]: 1}, vals={t[j]: t0})
assert series[2] == D(f, derivs={t[j]: 2}, vals={t[j]: t0}) / 2
assert series[3] == D(f, derivs={t[j]: 3}, vals={t[j]: t0}) / 6
def test_quantum_symbols_with_symargs():
"""Test properties and behavior of symbols with scalar arguments,
through the example of an OperatorSymbol"""
t = IndexedBase('t')
i = IdxSym('i')
j = IdxSym('j')
alpha, beta = symbols('alpha, beta')
A = OperatorSymbol("A", t[i], (alpha + 1)**2, hs=0)
assert A.label == 'A'
assert len(A.args) == 3
assert A.kwargs == {'hs': LocalSpace('0')}
assert A._get_instance_key(A.args, A.kwargs) == (
OperatorSymbol, 'A', t[i], (alpha + 1)**2, ('hs', A.space))
A_beta = OperatorSymbol("A", beta, (alpha + 1)**2, hs=0)
assert A != A_beta
assert A.substitute({t[i]: beta}) == A_beta
half = sympy.sympify(1) / 2
assert A.sym_args == (t[i], (alpha + 1)**2)
assert A.free_symbols == {symbols('t'), i, alpha}
assert len(A.bound_symbols) == 0
assert A.simplify_scalar(sympy.expand) == OperatorSymbol(
"A", t[i], alpha**2 + 2*alpha + 1, hs=0)
assert A.diff(beta) == ZeroOperator
assert A.diff(t[j]) == ZeroOperator
assert OperatorSymbol("A", t[i], i, j, hs=0).diff(t[j]) == ZeroOperator
assert A.diff(alpha) == OperatorDerivative(A, derivs=((alpha, 1),))
assert A.expand() == A
series = A.series_expand(t[i], about=beta, order=2)
assert len(series) == 3
assert series[0] == OperatorSymbol("A", beta, (alpha + 1)**2, hs=0)
assert series[2] == half * OperatorDerivative(
A, derivs=((t[i], 2),), vals=((t[i], beta),))
def test_quantum_symbols_with_indexedhs():
"""Test the fee_symbols method for objects that have a Hilbert space with a
sybmolic label, for the example of an OperatorSymbol"""
i, j = symbols('i, j', cls=IdxSym)
hs_i = LocalSpace(StrLabel(i))
hs_j = LocalSpace(StrLabel(j))
A = OperatorSymbol("A", hs=hs_i*hs_j)
assert A.free_symbols == {i, j}
expr = (Create(hs=hs_i)*Destroy(hs=hs_i))
assert expr.free_symbols == {i, }
|
|
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Interfaces for Hadoop filesystem access via HttpFs/WebHDFS
"""
import errno
import logging
import posixpath
import random
import stat
import threading
from django.utils.encoding import smart_str
from desktop.lib.rest import http_client, resource
from hadoop.fs import normpath, SEEK_SET, SEEK_CUR, SEEK_END
from hadoop.fs.hadoopfs import Hdfs
from hadoop.fs.exceptions import WebHdfsException
from hadoop.fs.webhdfs_types import WebHdfsStat, WebHdfsContentSummary
from hadoop.conf import UPLOAD_CHUNK_SIZE
import hadoop.conf
DEFAULT_HDFS_SUPERUSER = 'hdfs'
# The number of bytes to read if not specified
DEFAULT_READ_SIZE = 1024*1024 # 1MB
LOG = logging.getLogger(__name__)
class WebHdfs(Hdfs):
"""
WebHdfs implements the filesystem interface via the WebHDFS rest protocol.
"""
DEFAULT_USER = 'hue' # This should be the user running Hue
def __init__(self, url,
fs_defaultfs,
hdfs_superuser=None,
security_enabled=False,
temp_dir="/tmp"):
self._url = url
self._superuser = hdfs_superuser
self._security_enabled = security_enabled
self._temp_dir = temp_dir
self._fs_defaultfs = fs_defaultfs
self._client = self._make_client(url, security_enabled)
self._root = resource.Resource(self._client)
# To store user info
self._thread_local = threading.local()
LOG.debug("Initializing Hadoop WebHdfs: %s (security: %s, superuser: %s)" %
(self._url, self._security_enabled, self._superuser))
@classmethod
def from_config(cls, hdfs_config):
fs_defaultfs = hdfs_config.FS_DEFAULTFS.get()
return cls(url=_get_service_url(hdfs_config),
fs_defaultfs=fs_defaultfs,
security_enabled=hdfs_config.SECURITY_ENABLED.get(),
temp_dir=hdfs_config.TEMP_DIR.get())
def __str__(self):
return "WebHdfs at %s" % (self._url,)
def _make_client(self, url, security_enabled):
client = http_client.HttpClient(
url, exc_class=WebHdfsException, logger=LOG)
if security_enabled:
client.set_kerberos_auth()
return client
@property
def uri(self):
return self._url
@property
def fs_defaultfs(self):
return self._fs_defaultfs
@property
def security_enabled(self):
return self._security_enabled
@property
def superuser(self):
if self._superuser is None:
try:
# The owner of '/' is usually the superuser
sb = self.stats('/')
self._superuser = sb.user
except Exception, ex:
LOG.exception('Failed to determine superuser of %s: %s' % (self, ex))
self._superuser = DEFAULT_HDFS_SUPERUSER
return self._superuser
@property
def user(self):
try:
return self._thread_local.user
except AttributeError:
return WebHdfs.DEFAULT_USER
def _getparams(self):
return {
"user.name" : WebHdfs.DEFAULT_USER,
"doas" : self.user
}
def setuser(self, user):
"""Set a new user. Return the current user."""
curr = self.user
self._thread_local.user = user
return curr
def listdir_stats(self, path, glob=None):
"""
listdir_stats(path, glob=None) -> [ WebHdfsStat ]
Get directory listing with stats.
"""
path = Hdfs.normpath(path)
params = self._getparams()
if glob is not None:
params['filter'] = glob
params['op'] = 'LISTSTATUS'
json = self._root.get(path, params)
filestatus_list = json['FileStatuses']['FileStatus']
return [ WebHdfsStat(st, path) for st in filestatus_list ]
def listdir(self, path, glob=None):
"""
listdir(path, glob=None) -> [ entry names ]
Get directory entry names without stats.
"""
dirents = self.listdir_stats(path, glob)
return [Hdfs.basename(x.path) for x in dirents]
def get_content_summary(self, path):
"""
get_content_summary(path) -> WebHdfsContentSummary
"""
path = Hdfs.normpath(path)
params = self._getparams()
params['op'] = 'GETCONTENTSUMMARY'
json = self._root.get(path, params)
return WebHdfsContentSummary(json['ContentSummary'])
def _stats(self, path):
"""This version of stats returns None if the entry is not found"""
path = Hdfs.normpath(path)
params = self._getparams()
params['op'] = 'GETFILESTATUS'
try:
json = self._root.get(path, params)
return WebHdfsStat(json['FileStatus'], path)
except WebHdfsException, ex:
if ex.server_exc == 'FileNotFoundException' or ex.code == 404:
return None
raise ex
def stats(self, path):
"""
stats(path) -> WebHdfsStat
"""
res = self._stats(path)
if res is not None:
return res
raise IOError(errno.ENOENT, "File %s not found" % (smart_str(path),))
def exists(self, path):
return self._stats(path) is not None
def isdir(self, path):
sb = self._stats(path)
if sb is None:
return False
return sb.isDir
def isfile(self, path):
sb = self._stats(path)
if sb is None:
return False
return not sb.isDir
def _delete(self, path, recursive=False):
"""
_delete(path, recursive=False)
Delete a file or directory.
"""
path = Hdfs.normpath(path)
params = self._getparams()
params['op'] = 'DELETE'
params['recursive'] = recursive and 'true' or 'false'
result = self._root.delete(path, params)
# This part of the API is nonsense.
# The lack of exception should indicate success.
if not result['boolean']:
raise IOError('Delete failed: %s' % (smart_str(path),))
def remove(self, path):
"""Delete a file."""
self._delete(path, recursive=False)
def rmdir(self, path):
"""Delete a file."""
self._delete(path, recursive=False)
def rmtree(self, path):
"""Delete a tree recursively."""
self._delete(path, recursive=True)
def mkdir(self, path, mode=None):
"""
mkdir(path, mode=None)
Creates a directory and any parent directory if necessary.
"""
path = Hdfs.normpath(path)
params = self._getparams()
params['op'] = 'MKDIRS'
if mode is not None:
params['permission'] = safe_octal(mode)
success = self._root.put(path, params)
if not success:
raise IOError("Mkdir failed: %s" % (smart_str(path),))
def mktemp(self, subdir='', prefix='tmp'):
"""
mktemp(subdir, prefix) -> <temp_dir>/subdir/prefix.<rand>
Return a unique temporary filename with prefix in the cluster's temp dir.
"""
RANDOM_BITS = 64
base = self.join(self._temp_dir, subdir)
if not self.isdir(base):
self.mkdir(base)
while True:
name = "%s.%s" % (prefix, random.getrandbits(RANDOM_BITS))
candidate = self.join(base, name)
if not self.exists(candidate):
return candidate
def rename(self, old, new):
"""rename(old, new)"""
old = Hdfs.normpath(old)
if not new.startswith('/'):
new = Hdfs.join(Hdfs.dirname(old), new)
new = Hdfs.normpath(new)
params = self._getparams()
params['op'] = 'RENAME'
# Encode `new' because it's in the params
params['destination'] = smart_str(new)
result = self._root.put(old, params)
if not result['boolean']:
raise IOError("Rename failed: %s -> %s" %
(smart_str(old), smart_str(new)))
def rename_star(self, old_dir, new_dir):
"""Equivalent to `mv old_dir/* new"""
if not self.isdir(old_dir):
raise IOError(errno.ENOTDIR, "'%s' is not a directory" % (old_dir,))
if not self.exists(new_dir):
self.mkdir(new_dir)
elif not self.isdir(new_dir):
raise IOError(errno.ENOTDIR, "'%s' is not a directory" % (new_dir,))
ls = self.listdir(old_dir)
for dirent in ls:
self.rename(Hdfs.join(old_dir, dirent), Hdfs.join(new_dir, dirent))
def _listdir_r(self, path, glob=None):
"""
_listdir_r(path, glob=None) -> [ entry names ]
Get directory entry names without stats, recursively.
"""
paths = [path]
while paths:
path = paths.pop()
if self.isdir(path):
hdfs_paths = self.listdir_stats(path, glob)
paths[:0] = [x.path for x in hdfs_paths]
yield path
def chown(self, path, user=None, group=None, recursive=False):
"""chown(path, user=None, group=None, recursive=False)"""
path = Hdfs.normpath(path)
params = self._getparams()
params['op'] = 'SETOWNER'
if user is not None:
params['owner'] = user
if group is not None:
params['group'] = group
if recursive:
for xpath in self._listdir_r(path):
self._root.put(xpath, params)
else:
self._root.put(path, params)
def chmod(self, path, mode, recursive=False):
"""
chmod(path, mode, recursive=False)
`mode' should be an octal integer or string.
"""
path = Hdfs.normpath(path)
params = self._getparams()
params['op'] = 'SETPERMISSION'
params['permission'] = safe_octal(mode)
if recursive:
for xpath in self._listdir_r(path):
self._root.put(xpath, params)
else:
self._root.put(path, params)
def get_home_dir(self):
"""get_home_dir() -> Home directory for the current user"""
params = self._getparams()
params['op'] = 'GETHOMEDIRECTORY'
res = self._root.get(params=params)
return res['Path']
def read(self, path, offset, length, bufsize=None):
"""
read(path, offset, length[, bufsize]) -> data
Read data from a file.
"""
path = Hdfs.normpath(path)
params = self._getparams()
params['op'] = 'OPEN'
params['offset'] = long(offset)
params['length'] = long(length)
if bufsize is not None:
params['bufsize'] = bufsize
try:
return self._root.get(path, params)
except WebHdfsException, ex:
if "out of the range" in ex.message:
return ""
raise ex
def open(self, path, mode='r'):
"""
DEPRECATED!
open(path, mode='r') -> File object
This exists for legacy support and backwards compatibility only.
Please use read().
"""
return File(self, path, mode)
def create(self, path, overwrite=False, blocksize=None,
replication=None, permission=None, data=None):
"""
create(path, overwrite=False, blocksize=None, replication=None, permission=None)
Creates a file with the specified parameters.
`permission' should be an octal integer or string.
"""
path = Hdfs.normpath(path)
params = self._getparams()
params['op'] = 'CREATE'
params['overwrite'] = overwrite and 'true' or 'false'
if blocksize is not None:
params['blocksize'] = long(blocksize)
if replication is not None:
params['replication'] = int(replication)
if permission is not None:
params['permission'] = safe_octal(permission)
self._invoke_with_redirect('PUT', path, params, data)
def append(self, path, data):
"""
append(path, data)
Append data to a given file.
"""
path = Hdfs.normpath(path)
params = self._getparams()
params['op'] = 'APPEND'
self._invoke_with_redirect('POST', path, params, data)
def copyfile(self, src, dst):
sb = self._stats(src)
if sb is None:
raise IOError(errno.ENOENT, "Copy src '%s' does not exist" % (src,))
if sb.isDir:
raise IOError(errno.INVAL, "Copy src '%s' is a directory" % (src,))
if self.isdir(dst):
raise IOError(errno.INVAL, "Copy dst '%s' is a directory" % (dst,))
offset = 0
while True:
data = self.read(src, offset, UPLOAD_CHUNK_SIZE.get())
if offset == 0:
self.create(dst,
overwrite=True,
blocksize=sb.blockSize,
replication=sb.replication,
permission=oct(stat.S_IMODE(sb.mode)),
data=data)
cnt = len(data)
if cnt == 0:
break
if offset != 0:
self.append(dst, data)
offset += cnt
def copy_remote_dir(self, source, destination, dir_mode=0755, owner=None):
if owner is None:
owner = self.DEFAULT_USER
self.do_as_user(owner, self.mkdir, destination, mode=dir_mode)
self.do_as_user(owner, self.chmod, destination, mode=dir_mode) # To remove after HDFS-3491
for stat in self.listdir_stats(source):
source_file = stat.path
destination_file = posixpath.join(destination, stat.name)
print source_file, destination_file
if stat.isDir:
self.copy_remote_dir(source_file, destination_file, dir_mode, owner)
else:
self.copyfile(source_file, destination_file)
self.do_as_superuser(self.chown, destination_file, owner, owner)
@staticmethod
def urlsplit(url):
return Hdfs.urlsplit(url)
def get_hdfs_path(self, path):
return posixpath.join(self.fs_defaultfs, path.lstrip('/'))
def create_home_dir(self, home_path=None):
if home_path is None:
home_path = self.get_home_dir()
if not self.exists(home_path):
user = self.user
try:
try:
self.setuser(self.superuser)
self.mkdir(home_path)
self.chmod(home_path, 0755)
self.chown(home_path, user, user)
except IOError, e:
msg = 'Failed to create home dir ("%s") as superuser %s' %\
(home_path, self.superuser)
LOG.exception(msg)
raise PopupException(msg, detail=e)
finally:
self.setuser(user)
def _invoke_with_redirect(self, method, path, params=None, data=None):
"""
Issue a request, and expect a redirect, and then submit the data to
the redirected location. This is used for create, write, etc.
Returns the response from the redirected request.
"""
next_url = None
try:
# Do not pass data in the first leg.
self._root.invoke(method, path, params)
except WebHdfsException, ex:
# This is expected. We get a 307 redirect.
# The following call may throw.
next_url = self._get_redirect_url(ex)
if next_url is None:
raise WebHdfsException(
"Failed to create '%s'. HDFS did not return a redirect" % (path,))
# Now talk to the real thing. The redirect url already includes the params.
client = self._make_client(next_url, self.security_enabled)
headers = {'Content-Type': 'application/octet-stream'}
return resource.Resource(client).invoke(method, data=data, headers=headers)
def _get_redirect_url(self, webhdfs_ex):
"""Retrieve the redirect url from an exception object"""
try:
# The actual HttpError (307) is wrapped inside
http_error = webhdfs_ex.get_parent_ex()
if http_error is None:
raise webhdfs_ex
if http_error.code not in (301, 302, 303, 307):
LOG.error("Response is not a redirect: %s" % (webhdfs_ex,))
raise webhdfs_ex
return http_error.headers.getheader('location')
except Exception, ex:
LOG.error("Failed to read redirect from response: %s (%s)" %
(webhdfs_ex, ex))
raise webhdfs_ex
def get_delegation_token(self, renewer):
"""get_delegation_token(user) -> Delegation token"""
params = self._getparams()
params['op'] = 'GETDELEGATIONTOKEN'
params['renewer'] = renewer
res = self._root.get(params=params)
return res['Token']['urlString']
def do_as_user(self, username, fn, *args, **kwargs):
prev_user = self.user
try:
self.setuser(username)
fn(*args, **kwargs)
finally:
self.setuser(prev_user)
def do_as_superuser(self, fn, *args, **kwargs):
self.do_as_user(self.superuser, fn, *args, **kwargs)
class File(object):
"""
DEPRECATED!
Represent an open file on HDFS. This exists to mirror the old thriftfs
interface, for backwards compatibility only.
"""
def __init__(self, fs, path, mode='r'):
self._fs = fs
self._path = normpath(path)
self._pos = 0
self._mode = mode
try:
self._stat = fs.stats(path)
if self._stat.isDir:
raise IOError(errno.EISDIR, "Is a directory: '%s'" % (smart_str(path),))
except IOError, ex:
if ex.errno == errno.ENOENT and 'w' in self._mode:
self._fs.create(self._path)
self.stat()
else:
raise ex
def seek(self, offset, whence=0):
"""Set the file pointer to the given spot. @see file.seek"""
if whence == SEEK_SET:
self._pos = offset
elif whence == SEEK_CUR:
self._pos += offset
elif whence == SEEK_END:
self.stat()
self._pos = self._fs.stats(self._path).size + offset
else:
raise IOError(errno.EINVAL, "Invalid argument to seek for whence")
def stat(self):
self._stat = self._fs.stats(self._path)
return self._stat
def tell(self):
return self._pos
def read(self, length=DEFAULT_READ_SIZE):
data = self._fs.read(self._path, self._pos, length)
self._pos += len(data)
return data
def write(self, data):
"""Append the data to the end of the file"""
self.append(data)
def append(self, data):
if 'w' not in self._mode:
raise IOError(errno.EINVAL, "File not open for writing")
self._fs.append(self._path, data=data)
def flush(self):
pass
def close(self):
pass
def safe_octal(octal_value):
"""
safe_octal(octal_value) -> octal value in string
This correctly handles octal values specified as a string or as a numeric.
"""
try:
return oct(octal_value)
except TypeError:
return str(octal_value)
def _get_service_url(hdfs_config):
override = hdfs_config.WEBHDFS_URL.get()
if override:
return override
fs_defaultfs = hdfs_config.FS_DEFAULTFS.get()
netloc = Hdfs.urlsplit(fs_defaultfs)[1]
host = netloc.split(':')[0]
port = hadoop.conf.DEFAULT_NN_HTTP_PORT
return "http://%s:%s/webhdfs/v1" % (host, port)
def test_fs_configuration(fs_config):
"""
This is a config validation method. Returns a list of
[ (config_variable, error_message) ]
"""
fs = WebHdfs.from_config(fs_config)
fs.setuser(fs.superuser)
# Access root
try:
statbuf = fs.stats('/')
except Exception, ex:
LOG.info("%s -- Validation error: %s" % (fs, ex))
return [(fs_config.WEBHDFS_URL, 'Failed to access filesystem root')]
# Write a file
tmpname = fs.mktemp(prefix='hue_config_validation')
try:
fs.create(tmpname)
except Exception, ex:
LOG.info("%s -- Validation error: %s" % (fs, ex))
return [(fs_config.WEBHDFS_URL,
'Failed to create temporary file "%s"' % (tmpname,))]
# Check superuser has super power
try: # Finally: delete tmpname
try:
fs.chown(tmpname, fs.superuser)
except Exception, ex:
LOG.info("%s -- Validation error: %s" % (fs, ex))
return [(fs_config.WEBHDFS_URL,
'Failed to chown file. Please make sure that the filesystem root '
'is owned by the cluster superuser ("hdfs" in most cases).')]
finally:
try:
fs.remove(tmpname)
except Exception, ex:
LOG.error("Failed to remove '%s': %s" % (tmpname, ex))
return [(fs_config.WEBHDFS_URL,
'Failed to remove temporary file "%s"' % (tmpname,))]
return [ ]
|
|
"""
Implements a special view to visualize and stage pieces of a project's
current diff.
"""
from collections import namedtuple
from contextlib import contextmanager
import os
import sublime
from sublime_plugin import WindowCommand, TextCommand, EventListener
from . import intra_line_colorizer
from .navigate import GsNavigate
from ..fns import filter_, flatten
from ..parse_diff import SplittedDiff
from ..git_command import GitCommand, GitSavvyError
from ..runtime import enqueue_on_ui, enqueue_on_worker
from ..utils import flash, focus_view, line_indentation
from ..view import replace_view_content, Position
from ...common import util
__all__ = (
"gs_diff",
"gs_diff_refresh",
"gs_diff_toggle_setting",
"gs_diff_toggle_cached_mode",
"gs_diff_zoom",
"gs_diff_stage_or_reset_hunk",
"gs_diff_open_file_at_hunk",
"gs_diff_navigate",
"gs_diff_undo",
"GsDiffFocusEventListener",
)
MYPY = False
if MYPY:
from typing import (
Iterable, Iterator, List, NamedTuple, Optional, Set,
Tuple, TypeVar
)
from ..parse_diff import Hunk, HunkLine
from ..types import LineNo, ColNo
T = TypeVar('T')
Point = int
LineCol = Tuple[LineNo, ColNo]
HunkLineWithB = NamedTuple('HunkLineWithB', [('line', 'HunkLine'), ('b', LineNo)])
else:
HunkLineWithB = namedtuple('HunkLineWithB', 'line b')
DIFF_TITLE = "DIFF: {}"
DIFF_CACHED_TITLE = "DIFF: {} (staged)"
# Clickable lines:
# (A) common/commands/view_manipulation.py | 1 +
# (B) --- a/common/commands/view_manipulation.py
# (C) +++ b/common/commands/view_manipulation.py
# (D) diff --git a/common/commands/view_manipulation.py b/common/commands/view_manipulation.py
FILE_RE = (
r"^(?:\s(?=.*\s+\|\s+\d+\s)|--- a\/|\+{3} b\/|diff .+b\/)"
# ^^^^^^^^^^^^^^^^^^^^^ (A)
# ^ one space, and then somewhere later on the line the pattern ` | 23 `
# ^^^^^^^ (B)
# ^^^^^^^^ (C)
# ^^^^^^^^^^^ (D)
r"(\S[^|]*?)"
# ^ ! lazy to not match the trailing spaces, see below
r"(?:\s+\||$)"
# ^ (B), (C), (D)
# ^^^^^ (A) We must match the spaces here bc Sublime will not rstrip() the
# filename for us.
)
# Clickable line:
# @@ -69,6 +69,7 @@ class GsHandleVintageousCommand(TextCommand):
# ^^ we want the second (current) line offset of the diff
LINE_RE = r"^@@ [^+]*\+(\d+)"
def compute_identifier_for_view(view):
# type: (sublime.View) -> Optional[Tuple]
settings = view.settings()
return (
settings.get('git_savvy.repo_path'),
settings.get('git_savvy.file_path'),
settings.get('git_savvy.diff_view.base_commit'),
settings.get('git_savvy.diff_view.target_commit')
) if settings.get('git_savvy.diff_view') else None
class gs_diff(WindowCommand, GitCommand):
"""
Create a new view to display the difference of `target_commit`
against `base_commit`. If `target_commit` is None, compare
working directory with `base_commit`. If `in_cached_mode` is set,
display a diff of the Git index. Set `disable_stage` to True to
disable Ctrl-Enter in the diff view.
"""
def run(
self,
repo_path=None,
file_path=None,
in_cached_mode=None, # type: Optional[bool]
current_file=False,
base_commit=None,
target_commit=None,
disable_stage=False,
title=None,
ignore_whitespace=False,
context_lines=3
):
# type: (...) -> None
if repo_path is None:
repo_path = self.repo_path
assert repo_path
if current_file:
file_path = self.file_path or file_path
this_id = (
repo_path,
file_path,
base_commit,
target_commit
)
for view in self.window.views():
if compute_identifier_for_view(view) == this_id:
if in_cached_mode is not None:
settings = view.settings()
settings.set("git_savvy.diff_view.in_cached_mode", in_cached_mode)
focus_view(view)
break
else:
diff_view = util.view.get_scratch_view(self, "diff", read_only=True)
show_diffstat = self.savvy_settings.get("show_diffstat", True)
settings = diff_view.settings()
settings.set("git_savvy.repo_path", repo_path)
settings.set("git_savvy.file_path", file_path)
settings.set("git_savvy.diff_view.in_cached_mode", bool(in_cached_mode))
settings.set("git_savvy.diff_view.ignore_whitespace", ignore_whitespace)
settings.set("git_savvy.diff_view.context_lines", context_lines)
settings.set("git_savvy.diff_view.base_commit", base_commit)
settings.set("git_savvy.diff_view.target_commit", target_commit)
settings.set("git_savvy.diff_view.show_diffstat", show_diffstat)
settings.set("git_savvy.diff_view.disable_stage", disable_stage)
settings.set("git_savvy.diff_view.history", [])
settings.set("git_savvy.diff_view.just_hunked", "")
settings.set("result_file_regex", FILE_RE)
settings.set("result_line_regex", LINE_RE)
settings.set("result_base_dir", repo_path)
if not title:
title = (DIFF_CACHED_TITLE if in_cached_mode else DIFF_TITLE).format(
os.path.basename(file_path) if file_path else os.path.basename(repo_path)
)
diff_view.set_name(title)
diff_view.set_syntax_file("Packages/GitSavvy/syntax/diff_view.sublime-syntax")
diff_view.run_command("gs_handle_vintageous")
class gs_diff_refresh(TextCommand, GitCommand):
"""Refresh the diff view with the latest repo state."""
def run(self, edit, sync=True):
if sync:
self.run_impl(sync)
else:
enqueue_on_worker(self.run_impl, sync)
def run_impl(self, runs_on_ui_thread):
if self.view.settings().get("git_savvy.disable_diff"):
return
repo_path = self.view.settings().get("git_savvy.repo_path")
file_path = self.view.settings().get("git_savvy.file_path")
in_cached_mode = self.view.settings().get("git_savvy.diff_view.in_cached_mode")
ignore_whitespace = self.view.settings().get("git_savvy.diff_view.ignore_whitespace")
base_commit = self.view.settings().get("git_savvy.diff_view.base_commit")
target_commit = self.view.settings().get("git_savvy.diff_view.target_commit")
show_diffstat = self.view.settings().get("git_savvy.diff_view.show_diffstat")
disable_stage = self.view.settings().get("git_savvy.diff_view.disable_stage")
context_lines = self.view.settings().get('git_savvy.diff_view.context_lines')
prelude = "\n"
title = ["DIFF:"]
if file_path:
rel_file_path = os.path.relpath(file_path, repo_path)
prelude += " FILE: {}\n".format(rel_file_path)
title += [os.path.basename(file_path)]
elif not disable_stage:
title += [os.path.basename(repo_path)]
if disable_stage:
if in_cached_mode:
prelude += " {}..INDEX\n".format(base_commit or target_commit)
title += ["{}..INDEX".format(base_commit or target_commit)]
else:
if base_commit and target_commit:
prelude += " {}..{}\n".format(base_commit, target_commit)
title += ["{}..{}".format(base_commit, target_commit)]
elif base_commit and "..." in base_commit:
prelude += " {}\n".format(base_commit)
title += [base_commit]
else:
prelude += " {}..WORKING DIR\n".format(base_commit or target_commit)
title += ["{}..WORKING DIR".format(base_commit or target_commit)]
else:
if in_cached_mode:
prelude += " STAGED CHANGES (Will commit)\n"
title += ["(staged)"]
else:
prelude += " UNSTAGED CHANGES\n"
if ignore_whitespace:
prelude += " IGNORING WHITESPACE\n"
try:
diff = self.git(
"diff",
"--ignore-all-space" if ignore_whitespace else None,
"--unified={}".format(context_lines) if context_lines is not None else None,
"--stat" if show_diffstat else None,
"--patch",
"--no-color",
"--cached" if in_cached_mode else None,
base_commit,
target_commit,
"--", file_path)
except GitSavvyError as err:
# When the output of the above Git command fails to correctly parse,
# the expected notification will be displayed to the user. However,
# once the userpresses OK, a new refresh event will be triggered on
# the view.
#
# This causes an infinite loop of increasingly frustrating error
# messages, ultimately resulting in psychosis and serious medical
# bills. This is a better, though somewhat cludgy, alternative.
#
if err.args and type(err.args[0]) == UnicodeDecodeError:
self.view.settings().set("git_savvy.disable_diff", True)
return
raise err
old_diff = self.view.settings().get("git_savvy.diff_view.raw_diff")
self.view.settings().set("git_savvy.diff_view.raw_diff", diff)
prelude += "\n--\n"
draw = lambda: _draw(
self.view,
' '.join(title),
prelude,
diff,
navigate=not old_diff
)
if runs_on_ui_thread:
draw()
else:
enqueue_on_ui(draw)
def _draw(view, title, prelude, diff_text, navigate):
# type: (sublime.View, str, str, str, bool) -> None
view.set_name(title)
text = prelude + diff_text
replace_view_content(view, text)
if navigate:
view.run_command("gs_diff_navigate")
intra_line_colorizer.annotate_intra_line_differences(view, diff_text, len(prelude))
class gs_diff_toggle_setting(TextCommand):
"""
Toggle view settings: `ignore_whitespace`.
"""
def run(self, edit, setting):
settings = self.view.settings()
setting_str = "git_savvy.diff_view.{}".format(setting)
current_mode = settings.get(setting_str)
next_mode = not current_mode
settings.set(setting_str, next_mode)
flash(self.view, "{} is now {}".format(setting, next_mode))
self.view.run_command("gs_diff_refresh")
class gs_diff_toggle_cached_mode(TextCommand):
"""
Toggle `in_cached_mode` or flip `base` with `target`.
"""
# NOTE: MUST NOT be async, otherwise `view.show` will not update the view 100%!
def run(self, edit):
settings = self.view.settings()
base_commit = settings.get("git_savvy.diff_view.base_commit")
target_commit = settings.get("git_savvy.diff_view.target_commit")
if base_commit and target_commit:
settings.set("git_savvy.diff_view.base_commit", target_commit)
settings.set("git_savvy.diff_view.target_commit", base_commit)
self.view.run_command("gs_diff_refresh")
return
if base_commit and "..." in base_commit:
a, b = base_commit.split("...")
settings.set("git_savvy.diff_view.base_commit", "{}...{}".format(b, a))
self.view.run_command("gs_diff_refresh")
return
last_cursors = settings.get('git_savvy.diff_view.last_cursors') or []
settings.set('git_savvy.diff_view.last_cursors', pickle_sel(self.view.sel()))
setting_str = "git_savvy.diff_view.{}".format('in_cached_mode')
current_mode = settings.get(setting_str)
next_mode = not current_mode
settings.set(setting_str, next_mode)
flash(self.view, "Showing {} changes".format("staged" if next_mode else "unstaged"))
self.view.run_command("gs_diff_refresh")
just_hunked = self.view.settings().get("git_savvy.diff_view.just_hunked")
# Check for `last_cursors` as well bc it is only falsy on the *first*
# switch. T.i. if the user hunked and then switches to see what will be
# actually comitted, the view starts at the top. Later, the view will
# show the last added hunk.
if just_hunked and last_cursors:
self.view.settings().set("git_savvy.diff_view.just_hunked", "")
region = find_hunk_in_view(self.view, just_hunked)
if region:
set_and_show_cursor(self.view, region.a)
return
if last_cursors:
# The 'flipping' between the two states should be as fast as possible and
# without visual clutter.
with no_animations():
set_and_show_cursor(self.view, unpickle_sel(last_cursors))
class gs_diff_zoom(TextCommand):
"""
Update the number of context lines the diff shows by given `amount`
and refresh the view.
"""
def run(self, edit, amount):
# type: (sublime.Edit, int) -> None
settings = self.view.settings()
current = settings.get('git_savvy.diff_view.context_lines')
next = max(current + amount, 0)
settings.set('git_savvy.diff_view.context_lines', next)
# Getting a meaningful cursor after 'zooming' is the tricky part
# here. We first extract all hunks under the cursors *verbatim*.
diff = SplittedDiff.from_view(self.view)
cur_hunks = [
header.text + hunk.text
for header, hunk in filter_(diff.head_and_hunk_for_pt(s.a) for s in self.view.sel())
]
self.view.run_command("gs_diff_refresh")
# Now, we fuzzy search the new view content for the old hunks.
cursors = {
region.a
for region in (
filter_(find_hunk_in_view(self.view, hunk) for hunk in cur_hunks)
)
}
if cursors:
set_and_show_cursor(self.view, cursors)
class GsDiffFocusEventListener(EventListener):
"""
If the current view is a diff view, refresh the view with latest tree status
when the view regains focus.
"""
def on_activated_async(self, view):
if view.settings().get("git_savvy.diff_view") is True:
view.run_command("gs_diff_refresh", {"sync": False})
class gs_diff_stage_or_reset_hunk(TextCommand, GitCommand):
"""
Depending on whether the user is in cached mode and what action
the user took, either 1) stage, 2) unstage, or 3) reset the
hunk under the user's cursor(s).
"""
# NOTE: The whole command (including the view refresh) must be blocking otherwise
# the view and the repo state get out of sync and e.g. hitting 'h' very fast will
# result in errors.
def run(self, edit, reset=False):
ignore_whitespace = self.view.settings().get("git_savvy.diff_view.ignore_whitespace")
if ignore_whitespace:
sublime.error_message("Staging is not supported while ignoring [w]hitespace is on.")
return None
# Filter out any cursors that are larger than a single point.
cursor_pts = tuple(cursor.a for cursor in self.view.sel() if cursor.a == cursor.b)
diff = SplittedDiff.from_view(self.view)
patches = unique(flatten(filter_(diff.head_and_hunk_for_pt(pt) for pt in cursor_pts)))
patch = ''.join(part.text for part in patches)
if patch:
self.apply_patch(patch, cursor_pts, reset)
else:
window = self.view.window()
if window:
window.status_message('Not within a hunk')
def apply_patch(self, patch, pts, reset):
in_cached_mode = self.view.settings().get("git_savvy.diff_view.in_cached_mode")
context_lines = self.view.settings().get('git_savvy.diff_view.context_lines')
# The three argument combinations below result from the following
# three scenarios:
#
# 1) The user is in non-cached mode and wants to stage a hunk, so
# do NOT apply the patch in reverse, but do apply it only against
# the cached/indexed file (not the working tree).
# 2) The user is in non-cached mode and wants to undo a line/hunk, so
# DO apply the patch in reverse, and do apply it both against the
# index and the working tree.
# 3) The user is in cached mode and wants to undo a line hunk, so DO
# apply the patch in reverse, but only apply it against the cached/
# indexed file.
#
# NOTE: When in cached mode, no action will be taken when the user
# presses SUPER-BACKSPACE.
args = (
"apply",
"-R" if (reset or in_cached_mode) else None,
"--cached" if (in_cached_mode or not reset) else None,
"--unidiff-zero" if context_lines == 0 else None,
"-",
)
self.git(
*args,
stdin=patch
)
history = self.view.settings().get("git_savvy.diff_view.history")
history.append((args, patch, pts, in_cached_mode))
self.view.settings().set("git_savvy.diff_view.history", history)
self.view.settings().set("git_savvy.diff_view.just_hunked", patch)
self.view.run_command("gs_diff_refresh")
MYPY = False
if MYPY:
from typing import NamedTuple
JumpTo = NamedTuple('JumpTo', [
('commit_hash', Optional[str]),
('filename', str),
('line', LineNo),
('col', ColNo)
])
else:
from collections import namedtuple
JumpTo = namedtuple('JumpTo', 'commit_hash filename line col')
class gs_diff_open_file_at_hunk(TextCommand, GitCommand):
"""
For each cursor in the view, identify the hunk in which the cursor lies,
and open the file at that hunk in a separate view.
"""
def run(self, edit):
# type: (sublime.Edit) -> None
def first_per_file(items):
# type: (Iterator[JumpTo]) -> Iterator[JumpTo]
seen = set() # type: Set[str]
for item in items:
if item.filename not in seen:
seen.add(item.filename)
yield item
diff = SplittedDiff.from_view(self.view)
jump_positions = list(first_per_file(filter_(
jump_position_to_file(self.view, diff, s.begin())
for s in self.view.sel()
)))
if not jump_positions:
flash(self.view, "Not within a hunk")
else:
for jp in jump_positions:
self.load_file_at_line(*jp)
def load_file_at_line(self, commit_hash, filename, line, col):
# type: (Optional[str], str, LineNo, ColNo) -> None
"""
Show file at target commit if `git_savvy.diff_view.target_commit` is non-empty.
Otherwise, open the file directly.
"""
target_commit = commit_hash or self.view.settings().get("git_savvy.diff_view.target_commit")
full_path = os.path.join(self.repo_path, filename)
window = self.view.window()
if not window:
return
if target_commit:
window.run_command("gs_show_file_at_commit", {
"commit_hash": target_commit,
"filepath": full_path,
"position": Position(line - 1, col - 1, None),
})
else:
window.open_file(
"{file}:{line}:{col}".format(file=full_path, line=line, col=col),
sublime.ENCODED_POSITION
)
def jump_position_to_file(view, diff, pt):
# type: (sublime.View, SplittedDiff, int) -> Optional[JumpTo]
head_and_hunk = diff.head_and_hunk_for_pt(pt)
if not head_and_hunk:
return None
header, hunk = head_and_hunk
linecol = real_linecol_in_hunk(hunk, *row_offset_and_col_in_hunk(view, hunk, pt))
if not linecol:
return None
line, col = linecol
filename = header.from_filename()
if not filename:
return None
commit_header = diff.commit_for_hunk(hunk)
commit_hash = commit_header.commit_hash() if commit_header else None
return JumpTo(commit_hash, filename, line, col)
def row_offset_and_col_in_hunk(view, hunk, pt):
# type: (sublime.View, Hunk, Point) -> Tuple[int, ColNo]
"""Return row offset of `pt` relative to hunk start and its column
Note that the column is already 1-based t.i. a `ColNo`
"""
head_row, _ = view.rowcol(hunk.a)
pt_row, col = view.rowcol(pt)
# If `col=0` the user is on the meta char (e.g. '+- ') which is not
# present in the source. We pin `col` to 1 because the target API
# `open_file` expects 1-based row, col offsets.
return pt_row - head_row, max(col, 1)
def real_linecol_in_hunk(hunk, row_offset, col):
# type: (Hunk, int, ColNo) -> Optional[LineCol]
"""Translate relative to absolute line, col pair"""
hunk_lines = counted_lines(hunk)
if not hunk_lines:
return None
# If the user is on the header line ('@@ ..') pretend to be on the
# first visible line with some content instead.
if row_offset == 0:
row_offset = next(
(
index
for index, (line, _) in enumerate(hunk_lines, 1)
if not line.is_from_line() and line.content.strip()
),
1
)
col = 1
line, b = hunk_lines[row_offset - 1]
# Happy path since the user is on a present line
if not line.is_from_line():
return b, col
# The user is on a deleted line ('-') we cannot jump to. If possible,
# select the next guaranteed to be available line
for next_line, next_b in hunk_lines[row_offset:]:
if next_line.is_to_line():
return next_b, min(col, len(next_line.content) + 1)
elif next_line.is_context():
# If we only have a contextual line, choose this or the
# previous line, pretty arbitrary, depending on the
# indentation.
next_lines_indentation = line_indentation(next_line.content)
if next_lines_indentation == line_indentation(line.content):
return next_b, next_lines_indentation + 1
else:
return max(1, b - 1), 1
else:
return b, 1
def counted_lines(hunk):
# type: (Hunk) -> Optional[List[HunkLineWithB]]
"""Split a hunk into (first char, line content, line) tuples
Note that rows point to available rows on the b-side.
"""
b = hunk.header().to_line_start()
if b is None:
return None
return list(_recount_lines(hunk.content().lines(), b))
def _recount_lines(lines, b):
# type: (List[HunkLine], int) -> Iterator[HunkLineWithB]
# Be aware that we only consider the b-line numbers, and that we
# always yield a b value, even for deleted lines.
for line in lines:
yield HunkLineWithB(line, b)
if not line.is_from_line():
b += 1
class gs_diff_navigate(GsNavigate):
"""
Travel between hunks. It is also used by show_commit_view.
"""
offset = 0
def get_available_regions(self):
return self.view.find_by_selector("meta.diff.range.unified, meta.commit-info.header")
class gs_diff_undo(TextCommand, GitCommand):
"""
Undo the last action taken in the diff view, if possible.
"""
# NOTE: MUST NOT be async, otherwise `view.show` will not update the view 100%!
def run(self, edit):
history = self.view.settings().get("git_savvy.diff_view.history")
if not history:
window = self.view.window()
if window:
window.status_message("Undo stack is empty")
return
args, stdin, cursors, in_cached_mode = history.pop()
# Toggle the `--reverse` flag.
args[1] = "-R" if not args[1] else None
self.git(*args, stdin=stdin)
self.view.settings().set("git_savvy.diff_view.history", history)
self.view.settings().set("git_savvy.diff_view.just_hunked", stdin)
self.view.run_command("gs_diff_refresh")
# The cursor is only applicable if we're still in the same cache/stage mode
if self.view.settings().get("git_savvy.diff_view.in_cached_mode") == in_cached_mode:
set_and_show_cursor(self.view, cursors)
def find_hunk_in_view(view, patch):
# type: (sublime.View, str) -> Optional[sublime.Region]
"""Given a patch, search for its first hunk in the view
Returns the region of the first line of the hunk (the one starting
with '@@ ...'), if any.
"""
diff = SplittedDiff.from_string(patch)
try:
hunk = diff.hunks[0]
except IndexError:
return None
return (
view.find(hunk.header().text, 0, sublime.LITERAL)
or fuzzy_search_hunk_content_in_view(view, hunk.content().text.splitlines())
)
def fuzzy_search_hunk_content_in_view(view, lines):
# type: (sublime.View, List[str]) -> Optional[sublime.Region]
"""Fuzzy search the hunk content in the view
Note that hunk content does not include the starting line, the one
starting with '@@ ...', anymore.
The fuzzy strategy here is to search for the hunk or parts of it
by reducing the contextual lines symmetrically.
Returns the region of the starting line of the found hunk, if any.
"""
for hunk_content in shrink_list_sym(lines):
region = view.find('\n'.join(hunk_content), 0, sublime.LITERAL)
if region:
diff = SplittedDiff.from_view(view)
head_and_hunk = diff.head_and_hunk_for_pt(region.a)
if head_and_hunk:
_, hunk = head_and_hunk
hunk_header = hunk.header()
return sublime.Region(hunk_header.a, hunk_header.b)
break
return None
def shrink_list_sym(list):
# type: (List[T]) -> Iterator[List[T]]
while list:
yield list
list = list[1:-1]
def pickle_sel(sel):
return [(s.a, s.b) for s in sel]
def unpickle_sel(pickled_sel):
return [sublime.Region(a, b) for a, b in pickled_sel]
def unique(items):
# type: (Iterable[T]) -> List[T]
"""Remove duplicate entries but remain sorted/ordered."""
rv = [] # type: List[T]
for item in items:
if item not in rv:
rv.append(item)
return rv
def set_and_show_cursor(view, cursors):
sel = view.sel()
sel.clear()
try:
it = iter(cursors)
except TypeError:
sel.add(cursors)
else:
for c in it:
sel.add(c)
view.show(sel)
@contextmanager
def no_animations():
pref = sublime.load_settings("Preferences.sublime-settings")
current = pref.get("animation_enabled")
pref.set("animation_enabled", False)
try:
yield
finally:
pref.set("animation_enabled", current)
|
|
from .td import Connection
from .td import QueryEngine
from .td import ResultProxy
from .td import StreamingUploader
from .td import _convert_time_column
from .td import _convert_index_column
from .td import _convert_date_format
from pandas_td import connect
from pandas_td import read_td
from pandas_td import read_td_query
from pandas_td import read_td_table
from pandas_td import to_td
import collections
import datetime
import gzip
import io
import os
import msgpack
import tdclient
import numpy as np
import pandas as pd
from unittest import TestCase
try:
from unittest.mock import MagicMock
except ImportError:
from mock import MagicMock
from nose.tools import ok_, eq_, raises
# mocks
class MockJob(object):
def __init__(self, status='success'):
self._status = status
self.result = [{'c1': i, 'c2': '2001-01-01', 'time': i} for i in range(100)]
self.result_bytes = self._pack_gz(self.result)
self.job_id = 1
self.type = 'presto'
self.result_size = len(self.result_bytes)
self.result_schema = [['c1', 'int'], ['c2', 'string'], ['time', 'int']]
self.debug = {
'cmdout': 'output',
'stderr': 'error',
}
def _pack_gz(self, result):
packer = msgpack.Packer(autoreset=False)
for row in result:
packer.pack(row)
buff = io.BytesIO()
with gzip.GzipFile(fileobj=buff, mode='wb') as f:
f.write(packer.bytes())
return buff.getvalue()
def url(self):
return 'https://mock/jobs/1'
def finished(self):
return True
def update(self):
return True
def status(self):
return self._status
def success(self):
return self._status == 'success'
class MockRequest(object):
def __init__(self, job):
self.job = job
self.headers = {
'Content-length': str(self.job.result_size)
}
def iter_content(self, chunksize):
yield self.job.result_bytes
def close(self):
pass
# test cases
class ConnectionConfigurationTestCase(TestCase):
def setUp(self):
self._environ = os.environ.copy()
# clear environment variables
if 'TD_API_KEY' in os.environ:
del os.environ['TD_API_KEY']
def tearDown(self):
# restore environment variables
os.environ.clear()
os.environ.update(self._environ)
@raises(ValueError)
def test_error_without_parameters(self):
Connection()
def test_apikey(self):
# parameter
c1 = Connection(apikey='test-key')
eq_(c1.apikey, 'test-key')
# environment variable
os.environ['TD_API_KEY'] = 'test-key'
c2 = Connection(apikey='test-key')
eq_(c2.apikey, 'test-key')
def test_endpoint(self):
os.environ['TD_API_KEY'] = 'test-key'
# default
c1 = Connection()
eq_(c1.endpoint, 'https://api.treasuredata.com/')
# parameter
c2 = Connection(endpoint='http://api/')
eq_(c2.endpoint, 'http://api/')
# no trailing slash
c3 = Connection(endpoint='http://api')
eq_(c3.endpoint, 'http://api/')
class ConnectionTestCase(TestCase):
def setUp(self):
self.connection = Connection('test-key', 'test-endpoint')
def test_empty_databases(self):
client = self.connection.client
client.databases = MagicMock(return_value=[])
d = self.connection.databases()
eq_(len(d), 0)
def test_databases(self):
TestDatabase = collections.namedtuple('TestDatabase',
['name', 'count', 'permission', 'created_at', 'updated_at'])
client = self.connection.client
client.databases = MagicMock(return_value=[
TestDatabase(
name = 'test_db',
count = 0,
permission = 'administrator',
created_at = datetime.datetime(2015, 1, 1, 0, 0, 0),
updated_at = datetime.datetime(2015, 1, 1, 0, 0, 0),
)
])
d = self.connection.databases()
eq_(len(d), 1)
eq_(d.name[0], 'test_db')
def test_empty_tables(self):
client = self.connection.client
client.tables = MagicMock(return_value=[])
d = self.connection.tables('test_db')
eq_(len(d), 0)
def test_tables(self):
TestTable = collections.namedtuple('TestTable',
['name', 'count', 'estimated_storage_size', 'created_at', 'last_log_timestamp'])
client = self.connection.client
client.tables = MagicMock(return_value=[
TestTable(
name = 'test_tbl',
count = 0,
estimated_storage_size = 0,
created_at = datetime.datetime(2015, 1, 1, 0, 0, 0),
last_log_timestamp = datetime.datetime(2015, 1, 1, 0, 0, 0),
)
])
d = self.connection.tables('test_db')
eq_(len(d), 1)
eq_(d.name[0], 'test_tbl')
class QueryEngineTestCase(TestCase):
def setUp(self):
self.connection = Connection('test-key', 'test-endpoint')
def test_execute_ok(self):
# mock
job = MockJob('success')
self.connection.client.query = MagicMock(return_value=job)
# test
engine = QueryEngine(self.connection, 'test_db')
r = engine.execute('select 1')
self.connection.client.query.assert_called_with('test_db', 'select 1')
ok_(isinstance(r, ResultProxy))
eq_(r.engine, engine)
eq_(r.job, job)
@raises(RuntimeError)
def test_execute_error(self):
# mock
job = MockJob('error')
self.connection.client.query = MagicMock(return_value=job)
# test
engine = QueryEngine(self.connection, 'test_db')
r = engine.execute('select 1')
class ResultProxyTestCase(TestCase):
def setUp(self):
self.connection = Connection('test-key', 'test-endpoint')
self.engine = QueryEngine(self.connection, 'test_db')
self.job = MockJob()
self.result = ResultProxy(self.engine, self.job)
self.engine._http_get = MagicMock(return_value=MockRequest(self.job))
def test_ok(self):
r = self.result
# attributes
eq_(r.status, self.job.status())
eq_(r.size, self.job.result_size)
eq_(r.description, self.job.result_schema)
# result
rows = list(r)
eq_(len(rows), 100)
eq_(rows[0], self.job.result[0])
def test_to_dataframe(self):
r = self.result
d = r.to_dataframe()
eq_(len(d), 100)
eq_(list(d.columns), ['c1', 'c2', 'time'])
eq_(list(d.c1), list(range(100)))
def test_to_dataframe_index_col(self):
r = self.result
d = r.to_dataframe(index_col='c1')
eq_(d.index.name, 'c1')
eq_(list(d.index.values), list(range(100)))
eq_(list(d.columns), ['c2', 'time'])
def test_to_dataframe_parse_dates(self):
r = self.result
d = r.to_dataframe(parse_dates=['c2'])
eq_(d.c2.dtype, np.dtype('datetime64[ns]'))
def test_to_dataframe_time_series(self):
r = self.result
d = r.to_dataframe(index_col='time', parse_dates={'time': 's'})
eq_(d.index.dtype, np.dtype('datetime64[ns]'))
class StreamingUploaderTestCase(TestCase):
def setUp(self):
self.uploader = StreamingUploader(None, 'test_db', 'test_tbl')
def test_chunk_frame(self):
frame = pd.DataFrame([[1], [2], [3], [4]])
chunks = [chunk for chunk in self.uploader._chunk_frame(frame, 2)]
eq_(len(chunks), 2)
def test_pack(self):
records = [{'x': 'a', 'y': 1}, {'x': 'b', 'y': 2}]
data = self.uploader._pack(pd.DataFrame(records))
for unpacked in msgpack.Unpacker(io.BytesIO(data), encoding='utf-8'):
eq_(unpacked, records[0])
records = records[1:]
eq_(records, [])
def test_drop_nan(self):
records = [{'x': 'a', 'y': np.nan}, {'x': np.nan, 'y': 1.0}]
data = self.uploader._pack(pd.DataFrame(records))
unpacker = msgpack.Unpacker(io.BytesIO(data), encoding='utf-8')
eq_(unpacker.unpack(), {'x': 'a'})
eq_(unpacker.unpack(), {'y': 1.0})
def test_gzip(self):
data = self.uploader._gzip(b'abc')
with gzip.GzipFile(fileobj=io.BytesIO(data)) as f:
eq_(f.read(), b'abc')
class ReadTdQueryTestCase(TestCase):
def setUp(self):
job = MockJob()
self.connection = connect('test-key', 'test-endpoint')
self.connection.client.query = MagicMock(return_value=job)
self.engine = self.connection.query_engine('test_db', type='presto')
self.engine._http_get = MagicMock(return_value=MockRequest(job))
def assert_query(self, query):
self.connection.client.query.assert_called_with('test_db', "-- read_td_query\n" + query, type='presto')
def test_ok(self):
read_td_query('select 1', self.engine)
self.assert_query('select 1')
class ReadTdTableTestCase(TestCase):
def setUp(self):
job = MockJob()
self.connection = connect('test-key', 'test-endpoint')
self.connection.client.query = MagicMock(return_value=job)
self.engine = self.connection.query_engine('test_db', type='presto')
self.engine._http_get = MagicMock(return_value=MockRequest(job))
def assert_query(self, query):
self.connection.client.query.assert_called_with('test_db', "-- read_td_table('test_table')" + query, type='presto')
@raises(ValueError)
def test_invalid_time_range(self):
read_td_table('test_table', self.engine, time_range=(1.0, 2.0))
@raises(ValueError)
def test_invalid_sample_small(self):
read_td_table('test_table', self.engine, sample=-1)
@raises(ValueError)
def test_invalid_sample_large(self):
read_td_table('test_table', self.engine, sample=1.1)
def test_default(self):
read_td_table('test_table', self.engine)
self.assert_query('''
SELECT *
FROM test_table
LIMIT 10000
''')
def test_time_range(self):
time_range_tests = [
[(None, None), "NULL", "NULL"],
[(0, 1000000000), "'1970-01-01 00:00:00'", "'2001-09-09 01:46:40'"],
[('2000-01-01', '2010-01-01'), "'2000-01-01 00:00:00'", "'2010-01-01 00:00:00'"],
[(datetime.date(2000, 1, 1), datetime.datetime(2010, 1, 1, 0, 0, 0)),
"'2000-01-01 00:00:00'", "'2010-01-01 00:00:00'"],
]
for time_range, start, end in time_range_tests:
read_td_table('test_table', self.engine, time_range=time_range)
self.assert_query('''
SELECT *
FROM test_table
WHERE td_time_range(time, {0}, {1})
LIMIT 10000
'''.format(start, end))
def test_with_columns(self):
read_td_table('test_table', self.engine, columns=['c1', 'c2'])
self.assert_query('''
SELECT c1, c2
FROM test_table
LIMIT 10000
''')
def test_with_sample(self):
read_td_table('test_table', self.engine, sample=0.1)
self.assert_query('''
SELECT *
FROM test_table
TABLESAMPLE BERNOULLI (10.0)
LIMIT 10000
''')
def test_without_limit(self):
read_td_table('test_table', self.engine, limit=None)
self.assert_query('''
SELECT *
FROM test_table
''')
class ToTdTestCase(TestCase):
def setUp(self):
self.connection = connect('test-key', 'test-endpoint')
self.frame = pd.DataFrame([[1,2],[3,4]], columns=['x', 'y'])
@raises(ValueError)
def test_invalid_table_name(self):
to_td(self.frame, 'invalid', self.connection)
@raises(TypeError)
def test_datetime_is_not_supported(self):
# mock
client = self.connection.client
client.table = MagicMock(side_effect=tdclient.api.NotFoundError('test_table'))
client.create_log_table = MagicMock()
client.import_data = MagicMock()
# test
frame = pd.DataFrame({'timestamp': [datetime.datetime(2000,1,1)]})
to_td(frame, 'test_db.test_table', self.connection)
# if_exists
@raises(ValueError)
def test_invalid_if_exists(self):
to_td(self.frame, 'test_db.test_table', self.connection, if_exists='invalid')
@raises(RuntimeError)
def test_fail_if_exists(self):
client = self.connection.client
client.table = MagicMock()
to_td(self.frame, 'test_db.test_table', self.connection)
def test_ok_if_not_exists(self):
# mock
client = self.connection.client
client.table = MagicMock(side_effect=tdclient.api.NotFoundError('test_table'))
client.create_log_table = MagicMock()
client.import_data = MagicMock()
# test
to_td(self.frame, 'test_db.test_table', self.connection)
client.table.assert_called_with('test_db', 'test_table')
client.create_log_table.assert_called_with('test_db', 'test_table')
def test_replace_if_exists(self):
# mock
client = self.connection.client
client.table = MagicMock(side_effect=tdclient.api.NotFoundError('test_table'))
client.create_log_table = MagicMock()
client.import_data = MagicMock()
# first call
to_td(self.frame, 'test_db.test_table', self.connection, if_exists='replace')
client.create_log_table.assert_called_with('test_db', 'test_table')
# mock
client.table = MagicMock()
client.delete_table = MagicMock()
# second call
to_td(self.frame, 'test_db.test_table', self.connection, if_exists='replace')
client.delete_table.assert_called_with('test_db', 'test_table')
client.create_log_table.assert_called_with('test_db', 'test_table')
def test_append_if_exists(self):
# mock
client = self.connection.client
client.table = MagicMock(side_effect=tdclient.api.NotFoundError('test_table'))
client.create_log_table = MagicMock()
client.import_data = MagicMock()
# first call
to_td(self.frame, 'test_db.test_table', self.connection, if_exists='append')
# second call
client.table = MagicMock()
to_td(self.frame, 'test_db.test_table', self.connection, if_exists='append')
client.create_log_table.assert_called_once_with('test_db', 'test_table')
# time_col
@raises(ValueError)
def test_error_time_col_and_time_index(self):
_convert_time_column(self.frame, time_col='x', time_index=0)
@raises(ValueError)
def test_error_time_column_already_exists(self):
f1 = pd.DataFrame([[0, 'a', 1], [0, 'b', 2]], columns=['time', 'x', 'y'])
f2 = _convert_time_column(f1)
def test_time_now(self):
f1 = pd.DataFrame([['a', 1], ['b', 2]], columns=['x', 'y'])
f2 = _convert_time_column(f1)
eq_(list(f2.columns), ['x', 'y', 'time'])
def test_time_col_rename(self):
f1 = pd.DataFrame([[0, 'a', 1], [0, 'b', 2]], columns=['unixtime', 'x', 'y'])
f2 = _convert_time_column(f1, time_col='unixtime')
eq_(list(f2.columns), ['time', 'x', 'y'])
def test_time_col_by_unixtime(self):
f1 = pd.DataFrame([[0, 'a', 1], [0, 'b', 2]], columns=['time', 'x', 'y'])
f2 = _convert_time_column(f1, time_col='time')
eq_(list(f2.columns), ['time', 'x', 'y'])
def test_time_col_by_datetime(self):
f1 = pd.DataFrame([['a', 1], ['b', 2]], columns=['x', 'y'])
f1['time'] = pd.to_datetime('2001-01-01')
f2 = _convert_time_column(f1, time_col='time')
eq_(list(f2.columns), ['x', 'y', 'time'])
def test_time_col_by_string(self):
f1 = pd.DataFrame([['a', 1], ['b', 2]], columns=['x', 'y'])
f1['time'] = '2001-01-01'
f2 = _convert_time_column(f1, time_col='time')
eq_(list(f2.columns), ['x', 'y', 'time'])
# time_index
@raises(TypeError)
def test_invalid_arg_time_index(self):
date_range = pd.date_range('2015-01-01', periods=2, freq='d')
f1 = pd.DataFrame([['a', 1], ['b', 2]], columns=['x', 'y'], index=date_range)
f2 = _convert_time_column(f1, time_index=True)
@raises(IndexError)
def test_invalid_level_time_index(self):
date_range = pd.date_range('2015-01-01', periods=2, freq='d')
f1 = pd.DataFrame([['a', 1], ['b', 2]], columns=['x', 'y'], index=date_range)
f2 = _convert_time_column(f1, time_index=1)
@raises(TypeError)
def test_invalid_value_time_index(self):
f1 = pd.DataFrame([['a', 1], ['b', 2]], columns=['x', 'y'])
f2 = _convert_time_column(f1, time_index=0)
def test_time_index(self):
date_range = pd.date_range('2015-01-01', periods=2, freq='d')
f1 = pd.DataFrame([['a', 1], ['b', 2]], columns=['x', 'y'], index=date_range)
f2 = _convert_time_column(f1, time_index=0)
eq_(list(f2.columns), ['x', 'y', 'time'])
@raises(IndexError)
def test_invalid_level_time_index_multi(self):
date_range = pd.date_range('2015-01-01', periods=2, freq='d')
f1 = pd.DataFrame([['a', 1], ['b', 2]], columns=['x', 'y'], index=[[0, 1], date_range])
f2 = _convert_time_column(f1, time_index=2)
@raises(TypeError)
def test_invalid_value_time_index_multi(self):
date_range = pd.date_range('2015-01-01', periods=2, freq='d')
f1 = pd.DataFrame([['a', 1], ['b', 2]], columns=['x', 'y'], index=[[0, 1], date_range])
f2 = _convert_time_column(f1, time_index=0)
def test_time_index_multi(self):
date_range = pd.date_range('2015-01-01', periods=2, freq='d')
f1 = pd.DataFrame([['a', 1], ['b', 2]], columns=['x', 'y'], index=[[0, 1], date_range])
f2 = _convert_time_column(f1, time_index=1)
eq_(list(f2.columns), ['x', 'y', 'time'])
# index / index_label
@raises(TypeError)
def test_invalid_index_type(self):
f1 = pd.DataFrame([['a', 1], ['b', 2]], columns=['x', 'y'])
f2 = _convert_index_column(f1, index=0)
def test_no_index(self):
f1 = pd.DataFrame([['a', 1], ['b', 2]], columns=['x', 'y'])
f2 = _convert_index_column(f1, index=False)
eq_(list(f2.columns), ['x', 'y'])
def test_index(self):
f1 = pd.DataFrame([['a', 1], ['b', 2]], columns=['x', 'y'])
f2 = _convert_index_column(f1, index=True)
eq_(list(f2.columns), ['x', 'y', 'index'])
def test_index_name(self):
f1 = pd.DataFrame([['a', 1], ['b', 2]], columns=['x', 'y'])
f1.index.name = 'id'
f2 = _convert_index_column(f1, index=True)
eq_(list(f2.columns), ['x', 'y', 'id'])
def test_index_label(self):
f1 = pd.DataFrame([['a', 1], ['b', 2]], columns=['x', 'y'])
f2 = _convert_index_column(f1, index=True, index_label='id')
eq_(list(f2.columns), ['x', 'y', 'id'])
def test_multi_index(self):
f1 = pd.DataFrame([['a', 1], ['b', 2]], columns=['x', 'y'], index=[[0, 1], [0, 1]])
f2 = _convert_index_column(f1, index=True)
eq_(list(f2.columns), ['x', 'y', 'level_0', 'level_1'])
def test_multi_index_name(self):
f1 = pd.DataFrame([['a', 1], ['b', 2]], columns=['x', 'y'], index=[[0, 1], [0, 1]])
f1.index.names = ['id1', 'id2']
f2 = _convert_index_column(f1, index=True)
eq_(list(f2.columns), ['x', 'y', 'id1', 'id2'])
def test_multi_index_label(self):
f1 = pd.DataFrame([['a', 1], ['b', 2]], columns=['x', 'y'], index=[[0, 1], [0, 1]])
f2 = _convert_index_column(f1, index=True, index_label=['id1', 'id2'])
eq_(list(f2.columns), ['x', 'y', 'id1', 'id2'])
# date_format
def test_date_format(self):
ts = datetime.datetime(2000, 1, 2, 3, 4, 5)
f1 = pd.DataFrame([['a', ts], ['b', ts]], columns=['x', 'y'])
f2 = _convert_date_format(f1, date_format='%Y-%m-%d %T')
eq_(f2['y'].tolist(), ['2000-01-02 03:04:05', '2000-01-02 03:04:05'])
|
|
# coding: utf-8
"""
Cloudbreak API
Cloudbreak is a powerful left surf that breaks over a coral reef, a mile off southwest the island of Tavarua, Fiji. Cloudbreak is a cloud agnostic Hadoop as a Service API. Abstracts the provisioning and ease management and monitoring of on-demand clusters. SequenceIQ's Cloudbreak is a RESTful application development platform with the goal of helping developers to build solutions for deploying Hadoop YARN clusters in different environments. Once it is deployed in your favourite servlet container it exposes a REST API allowing to span up Hadoop clusters of arbitary sizes and cloud providers. Provisioning Hadoop has never been easier. Cloudbreak is built on the foundation of cloud providers API (Amazon AWS, Microsoft Azure, Google Cloud Platform, Openstack), Apache Ambari, Docker lightweight containers, Swarm and Consul. For further product documentation follow the link: <a href=\"http://hortonworks.com/apache/cloudbreak/\">http://hortonworks.com/apache/cloudbreak/</a>
OpenAPI spec version: 2.9.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class PlatformResourceRequestJson(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'credential_id': 'int',
'credential_name': 'str',
'region': 'str',
'platform_variant': 'str',
'filters': 'dict(str, str)',
'availability_zone': 'str'
}
attribute_map = {
'credential_id': 'credentialId',
'credential_name': 'credentialName',
'region': 'region',
'platform_variant': 'platformVariant',
'filters': 'filters',
'availability_zone': 'availabilityZone'
}
def __init__(self, credential_id=None, credential_name=None, region=None, platform_variant=None, filters=None, availability_zone=None):
"""
PlatformResourceRequestJson - a model defined in Swagger
"""
self._credential_id = None
self._credential_name = None
self._region = None
self._platform_variant = None
self._filters = None
self._availability_zone = None
if credential_id is not None:
self.credential_id = credential_id
if credential_name is not None:
self.credential_name = credential_name
if region is not None:
self.region = region
if platform_variant is not None:
self.platform_variant = platform_variant
if filters is not None:
self.filters = filters
if availability_zone is not None:
self.availability_zone = availability_zone
@property
def credential_id(self):
"""
Gets the credential_id of this PlatformResourceRequestJson.
credential resource id for the request
:return: The credential_id of this PlatformResourceRequestJson.
:rtype: int
"""
return self._credential_id
@credential_id.setter
def credential_id(self, credential_id):
"""
Sets the credential_id of this PlatformResourceRequestJson.
credential resource id for the request
:param credential_id: The credential_id of this PlatformResourceRequestJson.
:type: int
"""
self._credential_id = credential_id
@property
def credential_name(self):
"""
Gets the credential_name of this PlatformResourceRequestJson.
credential resource name for the request
:return: The credential_name of this PlatformResourceRequestJson.
:rtype: str
"""
return self._credential_name
@credential_name.setter
def credential_name(self, credential_name):
"""
Sets the credential_name of this PlatformResourceRequestJson.
credential resource name for the request
:param credential_name: The credential_name of this PlatformResourceRequestJson.
:type: str
"""
self._credential_name = credential_name
@property
def region(self):
"""
Gets the region of this PlatformResourceRequestJson.
Related region
:return: The region of this PlatformResourceRequestJson.
:rtype: str
"""
return self._region
@region.setter
def region(self, region):
"""
Sets the region of this PlatformResourceRequestJson.
Related region
:param region: The region of this PlatformResourceRequestJson.
:type: str
"""
self._region = region
@property
def platform_variant(self):
"""
Gets the platform_variant of this PlatformResourceRequestJson.
cloud provider api variant
:return: The platform_variant of this PlatformResourceRequestJson.
:rtype: str
"""
return self._platform_variant
@platform_variant.setter
def platform_variant(self, platform_variant):
"""
Sets the platform_variant of this PlatformResourceRequestJson.
cloud provider api variant
:param platform_variant: The platform_variant of this PlatformResourceRequestJson.
:type: str
"""
self._platform_variant = platform_variant
@property
def filters(self):
"""
Gets the filters of this PlatformResourceRequestJson.
filter for resources
:return: The filters of this PlatformResourceRequestJson.
:rtype: dict(str, str)
"""
return self._filters
@filters.setter
def filters(self, filters):
"""
Sets the filters of this PlatformResourceRequestJson.
filter for resources
:param filters: The filters of this PlatformResourceRequestJson.
:type: dict(str, str)
"""
self._filters = filters
@property
def availability_zone(self):
"""
Gets the availability_zone of this PlatformResourceRequestJson.
related availability zone
:return: The availability_zone of this PlatformResourceRequestJson.
:rtype: str
"""
return self._availability_zone
@availability_zone.setter
def availability_zone(self, availability_zone):
"""
Sets the availability_zone of this PlatformResourceRequestJson.
related availability zone
:param availability_zone: The availability_zone of this PlatformResourceRequestJson.
:type: str
"""
self._availability_zone = availability_zone
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, PlatformResourceRequestJson):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
|
# Copyright (c) 2014 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import netaddr
from neutron_lib import constants
from neutron_lib.utils import net
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import importutils
import testtools
from neutron.agent.linux import ip_lib
from neutron.common import utils
from neutron.conf.agent import common as config
from neutron.tests.common import net_helpers
from neutron.tests.functional import base as functional_base
LOG = logging.getLogger(__name__)
Device = collections.namedtuple('Device',
'name ip_cidrs mac_address namespace')
WRONG_IP = '0.0.0.0'
TEST_IP = '240.0.0.1'
TEST_IP_NEIGH = '240.0.0.2'
class IpLibTestFramework(functional_base.BaseSudoTestCase):
def setUp(self):
super(IpLibTestFramework, self).setUp()
self._configure()
def _configure(self):
config.register_interface_driver_opts_helper(cfg.CONF)
cfg.CONF.set_override(
'interface_driver',
'neutron.agent.linux.interface.OVSInterfaceDriver')
config.register_interface_opts()
self.driver = importutils.import_object(cfg.CONF.interface_driver,
cfg.CONF)
def generate_device_details(self, name=None, ip_cidrs=None,
mac_address=None, namespace=None):
return Device(name or utils.get_rand_name(),
ip_cidrs or ["%s/24" % TEST_IP],
mac_address or
net.get_random_mac('fa:16:3e:00:00:00'.split(':')),
namespace or utils.get_rand_name())
def _safe_delete_device(self, device):
try:
device.link.delete()
except RuntimeError:
LOG.debug('Could not delete %s, was it already deleted?', device)
def manage_device(self, attr):
"""Create a tuntap with the specified attributes.
The device is cleaned up at the end of the test.
:param attr: A Device namedtuple
:return: A tuntap ip_lib.IPDevice
"""
ip = ip_lib.IPWrapper(namespace=attr.namespace)
if attr.namespace:
ip.netns.add(attr.namespace)
self.addCleanup(ip.netns.delete, attr.namespace)
tap_device = ip.add_tuntap(attr.name)
self.addCleanup(self._safe_delete_device, tap_device)
tap_device.link.set_address(attr.mac_address)
self.driver.init_l3(attr.name, attr.ip_cidrs,
namespace=attr.namespace)
tap_device.link.set_up()
return tap_device
class IpLibTestCase(IpLibTestFramework):
def test_device_exists(self):
attr = self.generate_device_details()
self.assertFalse(
ip_lib.device_exists(attr.name, namespace=attr.namespace))
device = self.manage_device(attr)
self.assertTrue(
ip_lib.device_exists(device.name, namespace=attr.namespace))
self.assertFalse(
ip_lib.device_exists(attr.name, namespace='wrong_namespace'))
device.link.delete()
self.assertFalse(
ip_lib.device_exists(attr.name, namespace=attr.namespace))
def test_ipdevice_exists(self):
attr = self.generate_device_details()
device = self.manage_device(attr)
self.assertTrue(device.exists())
device.link.delete()
self.assertFalse(device.exists())
def test_vxlan_exists(self):
attr = self.generate_device_details()
ip = ip_lib.IPWrapper(namespace=attr.namespace)
ip.netns.add(attr.namespace)
self.addCleanup(ip.netns.delete, attr.namespace)
self.assertFalse(ip_lib.vxlan_in_use(9999, namespace=attr.namespace))
device = ip.add_vxlan(attr.name, 9999)
self.addCleanup(self._safe_delete_device, device)
self.assertTrue(ip_lib.vxlan_in_use(9999, namespace=attr.namespace))
device.link.delete()
self.assertFalse(ip_lib.vxlan_in_use(9999, namespace=attr.namespace))
def test_ipwrapper_get_device_by_ip_None(self):
ip_wrapper = ip_lib.IPWrapper(namespace=None)
self.assertIsNone(ip_wrapper.get_device_by_ip(ip=None))
def test_ipwrapper_get_device_by_ip(self):
attr = self.generate_device_details()
self.manage_device(attr)
ip_wrapper = ip_lib.IPWrapper(namespace=attr.namespace)
self.assertEqual(attr.name, ip_wrapper.get_device_by_ip(TEST_IP).name)
self.assertIsNone(ip_wrapper.get_device_by_ip(WRONG_IP))
def test_device_exists_with_ips_and_mac(self):
attr = self.generate_device_details()
device = self.manage_device(attr)
self.assertTrue(
ip_lib.device_exists_with_ips_and_mac(*attr))
wrong_ip_cidr = '10.0.0.1/8'
wrong_mac_address = 'aa:aa:aa:aa:aa:aa'
attr = self.generate_device_details(name='wrong_name')
self.assertFalse(
ip_lib.device_exists_with_ips_and_mac(*attr))
attr = self.generate_device_details(ip_cidrs=[wrong_ip_cidr])
self.assertFalse(ip_lib.device_exists_with_ips_and_mac(*attr))
attr = self.generate_device_details(mac_address=wrong_mac_address)
self.assertFalse(ip_lib.device_exists_with_ips_and_mac(*attr))
attr = self.generate_device_details(namespace='wrong_namespace')
self.assertFalse(ip_lib.device_exists_with_ips_and_mac(*attr))
device.link.delete()
def test_get_device_mac(self):
attr = self.generate_device_details()
device = self.manage_device(attr)
mac_address = ip_lib.get_device_mac(attr.name,
namespace=attr.namespace)
self.assertEqual(attr.mac_address, mac_address)
device.link.delete()
def test_get_device_mac_too_long_name(self):
name = utils.get_rand_name(
max_length=constants.DEVICE_NAME_MAX_LEN + 5)
attr = self.generate_device_details(name=name)
device = self.manage_device(attr)
mac_address = ip_lib.get_device_mac(attr.name,
namespace=attr.namespace)
self.assertEqual(attr.mac_address, mac_address)
device.link.delete()
def test_get_routing_table(self):
attr = self.generate_device_details(
ip_cidrs=["%s/24" % TEST_IP, "fd00::1/64"]
)
device = self.manage_device(attr)
device_ip = attr.ip_cidrs[0].split('/')[0]
destination = '8.8.8.0/24'
device.route.add_route(destination, device_ip)
destination6 = 'fd01::/64'
device.route.add_route(destination6, "fd00::2")
expected_routes = [{'nexthop': device_ip,
'device': attr.name,
'destination': destination,
'scope': 'universe'},
{'nexthop': None,
'device': attr.name,
'destination': str(
netaddr.IPNetwork(attr.ip_cidrs[0]).cidr),
'scope': 'link'}]
routes = ip_lib.get_routing_table(4, namespace=attr.namespace)
self.assertItemsEqual(expected_routes, routes)
self.assertIsInstance(routes, list)
expected_routes6 = [{'nexthop': "fd00::2",
'device': attr.name,
'destination': destination6,
'scope': 'universe'},
{'nexthop': None,
'device': attr.name,
'destination': str(
netaddr.IPNetwork(attr.ip_cidrs[1]).cidr),
'scope': 'universe'}]
routes6 = ip_lib.get_routing_table(6, namespace=attr.namespace)
self.assertItemsEqual(expected_routes6, routes6)
self.assertIsInstance(routes6, list)
def test_get_routing_table_no_namespace(self):
with testtools.ExpectedException(ip_lib.NetworkNamespaceNotFound):
ip_lib.get_routing_table(4, namespace="nonexistent-netns")
def test_get_neigh_entries(self):
attr = self.generate_device_details(
ip_cidrs=["%s/24" % TEST_IP, "fd00::1/64"]
)
mac_address = net.get_random_mac('fa:16:3e:00:00:00'.split(':'))
device = self.manage_device(attr)
device.neigh.add(TEST_IP_NEIGH, mac_address)
expected_neighs = [{'dst': TEST_IP_NEIGH,
'lladdr': mac_address,
'device': attr.name}]
neighs = device.neigh.dump(4)
self.assertItemsEqual(expected_neighs, neighs)
self.assertIsInstance(neighs, list)
device.neigh.delete(TEST_IP_NEIGH, mac_address)
neighs = device.neigh.dump(4, dst=TEST_IP_NEIGH, lladdr=mac_address)
self.assertEqual([], neighs)
def test_get_neigh_entries_no_namespace(self):
with testtools.ExpectedException(ip_lib.NetworkNamespaceNotFound):
ip_lib.dump_neigh_entries(4, namespace="nonexistent-netns")
def test_get_neigh_entries_no_interface(self):
attr = self.generate_device_details(
ip_cidrs=["%s/24" % TEST_IP, "fd00::1/64"]
)
self.manage_device(attr)
with testtools.ExpectedException(ip_lib.NetworkInterfaceNotFound):
ip_lib.dump_neigh_entries(4, device="nosuchdevice",
namespace=attr.namespace)
def test_delete_neigh_entries(self):
attr = self.generate_device_details(
ip_cidrs=["%s/24" % TEST_IP, "fd00::1/64"]
)
mac_address = net.get_random_mac('fa:16:3e:00:00:00'.split(':'))
device = self.manage_device(attr)
# trying to delete a non-existent entry shouldn't raise an error
device.neigh.delete(TEST_IP_NEIGH, mac_address)
def _check_for_device_name(self, ip, name, should_exist):
exist = any(d for d in ip.get_devices() if d.name == name)
self.assertEqual(should_exist, exist)
def test_dummy_exists(self):
namespace = self.useFixture(net_helpers.NamespaceFixture())
dev_name = utils.get_rand_name()
device = namespace.ip_wrapper.add_dummy(dev_name)
self.addCleanup(self._safe_delete_device, device)
self._check_for_device_name(namespace.ip_wrapper, dev_name, True)
device.link.delete()
self._check_for_device_name(namespace.ip_wrapper, dev_name, False)
class TestSetIpNonlocalBind(functional_base.BaseSudoTestCase):
def test_assigned_value(self):
namespace = self.useFixture(net_helpers.NamespaceFixture())
for expected in (0, 1):
failed = ip_lib.set_ip_nonlocal_bind(expected, namespace.name)
try:
observed = ip_lib.get_ip_nonlocal_bind(namespace.name)
except RuntimeError as rte:
stat_message = (
'cannot stat /proc/sys/net/ipv4/ip_nonlocal_bind')
if stat_message in str(rte):
raise self.skipException(
"This kernel doesn't support %s in network "
"namespaces." % ip_lib.IP_NONLOCAL_BIND)
raise
self.assertFalse(failed)
self.assertEqual(expected, observed)
|
|
#!/usr/bin/env python
"""Common utilities to parse external input, e.g. for preload and service gateway"""
__author__ = 'Michael Meisinger, Ian Katz'
import ast
from pyon.public import BadRequest, IonObject, log
from interface import objects
def get_typed_value(value, schema_entry=None, targettype=None, strict=False):
"""
Performs a value type check or conversion according to a schema entry or specified target type.
Supports simplelist and parsedict special type parsing from strings.
@param strict if True, raise error of type does not match
"""
if not schema_entry and not targettype:
raise BadRequest("Invalid schema or targettype")
targettype = targettype or schema_entry["type"]
if schema_entry and 'enum_type' in schema_entry:
enum_clzz = getattr(objects, schema_entry['enum_type'])
if type(value).__name__ == targettype and value in enum_clzz._str_map:
return value
if isinstance(value, basestring):
if strict and value in enum_clzz._value_map:
return enum_clzz._value_map[value]
elif not strict:
if value in enum_clzz._value_map:
return enum_clzz._value_map[value]
for enum_key, enum_val in enum_clzz._value_map.iteritems():
if enum_key.lower() == value.lower():
return enum_val
raise BadRequest("Value %s is not valid enum value" % value)
elif targettype == 'str':
if type(value) is str:
return value
elif type(value) is unicode:
return value.encode("utf8")
if strict:
raise BadRequest("Value %s is type %s not str" % (value, type(value).__name__))
return str(value)
elif targettype == 'bool':
if type(value) is bool:
return value
if strict:
raise BadRequest("Value %s is type %s not bool" % (value, type(value).__name__))
if value in ('TRUE', 'True', 'true', '1', 1):
return True
elif value in ('FALSE', 'False', 'false', '0', 0, '', None):
return False
raise BadRequest("Value %s cannot be converted to bool" % value)
elif targettype == 'int':
if type(value) in (int, long):
return value
if strict:
raise BadRequest("Value %s is type %s not int" % (value, type(value).__name__))
try:
return int(value)
except Exception:
pass
raise BadRequest("Value %s cannot be converted to int" % value)
elif targettype == 'float':
if type(value) == float:
return value
elif type(value) in (int, long):
return float(value)
if strict:
raise BadRequest("Value %s is type %s not float" % (value, type(value).__name__))
try:
return float(value)
except Exception:
pass
raise BadRequest("Value %s cannot be converted to float" % value)
elif targettype == 'simplelist':
if isinstance(value, basestring):
return parse_list(value)
raise BadRequest("Value %s cannot be converted to list as simplelist" % value)
elif targettype == 'parsedict':
if isinstance(value, basestring):
return parse_dict(value)
raise BadRequest("Value %s cannot be converted to dict as parsedict" % value)
elif targettype == 'list':
if type(value) is list:
return value
if strict:
raise BadRequest("Value %s is type %s not list" % (value, type(value).__name__))
if isinstance(value, (tuple, set)):
return list(value)
elif isinstance(value, basestring):
try:
ret_val = ast.literal_eval(value)
except Exception:
ret_val = None
if isinstance(ret_val, list):
return ret_val
elif isinstance(ret_val, tuple):
return list(ret_val)
if isinstance(value, basestring):
return parse_list(value)
else:
return [value]
elif targettype == 'dict':
if type(value) is dict:
return value
if strict:
raise BadRequest("Value %s is type %s not dict" % (value, type(value).__name__))
if isinstance(value, dict):
return dict(value)
elif isinstance(value, basestring):
try:
ret_val = ast.literal_eval(value)
except Exception:
ret_val = None
if isinstance(ret_val, dict):
return ret_val
return parse_dict(value)
return dict(value=value)
elif targettype == 'NoneType':
if value is None:
return None
if not strict:
if value in ("None", "NONE", "none", "Null", "NULL", "null", ""):
return None
elif isinstance(value, basestring):
return ast.literal_eval(value)
return value
elif targettype == 'ANY':
if isinstance(value, basestring):
return ast.literal_eval(value)
return value
else:
raise BadRequest("Value %s cannot be converted to target type %s" % (value, targettype))
def parse_list(value):
"""
Parse a string to extract a simple list of string values.
Assumes comma separated values optionally within []
"""
if value.startswith('[') and value.endswith(']'):
value = value[1:-1].strip()
elif not value.strip():
return []
return list(value.split(','))
def parse_dict(text):
"""
Parse a text string to obtain a dictionary of unquoted string keys and values.
The following substitutions are made:
keys with dots ('.') will be split into dictionaries.
booleans "True", "False" will be parsed
numbers will be parsed as floats unless they begin with "0" or include one "." and end with "0"
"{}" will be converted to {}
"[]" will be converted to []
For example, an entry could be this:
PARAMETERS.TXWAVESTATS: False,
PARAMETERS.TXREALTIME: True,
PARAMETERS.TXWAVEBURST: false,
SCHEDULER.ACQUIRE_STATUS: {},
SCHEDULER.CLOCK_SYNC: 48.2
SCHEDULER.VERSION.number: 3.0
which would translate back to:
{ "PARAMETERS": { "TXWAVESTATS": False, "TXREALTIME": True, "TXWAVEBURST": "false" },
"SCHEDULER": { "ACQUIRE_STATUS": {}, "CLOCK_SYNC", 48.2, "VERSION": {"number": "3.0"}}
}
"""
substitutions = {"{}": {}, "[]": [], "True": True, "False": False}
def parse_value(some_val):
some_val = substitutions.get(some_val, some_val)
try:
int_val = int(some_val)
if str(int_val) == some_val:
return int_val
except ValueError:
pass
try:
float_val = float(some_val)
if str(float_val) == some_val:
return float_val
except ValueError:
pass
return some_val
def chomp_key_list(out_dict, keys, value):
"""
turn keys like ['a', 'b', 'c', 'd'] and a value into
out_dict['a']['b']['c']['d'] = value
"""
dict_ptr = out_dict
last_ptr = out_dict
for i, key in enumerate(keys):
last_ptr = dict_ptr
if not key in dict_ptr:
dict_ptr[key] = {}
else:
if type(dict_ptr[key]) is not dict:
raise BadRequest("Building a dict in %s field, but it exists as %s already" %
(key, type(dict_ptr[key])))
dict_ptr = dict_ptr[key]
last_ptr[keys[-1]] = value
out = {}
if text is None:
return out
pairs = text.split(',') # pairs separated by commas
for pair in pairs:
if pair.count(':') == 0:
continue
fields = pair.split(':', 1) # pair separated by first colon
key = fields[0].strip()
value = fields[1].strip()
keyparts = key.split(".")
chomp_key_list(out, keyparts, parse_value(value))
return out
def parse_phones(text):
if ':' in text:
out = []
for type,number in parse_dict(text).iteritems():
out.append(IonObject("Phone", phone_number=number, phone_type=type))
return out
elif text:
return [ IonObject("Phone", phone_number=text.strip(), phone_type='office') ]
else:
return []
|
|
#!/usr/bin/env python
# Copyright 2013 Brett Slatkin
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Background worker that screenshots URLs, possibly from a queue."""
import Queue
import json
import logging
import os
import shutil
import tempfile
import time
import urllib2
# Local Libraries
import gflags
FLAGS = gflags.FLAGS
# Local modules
from dpxdt import constants
from dpxdt.client import process_worker
from dpxdt.client import queue_worker
from dpxdt.client import release_worker
from dpxdt.client import utils
from dpxdt.client import workers
DEFAULT_PHANTOMJS_FLAGS = [
'--disk-cache=false',
'--debug=true',
'--ignore-ssl-errors=true',
# https://github.com/ariya/phantomjs/issues/11239
'--ssl-protocol=TLSv1',
]
gflags.DEFINE_integer(
'capture_threads', 5, 'Number of website screenshot threads to run')
gflags.DEFINE_integer(
'capture_task_max_attempts', 3,
'Maximum number of attempts for processing a capture task.')
gflags.DEFINE_integer(
'capture_wait_seconds', 3,
'Wait this many seconds between repeated invocations of capture '
'subprocesses. Can be used to spread out load on the server.')
# DEPRECATED
gflags.DEFINE_string(
'phantomjs_binary', None, 'Path to the phantomjs binary')
# DEPRECATED
gflags.DEFINE_string(
'phantomjs_script',
None,
'Path to the script that drives the phantomjs process')
# DEPRECATED
gflags.DEFINE_integer(
'phantomjs_timeout', None,
'Seconds until giving up on a phantomjs sub-process and trying again.')
# TODO(elsigh): Consider changing default `capture_binary` to `python`
# and `capture_script` to `capture.py` if BrowserStack writes back with
# a free account for testing with dpxdt.
gflags.DEFINE_string(
'capture_format', 'png',
'Screenshot format, e.g. png or bmp')
gflags.DEFINE_string(
'capture_binary', 'phantomjs',
'Path to the capture binary, e.g. python or phantomjs')
gflags.DEFINE_string(
'capture_script',
os.path.join(os.path.dirname(__file__), 'capture.js'),
'Path to the script that drives the capture process')
gflags.DEFINE_integer(
'capture_timeout', 120,
'Seconds until giving up on a capture sub-process and trying again.')
class CaptureFailedError(queue_worker.GiveUpAfterAttemptsError):
"""Capturing a webpage screenshot failed for some reason."""
class CaptureWorkflow(process_worker.ProcessWorkflow):
"""Workflow for capturing a website screenshot using PhantomJs."""
def __init__(self, log_path, config_path, output_path):
"""Initializer.
Args:
log_path: Where to write the verbose logging output.
config_path: Path to the screenshot config file to pass
to PhantomJs.
output_path: Where the output screenshot should be written.
"""
if FLAGS.phantomjs_timeout is not None:
logging.info(
'Using FLAGS.phantomjs_timeout which is deprecated in favor'
'of FLAGS.capture_timeout - please update your config')
capture_timeout = FLAGS.phantomjs_timeout
else:
capture_timeout = FLAGS.capture_timeout
process_worker.ProcessWorkflow.__init__(
self, log_path, timeout_seconds=capture_timeout)
self.config_path = config_path
self.output_path = output_path
def get_args(self):
if FLAGS.phantomjs_binary:
logging.info(
'Using FLAGS.phantomjs_binary which is deprecated in favor'
'of FLAGS.capture_binary - please update your config')
return [FLAGS.phantomjs_binary] + DEFAULT_PHANTOMJS_FLAGS + [
FLAGS.phantomjs_script,
self.config_path,
self.output_path,
]
else:
args = [FLAGS.capture_binary]
# Injects some default flags if we think this is phantomjs
if FLAGS.capture_binary.endswith('phantomjs'):
args += DEFAULT_PHANTOMJS_FLAGS
return args + [
FLAGS.capture_script,
self.config_path,
self.output_path,
]
class DoCaptureQueueWorkflow(workers.WorkflowItem):
"""Runs a webpage screenshot process from queue parameters.
Args:
build_id: ID of the build.
release_name: Name of the release.
release_number: Number of the release candidate.
run_name: Run to run perceptual diff for.
url: URL of the content to screenshot.
config_sha1sum: Content hash of the config for the new screenshot.
baseline: Optional. When specified and True, this capture is for
the reference baseline of the specified run, not the new capture.
heartbeat: Function to call with progress status.
Raises:
CaptureFailedError if the screenshot process failed.
"""
def run(self, build_id=None, release_name=None, release_number=None,
run_name=None, url=None, config_sha1sum=None, baseline=None,
heartbeat=None):
output_path = tempfile.mkdtemp()
try:
image_path = os.path.join(output_path, 'capture.%s' % FLAGS.capture_format)
log_path = os.path.join(output_path, 'log.txt')
config_path = os.path.join(output_path, 'config.json')
capture_failed = True
failure_reason = None
yield heartbeat('Fetching webpage capture config')
yield release_worker.DownloadArtifactWorkflow(
build_id, config_sha1sum, result_path=config_path)
yield heartbeat('Running webpage capture process')
try:
returncode = yield CaptureWorkflow(
log_path, config_path, image_path)
except (process_worker.TimeoutError, OSError), e:
failure_reason = str(e)
else:
capture_failed = returncode != 0
failure_reason = 'returncode=%s' % returncode
# Don't upload bad captures, but always upload the error log.
if capture_failed:
image_path = None
yield heartbeat('Reporting capture status to server')
yield release_worker.ReportRunWorkflow(
build_id, release_name, release_number, run_name,
image_path=image_path, log_path=log_path, baseline=baseline,
run_failed=capture_failed)
if capture_failed:
raise CaptureFailedError(
FLAGS.capture_task_max_attempts,
failure_reason)
finally:
shutil.rmtree(output_path, True)
def register(coordinator):
"""Registers this module as a worker with the given coordinator."""
if FLAGS.phantomjs_script:
utils.verify_binary('phantomjs_binary', ['--version'])
assert os.path.exists(FLAGS.phantomjs_script)
else:
utils.verify_binary('capture_binary', ['--version'])
assert FLAGS.capture_script
assert os.path.exists(FLAGS.capture_script)
assert FLAGS.capture_threads > 0
assert FLAGS.queue_server_prefix
item = queue_worker.RemoteQueueWorkflow(
constants.CAPTURE_QUEUE_NAME,
DoCaptureQueueWorkflow,
max_tasks=FLAGS.capture_threads,
wait_seconds=FLAGS.capture_wait_seconds)
item.root = True
coordinator.input_queue.put(item)
|
|
#
# ElementTree
# $Id: ElementTree.py 2326 2005-03-17 07:45:21Z fredrik $
#
# light-weight XML support for Python 1.5.2 and later.
#
# history:
# 2001-10-20 fl created (from various sources)
# 2001-11-01 fl return root from parse method
# 2002-02-16 fl sort attributes in lexical order
# 2002-04-06 fl TreeBuilder refactoring, added PythonDoc markup
# 2002-05-01 fl finished TreeBuilder refactoring
# 2002-07-14 fl added basic namespace support to ElementTree.write
# 2002-07-25 fl added QName attribute support
# 2002-10-20 fl fixed encoding in write
# 2002-11-24 fl changed default encoding to ascii; fixed attribute encoding
# 2002-11-27 fl accept file objects or file names for parse/write
# 2002-12-04 fl moved XMLTreeBuilder back to this module
# 2003-01-11 fl fixed entity encoding glitch for us-ascii
# 2003-02-13 fl added XML literal factory
# 2003-02-21 fl added ProcessingInstruction/PI factory
# 2003-05-11 fl added tostring/fromstring helpers
# 2003-05-26 fl added ElementPath support
# 2003-07-05 fl added makeelement factory method
# 2003-07-28 fl added more well-known namespace prefixes
# 2003-08-15 fl fixed typo in ElementTree.findtext (Thomas Dartsch)
# 2003-09-04 fl fall back on emulator if ElementPath is not installed
# 2003-10-31 fl markup updates
# 2003-11-15 fl fixed nested namespace bug
# 2004-03-28 fl added XMLID helper
# 2004-06-02 fl added default support to findtext
# 2004-06-08 fl fixed encoding of non-ascii element/attribute names
# 2004-08-23 fl take advantage of post-2.1 expat features
# 2005-02-01 fl added iterparse implementation
# 2005-03-02 fl fixed iterparse support for pre-2.2 versions
#
# Copyright (c) 1999-2005 by Fredrik Lundh. All rights reserved.
#
# fredrik@pythonware.com
# http://www.pythonware.com
#
# --------------------------------------------------------------------
# The ElementTree toolkit is
#
# Copyright (c) 1999-2005 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
# Licensed to PSF under a Contributor Agreement.
# See http://www.python.org/2.4/license for licensing details.
__all__ = [
# public symbols
"Comment",
"dump",
"Element", "ElementTree",
"fromstring",
"iselement", "iterparse",
"parse",
"PI", "ProcessingInstruction",
"QName",
"SubElement",
"tostring",
"TreeBuilder",
"VERSION", "XML",
"XMLParser", "XMLTreeBuilder",
]
##
# The <b>Element</b> type is a flexible container object, designed to
# store hierarchical data structures in memory. The type can be
# described as a cross between a list and a dictionary.
# <p>
# Each element has a number of properties associated with it:
# <ul>
# <li>a <i>tag</i>. This is a string identifying what kind of data
# this element represents (the element type, in other words).</li>
# <li>a number of <i>attributes</i>, stored in a Python dictionary.</li>
# <li>a <i>text</i> string.</li>
# <li>an optional <i>tail</i> string.</li>
# <li>a number of <i>child elements</i>, stored in a Python sequence</li>
# </ul>
#
# To create an element instance, use the {@link #Element} or {@link
# #SubElement} factory functions.
# <p>
# The {@link #ElementTree} class can be used to wrap an element
# structure, and convert it from and to XML.
##
import sys, re
from . import ElementPath
# TODO: add support for custom namespace resolvers/default namespaces
# TODO: add improved support for incremental parsing
VERSION = "1.2.6"
##
# Internal element class. This class defines the Element interface,
# and provides a reference implementation of this interface.
# <p>
# You should not create instances of this class directly. Use the
# appropriate factory functions instead, such as {@link #Element}
# and {@link #SubElement}.
#
# @see Element
# @see SubElement
# @see Comment
# @see ProcessingInstruction
class _ElementInterface:
# <tag attrib>text<child/>...</tag>tail
##
# (Attribute) Element tag.
tag = None
##
# (Attribute) Element attribute dictionary. Where possible, use
# {@link #_ElementInterface.get},
# {@link #_ElementInterface.set},
# {@link #_ElementInterface.keys}, and
# {@link #_ElementInterface.items} to access
# element attributes.
attrib = None
##
# (Attribute) Text before first subelement. This is either a
# string or the value None, if there was no text.
text = None
##
# (Attribute) Text after this element's end tag, but before the
# next sibling element's start tag. This is either a string or
# the value None, if there was no text.
tail = None # text after end tag, if any
def __init__(self, tag, attrib):
self.tag = tag
self.attrib = attrib
self._children = []
def __repr__(self):
return "<Element %s at %x>" % (self.tag, id(self))
##
# Creates a new element object of the same type as this element.
#
# @param tag Element tag.
# @param attrib Element attributes, given as a dictionary.
# @return A new element instance.
def makeelement(self, tag, attrib):
return Element(tag, attrib)
##
# Returns the number of subelements.
#
# @return The number of subelements.
def __len__(self):
return len(self._children)
##
# Returns the given subelement.
#
# @param index What subelement to return.
# @return The given subelement.
# @exception IndexError If the given element does not exist.
def __getitem__(self, index):
return self._children[index]
##
# Replaces the given subelement.
#
# @param index What subelement to replace.
# @param element The new element value.
# @exception IndexError If the given element does not exist.
# @exception AssertionError If element is not a valid object.
def __setitem__(self, index, element):
assert iselement(element)
self._children[index] = element
##
# Deletes the given subelement.
#
# @param index What subelement to delete.
# @exception IndexError If the given element does not exist.
def __delitem__(self, index):
del self._children[index]
##
# Returns a list containing subelements in the given range.
#
# @param start The first subelement to return.
# @param stop The first subelement that shouldn't be returned.
# @return A sequence object containing subelements.
def __getslice__(self, start, stop):
return self._children[start:stop]
##
# Replaces a number of subelements with elements from a sequence.
#
# @param start The first subelement to replace.
# @param stop The first subelement that shouldn't be replaced.
# @param elements A sequence object with zero or more elements.
# @exception AssertionError If a sequence member is not a valid object.
def __setslice__(self, start, stop, elements):
for element in elements:
assert iselement(element)
self._children[start:stop] = list(elements)
##
# Deletes a number of subelements.
#
# @param start The first subelement to delete.
# @param stop The first subelement to leave in there.
def __delslice__(self, start, stop):
del self._children[start:stop]
##
# Adds a subelement to the end of this element.
#
# @param element The element to add.
# @exception AssertionError If a sequence member is not a valid object.
def append(self, element):
assert iselement(element)
self._children.append(element)
##
# Inserts a subelement at the given position in this element.
#
# @param index Where to insert the new subelement.
# @exception AssertionError If the element is not a valid object.
def insert(self, index, element):
assert iselement(element)
self._children.insert(index, element)
##
# Removes a matching subelement. Unlike the <b>find</b> methods,
# this method compares elements based on identity, not on tag
# value or contents.
#
# @param element What element to remove.
# @exception ValueError If a matching element could not be found.
# @exception AssertionError If the element is not a valid object.
def remove(self, element):
assert iselement(element)
self._children.remove(element)
##
# Returns all subelements. The elements are returned in document
# order.
#
# @return A list of subelements.
# @defreturn list of Element instances
def getchildren(self):
return self._children
##
# Finds the first matching subelement, by tag name or path.
#
# @param path What element to look for.
# @return The first matching element, or None if no element was found.
# @defreturn Element or None
def find(self, path):
return ElementPath.find(self, path)
##
# Finds text for the first matching subelement, by tag name or path.
#
# @param path What element to look for.
# @param default What to return if the element was not found.
# @return The text content of the first matching element, or the
# default value no element was found. Note that if the element
# has is found, but has no text content, this method returns an
# empty string.
# @defreturn string
def findtext(self, path, default=None):
return ElementPath.findtext(self, path, default)
##
# Finds all matching subelements, by tag name or path.
#
# @param path What element to look for.
# @return A list or iterator containing all matching elements,
# in document order.
# @defreturn list of Element instances
def findall(self, path):
return ElementPath.findall(self, path)
##
# Resets an element. This function removes all subelements, clears
# all attributes, and sets the text and tail attributes to None.
def clear(self):
self.attrib.clear()
self._children = []
self.text = self.tail = None
##
# Gets an element attribute.
#
# @param key What attribute to look for.
# @param default What to return if the attribute was not found.
# @return The attribute value, or the default value, if the
# attribute was not found.
# @defreturn string or None
def get(self, key, default=None):
return self.attrib.get(key, default)
##
# Sets an element attribute.
#
# @param key What attribute to set.
# @param value The attribute value.
def set(self, key, value):
self.attrib[key] = value
##
# Gets a list of attribute names. The names are returned in an
# arbitrary order (just like for an ordinary Python dictionary).
#
# @return A list of element attribute names.
# @defreturn list of strings
def keys(self):
return self.attrib.keys()
##
# Gets element attributes, as a sequence. The attributes are
# returned in an arbitrary order.
#
# @return A list of (name, value) tuples for all attributes.
# @defreturn list of (string, string) tuples
def items(self):
return self.attrib.items()
##
# Creates a tree iterator. The iterator loops over this element
# and all subelements, in document order, and returns all elements
# with a matching tag.
# <p>
# If the tree structure is modified during iteration, the result
# is undefined.
#
# @param tag What tags to look for (default is to return all elements).
# @return A list or iterator containing all the matching elements.
# @defreturn list or iterator
def getiterator(self, tag=None):
nodes = []
if tag == "*":
tag = None
if tag is None or self.tag == tag:
nodes.append(self)
for node in self._children:
nodes.extend(node.getiterator(tag))
return nodes
# compatibility
_Element = _ElementInterface
##
# Element factory. This function returns an object implementing the
# standard Element interface. The exact class or type of that object
# is implementation dependent, but it will always be compatible with
# the {@link #_ElementInterface} class in this module.
# <p>
# The element name, attribute names, and attribute values can be
# either 8-bit ASCII strings or Unicode strings.
#
# @param tag The element name.
# @param attrib An optional dictionary, containing element attributes.
# @param **extra Additional attributes, given as keyword arguments.
# @return An element instance.
# @defreturn Element
def Element(tag, attrib={}, **extra):
attrib = attrib.copy()
attrib.update(extra)
return _ElementInterface(tag, attrib)
##
# Subelement factory. This function creates an element instance, and
# appends it to an existing element.
# <p>
# The element name, attribute names, and attribute values can be
# either 8-bit ASCII strings or Unicode strings.
#
# @param parent The parent element.
# @param tag The subelement name.
# @param attrib An optional dictionary, containing element attributes.
# @param **extra Additional attributes, given as keyword arguments.
# @return An element instance.
# @defreturn Element
def SubElement(parent, tag, attrib={}, **extra):
attrib = attrib.copy()
attrib.update(extra)
element = parent.makeelement(tag, attrib)
parent.append(element)
return element
##
# Comment element factory. This factory function creates a special
# element that will be serialized as an XML comment.
# <p>
# The comment string can be either an 8-bit ASCII string or a Unicode
# string.
#
# @param text A string containing the comment string.
# @return An element instance, representing a comment.
# @defreturn Element
def Comment(text=None):
element = Element(Comment)
element.text = text
return element
##
# PI element factory. This factory function creates a special element
# that will be serialized as an XML processing instruction.
#
# @param target A string containing the PI target.
# @param text A string containing the PI contents, if any.
# @return An element instance, representing a PI.
# @defreturn Element
def ProcessingInstruction(target, text=None):
element = Element(ProcessingInstruction)
element.text = target
if text:
element.text = element.text + " " + text
return element
PI = ProcessingInstruction
##
# QName wrapper. This can be used to wrap a QName attribute value, in
# order to get proper namespace handling on output.
#
# @param text A string containing the QName value, in the form {uri}local,
# or, if the tag argument is given, the URI part of a QName.
# @param tag Optional tag. If given, the first argument is interpreted as
# an URI, and this argument is interpreted as a local name.
# @return An opaque object, representing the QName.
class QName:
def __init__(self, text_or_uri, tag=None):
if tag:
text_or_uri = "{%s}%s" % (text_or_uri, tag)
self.text = text_or_uri
def __str__(self):
return self.text
def __hash__(self):
return hash(self.text)
def __cmp__(self, other):
if isinstance(other, QName):
return cmp(self.text, other.text)
return cmp(self.text, other)
##
# ElementTree wrapper class. This class represents an entire element
# hierarchy, and adds some extra support for serialization to and from
# standard XML.
#
# @param element Optional root element.
# @keyparam file Optional file handle or name. If given, the
# tree is initialized with the contents of this XML file.
class ElementTree:
def __init__(self, element=None, file=None):
assert element is None or iselement(element)
self._root = element # first node
if file:
self.parse(file)
##
# Gets the root element for this tree.
#
# @return An element instance.
# @defreturn Element
def getroot(self):
return self._root
##
# Replaces the root element for this tree. This discards the
# current contents of the tree, and replaces it with the given
# element. Use with care.
#
# @param element An element instance.
def _setroot(self, element):
assert iselement(element)
self._root = element
##
# Loads an external XML document into this element tree.
#
# @param source A file name or file object.
# @param parser An optional parser instance. If not given, the
# standard {@link XMLTreeBuilder} parser is used.
# @return The document root element.
# @defreturn Element
def parse(self, source, parser=None):
if not hasattr(source, "read"):
source = open(source, "rb")
if not parser:
parser = XMLTreeBuilder()
while 1:
data = source.read(32768)
if not data:
break
parser.feed(data)
self._root = parser.close()
return self._root
##
# Creates a tree iterator for the root element. The iterator loops
# over all elements in this tree, in document order.
#
# @param tag What tags to look for (default is to return all elements)
# @return An iterator.
# @defreturn iterator
def getiterator(self, tag=None):
assert self._root is not None
return self._root.getiterator(tag)
##
# Finds the first toplevel element with given tag.
# Same as getroot().find(path).
#
# @param path What element to look for.
# @return The first matching element, or None if no element was found.
# @defreturn Element or None
def find(self, path):
assert self._root is not None
if path[:1] == "/":
path = "." + path
return self._root.find(path)
##
# Finds the element text for the first toplevel element with given
# tag. Same as getroot().findtext(path).
#
# @param path What toplevel element to look for.
# @param default What to return if the element was not found.
# @return The text content of the first matching element, or the
# default value no element was found. Note that if the element
# has is found, but has no text content, this method returns an
# empty string.
# @defreturn string
def findtext(self, path, default=None):
assert self._root is not None
if path[:1] == "/":
path = "." + path
return self._root.findtext(path, default)
##
# Finds all toplevel elements with the given tag.
# Same as getroot().findall(path).
#
# @param path What element to look for.
# @return A list or iterator containing all matching elements,
# in document order.
# @defreturn list of Element instances
def findall(self, path):
assert self._root is not None
if path[:1] == "/":
path = "." + path
return self._root.findall(path)
##
# Writes the element tree to a file, as XML.
#
# @param file A file name, or a file object opened for writing.
# @param encoding Optional output encoding (default is None)
def write(self, file, encoding=None):
assert self._root is not None
if not hasattr(file, "write"):
if encoding:
file = open(file, "wb")
else:
file = open(file, "w")
if encoding and encoding != "utf-8":
file.write(_encode("<?xml version='1.0' encoding='%s'?>\n" % encoding, encoding))
self._write(file, self._root, encoding, {})
def _write(self, file, node, encoding, namespaces):
# write XML to file
tag = node.tag
if tag is Comment:
file.write(_encode("<!-- %s -->" % _escape_cdata(node.text), encoding))
elif tag is ProcessingInstruction:
file.write(_encode("<?%s?>" % _escape_cdata(node.text), encoding))
else:
items = list(node.items())
xmlns_items = [] # new namespaces in this scope
try:
if isinstance(tag, QName) or tag[:1] == "{":
tag, xmlns = fixtag(tag, namespaces)
if xmlns: xmlns_items.append(xmlns)
except TypeError:
_raise_serialization_error(tag)
file.write(_encode("<" + tag, encoding))
if items or xmlns_items:
items.sort() # lexical order
for k, v in items:
try:
if isinstance(k, QName) or k[:1] == "{":
k, xmlns = fixtag(k, namespaces)
if xmlns: xmlns_items.append(xmlns)
except TypeError:
_raise_serialization_error(k)
try:
if isinstance(v, QName):
v, xmlns = fixtag(v, namespaces)
if xmlns: xmlns_items.append(xmlns)
except TypeError:
_raise_serialization_error(v)
file.write(_encode(" %s=\"%s\"" % (k, _escape_attrib(v)), encoding))
for k, v in xmlns_items:
file.write(_encode(" %s=\"%s\"" % (k, _escape_attrib(v)), encoding))
if node.text or len(node):
file.write(_encode(">", encoding))
if node.text:
file.write(_encode(_escape_cdata(node.text), encoding))
for n in node:
self._write(file, n, encoding, namespaces)
file.write(_encode("</" + tag + ">", encoding))
else:
file.write(_encode(" />", encoding))
for k, v in xmlns_items:
del namespaces[v]
if node.tail:
file.write(_encode(_escape_cdata(node.tail), encoding))
# --------------------------------------------------------------------
# helpers
##
# Checks if an object appears to be a valid element object.
#
# @param An element instance.
# @return A true value if this is an element object.
# @defreturn flag
def iselement(element):
# FIXME: not sure about this; might be a better idea to look
# for tag/attrib/text attributes
return isinstance(element, _ElementInterface) or hasattr(element, "tag")
##
# Writes an element tree or element structure to sys.stdout. This
# function should be used for debugging only.
# <p>
# The exact output format is implementation dependent. In this
# version, it's written as an ordinary XML file.
#
# @param elem An element tree or an individual element.
def dump(elem):
# debugging
if not isinstance(elem, ElementTree):
elem = ElementTree(elem)
elem.write(sys.stdout)
tail = elem.getroot().tail
if not tail or tail[-1] != "\n":
sys.stdout.write("\n")
def _encode(s, encoding):
if encoding:
return s.encode(encoding)
else:
return s
_escape = re.compile(r"[&<>\"\u0080-\uffff]+")
_escape_map = {
"&": "&",
"<": "<",
">": ">",
'"': """,
}
_namespace_map = {
# "well-known" namespace prefixes
"http://www.w3.org/XML/1998/namespace": "xml",
"http://www.w3.org/1999/xhtml": "html",
"http://www.w3.org/1999/02/22-rdf-syntax-ns#": "rdf",
"http://schemas.xmlsoap.org/wsdl/": "wsdl",
}
def _raise_serialization_error(text):
raise TypeError(
"cannot serialize %r (type %s)" % (text, type(text).__name__)
)
def _encode_entity(text, pattern=_escape):
# map reserved and non-ascii characters to numerical entities
def escape_entities(m, map=_escape_map):
out = []
append = out.append
for char in m.group():
text = map.get(char)
if text is None:
text = "&#%d;" % ord(char)
append(text)
return "".join(out)
try:
return _encode(pattern.sub(escape_entities, text), "ascii")
except TypeError:
_raise_serialization_error(text)
#
# the following functions assume an ascii-compatible encoding
# (or "utf-16")
def _escape_cdata(text):
# escape character data
try:
text = text.replace("&", "&")
text = text.replace("<", "<")
text = text.replace(">", ">")
return text
except (TypeError, AttributeError):
_raise_serialization_error(text)
def _escape_attrib(text):
# escape attribute value
try:
text = text.replace("&", "&")
text = text.replace("'", "'") # FIXME: overkill
text = text.replace("\"", """)
text = text.replace("<", "<")
text = text.replace(">", ">")
return text
except (TypeError, AttributeError):
_raise_serialization_error(text)
def fixtag(tag, namespaces):
# given a decorated tag (of the form {uri}tag), return prefixed
# tag and namespace declaration, if any
if isinstance(tag, QName):
tag = tag.text
namespace_uri, tag = tag[1:].split("}", 1)
prefix = namespaces.get(namespace_uri)
if prefix is None:
prefix = _namespace_map.get(namespace_uri)
if prefix is None:
prefix = "ns%d" % len(namespaces)
namespaces[namespace_uri] = prefix
if prefix == "xml":
xmlns = None
else:
xmlns = ("xmlns:%s" % prefix, namespace_uri)
else:
xmlns = None
return "%s:%s" % (prefix, tag), xmlns
##
# Parses an XML document into an element tree.
#
# @param source A filename or file object containing XML data.
# @param parser An optional parser instance. If not given, the
# standard {@link XMLTreeBuilder} parser is used.
# @return An ElementTree instance
def parse(source, parser=None):
tree = ElementTree()
tree.parse(source, parser)
return tree
##
# Parses an XML document into an element tree incrementally, and reports
# what's going on to the user.
#
# @param source A filename or file object containing XML data.
# @param events A list of events to report back. If omitted, only "end"
# events are reported.
# @return A (event, elem) iterator.
class iterparse:
def __init__(self, source, events=None):
if not hasattr(source, "read"):
source = open(source, "rb")
self._file = source
self._events = []
self._index = 0
self.root = self._root = None
self._parser = XMLTreeBuilder()
# wire up the parser for event reporting
parser = self._parser._parser
append = self._events.append
if events is None:
events = ["end"]
for event in events:
if event == "start":
try:
parser.ordered_attributes = 1
parser.specified_attributes = 1
def handler(tag, attrib_in, event=event, append=append,
start=self._parser._start_list):
append((event, start(tag, attrib_in)))
parser.StartElementHandler = handler
except AttributeError:
def handler(tag, attrib_in, event=event, append=append,
start=self._parser._start):
append((event, start(tag, attrib_in)))
parser.StartElementHandler = handler
elif event == "end":
def handler(tag, event=event, append=append,
end=self._parser._end):
append((event, end(tag)))
parser.EndElementHandler = handler
elif event == "start-ns":
def handler(prefix, uri, event=event, append=append):
try:
uri = _encode(uri, "ascii")
except UnicodeError:
pass
append((event, (prefix or "", uri)))
parser.StartNamespaceDeclHandler = handler
elif event == "end-ns":
def handler(prefix, event=event, append=append):
append((event, None))
parser.EndNamespaceDeclHandler = handler
def __next__(self):
while 1:
try:
item = self._events[self._index]
except IndexError:
if self._parser is None:
self.root = self._root
try:
raise StopIteration
except NameError:
raise IndexError
# load event buffer
del self._events[:]
self._index = 0
data = self._file.read(16384)
if data:
self._parser.feed(data)
else:
self._root = self._parser.close()
self._parser = None
else:
self._index = self._index + 1
return item
try:
iter
def __iter__(self):
return self
except NameError:
def __getitem__(self, index):
return self.__next__()
##
# Parses an XML document from a string constant. This function can
# be used to embed "XML literals" in Python code.
#
# @param source A string containing XML data.
# @return An Element instance.
# @defreturn Element
def XML(text):
parser = XMLTreeBuilder()
parser.feed(text)
return parser.close()
##
# Parses an XML document from a string constant, and also returns
# a dictionary which maps from element id:s to elements.
#
# @param source A string containing XML data.
# @return A tuple containing an Element instance and a dictionary.
# @defreturn (Element, dictionary)
def XMLID(text):
parser = XMLTreeBuilder()
parser.feed(text)
tree = parser.close()
ids = {}
for elem in tree.getiterator():
id = elem.get("id")
if id:
ids[id] = elem
return tree, ids
##
# Parses an XML document from a string constant. Same as {@link #XML}.
#
# @def fromstring(text)
# @param source A string containing XML data.
# @return An Element instance.
# @defreturn Element
fromstring = XML
##
# Generates a string representation of an XML element, including all
# subelements. If encoding is None, the return type is a string;
# otherwise it is a bytes array.
#
# @param element An Element instance.
# @return An (optionally) encoded string containing the XML data.
# @defreturn string
def tostring(element, encoding=None):
class dummy:
pass
data = []
file = dummy()
file.write = data.append
ElementTree(element).write(file, encoding)
if encoding:
return b"".join(data)
else:
return "".join(data)
##
# Generic element structure builder. This builder converts a sequence
# of {@link #TreeBuilder.start}, {@link #TreeBuilder.data}, and {@link
# #TreeBuilder.end} method calls to a well-formed element structure.
# <p>
# You can use this class to build an element structure using a custom XML
# parser, or a parser for some other XML-like format.
#
# @param element_factory Optional element factory. This factory
# is called to create new Element instances, as necessary.
class TreeBuilder:
def __init__(self, element_factory=None):
self._data = [] # data collector
self._elem = [] # element stack
self._last = None # last element
self._tail = None # true if we're after an end tag
if element_factory is None:
element_factory = _ElementInterface
self._factory = element_factory
##
# Flushes the parser buffers, and returns the toplevel documen
# element.
#
# @return An Element instance.
# @defreturn Element
def close(self):
assert len(self._elem) == 0, "missing end tags"
assert self._last != None, "missing toplevel element"
return self._last
def _flush(self):
if self._data:
if self._last is not None:
text = "".join(self._data)
if self._tail:
assert self._last.tail is None, "internal error (tail)"
self._last.tail = text
else:
assert self._last.text is None, "internal error (text)"
self._last.text = text
self._data = []
##
# Adds text to the current element.
#
# @param data A string. This should be either an 8-bit string
# containing ASCII text, or a Unicode string.
def data(self, data):
self._data.append(data)
##
# Opens a new element.
#
# @param tag The element name.
# @param attrib A dictionary containing element attributes.
# @return The opened element.
# @defreturn Element
def start(self, tag, attrs):
self._flush()
self._last = elem = self._factory(tag, attrs)
if self._elem:
self._elem[-1].append(elem)
self._elem.append(elem)
self._tail = 0
return elem
##
# Closes the current element.
#
# @param tag The element name.
# @return The closed element.
# @defreturn Element
def end(self, tag):
self._flush()
self._last = self._elem.pop()
assert self._last.tag == tag,\
"end tag mismatch (expected %s, got %s)" % (
self._last.tag, tag)
self._tail = 1
return self._last
##
# Element structure builder for XML source data, based on the
# <b>expat</b> parser.
#
# @keyparam target Target object. If omitted, the builder uses an
# instance of the standard {@link #TreeBuilder} class.
# @keyparam html Predefine HTML entities. This flag is not supported
# by the current implementation.
# @see #ElementTree
# @see #TreeBuilder
class XMLTreeBuilder:
def __init__(self, html=0, target=None):
try:
from xml.parsers import expat
except ImportError:
raise ImportError(
"No module named expat; use SimpleXMLTreeBuilder instead"
)
self._parser = parser = expat.ParserCreate(None, "}")
if target is None:
target = TreeBuilder()
self._target = target
self._names = {} # name memo cache
# callbacks
parser.DefaultHandlerExpand = self._default
parser.StartElementHandler = self._start
parser.EndElementHandler = self._end
parser.CharacterDataHandler = self._data
# let expat do the buffering, if supported
try:
self._parser.buffer_text = 1
except AttributeError:
pass
# use new-style attribute handling, if supported
try:
self._parser.ordered_attributes = 1
self._parser.specified_attributes = 1
parser.StartElementHandler = self._start_list
except AttributeError:
pass
encoding = "utf-8"
# target.xml(encoding, None)
self._doctype = None
self.entity = {}
def _fixname(self, key):
# expand qname, and convert name string to ascii, if possible
try:
name = self._names[key]
except KeyError:
name = key
if "}" in name:
name = "{" + name
self._names[key] = name
return name
def _start(self, tag, attrib_in):
fixname = self._fixname
tag = fixname(tag)
attrib = {}
for key, value in attrib_in.items():
attrib[fixname(key)] = value
return self._target.start(tag, attrib)
def _start_list(self, tag, attrib_in):
fixname = self._fixname
tag = fixname(tag)
attrib = {}
if attrib_in:
for i in range(0, len(attrib_in), 2):
attrib[fixname(attrib_in[i])] = attrib_in[i+1]
return self._target.start(tag, attrib)
def _data(self, text):
return self._target.data(text)
def _end(self, tag):
return self._target.end(self._fixname(tag))
def _default(self, text):
prefix = text[:1]
if prefix == "&":
# deal with undefined entities
try:
self._target.data(self.entity[text[1:-1]])
except KeyError:
from xml.parsers import expat
raise expat.error(
"undefined entity %s: line %d, column %d" %
(text, self._parser.ErrorLineNumber,
self._parser.ErrorColumnNumber)
)
elif prefix == "<" and text[:9] == "<!DOCTYPE":
self._doctype = [] # inside a doctype declaration
elif self._doctype is not None:
# parse doctype contents
if prefix == ">":
self._doctype = None
return
text = text.strip()
if not text:
return
self._doctype.append(text)
n = len(self._doctype)
if n > 2:
type = self._doctype[1]
if type == "PUBLIC" and n == 4:
name, type, pubid, system = self._doctype
elif type == "SYSTEM" and n == 3:
name, type, system = self._doctype
pubid = None
else:
return
if pubid:
pubid = pubid[1:-1]
self.doctype(name, pubid, system[1:-1])
self._doctype = None
##
# Handles a doctype declaration.
#
# @param name Doctype name.
# @param pubid Public identifier.
# @param system System identifier.
def doctype(self, name, pubid, system):
pass
##
# Feeds data to the parser.
#
# @param data Encoded data.
def feed(self, data):
self._parser.Parse(data, 0)
##
# Finishes feeding data to the parser.
#
# @return An element structure.
# @defreturn Element
def close(self):
self._parser.Parse("", 1) # end of data
tree = self._target.close()
del self._target, self._parser # get rid of circular references
return tree
# compatibility
XMLParser = XMLTreeBuilder
|
|
#!/usr/bin/env python
# Copyright 2011 OpenStack, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""vm_vdi_cleaner.py - List or clean orphaned VDIs/instances on XenServer."""
import doctest
import optparse
import sys
import XenAPI
from nova import context
from nova import db
from nova import exception
from nova import flags
from nova import utils
from nova.virt import xenapi_conn
flags.DECLARE("resize_confirm_window", "nova.compute.manager")
flags.DECLARE("xenapi_connection_url", "nova.virt.xenapi_conn")
flags.DECLARE("xenapi_connection_username", "nova.virt.xenapi_conn")
flags.DECLARE("xenapi_connection_password", "nova.virt.xenapi_conn")
FLAGS = flags.FLAGS
# NOTE(sirp): Nova futzs with the sys.argv in order to provide default
# flagfile. To isolate this awful practice, we're supplying a dummy
# argument list.
dummy = ["fakearg"]
utils.default_flagfile(args=dummy)
FLAGS(dummy)
class UnrecognizedNameLabel(Exception):
pass
def parse_options():
"""Generate command line options."""
ALLOWED_COMMANDS = ["list-vdis", "clean-vdis", "list-instances",
"clean-instances", "test"]
arg_str = "|".join(ALLOWED_COMMANDS)
parser = optparse.OptionParser("%prog [options] [" + arg_str + "]")
parser.add_option("--verbose", action="store_true")
options, args = parser.parse_args()
if not args:
parser.print_usage()
sys.exit(1)
return options, args
def get_instance_id_from_name_label(name_label, template):
"""In order to derive the instance_id from the name label on the VM, we
take the following steps:
1. We substitute a dummy value in to the instance_name_template so we
can figure out the prefix and the suffix of the template (the
instance_id is between the two)
2. We delete the prefix and suffix from the name_label.
3. What's left *should* be the instance_id which we cast to an int
and return.
>>> get_instance_id_from_name_label("", "instance-%08x")
Traceback (most recent call last):
...
UnrecognizedNameLabel
>>> get_instance_id_from_name_label("instance-00000001", "instance-%08x")
1
>>> get_instance_id_from_name_label("instance-0000000A", "instance-%08x")
10
>>> get_instance_id_from_name_label("instance-42-suffix", \
"instance-%d-suffix")
42
"""
# Interpolate template to figure out where to extract the instance_id from.
# The instance_id may be in hex "%x" or decimal "%d", so try decimal first
# then fall back to hex.
fake_instance_id = 123456789
result = template % fake_instance_id
in_hex = False
base_10 = "%d" % fake_instance_id
try:
prefix, suffix = result.split(base_10)
except ValueError:
base_16 = "%x" % fake_instance_id
prefix, suffix = result.split(base_16)
in_hex = True
if prefix:
name_label = name_label.replace(prefix, '')
if suffix:
name_label = name_label.replace(suffix, '')
try:
if in_hex:
instance_id = int(name_label, 16)
else:
instance_id = int(name_label)
except ValueError:
raise UnrecognizedNameLabel(name_label)
return instance_id
def find_orphaned_instances(session, verbose=False):
"""Find and return a list of orphaned instances."""
ctxt = context.get_admin_context(read_deleted="only")
orphaned_instances = []
for vm_rec in _get_applicable_vm_recs(session):
try:
instance_id = get_instance_id_from_name_label(
vm_rec["name_label"], FLAGS.instance_name_template)
except UnrecognizedNameLabel, exc:
print_xen_object("WARNING: Unrecognized VM", vm_rec,
indent_level=0, verbose=verbose)
continue
try:
instance = db.api.instance_get(ctxt, instance_id)
except exception.InstanceNotFound:
# NOTE(jk0): Err on the side of caution here. If we don't know
# anything about the particular instance, ignore it.
print_xen_object("INFO: Ignoring VM", vm_rec, indent_level=0,
verbose=verbose)
continue
# NOTE(jk0): This would be triggered if a VM was deleted but the
# actual deletion process failed somewhere along the line.
is_active_and_deleting = (instance.vm_state == "active" and
instance.task_state == "deleting")
# NOTE(jk0): A zombie VM is an instance that is not active and hasn't
# been updated in over the specified period.
is_zombie_vm = (instance.vm_state != "active"
and utils.is_older_than(instance.updated_at,
FLAGS.zombie_instance_updated_at_window))
if is_active_and_deleting or is_zombie_vm:
orphaned_instances.append(instance)
return orphaned_instances
def cleanup_instance(session, instance):
"""Delete orphaned instances."""
network_info = None
connection = xenapi_conn.get_connection(_)
connection.destroy(instance, network_info)
def _get_applicable_vm_recs(session):
"""An 'applicable' VM is one that is not a template and not the control
domain.
"""
for vm_ref in session.xenapi.VM.get_all():
vm_rec = session.xenapi.VM.get_record(vm_ref)
if vm_rec["is_a_template"] or vm_rec["is_control_domain"]:
continue
yield vm_rec
def print_xen_object(obj_type, obj, indent_level=0, spaces_per_indent=4,
verbose=False):
"""Pretty-print a Xen object.
Looks like:
VM (abcd-abcd-abcd): 'name label here'
"""
if not verbose:
return
uuid = obj["uuid"]
try:
name_label = obj["name_label"]
except KeyError:
name_label = ""
msg = "%(obj_type)s (%(uuid)s) '%(name_label)s'" % locals()
indent = " " * spaces_per_indent * indent_level
print "".join([indent, msg])
def _find_vdis_connected_to_vm(session, connected_vdi_uuids, verbose=False):
"""Find VDIs which are connected to VBDs which are connected to VMs."""
def _is_null_ref(ref):
return ref == "OpaqueRef:NULL"
def _add_vdi_and_parents_to_connected(vdi_rec, indent_level):
indent_level += 1
vdi_and_parent_uuids = []
cur_vdi_rec = vdi_rec
while True:
cur_vdi_uuid = cur_vdi_rec["uuid"]
print_xen_object("VDI", vdi_rec, indent_level=indent_level,
verbose=verbose)
connected_vdi_uuids.add(cur_vdi_uuid)
vdi_and_parent_uuids.append(cur_vdi_uuid)
try:
parent_vdi_uuid = vdi_rec["sm_config"]["vhd-parent"]
except KeyError:
parent_vdi_uuid = None
# NOTE(sirp): VDI's can have themselves as a parent?!
if parent_vdi_uuid and parent_vdi_uuid != cur_vdi_uuid:
indent_level += 1
cur_vdi_ref = session.xenapi.VDI.get_by_uuid(
parent_vdi_uuid)
cur_vdi_rec = session.xenapi.VDI.get_record(
cur_vdi_ref)
else:
break
for vm_rec in _get_applicable_vm_recs(session):
indent_level = 0
print_xen_object("VM", vm_rec, indent_level=indent_level,
verbose=verbose)
vbd_refs = vm_rec["VBDs"]
for vbd_ref in vbd_refs:
vbd_rec = session.xenapi.VBD.get_record(vbd_ref)
indent_level = 1
print_xen_object("VBD", vbd_rec, indent_level=indent_level,
verbose=verbose)
vbd_vdi_ref = vbd_rec["VDI"]
if _is_null_ref(vbd_vdi_ref):
continue
vdi_rec = session.xenapi.VDI.get_record(vbd_vdi_ref)
_add_vdi_and_parents_to_connected(vdi_rec, indent_level)
def _find_all_vdis_and_system_vdis(session, all_vdi_uuids, connected_vdi_uuids,
verbose=False):
"""Collects all VDIs and adds system VDIs to the connected set."""
def _system_owned(vdi_rec):
vdi_name = vdi_rec["name_label"]
return (vdi_name.startswith("USB") or
vdi_name.endswith(".iso") or
vdi_rec["type"] == "system")
for vdi_ref in session.xenapi.VDI.get_all():
vdi_rec = session.xenapi.VDI.get_record(vdi_ref)
vdi_uuid = vdi_rec["uuid"]
all_vdi_uuids.add(vdi_uuid)
# System owned and non-managed VDIs should be considered 'connected'
# for our purposes.
if _system_owned(vdi_rec):
print_xen_object("SYSTEM VDI", vdi_rec, indent_level=0,
verbose=verbose)
connected_vdi_uuids.add(vdi_uuid)
elif not vdi_rec["managed"]:
print_xen_object("UNMANAGED VDI", vdi_rec, indent_level=0,
verbose=verbose)
connected_vdi_uuids.add(vdi_uuid)
def find_orphaned_vdi_uuids(session, verbose=False):
"""Walk VM -> VBD -> VDI change and accumulate connected VDIs."""
connected_vdi_uuids = set()
_find_vdis_connected_to_vm(session, connected_vdi_uuids, verbose=verbose)
all_vdi_uuids = set()
_find_all_vdis_and_system_vdis(session, all_vdi_uuids, connected_vdi_uuids,
verbose=verbose)
orphaned_vdi_uuids = all_vdi_uuids - connected_vdi_uuids
return orphaned_vdi_uuids
def list_orphaned_vdis(vdi_uuids, verbose=False):
"""List orphaned VDIs."""
for vdi_uuid in vdi_uuids:
if verbose:
print "ORPHANED VDI (%s)" % vdi_uuid
else:
print vdi_uuid
def clean_orphaned_vdis(session, vdi_uuids, verbose=False):
"""Clean orphaned VDIs."""
for vdi_uuid in vdi_uuids:
if verbose:
print "CLEANING VDI (%s)" % vdi_uuid
vdi_ref = session.xenapi.VDI.get_by_uuid(vdi_uuid)
try:
session.xenapi.VDI.destroy(vdi_ref)
except XenAPI.Failure, exc:
print >> sys.stderr, "Skipping %s: %s" % (vdi_uuid, exc)
def list_orphaned_instances(orphaned_instances, verbose=False):
"""List orphaned instances."""
for orphaned_instance in orphaned_instances:
if verbose:
print "ORPHANED INSTANCE (%s)" % orphaned_instance.name
else:
print orphaned_instance.name
def clean_orphaned_instances(session, orphaned_instances, verbose=False):
"""Clean orphaned instances."""
for instance in orphaned_instances:
if verbose:
print "CLEANING INSTANCE (%s)" % instance.name
cleanup_instance(session, instance)
def main():
"""Main loop."""
options, args = parse_options()
verbose = options.verbose
command = args[0]
if FLAGS.zombie_instance_updated_at_window < FLAGS.resize_confirm_window:
raise Exception("`zombie_instance_updated_at_window` has to be longer"
" than `resize_confirm_window`.")
session = XenAPI.Session(FLAGS.xenapi_connection_url)
session.xenapi.login_with_password(FLAGS.xenapi_connection_username,
FLAGS.xenapi_connection_password)
if command == "list-vdis":
if verbose:
print "Connected VDIs:\n"
orphaned_vdi_uuids = find_orphaned_vdi_uuids(session, verbose=verbose)
if verbose:
print "\nOprhaned VDIs:\n"
list_orphaned_vdis(orphaned_vdi_uuids, verbose=verbose)
elif command == "clean-vdis":
orphaned_vdi_uuids = find_orphaned_vdi_uuids(session, verbose=verbose)
clean_orphaned_vdis(session, orphaned_vdi_uuids, verbose=verbose)
elif command == "list-instances":
orphaned_instances = find_orphaned_instances(session, verbose=verbose)
list_orphaned_instances(orphaned_instances, verbose=verbose)
elif command == "clean-instances":
orphaned_instances = find_orphaned_instances(session, verbose=verbose)
clean_orphaned_instances(session, orphaned_instances,
verbose=verbose)
elif command == "test":
doctest.testmod()
else:
print "Unknown command '%s'" % command
sys.exit(1)
if __name__ == "__main__":
main()
|
|
#!/usr/bin/env python3
import time
import json
import os
import re
from scanner import downloader, imageboard_info
from scanner.config import DB_FILE, currently_downloading
import sqlite3
import subprocess
import urllib.request
import threading
import http.client
class thread_scanner:
def __init__(self, keywords_file:str, output:str, quota_mb:int, wait_time:int, logger):
"""
Using the keyword file passed as a paramater to 4scanner,
thread_scanner will search multiple threads and imageboards
and launch the download of a thread if a keyword is found in first post of the thread.
Use scan() to start the scan.
Args:
keywords_file: path of file containing whats imageboard to search as JSON (see README for more info)
output: The output directory where the pictures will be downloaded
quota_mb: stop 4scanner after quota_mb MB have been downloaded
throttle: Time to wait, in second, between image downloads
wait_time: number of time to wait between scans
"""
self.keywords_file = keywords_file
self.output = output
self.quota_mb = quota_mb
self.wait_time = wait_time
self.logger = logger
def get_catalog_json(self, board:str, chan:str):
"""
Get the catalog of a given imageboards board as a JSON
Return:
catalog info as a dict
"""
chan_base_url = imageboard_info.imageboard_info(chan).base_url
catalog = urllib.request.urlopen(
"{0}{1}/catalog.json".format(chan_base_url, board))
try:
catalog_data = catalog.read()
except http.client.IncompleteRead as err:
catalog_data = err.partial
return json.loads(catalog_data.decode("utf8"))
def scan_thread(self, keyword:str, catalog_json:str, subject_only:str, wildcard:str):
"""
Check each thread, threads who contains the keyword are returned
Args:
keyword: A keyword to search for. Example: "moot"
catalog_json: A dict of a board catalog, as returned by get_catalog_json()
subject_only: Search only withing the subject of the thread, as oposed to searching the subject and first post
Returns:
a list of threads number that matched the keyword
"""
matched_threads = []
for i in range(len(catalog_json)):
for thread in catalog_json[i]["threads"]:
if wildcard == "all":
regex = r'{0}'.format(keyword)
# Search thread subject
if 'sub' in thread:
if re.search(regex, str(thread["sub"]), re.IGNORECASE):
matched_threads.append(thread["no"])
if not subject_only:
# Search OPs post body
if 'com' in thread:
if re.search(regex, str(thread["com"]), re.IGNORECASE):
matched_threads.append(thread["no"])
elif wildcard == "start":
regex = r'\b{0}'.format(keyword)
# Search thread subject
if 'sub' in thread:
if re.search(regex, str(thread["sub"]), re.IGNORECASE):
matched_threads.append(thread["no"])
if not subject_only:
# Search OPs post body
if 'com' in thread:
if re.search(regex, str(thread["com"]), re.IGNORECASE):
matched_threads.append(thread["no"])
else:
regex = r'\b{0}\b'.format(keyword)
# Search thread subject
if 'sub' in thread:
if re.search(regex, str(thread["sub"]), re.IGNORECASE):
matched_threads.append(thread["no"])
if not subject_only:
# Search OPs post body
if 'com' in thread:
if re.search(regex, str(thread["com"]), re.IGNORECASE):
matched_threads.append(thread["no"])
return matched_threads
def download_thread(self, thread_id:int, chan:str, board:str, folder:str, output:str, condition:dict, dupe_check:bool, tag_list:list, throttle:int):
"""
Create a downloader object with the info passed as paramater and start the download of in a new thread.
"""
thread_downloader = downloader.downloader(thread_id, board,chan, output, folder, True, condition, dupe_check, tag_list, throttle, self.logger)
t = threading.Thread(target=thread_downloader.download)
t.daemon = True
t.start()
def dir_size_mb(self, directory):
"""
Check the size of a directory in MB.
Args:
directory: the path to a directory
Returns:
Size of the directory in MB
"""
total_size = 0
for dirpath, dirnames, filenames in os.walk(directory):
for f in filenames:
fp = os.path.join(dirpath, f)
total_size += os.path.getsize(fp)
return total_size / 1000000
def check_quota(self):
"""
Stop 4scanner of the download quota was reached.
"""
if int(self.quota_mb) < dir_size_mb(os.path.join(self.output, "downloads")):
self.logger.info("Quota limit exceeded. Stopping 4scanner.")
exit(0)
def get_check_duplicate(self, search):
"""
Check whether to activate the check duplicate feature
Returns:
True if we need to activate it, False otherwise
"""
if 'check_duplicate' in search:
if search['check_duplicate']:
return True
else:
return False
# duplicate check is on by default
return True
def get_condition(self, search:dict):
"""
Get all search condition from a search
Returns:
All search conditions as a dict
"""
condition = {}
if 'extension' in search:
condition["ext"] = []
if isinstance(search['extension'], str):
condition["ext"].append(search['extension'])
else:
for extension in search['extension']:
condition["ext"].append(extension)
else:
condition["ext"] = False
if 'filename' in search:
condition["filename"] = []
if isinstance(search['filename'], str):
condition["filename"].append(search['filename'])
else:
for extension in search['filename']:
condition["filename"].append(extension)
else:
condition["filename"] = False
if 'width' in search:
condition["width"] = search['width']
else:
condition["width"] = False
if 'height' in search:
condition["height"] = search['height']
else:
condition["height"] = False
return condition
def get_imageboard(self, search:dict):
"""
get imageboard from a search
Returns:
imageboard_info object of an imageboard
"""
if 'imageboard' in search:
chan = search["imageboard"]
# will raise error if not supported
imageboard_info.imageboard_info(chan)
else:
# default
chan = "4chan"
return chan
def get_tag_list(self, search):
"""
get all tags from a search
Returns:
a list containing all tags or None
"""
if 'tag' in search:
tag = search["tag"]
else:
tag = None
return tag
def get_subject_only(self, search):
"""
Check whether to search only the subject of post for a given search.
Returns:
True to get subject only, False otherwise
"""
if 'subject_only' in search:
subject_only = search["subject_only"]
else:
subject_only = None
return subject_only
def get_wildcard(self, search):
"""
Check whether to search only the subject of post for a given search.
Returns:
True to get subject only, False otherwise
"""
if 'wildcard' in search:
wildcard = search["wildcard"]
else:
wildcard = None
return wildcard
def get_keyword(self, search):
"""
get a list of all keywords to use in a search.
Returns:
list of all keywords to search for
"""
if 'keywords' in search:
keywords_array = []
if isinstance(search['keywords'], str):
keywords_array.append(search['keywords'])
else:
for keywords in search['keywords']:
keywords_array.append(keywords)
else:
self.logger.critical("Cannot scan without any keyword...")
exit(1)
return keywords_array
def scan(self):
"""
Start the scanning/download process.
"""
while True:
if self.quota_mb:
self.check_quota()
self.logger.info("Searching threads...")
try:
json_file = json.load(open(self.keywords_file))
except ValueError:
self.logger.critical("Your JSON file is malformed. Quitting.")
exit(1)
for search in json_file["searches"]:
# Getting imageboard to search
chan = self.get_imageboard(search)
# Checking conditions
condition = self.get_condition(search)
# Check if we need to check for duplicate when downloading
dupe_check = self.get_check_duplicate(search)
# Getting output folder name
folder_name = search["folder_name"]
# Get tag list (if any)
tag_list = self.get_tag_list(search)
# Get throttle
throttle = int(search['throttle']) if 'throttle' in search else 2
# if this is true we will search only the subject field
subject_only = self.get_subject_only(search)
wildcard = self.get_wildcard(search)
board = search["board"]
keywords = self.get_keyword(search)
try:
catalog_json = self.get_catalog_json(board, chan)
for keyword in keywords:
threads_id = self.scan_thread(keyword, catalog_json, subject_only, wildcard)
for thread_id in list(set(threads_id)):
if thread_id not in currently_downloading:
self.download_thread(thread_id, chan, board,
folder_name, self.output,
condition, dupe_check,
tag_list, throttle)
# Used to keep track of what is currently downloading
currently_downloading.append(thread_id)
except urllib.error.HTTPError as err:
self.logger.warning("Error while opening {0} catalog page. "
"Retrying during next scan.".format(board))
pass
active_downloads = threading.active_count()-1
self.logger.info("{0} threads currently downloading.".format(active_downloads))
self.logger.info("Searching again in {0} minutes!".format(str(int(self.wait_time / 60))))
time.sleep(self.wait_time)
|
|
from __future__ import absolute_import
import pytest
from sentry.utils.compat.mock import patch
from sentry.tasks.relay import schedule_update_config_cache
from sentry.relay.projectconfig_cache.redis import RedisProjectConfigCache
from sentry.relay.projectconfig_debounce_cache.redis import RedisProjectConfigDebounceCache
from sentry.models import ProjectKey, ProjectOption
def _cache_keys_for_project(project):
yield project.id
for key in ProjectKey.objects.filter(project_id=project.id):
yield key.public_key
@pytest.fixture
def redis_cache(monkeypatch):
monkeypatch.setattr(
"django.conf.settings.SENTRY_RELAY_PROJECTCONFIG_CACHE",
"sentry.relay.projectconfig_cache.redis.RedisProjectConfigCache",
)
cache = RedisProjectConfigCache()
monkeypatch.setattr("sentry.relay.projectconfig_cache.set_many", cache.set_many)
monkeypatch.setattr("sentry.relay.projectconfig_cache.delete_many", cache.delete_many)
monkeypatch.setattr("sentry.relay.projectconfig_cache.get", cache.get)
monkeypatch.setattr(
"django.conf.settings.SENTRY_RELAY_PROJECTCONFIG_DEBOUNCE_CACHE",
"sentry.relay.projectconfig_debounce_cache.redis.RedisProjectConfigDebounceCache",
)
debounce_cache = RedisProjectConfigDebounceCache()
monkeypatch.setattr(
"sentry.relay.projectconfig_debounce_cache.mark_task_done", debounce_cache.mark_task_done
)
monkeypatch.setattr(
"sentry.relay.projectconfig_debounce_cache.check_is_debounced",
debounce_cache.check_is_debounced,
)
return cache
@pytest.mark.django_db
def test_no_cache(monkeypatch, default_project):
def apply_async(*a, **kw):
assert False
monkeypatch.setattr("sentry.tasks.relay.update_config_cache.apply_async", apply_async)
schedule_update_config_cache(generate=True, project_id=default_project.id)
@pytest.mark.django_db
def test_debounce(monkeypatch, default_project, default_organization, redis_cache):
tasks = []
def apply_async(args, kwargs):
assert not args
tasks.append(kwargs)
monkeypatch.setattr("sentry.tasks.relay.update_config_cache.apply_async", apply_async)
schedule_update_config_cache(generate=True, project_id=default_project.id)
schedule_update_config_cache(generate=False, project_id=default_project.id)
schedule_update_config_cache(generate=True, organization_id=default_organization.id)
schedule_update_config_cache(generate=False, organization_id=default_organization.id)
assert tasks == [
{
"generate": True,
"project_id": default_project.id,
"organization_id": None,
"update_reason": None,
},
{
"generate": True,
"project_id": None,
"organization_id": default_organization.id,
"update_reason": None,
},
]
@pytest.mark.django_db
@pytest.mark.parametrize("entire_organization", (True, False))
def test_generate(
monkeypatch,
default_project,
default_organization,
default_projectkey,
task_runner,
entire_organization,
redis_cache,
):
assert not redis_cache.get(default_project.id)
if not entire_organization:
kwargs = {"project_id": default_project.id}
else:
kwargs = {"organization_id": default_organization.id}
with task_runner():
schedule_update_config_cache(generate=True, **kwargs)
cfg = redis_cache.get(default_project.id)
assert cfg["organizationId"] == default_organization.id
assert cfg["projectId"] == default_project.id
assert cfg["publicKeys"] == [
{
"publicKey": default_projectkey.public_key,
"isEnabled": True,
"numericId": default_projectkey.id,
"quotas": [],
}
]
@pytest.mark.django_db
@pytest.mark.parametrize("entire_organization", (True, False))
def test_invalidate(
monkeypatch,
default_project,
default_organization,
task_runner,
entire_organization,
redis_cache,
):
cfg = {"foo": "bar"}
redis_cache.set_many({default_project.id: cfg})
assert redis_cache.get(default_project.id) == cfg
if not entire_organization:
kwargs = {"project_id": default_project.id}
else:
kwargs = {"organization_id": default_organization.id}
with task_runner():
schedule_update_config_cache(generate=False, **kwargs)
for cache_key in _cache_keys_for_project(default_project):
assert not redis_cache.get(cache_key)
@pytest.mark.django_db
def test_project_update_option(default_project, task_runner, redis_cache):
with task_runner():
default_project.update_option(
"sentry:relay_pii_config", '{"applications": {"$string": ["@creditcard:mask"]}}'
)
assert redis_cache.get(default_project.id)["config"]["piiConfig"] == {
"applications": {"$string": ["@creditcard:mask"]}
}
with task_runner():
default_project.organization.update_option(
"sentry:relay_pii_config", '{"applications": {"$string": ["@creditcard:mask"]}}'
)
for cache_key in _cache_keys_for_project(default_project):
assert redis_cache.get(cache_key) is None
@pytest.mark.django_db
def test_project_delete_option(default_project, task_runner, redis_cache):
with task_runner():
default_project.delete_option("sentry:relay_pii_config")
for cache_key in _cache_keys_for_project(default_project):
assert redis_cache.get(cache_key)["config"]["piiConfig"] == {}
@pytest.mark.django_db
def test_project_get_option_does_not_reload(default_project, task_runner, monkeypatch):
ProjectOption.objects._option_cache.clear()
with task_runner():
with patch("sentry.models.projectoption.cache.get", return_value=None):
with patch(
"sentry.models.projectoption.schedule_update_config_cache"
) as update_config_cache:
default_project.get_option(
"sentry:relay_pii_config", '{"applications": {"$string": ["@creditcard:mask"]}}'
)
update_config_cache.assert_not_called() # noqa
@pytest.mark.django_db
def test_projectkeys(default_project, task_runner, redis_cache):
with task_runner():
deleted_pks = list(ProjectKey.objects.filter(project=default_project))
for key in deleted_pks:
key.delete()
pk = ProjectKey(project=default_project)
pk.save()
for key in deleted_pks:
# XXX: Ideally we would write `{"disabled": False}` into Redis, however
# it's fine if we don't and instead Relay starts hitting the endpoint
# which will write this for us.
assert not redis_cache.get(key.public_key)
for cache_key in (default_project.id, pk.public_key):
(pk_json,) = redis_cache.get(cache_key)["publicKeys"]
assert pk_json["publicKey"] == pk.public_key
assert pk_json["isEnabled"]
with task_runner():
pk.delete()
assert not redis_cache.get(default_project.id)["publicKeys"]
for key in ProjectKey.objects.filter(project_id=default_project.id):
assert not redis_cache.get(default_project.id)
|
|
"""init
Revision ID: 184ed1055383
Revises: None
Create Date: 2013-04-20 10:15:06.179822
"""
# revision identifiers, used by Alembic.
revision = '184ed1055383'
down_revision = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('user',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('userid', sa.String(length=22), nullable=False),
sa.Column('fullname', sa.Unicode(length=80), nullable=False),
sa.Column('username', sa.Unicode(length=80), nullable=True),
sa.Column('pw_hash', sa.String(length=80), nullable=True),
sa.Column('timezone', sa.Unicode(length=40), nullable=True),
sa.Column('description', sa.UnicodeText(), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('userid'),
sa.UniqueConstraint('username')
)
op.create_table('organization',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('owners_id', sa.Integer(), nullable=True),
sa.Column('userid', sa.String(length=22), nullable=False),
sa.Column('name', sa.Unicode(length=80), nullable=True),
sa.Column('title', sa.Unicode(length=80), nullable=False),
sa.Column('description', sa.UnicodeText(), nullable=False),
sa.ForeignKeyConstraint(['owners_id'], ['team.id'], name='fk_organization_owners_id'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name'),
sa.UniqueConstraint('userid')
)
op.create_table('smsmessage',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('phone_number', sa.String(length=15), nullable=False),
sa.Column('transaction_id', sa.Unicode(length=40), nullable=True),
sa.Column('message', sa.UnicodeText(), nullable=False),
sa.Column('status', sa.Integer(), nullable=False),
sa.Column('status_at', sa.DateTime(), nullable=True),
sa.Column('fail_reason', sa.Unicode(length=25), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('transaction_id')
)
op.create_table('client',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('org_id', sa.Integer(), nullable=True),
sa.Column('title', sa.Unicode(length=250), nullable=False),
sa.Column('description', sa.UnicodeText(), nullable=False),
sa.Column('website', sa.Unicode(length=250), nullable=False),
sa.Column('redirect_uri', sa.Unicode(length=250), nullable=True),
sa.Column('notification_uri', sa.Unicode(length=250), nullable=True),
sa.Column('iframe_uri', sa.Unicode(length=250), nullable=True),
sa.Column('resource_uri', sa.Unicode(length=250), nullable=True),
sa.Column('active', sa.Boolean(), nullable=False),
sa.Column('allow_any_login', sa.Boolean(), nullable=False),
sa.Column('team_access', sa.Boolean(), nullable=False),
sa.Column('key', sa.String(length=22), nullable=False),
sa.Column('secret', sa.String(length=44), nullable=False),
sa.Column('trusted', sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(['org_id'], ['organization.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('key')
)
op.create_table('permission',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('org_id', sa.Integer(), nullable=True),
sa.Column('name', sa.Unicode(length=80), nullable=False),
sa.Column('title', sa.Unicode(length=250), nullable=False),
sa.Column('description', sa.UnicodeText(), nullable=False),
sa.Column('allusers', sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(['org_id'], ['organization.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('noticetype',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('name', sa.Unicode(length=80), nullable=False),
sa.Column('title', sa.Unicode(length=250), nullable=False),
sa.Column('description', sa.UnicodeText(), nullable=False),
sa.Column('allusers', sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('useremailclaim',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('email', sa.Unicode(length=80), nullable=True),
sa.Column('verification_code', sa.String(length=44), nullable=False),
sa.Column('md5sum', sa.String(length=32), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('useroldid',
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('userid', sa.String(length=22), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('userid')
)
op.create_table('userphoneclaim',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('phone', sa.Unicode(length=80), nullable=False),
sa.Column('gets_text', sa.Boolean(), nullable=False),
sa.Column('verification_code', sa.Unicode(length=4), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('useremail',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('email', sa.Unicode(length=80), nullable=False),
sa.Column('md5sum', sa.String(length=32), nullable=False),
sa.Column('primary', sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('email'),
sa.UniqueConstraint('md5sum')
)
op.create_table('userphone',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('primary', sa.Boolean(), nullable=False),
sa.Column('phone', sa.Unicode(length=80), nullable=False),
sa.Column('gets_text', sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('phone')
)
op.create_table('team',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('userid', sa.String(length=22), nullable=False),
sa.Column('title', sa.Unicode(length=250), nullable=False),
sa.Column('org_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['org_id'], ['organization.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('userid')
)
op.create_table('userexternalid',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('service', sa.String(length=20), nullable=False),
sa.Column('userid', sa.String(length=250), nullable=False),
sa.Column('username', sa.Unicode(length=80), nullable=True),
sa.Column('oauth_token', sa.String(length=250), nullable=True),
sa.Column('oauth_token_secret', sa.String(length=250), nullable=True),
sa.Column('oauth_token_type', sa.String(length=250), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('service','userid')
)
op.create_table('passwordresetrequest',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('reset_code', sa.String(length=44), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('userflashmessage',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('seq', sa.Integer(), nullable=False),
sa.Column('category', sa.Unicode(length=20), nullable=False),
sa.Column('message', sa.Unicode(length=250), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('authcode',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_id', sa.Integer(), nullable=False),
sa.Column('code', sa.String(length=44), nullable=False),
sa.Column('scope', sa.Unicode(length=250), nullable=False),
sa.Column('redirect_uri', sa.Unicode(length=1024), nullable=False),
sa.Column('used', sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(['client_id'], ['client.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('team_membership',
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('team_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['team_id'], ['team.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint()
)
op.create_table('userclientpermissions',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_id', sa.Integer(), nullable=False),
sa.Column('permissions', sa.Unicode(length=250), nullable=False),
sa.ForeignKeyConstraint(['client_id'], ['client.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('user_id','client_id')
)
op.create_table('authtoken',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('client_id', sa.Integer(), nullable=False),
sa.Column('token', sa.String(length=22), nullable=False),
sa.Column('token_type', sa.String(length=250), nullable=False),
sa.Column('secret', sa.String(length=44), nullable=True),
sa.Column('algorithm', sa.String(length=20), nullable=True),
sa.Column('scope', sa.Unicode(length=250), nullable=False),
sa.Column('validity', sa.Integer(), nullable=False),
sa.Column('refresh_token', sa.String(length=22), nullable=True),
sa.ForeignKeyConstraint(['client_id'], ['client.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('refresh_token'),
sa.UniqueConstraint('token'),
sa.UniqueConstraint('user_id','client_id')
)
op.create_table('teamclientpermissions',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('team_id', sa.Integer(), nullable=False),
sa.Column('client_id', sa.Integer(), nullable=False),
sa.Column('permissions', sa.Unicode(length=250), nullable=False),
sa.ForeignKeyConstraint(['client_id'], ['client.id'], ),
sa.ForeignKeyConstraint(['team_id'], ['team.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('team_id','client_id')
)
op.create_table('resource',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('name', sa.Unicode(length=20), nullable=False),
sa.Column('client_id', sa.Integer(), nullable=False),
sa.Column('title', sa.Unicode(length=250), nullable=False),
sa.Column('description', sa.UnicodeText(), nullable=False),
sa.Column('siteresource', sa.Boolean(), nullable=False),
sa.Column('trusted', sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(['client_id'], ['client.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('clientteamaccess',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('org_id', sa.Integer(), nullable=True),
sa.Column('client_id', sa.Integer(), nullable=False),
sa.Column('access_level', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['client_id'], ['client.id'], ),
sa.ForeignKeyConstraint(['org_id'], ['organization.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('resourceaction',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('name', sa.Unicode(length=20), nullable=False),
sa.Column('resource_id', sa.Integer(), nullable=False),
sa.Column('title', sa.Unicode(length=250), nullable=False),
sa.Column('description', sa.UnicodeText(), nullable=False),
sa.ForeignKeyConstraint(['resource_id'], ['resource.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name','resource_id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('resourceaction')
op.drop_table('clientteamaccess')
op.drop_table('resource')
op.drop_table('teamclientpermissions')
op.drop_table('authtoken')
op.drop_table('userclientpermissions')
op.drop_table('team_membership')
op.drop_table('authcode')
op.drop_table('userflashmessage')
op.drop_table('passwordresetrequest')
op.drop_table('userexternalid')
op.drop_table('team')
op.drop_table('userphone')
op.drop_table('useremail')
op.drop_table('userphoneclaim')
op.drop_table('useroldid')
op.drop_table('useremailclaim')
op.drop_table('noticetype')
op.drop_table('permission')
op.drop_table('client')
op.drop_table('smsmessage')
op.drop_table('organization')
op.drop_table('user')
### end Alembic commands ###
|
|
#
# Copyright 2017 CNIT - Consorzio Nazionale Interuniversitario per le Telecomunicazioni
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
import jsonfield
from StringIO import StringIO
import zipfile
import json
import yaml
from lib.util import Util
from model_utils.managers import InheritanceManager
import logging
import os, shutil, git
logging.basicConfig(level=logging.DEBUG)
log = logging.getLogger('models.py')
project_types = {}
class Project(models.Model):
""" Base class for project types
data_project stores a validated JSON representation of the project
get_dataproject() method returns the python dict representation of the project
"""
owner = models.ForeignKey('sf_user.CustomUser', db_column='owner')
name = models.CharField(max_length=20, default='')
created_date = models.DateTimeField(default=timezone.now)
updated_date = models.DateTimeField(default=timezone.now, blank=True, null=True)
info = models.TextField(default='No info')
data_project = jsonfield.JSONField(default={})
"""Stores a validated JSON representation of the project"""
validated = models.BooleanField(default=False)
#InheritanceManager
objects = InheritanceManager()
@classmethod
def get_project_types(cls):
global project_types
return project_types
@classmethod
def add_project_type(cls, type, my_class):
global project_types
project_types [type]= my_class
@classmethod
def create_project(cls, name, user, validated, info, data_project):
project = cls.objects.create(name=name, owner=user, validated=False, info=info,
data_project=data_project)
return project
@classmethod
def get_graph_model(cls, file_path):
"""Returns the model of the graph of the project type as a yaml object
Returns an empty dict if there is no file with the model
"""
# file_path = GRAPH_MODEL_FULL_NAME
graph_model = {}
try:
graph_model = Util.loadyamlfile(file_path)
except Exception as e:
log.exception(e)
pass
return graph_model
def get_type(self):
return "Base"
#@classmethod
def get_dataproject(self):
""" Return the python dict representation of the project data
"""
#current_data = json.loads(self.data_project)
current_data = Util.json_loads_byteified(self.data_project)
return current_data
#@classmethod
def get_overview_data(self):
result = {
'owner': self.owner,
'name': self.name,
'updated_date': self.updated_date,
'info': self.info,
'validated': self.validated
}
return result
def set_data_project(self, new_data, validated):
self.data_project = new_data
self.set_validated(validated)
self.update()
def update(self):
self.updated_date = timezone.now()
self.save()
def __str__(self):
return self.name
def edit_graph_positions(self, positions):
# print positions
try:
current_data = json.loads(self.data_project)
if 'positions' not in current_data:
current_data['positions'] = {}
if 'vertices' not in current_data['positions']:
current_data['positions']['vertices'] = {}
if 'vertices' in positions:
current_data['positions']['vertices'].update(positions['vertices'])
self.data_project = current_data
self.update()
result = True
except Exception as e:
log.debug(e)
result = False
return result
def get_descriptors(self, type_descriptor):
"""Returns all descriptors of a given type"""
try:
current_data = json.loads(self.data_project)
result = current_data[type_descriptor]
except Exception as e:
log.debug(e)
result = {}
return result
def get_descriptor(self, descriptor_id, type_descriptor):
"""Returns a specific descriptor"""
try:
current_data = json.loads(self.data_project)
result = current_data[type_descriptor][descriptor_id]
print descriptor_id, type_descriptor, result
except Exception as e:
log.debug(e)
result = {}
return result
def delete_descriptor(self, type_descriptor, descriptor_id):
try:
log.debug('delete descriptor'+ descriptor_id + ' ' + type_descriptor)
current_data = json.loads(self.data_project)
del (current_data[type_descriptor][descriptor_id])
self.data_project = current_data
self.update()
result = True
except Exception as e:
log.debug(e)
result = False
return result
def clone_descriptor(self, type_descriptor, descriptor_id, new_id):
try:
current_data = json.loads(self.data_project)
descriptor = current_data[type_descriptor][descriptor_id]
new_descriptor = self.get_clone_descriptor(descriptor, type_descriptor, new_id)
current_data[type_descriptor][new_id] = new_descriptor
self.data_project = current_data
self.update()
result = True
except Exception as e:
log.debug(e)
result = False
return result
def edit_descriptor(self, type_descriptor, descriptor_id, new_data, data_type):
try:
##FIXME questa parte va completamente rivista cosi' ha varie lacune
#log.info('editing ',+ descriptor_id + ' ' + type_descriptor + ' ' + data_type)
current_data = json.loads(self.data_project)
new_descriptor = new_data
if data_type == 'json':
new_descriptor = json.loads(new_data)
elif data_type == 'yaml':
yaml_object = yaml.load(new_data)
new_descriptor = json.loads(Util.yaml2json(yaml_object))
if type_descriptor != 'click' and type_descriptor != 'oshi' and type_descriptor !='cran' and type_descriptor != 'srv6_net_prog' :
reference_schema = self.get_json_schema_by_type(type_descriptor)
Util.validate_json_schema(reference_schema, new_descriptor)
current_data[type_descriptor][descriptor_id] = new_descriptor
self.data_project = current_data
self.update()
result = True
except Exception as e:
log.debug(e)
result = False
return result
def get_zip_archive(self):
in_memory = StringIO()
try:
current_data = json.loads(self.data_project)
zip = zipfile.ZipFile(in_memory, "w", zipfile.ZIP_DEFLATED)
for desc_type in current_data:
for current_desc in current_data[desc_type]:
zip.writestr(current_desc + '.json', json.dumps(current_data[desc_type][current_desc]))
zip.close()
except Exception as e:
log.debug(e)
in_memory.flush()
return in_memory
def get_positions(self):
"""Returns the positions of nodes"""
try:
current_data = json.loads(self.data_project)
positions = {}
if 'positions' in current_data:
positions = current_data['positions']
except Exception as e:
log.debug(e)
return positions
def get_deployment_descriptor(self, **kwargs):
"""Returns the deployment descriptor"""
raise NotImplementedError
def get_node_overview(self, **kwargs):
"""Returns the node overview"""
raise NotImplementedError
def get_all_ns_descriptors(self, nsd_id):
raise NotImplementedError
def translate_push_ns_on_repository(self, translator, nsd_id, repository, **kwargs):
raise NotImplementedError
class ProjectStateless(Project):
def get_descriptors(self, type_descriptor):
"""Returns all descriptors of a given type"""
raise NotImplementedError
def delete_descriptor(self, type_descriptor, descriptor_id):
raise NotImplementedError
def get_all_ns_descriptors(self, nsd_id):
pass
def translate_push_ns_on_repository(self, translator, nsd_id, repository, **kwargs):
pass
def get_deployment_descriptor(self, **kwargs):
pass
def get_node_overview(self, **kwargs):
pass
def get_dataproject(self):
raise NotImplementedError
def get_overview_data(self):
raise NotImplementedError
class Repository(models.Model):
""" Repository
"""
name = models.CharField(max_length=20, default='')
base_url = models.TextField(default='')
last_update = models.DateTimeField(default=timezone.now)
DIR_NAME = "/tmp/git_repo/"
def fetch_repository(self):
"""
:return: git.remote.FetchInfo object
"""
if os.path.isdir(self.DIR_NAME):
shutil.rmtree(self.DIR_NAME)
os.mkdir(self.DIR_NAME)
repo = git.Repo.init(self.DIR_NAME)
origin = repo.create_remote('origin', self.base_url)
origin.fetch()
fetch_info = origin.pull('master')[0]
return fetch_info
def push_repository(self, msg=None):
"""
:param msg: Commit message
:return: git.remote.PushInfo object
"""
repo = git.Repo.init(self.DIR_NAME)
origin = repo.remote('origin')
repo.git.add('--all')
repo.git.commit('-m \'[RDCL3D commit] ' + msg + '\'')
push_info = origin.push('master')[0]
return push_info
def to_json(self):
"""
:return: JSON data of object
"""
return {
'name': self.name,
'base_url': self.base_url.rstrip('\/'),
'last_update': self.last_update
}
|
|
# -*- coding: utf-8 -*-
# uft.py --- Unitary fourier transform
# Taken from skimage.restoration and converted for use with pyfftw
# b72a960 on Dec 16, 2014
"""Function of unitary fourier transform and utilities
This module implements the unitary fourier transform, also known as
the ortho-normal transform. It is especially useful for convolution
[1], as it respects the Parseval equality. The value of the null
frequency is equal to
.. math:: \frac{1}{\sqrt{n}} \sum_i x_i
so the Fourier transform has the same energy as the original image
(see ``image_quad_norm`` function). The transform is applied from the
last axis for performance (assuming a C-order array input).
References
----------
.. [1] B. R. Hunt "A matrix theory proof of the discrete convolution
theorem", IEEE Trans. on Audio and Electroacoustics,
vol. au-19, no. 4, pp. 285-288, dec. 1971
"""
from __future__ import division, print_function
import numpy as np
try:
from pyfftw.interfaces.numpy_fft import fftn, ifftn, rfftn, irfftn
import pyfftw
# Turn on the cache for optimum performance
pyfftw.interfaces.cache.enable()
except ImportError:
from numpy.fft import fftn, ifftn, rfftn, irfftn
__keywords__ = "fft, Fourier Transform, orthonormal, unitary"
def ufftn(inarray, dim=None):
"""N-dimensional unitary Fourier transform.
Parameters
----------
inarray : ndarray
The array to transform.
dim : int, optional
The last axis along which to compute the transform. All
axes by default.
Returns
-------
outarray : ndarray (same shape than inarray)
The unitary N-D Fourier transform of ``inarray``.
Examples
--------
>>> input = np.ones((3, 3, 3))
>>> output = ufftn(input)
>>> np.allclose(np.sum(input) / np.sqrt(input.size), output[0, 0, 0])
True
>>> output.shape
(3, 3, 3)
"""
if dim is None:
dim = inarray.ndim
outarray = fftn(inarray, axes=range(-dim, 0))
return outarray / np.sqrt(np.prod(inarray.shape[-dim:]))
def uifftn(inarray, dim=None):
"""N-dimensional unitary inverse Fourier transform.
Parameters
----------
inarray : ndarray
The array to transform.
dim : int, optional
The last axis along which to compute the transform. All
axes by default.
Returns
-------
outarray : ndarray (same shape than inarray)
The unitary inverse N-D Fourier transform of ``inarray``.
Examples
--------
>>> input = np.ones((3, 3, 3))
>>> output = uifftn(input)
>>> np.allclose(np.sum(input) / np.sqrt(input.size), output[0, 0, 0])
True
>>> output.shape
(3, 3, 3)
"""
if dim is None:
dim = inarray.ndim
outarray = ifftn(inarray, axes=range(-dim, 0))
return outarray * np.sqrt(np.prod(inarray.shape[-dim:]))
def urfftn(inarray, dim=None):
"""N-dimensional real unitary Fourier transform.
This transform considers the Hermitian property of the transform on
real-valued input.
Parameters
----------
inarray : ndarray, shape (M, N, ..., P)
The array to transform.
dim : int, optional
The last axis along which to compute the transform. All
axes by default.
Returns
-------
outarray : ndarray, shape (M, N, ..., P / 2 + 1)
The unitary N-D real Fourier transform of ``inarray``.
Notes
-----
The ``urfft`` functions assume an input array of real
values. Consequently, the output has a Hermitian property and
redundant values are not computed or returned.
Examples
--------
>>> input = np.ones((5, 5, 5))
>>> output = urfftn(input)
>>> np.allclose(np.sum(input) / np.sqrt(input.size), output[0, 0, 0])
True
>>> output.shape
(5, 5, 3)
"""
if dim is None:
dim = inarray.ndim
outarray = rfftn(inarray, axes=range(-dim, 0))
return outarray / np.sqrt(np.prod(inarray.shape[-dim:]))
def uirfftn(inarray, dim=None, shape=None):
"""N-dimensional inverse real unitary Fourier transform.
This transform considers the Hermitian property of the transform
from complex to real input.
Parameters
----------
inarray : ndarray
The array to transform.
dim : int, optional
The last axis along which to compute the transform. All
axes by default.
shape : tuple of int, optional
The shape of the output. The shape of ``rfft`` is ambiguous in
case of odd-valued input shape. In this case, this parameter
should be provided. See ``irfftn``.
Returns
-------
outarray : ndarray
The unitary N-D inverse real Fourier transform of ``inarray``.
Notes
-----
The ``uirfft`` function assumes that the output array is
real-valued. Consequently, the input is assumed to have a Hermitian
property and redundant values are implicit.
Examples
--------
>>> input = np.ones((5, 5, 5))
>>> output = uirfftn(urfftn(input), shape=input.shape)
>>> np.allclose(input, output)
True
>>> output.shape
(5, 5, 5)
"""
if dim is None:
dim = inarray.ndim
outarray = irfftn(inarray, shape, axes=range(-dim, 0))
return outarray * np.sqrt(np.prod(outarray.shape[-dim:]))
def ufft2(inarray):
"""2-dimensional unitary Fourier transform.
Compute the Fourier transform on the last 2 axes.
Parameters
----------
inarray : ndarray
The array to transform.
Returns
-------
outarray : ndarray (same shape as inarray)
The unitary 2-D Fourier transform of ``inarray``.
See Also
--------
uifft2, ufftn, urfftn
Examples
--------
>>> input = np.ones((10, 128, 128))
>>> output = ufft2(input)
>>> np.allclose(np.sum(input[1, ...]) / np.sqrt(input[1, ...].size),
... output[1, 0, 0])
True
>>> output.shape
(10, 128, 128)
"""
return ufftn(inarray, 2)
def uifft2(inarray):
"""2-dimensional inverse unitary Fourier transform.
Compute the inverse Fourier transform on the last 2 axes.
Parameters
----------
inarray : ndarray
The array to transform.
Returns
-------
outarray : ndarray (same shape as inarray)
The unitary 2-D inverse Fourier transform of ``inarray``.
See Also
--------
uifft2, uifftn, uirfftn
Examples
--------
>>> input = np.ones((10, 128, 128))
>>> output = uifft2(input)
>>> np.allclose(np.sum(input[1, ...]) / np.sqrt(input[1, ...].size),
... output[0, 0, 0])
True
>>> output.shape
(10, 128, 128)
"""
return uifftn(inarray, 2)
def urfft2(inarray):
"""2-dimensional real unitary Fourier transform
Compute the real Fourier transform on the last 2 axes. This
transform considers the Hermitian property of the transform from
complex to real-valued input.
Parameters
----------
inarray : ndarray, shape (M, N, ..., P)
The array to transform.
Returns
-------
outarray : ndarray, shape (M, N, ..., 2 * (P - 1))
The unitary 2-D real Fourier transform of ``inarray``.
See Also
--------
ufft2, ufftn, urfftn
Examples
--------
>>> input = np.ones((10, 128, 128))
>>> output = urfft2(input)
>>> np.allclose(np.sum(input[1,...]) / np.sqrt(input[1,...].size),
... output[1, 0, 0])
True
>>> output.shape
(10, 128, 65)
"""
return urfftn(inarray, 2)
def uirfft2(inarray, shape=None):
"""2-dimensional inverse real unitary Fourier transform.
Compute the real inverse Fourier transform on the last 2 axes.
This transform considers the Hermitian property of the transform
from complex to real-valued input.
Parameters
----------
inarray : ndarray, shape (M, N, ..., P)
The array to transform.
Returns
-------
outarray : ndarray, shape (M, N, ..., 2 * (P - 1))
The unitary 2-D inverse real Fourier transform of ``inarray``.
See Also
--------
urfft2, uifftn, uirfftn
Examples
--------
>>> input = np.ones((10, 128, 128))
>>> output = uirfftn(urfftn(input), shape=input.shape)
>>> np.allclose(input, output)
True
>>> output.shape
(10, 128, 128)
"""
return uirfftn(inarray, 2, shape=shape)
def image_quad_norm(inarray):
"""Return the quadratic norm of images in Fourier space.
This function detects whether the input image satisfies the
Hermitian property.
Parameters
----------
inarray : ndarray
Input image. The image data should reside in the final two
axes.
Returns
-------
norm : float
The quadratic norm of ``inarray``.
Examples
--------
>>> input = np.ones((5, 5))
>>> image_quad_norm(ufft2(input)) == np.sum(np.abs(input)**2)
True
>>> image_quad_norm(ufft2(input)) == image_quad_norm(urfft2(input))
True
"""
# If there is a Hermitian symmetry
if inarray.shape[-1] != inarray.shape[-2]:
return 2 * np.sum(np.sum(np.abs(inarray) ** 2, axis=-1), axis=-1) - np.sum(
np.abs(inarray[..., 0]) ** 2, axis=-1
)
else:
return np.sum(np.sum(np.abs(inarray) ** 2, axis=-1), axis=-1)
def ir2tf(imp_resp, shape, dim=None, is_real=True):
"""Compute the transfer function of an impulse response (IR).
This function makes the necessary correct zero-padding, zero
convention, correct fft2, etc... to compute the transfer function
of IR. To use with unitary Fourier transform for the signal (ufftn
or equivalent).
Parameters
----------
imp_resp : ndarray
The impulse responses.
shape : tuple of int
A tuple of integer corresponding to the target shape of the
transfer function.
dim : int, optional
The last axis along which to compute the transform. All
axes by default.
is_real : boolean (optional, default True)
If True, imp_resp is supposed real and the Hermitian property
is used with rfftn Fourier transform.
Returns
-------
y : complex ndarray
The transfer function of shape ``shape``.
See Also
--------
ufftn, uifftn, urfftn, uirfftn
Examples
--------
>>> np.all(np.array([[4, 0], [0, 0]]) == ir2tf(np.ones((2, 2)), (2, 2)))
True
>>> ir2tf(np.ones((2, 2)), (512, 512)).shape == (512, 257)
True
>>> ir2tf(np.ones((2, 2)), (512, 512), is_real=False).shape == (512, 512)
True
Notes
-----
The input array can be composed of multiple-dimensional IR with
an arbitrary number of IR. The individual IR must be accessed
through the first axes. The last ``dim`` axes contain the space
definition.
"""
if not dim:
dim = imp_resp.ndim
# Zero padding and fill
irpadded = np.zeros(shape)
irpadded[tuple([slice(0, s) for s in imp_resp.shape])] = imp_resp
# Roll for zero convention of the fft to avoid the phase
# problem. Work with odd and even size.
for axis, axis_size in enumerate(imp_resp.shape):
if axis >= imp_resp.ndim - dim:
irpadded = np.roll(irpadded, shift=-int(np.floor(axis_size / 2)), axis=axis)
if is_real:
return rfftn(irpadded, axes=range(-dim, 0))
else:
return fftn(irpadded, axes=range(-dim, 0))
def laplacian(ndim, shape, is_real=True):
"""Return the transfer function of the Laplacian.
Laplacian is the second order difference, on row and column.
Parameters
----------
ndim : int
The dimension of the Laplacian.
shape : tuple, shape
The support on which to compute the transfer function
is_real : boolean (optional, default True)
If True, imp_resp is assumed to be real-valued and
the Hermitian property is used with rfftn Fourier transform
to return the transfer function.
Returns
-------
tf : array_like, complex
The transfer function.
impr : array_like, real
The Laplacian.
Examples
--------
>>> tf, ir = laplacian(2, (32, 32))
>>> np.all(ir == np.array([[0, -1, 0], [-1, 4, -1], [0, -1, 0]]))
True
>>> np.all(tf == ir2tf(ir, (32, 32)))
True
"""
impr = np.zeros([3] * ndim)
for dim in range(ndim):
idx = tuple([slice(1, 2)] * dim + [slice(None)] + [slice(1, 2)] * (ndim - dim - 1))
impr[idx] = np.array([-1.0, 0.0, -1.0]).reshape(
[-1 if i == dim else 1 for i in range(ndim)]
)
impr[([slice(1, 2)] * ndim)] = 2.0 * ndim
return ir2tf(impr, shape, is_real=is_real), impr
|
|
# (C) Copyright 2017 Inova Development Inc.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Implementation of the exlorer cmd group.
"""
from __future__ import print_function, absolute_import
import click
from smipyping._explore import Explorer
from smipyping._common import StrList, fold_cell
from smipyping._logging import AUDIT_LOGGER_NAME, get_logger
from .smicli import cli, CMD_OPTS_TXT
from ._click_common import print_table, get_multiple_target_ids, \
validate_target_ids
@cli.group('explorer', options_metavar=CMD_OPTS_TXT)
def explorer_group():
"""
Command group to explore providers.
This group of commands provides the tools for general explore of all
providers defined in the database.
The explore queries the providers and generates information on their
state and status including if active, namespaces, profiles, etc.
It also normally generates a log of all activity.
This information is generated by accessing the provider itself.
These subcommands automatically validates selected data from the server
against the database and creates an audit log entry for any changes. The
fields currently tested are:
* SMIVersion
"""
pass # pylint: disable=unnecessary-pass
@explorer_group.command('all', options_metavar=CMD_OPTS_TXT)
@click.option('--ping/--no-ping', default=True,
help='Ping the the provider as initial step in test. '
'Default: ping')
@click.option('--thread/--no-thread', default=True,
help='Run test multithreaded. Much faster. This option is only'
'here to aid debugging if issues occur with multithread.'
'Default: thread')
@click.option('-i', '--include-disabled', is_flag=True, default=False,
help='Include hosts marked disabled in the targets table.')
@click.option('-d', '--detail', type=click.Choice(['full', 'brief', 'all']),
default='full',
help='Generate full or brief (fewer columns) report. Full '
'report includes namespaces, SMI_profiles, etc. '
'(Default: full).')
@click.pass_obj
def explore_all(context, **options):
"""
Explore all targets in database.
Execute the general explore operation on some or all the providers in the
database and generate a report on the results.
This command explores the general characteristics of the server including:
* Company - From the targets database
* Product = From the targets database
* SMI Profiles - As defined by the server itself
* Interop Namespace - Ad defined by the server
* Status - General status (i.e. CIMPing status)
* Time - Time to execute the tests
General Server information
It executes the server requests in parallel mode (multi-threaded) or by
setting a command line options single thread (if for some reason there is
an issue with the multithreading)
It generates a report to the the defined output as a table with the
formatting defined by the global format option. Default is thread the
requests speeding up the explore significantly.
There is an option to ping the server before executing the
explore simply to speed up the process for servers that are completely
not available. The default is to ping as the first step.
ex: smicli explore all
"""
context.execute_cmd(lambda: cmd_explore_all(context, **options))
@explorer_group.command('ids', options_metavar=CMD_OPTS_TXT)
@click.argument('target-ids', type=str, metavar='TargetIDs', required=True,
nargs=-1)
@click.option('--ping/--no-ping', default=True,
help='Ping the the provider as initial step in test. '
'Default: ping')
@click.option('--thread/--no-thread', default=True,
help='Run test multithreaded. Much faster. '
'Default: thread')
@click.option('-d', '--detail', type=click.Choice(['full', 'brief', 'all']),
default='full',
help='Generate all or brief (fewer columns) report'
'(Default: full).')
@click.pass_obj
def explore_ids(context, target_ids, **options):
"""
Explore a list of target IDs.
Execute the explorer on the providers defined by id. Multiple
ids may be supplied (ex. id 5 6 7)
ex: smicli explorer ids 6 7 8
smicli explorer ids ?
"""
context.execute_cmd(lambda: cmd_explore_ids(context, target_ids, **options))
######################################################################
#
# Common functions for this group
#
######################################################################
######################################################################
#
# Action functions
#
######################################################################
def cmd_explore_all(context, **options):
"""Explore all of the providers defined in the current database and
report results.
"""
# TODO fix the log_level processing.
explorer = Explorer('smicli', context.targets_tbl,
logfile=context.log_file,
log_level=None,
verbose=context.verbose,
ping=options['ping'],
threaded=options['thread'],
output_format=context.output_format)
if options['include_disabled']:
targets = context.targets_tbl.keys()
else:
targets = context.targets_tbl.get_enabled_targetids()
servers = explorer.explore_servers(targets)
validate_servers(servers, context.targets_tbl)
# print results
# TODO make this part of normal print services
context.spinner.stop()
report_server_info(servers, context.targets_tbl, context.output_format,
report=options['detail'])
def cmd_explore_ids(context, target_ids, **options):
"""
Explore the wbem server defined by the Id provided
"""
target_ids = get_multiple_target_ids(context, target_ids, options)
if target_ids is None:
return
validate_target_ids(context, target_ids)
explorer = Explorer('smicli', context.targets_tbl,
verbose=context.verbose,
ping=options['ping'],
threaded=options['thread'],
logfile=context.log_file,
log_level=context.log_level,
output_format=context.output_format)
servers = explorer.explore_servers(target_ids)
validate_servers(servers, context.targets_tbl)
context.spinner.stop()
report_server_info(servers, context.targets_tbl,
context.output_format,
report=options['detail'])
def validate_servers(servers, targets_tbl):
"""
Validate the fields in the targetid against the data received from the
server. This allows updating the following fields from data received
from the server:
* SMIVERSION
* interop namespace
"""
# TODO this should be in explorer, not in the cmd_processor.
for server_tuple in servers:
server = server_tuple.server
status = server_tuple.status
target_id = server_tuple.target_id
target = targets_tbl.get_target(target_id)
if server is not None and status == 'OK':
try:
svr_profile_list = smi_versions(server_tuple)
except TypeError:
# ignore this server.
continue
sorted(svr_profile_list)
target_smi_profiles = target['SMIVersion']
regex = r'^[0-9.]*$'
server_smi_profiles = StrList(svr_profile_list, match=regex)
target_smi_profiles = StrList(target_smi_profiles, match=regex)
if not server_smi_profiles.equal(target_smi_profiles):
changes = {"SMIVersion": server_smi_profiles.str_by_sep("/")}
try:
targets_tbl.update_fields(target_id, changes)
change_str = ""
for key, value in changes.items():
change_str += "%s:%s " % (key, value)
audit_logger = get_logger(AUDIT_LOGGER_NAME)
audit_logger.info('Targets Table TargetID %s, updated '
'fields %s', target_id, change_str)
click.echo('Updated targetid=%s updated fields %s' %
(target_id, change_str))
except Exception as ex:
raise click.ClickException('Targets DB update failed '
'targetid=%s changes=%r. '
'Exception=%s' %
(target_id, changes, ex))
##############################################################
#
# Table generation functions
#
############################################################
def report_server_info(servers, targets_tbl, output_format,
table_format='table',
columns=None, report='full'):
""" Display a table of info from the server scan
"""
rows = []
if report == 'full':
headers = ['Id', 'Url', 'Company', 'Product', 'Vers',
'SMI Profiles', 'Interop_ns', 'Status', 'time']
elif report == 'all':
headers = targets_tbl.fields
else:
headers = ['Id', 'Url', 'Company', 'Product',
'Status', 'time']
servers.sort(key=lambda tup: int(tup.target_id))
for server_tuple in servers:
url = server_tuple.url
server = server_tuple.server
status = server_tuple.status
target_id = server_tuple.target_id
target = targets_tbl.get_target(target_id)
version = ''
interop_ns = ''
smi_profiles = ''
if server is not None and status == 'OK':
version = server.version
interop_ns = server.interop_ns
try:
smi_profile_list = smi_versions(server_tuple)
except TypeError:
smi_profile_list = []
if smi_profile_list is not None:
sorted(smi_profile_list)
cell_str = ", ". join(sorted(smi_profile_list))
smi_profiles = fold_cell(cell_str, 14)
disp_time = None
if server_tuple.time <= 60:
disp_time = "%.2fs" % (round(server_tuple.time, 1))
else:
disp_time = "%.2fm" % (server_tuple.time / 60)
row = []
if 'Id' in headers:
row.append(target_id)
if 'Url' in headers:
row.append(url)
if 'Company' in headers:
row.append(fold_cell(target['CompanyName'], 11),)
if 'Product' in headers:
row.append(fold_cell(target['Product'], 8),)
if 'Vers' in headers:
row.append(version)
if 'SMI Profiles' in headers:
row.append(smi_profiles)
if 'Interop_ns' in headers:
row.append(interop_ns)
if 'Status' in headers:
row.append(server_tuple.status)
if 'time' in headers:
row.append(disp_time)
rows.append(row)
print_table(rows, headers=headers,
title='Server Explorer Report:',
table_format=output_format)
def smi_versions(server_tuple):
"""
Get the smi version used by this server from the SNIA profile
information on the server. Uses pywbem server.get_selected_profiles to get
the complete list of profiles.
This code accounts for the issue that some profile instances may be
incorrectly defined and may generate an error in the process.
If the SMI-S profile cannot be found in the registered profiles an
exception is generated (TypeError)
Parameters:
server_tuple (named tuple ServerInfoTuple):
Named tuple that defines the target id and server object. The
server object is used to get the profiles from the server
Returns:
List of the property RegisteredVersion for all profiles that are
registered org 'SNIA' and registered name 'SMI-S.
If there is an exception, it returns an empty string.
Raises:
TypeError: if get_selected_profiles returns TypeError exception
"""
server = server_tuple.server
try:
snia_server_profiles = server.get_selected_profiles(
registered_org='SNIA', registered_name='SMI-S')
except TypeError as te:
audit_logger = get_logger(AUDIT_LOGGER_NAME)
audit_logger.error('Invalid profile definition caused exception '
'for targetid=%s, url=%s. exception %s: %s',
server_tuple.target_id,
server.conn.url, te.__class__.__name__, te)
click.echo('ERROR: Invalid profile definition caused exception for '
'targetid=%s url=%s. '
'exception: %s: %s' % (server_tuple.target_id,
server.conn.url,
te.__class__.__name__, te))
raise te
versions = [inst['RegisteredVersion'] for inst in snia_server_profiles]
return versions
def print_smi_profile_info(servers, user_data, table_format):
"""
Generates a table of smi profile information listing the smi profiles
Parameters:
servers: list of ServerInfoTuple entries
"""
table_data = []
table_hdr = [' Id', 'Url', 'Company', 'Product', 'SMI Profiles']
table_data.append(table_hdr)
for server_tuple in servers:
if server_tuple.status == 'OK':
target_id = server_tuple.target_id
target = user_data.get_target(target_id)
try:
versions = smi_versions(server_tuple.server)
# Uses very broad exception because smi_versions can generate
# some strange errors because of bad definitions in the server.
except Exception as ex: # pylint: disable=broad-except
audit_logger = get_logger(AUDIT_LOGGER_NAME)
audit_logger.error('Exception %s in smi_version %s. Ignored',
ex, server_tuple)
versions = []
line = [target['TargetID'],
server_tuple.url,
target['CompanyName'],
target['Product']]
if versions is not None:
cell_str = ", ". join(sorted(versions))
line.append(fold_cell(cell_str, 14))
table_data.append(line)
print_table(table_data, headers=table_hdr,
title='Display SMI Profile Information',
table_format=table_format)
|
|
# Tests of streamgapdf implementation, impulse tests moved to
# test_streamgapdf_impulse.py
import numpy
from scipy import integrate
import pytest
sdf_sanders15= None #so we can set this up and then use in other tests
sdf_sanders15_unp= None #so we can set this up and then use in other tests
sdfl_sanders15= None #so we can set this up and then use in other tests
sdfl_sanders15_unp= None #so we can set this up and then use in other tests
# Put seed in first function, so the seed gets set even if other test files
# were run first
def test_setupimpact_error():
numpy.random.seed(1)
#Imports
from galpy.df import streamgapdf
from galpy.orbit import Orbit
from galpy.potential import LogarithmicHaloPotential
from galpy.actionAngle import actionAngleIsochroneApprox
from galpy.util import conversion #for unit conversions
lp= LogarithmicHaloPotential(normalize=1.,q=0.9)
aAI= actionAngleIsochroneApprox(pot=lp,b=0.8)
prog_unp_peri= Orbit([2.6556151742081835,
0.2183747276300308,
0.67876510797240575,
-2.0143395648974671,
-0.3273737682604374,
0.24218273922966019])
V0, R0= 220., 8.
sigv= 0.365*(10./2.)**(1./3.) # km/s
with pytest.raises(IOError) as excinfo:
dum= streamgapdf(sigv/V0,progenitor=prog_unp_peri,pot=lp,aA=aAI,
leading=False,nTrackChunks=26,
nTrackIterations=1,
sigMeanOffset=4.5,
tdisrupt=10.88\
/conversion.time_in_Gyr(V0,R0),
Vnorm=V0,Rnorm=R0,
impactb=0.,
subhalovel=numpy.array([6.82200571,132.7700529,
149.4174464])/V0,
timpact=0.88/conversion.time_in_Gyr(V0,R0),
impact_angle=-2.34)
# Should be including these:
# GM=10.**-2.\
# /conversion.mass_in_1010msol(V0,R0),
# rs=0.625/R0)
return None
def test_leadingwtrailingimpact_error():
#Imports
from galpy.df import streamgapdf
from galpy.orbit import Orbit
from galpy.potential import LogarithmicHaloPotential
from galpy.actionAngle import actionAngleIsochroneApprox
from galpy.util import conversion #for unit conversions
lp= LogarithmicHaloPotential(normalize=1.,q=0.9)
aAI= actionAngleIsochroneApprox(pot=lp,b=0.8)
prog_unp_peri= Orbit([2.6556151742081835,
0.2183747276300308,
0.67876510797240575,
-2.0143395648974671,
-0.3273737682604374,
0.24218273922966019])
V0, R0= 220., 8.
sigv= 0.365*(10./2.)**(1./3.) # km/s
with pytest.raises(ValueError) as excinfo:
dum= streamgapdf(sigv/V0,progenitor=prog_unp_peri,pot=lp,aA=aAI,
leading=True,nTrackChunks=26,
nTrackIterations=1,
sigMeanOffset=4.5,
tdisrupt=10.88\
/conversion.time_in_Gyr(V0,R0),
Vnorm=V0,Rnorm=R0,
impactb=0.,
subhalovel=numpy.array([6.82200571,132.7700529,
149.4174464])/V0,
timpact=0.88/conversion.time_in_Gyr(V0,R0),
impact_angle=-2.34,
GM=10.**-2.\
/conversion.mass_in_1010msol(V0,R0),
rs=0.625/R0)
return None
def test_trailingwleadingimpact_error():
#Imports
from galpy.df import streamgapdf
from galpy.orbit import Orbit
from galpy.potential import LogarithmicHaloPotential
from galpy.actionAngle import actionAngleIsochroneApprox
from galpy.util import conversion #for unit conversions
lp= LogarithmicHaloPotential(normalize=1.,q=0.9)
aAI= actionAngleIsochroneApprox(pot=lp,b=0.8)
prog_unp_peri= Orbit([2.6556151742081835,
0.2183747276300308,
0.67876510797240575,
-2.0143395648974671,
-0.3273737682604374,
0.24218273922966019])
V0, R0= 220., 8.
sigv= 0.365*(10./2.)**(1./3.) # km/s
with pytest.raises(ValueError) as excinfo:
dum= streamgapdf(sigv/V0,progenitor=prog_unp_peri,pot=lp,aA=aAI,
leading=False,nTrackChunks=26,
nTrackIterations=1,
sigMeanOffset=4.5,
tdisrupt=10.88\
/conversion.time_in_Gyr(V0,R0),
Vnorm=V0,Rnorm=R0,
impactb=0.,
subhalovel=numpy.array([6.82200571,132.7700529,
149.4174464])/V0,
timpact=0.88/conversion.time_in_Gyr(V0,R0),
impact_angle=2.34,
GM=10.**-2.\
/conversion.mass_in_1010msol(V0,R0),
rs=0.625/R0)
return None
#Exact setup from Section 5 of Sanders, Bovy, and Erkal (2015); should reproduce those results (which have been checked against a simulation)
def test_sanders15_setup():
#Imports
from galpy.df import streamdf, streamgapdf
from galpy.orbit import Orbit
from galpy.potential import LogarithmicHaloPotential
from galpy.actionAngle import actionAngleIsochroneApprox
from galpy.util import conversion #for unit conversions
lp= LogarithmicHaloPotential(normalize=1.,q=0.9)
aAI= actionAngleIsochroneApprox(pot=lp,b=0.8)
prog_unp_peri= Orbit([2.6556151742081835,
0.2183747276300308,
0.67876510797240575,
-2.0143395648974671,
-0.3273737682604374,
0.24218273922966019])
global sdf_sanders15
V0, R0= 220., 8.
sigv= 0.365*(10./2.)**(1./3.) # km/s
sdf_sanders15= streamgapdf(sigv/V0,progenitor=prog_unp_peri,pot=lp,aA=aAI,
leading=False,nTrackChunks=26,
nTrackIterations=1,
sigMeanOffset=4.5,
tdisrupt=10.88\
/conversion.time_in_Gyr(V0,R0),
Vnorm=V0,Rnorm=R0,
impactb=0.,
subhalovel=numpy.array([6.82200571,132.7700529,
149.4174464])/V0,
timpact=0.88/conversion.time_in_Gyr(V0,R0),
impact_angle=-2.34,
GM=10.**-2.\
/conversion.mass_in_1010msol(V0,R0),
rs=0.625/R0)
assert not sdf_sanders15 is None, 'sanders15 streamgapdf setup did not work'
# Also setup the unperturbed model
global sdf_sanders15_unp
sdf_sanders15_unp= streamdf(sigv/V0,progenitor=prog_unp_peri,pot=lp,aA=aAI,
leading=False,nTrackChunks=26,
nTrackIterations=1,
sigMeanOffset=4.5,
tdisrupt=10.88\
/conversion.time_in_Gyr(V0,R0),
Vnorm=V0,Rnorm=R0)
assert not sdf_sanders15_unp is None, \
'sanders15 unperturbed streamdf setup did not work'
return None
def test_sanders15_leading_setup():
#Imports
from galpy.df import streamdf, streamgapdf
from galpy.orbit import Orbit
from galpy.potential import LogarithmicHaloPotential, PlummerPotential
from galpy.actionAngle import actionAngleIsochroneApprox
from galpy.util import conversion #for unit conversions
lp= LogarithmicHaloPotential(normalize=1.,q=0.9)
aAI= actionAngleIsochroneApprox(pot=lp,b=0.8)
prog_unp_peri= Orbit([2.6556151742081835,
0.2183747276300308,
0.67876510797240575,
-2.0143395648974671,
-0.3273737682604374,
0.24218273922966019])
global sdfl_sanders15
V0, R0= 220., 8.
sigv= 0.365*(10./2.)**(1./3.) # km/s
# Use a Potential object for the impact
pp= PlummerPotential(amp=10.**-2.\
/conversion.mass_in_1010msol(V0,R0),
b=0.625/R0)
import warnings
from galpy.util import galpyWarning
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always",galpyWarning)
sdfl_sanders15= streamgapdf(sigv/V0,progenitor=prog_unp_peri,
pot=lp,aA=aAI,
leading=True,nTrackChunks=26,
nTrackChunksImpact=29,
nTrackIterations=1,
sigMeanOffset=4.5,
tdisrupt=10.88\
/conversion.time_in_Gyr(V0,R0),
Vnorm=V0,Rnorm=R0,
impactb=0.,
subhalovel=numpy.array([49.447319,
116.179436,
155.104156])/V0,
timpact=0.88/conversion.time_in_Gyr(V0,R0),
impact_angle=2.09,
subhalopot=pp,
nKickPoints=290,
deltaAngleTrackImpact=4.5,
multi=True) # test multi
# Should raise warning bc of deltaAngleTrackImpact, might raise others
raisedWarning= False
for wa in w:
raisedWarning= (str(wa.message) == "WARNING: deltaAngleTrackImpact angle range large compared to plausible value")
if raisedWarning: break
assert raisedWarning, 'deltaAngleTrackImpact warning not raised when it should have been'
assert not sdfl_sanders15 is None, 'sanders15 trailing streamdf setup did not work'
# Also setup the unperturbed model
global sdfl_sanders15_unp
sdfl_sanders15_unp= streamdf(sigv/V0,progenitor=prog_unp_peri,
pot=lp,aA=aAI,
leading=True,nTrackChunks=26,
nTrackIterations=1,
sigMeanOffset=4.5,
tdisrupt=10.88\
/conversion.time_in_Gyr(V0,R0),
Vnorm=V0,Rnorm=R0)
assert not sdfl_sanders15_unp is None, \
'sanders15 unperturbed streamdf setup did not work'
return None
# Some very basic tests
def test_nTrackIterations():
assert sdf_sanders15.nTrackIterations == 1, 'nTrackIterations should have been 1'
return None
def test_nTrackChunks():
assert sdf_sanders15._nTrackChunks == 26, 'nTrackChunks should have been 26'
return None
def test_deltaAngleTrackImpact():
assert numpy.fabs(sdf_sanders15._deltaAngleTrackImpact-4.31) < 0.01, 'deltaAngleTrackImpact should have been ~4.31'
return None
# Tests of the track near the impact
def test_trackNearImpact():
# Sanity checks against numbers taken from plots of the simulation
# Make sure we're near 14.5
assert numpy.fabs(sdf_sanders15._gap_ObsTrack[14,0]*sdf_sanders15._ro
-14.5) < 0.2, '14th point along track near the impact is not near 14.5 kpc'
assert numpy.fabs(sdf_sanders15._gap_ObsTrack[14,1]*sdf_sanders15._vo
-80) < 3., 'Point along the track near impact near R=14.5 does not have the correct radial velocity'
assert numpy.fabs(sdf_sanders15._gap_ObsTrack[14,2]*sdf_sanders15._vo
-220.) < 3., 'Point along the track near impact near R=14.5 does not have the correct tangential velocity'
assert numpy.fabs(sdf_sanders15._gap_ObsTrack[14,3]*sdf_sanders15._ro
-0.) < 1., 'Point along the track near impact near R=14.5 does not have the correct vertical height'
assert numpy.fabs(sdf_sanders15._gap_ObsTrack[14,4]*sdf_sanders15._vo
-200.) < 5., 'Point along the track near impact near R=14.5 does not have the correct vertical velocity'
# Another one!
assert numpy.fabs(sdf_sanders15._gap_ObsTrack[27,0]*sdf_sanders15._ro
-16.25) < 0.2, '27th point along track near the impact is not near 16.25 kpc'
assert numpy.fabs(sdf_sanders15._gap_ObsTrack[27,1]*sdf_sanders15._vo
+130) < 3., 'Point along the track near impact near R=16.25 does not have the correct radial velocity'
assert numpy.fabs(sdf_sanders15._gap_ObsTrack[27,2]*sdf_sanders15._vo
-200.) < 3., 'Point along the track near impact near R=16.25 does not have the correct tangential velocity'
assert numpy.fabs(sdf_sanders15._gap_ObsTrack[27,3]*sdf_sanders15._ro
+12.) < 1., 'Point along the track near impact near R=16.25 does not have the correct vertical height'
assert numpy.fabs(sdf_sanders15._gap_ObsTrack[27,4]*sdf_sanders15._vo
-25.) < 5., 'Point along the track near impact near R=16.25 does not have the correct vertical velocity'
assert numpy.fabs(sdf_sanders15._gap_ObsTrack[27,5]-1.2) < .2, 'Point along the track near impact near R=16.25 does not have the correct azimuth'
return None
def test_interpolatedTrackNearImpact():
# Sanity checks against numbers taken from plots of the simulation
# Make sure we're near X=-10.9
theta= 2.7
assert numpy.fabs(sdf_sanders15._kick_interpTrackX(theta)*sdf_sanders15._ro
+10.9) < 0.2, 'Point along track near the impact at theta=2.7 is not near X=-10.9 kpc'
assert numpy.fabs(sdf_sanders15._kick_interpTrackY(theta)*sdf_sanders15._ro
-6.) < 0.5, 'Point along track near the impact at theta=2.7 is not near Y=6. kpc'
assert numpy.fabs(sdf_sanders15._kick_interpTrackZ(theta)*sdf_sanders15._ro
+5.) < 0.5, 'Point along track near the impact at theta=2.7 is not near Z=5. kpc'
assert numpy.fabs(sdf_sanders15._kick_interpTrackvX(theta)*sdf_sanders15._vo
+180.) < 5, 'Point along track near the impact at theta=2.7 is not near vX=-180 km/s'
assert numpy.fabs(sdf_sanders15._kick_interpTrackvY(theta)*sdf_sanders15._vo
+190.) < 5., 'Point along track near the impact at theta=2.7 is not near vY=190 km/s'
assert numpy.fabs(sdf_sanders15._kick_interpTrackvZ(theta)*sdf_sanders15._vo
-170.) < 5., 'Point along track near the impact at theta=2.7 is not near vZ=170 km/s'
return None
# Test the calculation of the kicks in dv
def test_kickdv():
# Closest one to the impact point, should be close to zero
tIndx= numpy.argmin(numpy.fabs(sdf_sanders15._kick_interpolatedThetasTrack\
-sdf_sanders15._impact_angle))
assert numpy.all(numpy.fabs(sdf_sanders15._kick_deltav[tIndx]*sdf_sanders15._vo) < 0.3), 'Kick near the impact point not close to zero'
# The peak, size and location
assert numpy.fabs(numpy.amax(numpy.fabs(sdf_sanders15._kick_deltav[:,0]*sdf_sanders15._vo))-0.35) < 0.06, 'Peak dvx incorrect'
assert sdf_sanders15._kick_interpolatedThetasTrack[numpy.argmax(sdf_sanders15._kick_deltav[:,0]*sdf_sanders15._vo)]-sdf_sanders15._impact_angle < 0., 'Location of peak dvx incorrect'
assert numpy.fabs(numpy.amax(numpy.fabs(sdf_sanders15._kick_deltav[:,1]*sdf_sanders15._vo))-0.35) < 0.06, 'Peak dvy incorrect'
assert sdf_sanders15._kick_interpolatedThetasTrack[numpy.argmax(sdf_sanders15._kick_deltav[:,1]*sdf_sanders15._vo)]-sdf_sanders15._impact_angle > 0., 'Location of peak dvy incorrect'
assert numpy.fabs(numpy.amax(numpy.fabs(sdf_sanders15._kick_deltav[:,2]*sdf_sanders15._vo))-1.8) < 0.06, 'Peak dvz incorrect'
assert sdf_sanders15._kick_interpolatedThetasTrack[numpy.argmax(sdf_sanders15._kick_deltav[:,2]*sdf_sanders15._vo)]-sdf_sanders15._impact_angle > 0., 'Location of peak dvz incorrect'
# Close to zero far from impact point
tIndx= numpy.argmin(numpy.fabs(sdf_sanders15._kick_interpolatedThetasTrack\
-sdf_sanders15._impact_angle-1.5))
assert numpy.all(numpy.fabs(sdf_sanders15._kick_deltav[tIndx]*sdf_sanders15._vo) < 0.3), 'Kick far the impact point not close to zero'
return None
# Test the calculation of the kicks in dO
def test_kickdO():
from galpy.util import conversion
# Closest one to the impact point, should be close to zero
tIndx= numpy.argmin(numpy.fabs(sdf_sanders15._kick_interpolatedThetasTrack\
-sdf_sanders15._impact_angle))
assert numpy.all(numpy.fabs(sdf_sanders15._kick_dOap[tIndx,:3]*conversion.freq_in_Gyr(sdf_sanders15._vo,sdf_sanders15._ro)) < 0.03), 'Kick near the impact point not close to zero'
# The peak, size and location
assert numpy.fabs(numpy.amax(numpy.fabs(sdf_sanders15._kick_dOap[:,0]*conversion.freq_in_Gyr(sdf_sanders15._vo,sdf_sanders15._ro)))-0.085) < 0.01, 'Peak dOR incorrect'
assert sdf_sanders15._kick_interpolatedThetasTrack[numpy.argmax(sdf_sanders15._kick_dOap[:,0])]-sdf_sanders15._impact_angle < 0., 'Location of peak dOR incorrect'
assert numpy.fabs(numpy.amax(numpy.fabs(sdf_sanders15._kick_dOap[:,1]*conversion.freq_in_Gyr(sdf_sanders15._vo,sdf_sanders15._ro)))-0.07) < 0.01, 'Peak dOp incorrect'
assert sdf_sanders15._kick_interpolatedThetasTrack[numpy.argmax(sdf_sanders15._kick_dOap[:,1])]-sdf_sanders15._impact_angle < 0., 'Location of peak dvy incorrect'
assert numpy.fabs(numpy.amax(numpy.fabs(sdf_sanders15._kick_dOap[:,2]*conversion.freq_in_Gyr(sdf_sanders15._vo,sdf_sanders15._ro)))-0.075) < 0.01, 'Peak dOz incorrect'
assert sdf_sanders15._kick_interpolatedThetasTrack[numpy.argmax(sdf_sanders15._kick_dOap[:,2])]-sdf_sanders15._impact_angle < 0., 'Location of peak dOz incorrect'
# Close to zero far from impact point
tIndx= numpy.argmin(numpy.fabs(sdf_sanders15._kick_interpolatedThetasTrack\
-sdf_sanders15._impact_angle-1.5))
assert numpy.all(numpy.fabs(sdf_sanders15._kick_dOap[tIndx,:3]*conversion.freq_in_Gyr(sdf_sanders15._vo,sdf_sanders15._ro)) < 0.03), 'Kick far the impact point not close to zero'
return None
def test_kickda():
# All angle kicks should be small, just test that they are smaller than dO/O close to the impact
nIndx= numpy.fabs(sdf_sanders15._kick_interpolatedThetasTrack-sdf_sanders15._impact_angle) < 0.75
assert numpy.all(numpy.fabs(sdf_sanders15._kick_dOap[nIndx,3:]) < 2.*(numpy.fabs(sdf_sanders15._kick_dOap[nIndx,:3]/sdf_sanders15._progenitor_Omega))), 'angle kicks not smaller than the frequency kicks'
return None
# Test the interpolation of the kicks
def test_interpKickdO():
from galpy.util import conversion
freqConv= conversion.freq_in_Gyr(sdf_sanders15._vo,sdf_sanders15._ro)
# Bunch of spot checks at some interesting angles
# Impact angle
theta= sdf_sanders15._impact_angle
assert numpy.fabs(sdf_sanders15._kick_interpdOpar(theta)*freqConv) < 10.**-4., 'Frequency kick at the impact point is not zero'
assert numpy.fabs(sdf_sanders15._kick_interpdOperp0(theta)*freqConv) < 10.**-4., 'Frequency kick at the impact point is not zero'
assert numpy.fabs(sdf_sanders15._kick_interpdOperp1(theta)*freqConv) < 10.**-4., 'Frequency kick at the impact point is not zero'
assert numpy.fabs(sdf_sanders15._kick_interpdOr(theta)*freqConv) < 10.**-4., 'Frequency kick at the impact point is not zero'
assert numpy.fabs(sdf_sanders15._kick_interpdOp(theta)*freqConv) < 10.**-4., 'Frequency kick at the impact point is not zero'
assert numpy.fabs(sdf_sanders15._kick_interpdOz(theta)*freqConv) < 10.**-4., 'Frequency kick at the impact point is not zero'
# random one
theta= sdf_sanders15._impact_angle-0.25
assert numpy.fabs(sdf_sanders15._kick_interpdOpar(theta)*freqConv+0.07) < 0.002, 'Frequency kick near the impact point is not zero'
assert numpy.fabs(sdf_sanders15._kick_interpdOperp0(theta)*freqConv) < 0.002, 'Frequency kick near the impact point is not zero'
assert numpy.fabs(sdf_sanders15._kick_interpdOperp1(theta)*freqConv) < 0.003, 'Frequency kick near the impact point is not zero'
assert numpy.fabs(sdf_sanders15._kick_interpdOr(theta)*freqConv-0.05) < 0.01, 'Frequency kick near the impact point is not zero'
assert numpy.fabs(sdf_sanders15._kick_interpdOp(theta)*freqConv-0.035) < 0.01, 'Frequency kick near the impact point is not zero'
assert numpy.fabs(sdf_sanders15._kick_interpdOz(theta)*freqConv-0.04) < 0.01, 'Frequency kick near the impact point is not zero'
# One beyond ._deltaAngleTrackImpact
theta= sdf_sanders15._deltaAngleTrackImpact+0.1
assert numpy.fabs(sdf_sanders15._kick_interpdOpar(theta)*freqConv) < 10.**-16., 'Frequency kick beyond ._deltaAngleTrackImpact is not zero'
assert numpy.fabs(sdf_sanders15._kick_interpdOperp0(theta)*freqConv) < 10.**-16., 'Frequency kick beyond ._deltaAngleTrackImpact is not zero'
assert numpy.fabs(sdf_sanders15._kick_interpdOperp1(theta)*freqConv) < 10.**-16., 'Frequency kick beyond ._deltaAngleTrackImpact is not zero'
assert numpy.fabs(sdf_sanders15._kick_interpdOr(theta)*freqConv) < 10.**-16., 'Frequency kick beyond ._deltaAngleTrackImpact is not zero'
assert numpy.fabs(sdf_sanders15._kick_interpdOp(theta)*freqConv) < 10.**-16., 'Frequency kick beyond ._deltaAngleTrackImpact is not zero'
assert numpy.fabs(sdf_sanders15._kick_interpdOz(theta)*freqConv) < 10.**-16., 'Frequency kick beyond ._deltaAngleTrackImpact is not zero'
assert numpy.fabs(sdf_sanders15._kick_interpdar(theta)) < 10.**-16., 'Angle kick beyond ._deltaAngleTrackImpact is not zero'
assert numpy.fabs(sdf_sanders15._kick_interpdap(theta)) < 10.**-16., 'Angle kick beyond ._deltaAngleTrackImpact is not zero'
assert numpy.fabs(sdf_sanders15._kick_interpdaz(theta)) < 10.**-16., 'Angle kick beyond ._deltaAngleTrackImpact is not zero'
return None
def test_interpKickda():
thetas= numpy.linspace(-0.75,0.75,10)+sdf_sanders15._impact_angle
assert numpy.all(numpy.fabs(sdf_sanders15._kick_interpdar(thetas)) \
< 2.*numpy.fabs(sdf_sanders15._kick_interpdOr(thetas)/sdf_sanders15._progenitor_Omegar)), 'Interpolated angle kick not everywhere smaller than the frequency kick after one period'
return None
# Test the sampling of present-day perturbed points based on the model
def test_sample():
# Sample stars from the model and compare them to the stream
xv_mock_per= sdf_sanders15.sample(n=100000,xy=True).T
# Rough gap-density check
ingap= numpy.sum((xv_mock_per[:,0]*sdf_sanders15._ro > 4.)\
*(xv_mock_per[:,0]*sdf_sanders15._ro < 5.))
edgegap= numpy.sum((xv_mock_per[:,0]*sdf_sanders15._ro > 1.)\
*(xv_mock_per[:,0]*sdf_sanders15._ro < 2.))
outgap= numpy.sum((xv_mock_per[:,0]*sdf_sanders15._ro > -2.5)\
*(xv_mock_per[:,0]*sdf_sanders15._ro < -1.5))
assert numpy.fabs(ingap/float(edgegap)-0.015/0.05) < 0.05, 'gap density versus edge of the gap is incorect'
assert numpy.fabs(ingap/float(outgap)-0.015/0.02) < 0.2, 'gap density versus outside of the gap is incorect'
# Test track of the stream
tIndx= (xv_mock_per[:,0]*sdf_sanders15._ro > 4.)\
*(xv_mock_per[:,0]*sdf_sanders15._ro < 5.)\
*(xv_mock_per[:,1]*sdf_sanders15._ro < 5.)
assert numpy.fabs(numpy.median(xv_mock_per[tIndx,1])*sdf_sanders15._ro+12.25) < 0.1, 'Location of mock track is incorrect near the gap'
assert numpy.fabs(numpy.median(xv_mock_per[tIndx,2])*sdf_sanders15._ro-3.8) < 0.1, 'Location of mock track is incorrect near the gap'
assert numpy.fabs(numpy.median(xv_mock_per[tIndx,3])*sdf_sanders15._vo-255.) < 2., 'Location of mock track is incorrect near the gap'
assert numpy.fabs(numpy.median(xv_mock_per[tIndx,4])*sdf_sanders15._vo-20.) < 2., 'Location of mock track is incorrect near the gap'
assert numpy.fabs(numpy.median(xv_mock_per[tIndx,5])*sdf_sanders15._vo+185.) < 2., 'Location of mock track is incorrect near the gap'
return None
# Test the sampling of present-day perturbed-unperturbed points
# (like in the paper)
def test_sample_offset():
# Sample stars from the model and compare them to the stream
numpy.random.seed(1)
xv_mock_per= sdf_sanders15.sample(n=100000,xy=True).T
numpy.random.seed(1) # should give same points
xv_mock_unp= sdf_sanders15_unp.sample(n=100000,xy=True).T
# Test perturbation as a function of unperturbed X
tIndx= (xv_mock_unp[:,0]*sdf_sanders15._ro > 0.)\
*(xv_mock_unp[:,0]*sdf_sanders15._ro < 1.)\
*(xv_mock_unp[:,1]*sdf_sanders15._ro < 5.)
assert numpy.fabs(numpy.median(xv_mock_per[tIndx,0]-xv_mock_unp[tIndx,0])*sdf_sanders15._ro+0.65) < 0.1, 'Location of perturbed mock track is incorrect near the gap'
assert numpy.fabs(numpy.median(xv_mock_per[tIndx,1]-xv_mock_unp[tIndx,1])*sdf_sanders15._ro-0.1) < 0.1, 'Location of perturbed mock track is incorrect near the gap'
assert numpy.fabs(numpy.median(xv_mock_per[tIndx,2]-xv_mock_unp[tIndx,2])*sdf_sanders15._ro-0.4) < 0.1, 'Location of perturbed mock track is incorrect near the gap'
assert numpy.fabs(numpy.median(xv_mock_per[tIndx,3]-xv_mock_unp[tIndx,3])*sdf_sanders15._vo) < 0.5, 'Location of perturbed mock track is incorrect near the gap'
assert numpy.fabs(numpy.median(xv_mock_per[tIndx,4]-xv_mock_unp[tIndx,4])*sdf_sanders15._vo+7.) < 0.5, 'Location of perturbed mock track is incorrect near the gap'
assert numpy.fabs(numpy.median(xv_mock_per[tIndx,5]-xv_mock_unp[tIndx,5])*sdf_sanders15._vo-4.) < 0.5, 'Location of perturbed mock track is incorrect near the gap'
return None
# Test the sampling of present-day perturbed-unperturbed points
# (like in the paper, but for the leading stream impact)
def test_sample_offset_leading():
# Sample stars from the model and compare them to the stream
numpy.random.seed(1)
xv_mock_per= sdfl_sanders15.sample(n=100000,xy=True).T
numpy.random.seed(1) # should give same points
xv_mock_unp= sdfl_sanders15_unp.sample(n=100000,xy=True).T
# Test perturbation as a function of unperturbed X
tIndx= (xv_mock_unp[:,0]*sdfl_sanders15._ro > 13.)\
*(xv_mock_unp[:,0]*sdfl_sanders15._ro < 14.)\
*(xv_mock_unp[:,1]*sdfl_sanders15._ro > 5.)
assert numpy.fabs(numpy.median(xv_mock_per[tIndx,0]-xv_mock_unp[tIndx,0])*sdfl_sanders15._ro+0.5) < 0.1, 'Location of perturbed mock track is incorrect near the gap'
assert numpy.fabs(numpy.median(xv_mock_per[tIndx,1]-xv_mock_unp[tIndx,1])*sdfl_sanders15._ro-0.3) < 0.1, 'Location of perturbed mock track is incorrect near the gap'
assert numpy.fabs(numpy.median(xv_mock_per[tIndx,2]-xv_mock_unp[tIndx,2])*sdfl_sanders15._ro-0.45) < 0.1, 'Location of perturbed mock track is incorrect near the gap'
assert numpy.fabs(numpy.median(xv_mock_per[tIndx,3]-xv_mock_unp[tIndx,3])*sdfl_sanders15._vo+2.) < 0.5, 'Location of perturbed mock track is incorrect near the gap'
assert numpy.fabs(numpy.median(xv_mock_per[tIndx,4]-xv_mock_unp[tIndx,4])*sdfl_sanders15._vo+7.) < 0.5, 'Location of perturbed mock track is incorrect near the gap'
assert numpy.fabs(numpy.median(xv_mock_per[tIndx,5]-xv_mock_unp[tIndx,5])*sdfl_sanders15._vo-6.) < 0.5, 'Location of perturbed mock track is incorrect near the gap'
return None
# Tests of the density and meanOmega functions
def test_pOparapar():
#Test that integrating pOparapar gives density_par
dens_frompOpar_close=\
integrate.quad(lambda x: sdf_sanders15.pOparapar(x,0.3),
sdf_sanders15._meandO\
-10.*numpy.sqrt(sdf_sanders15._sortedSigOEig[2]),
sdf_sanders15._meandO\
+10.*numpy.sqrt(sdf_sanders15._sortedSigOEig[2]))[0]
# This is actually in the gap!
dens_fromOpar_half=\
integrate.quad(lambda x: sdf_sanders15.pOparapar(x,2.6),
sdf_sanders15._meandO\
-10.*numpy.sqrt(sdf_sanders15._sortedSigOEig[2]),
sdf_sanders15._meandO\
+10.*numpy.sqrt(sdf_sanders15._sortedSigOEig[2]))[0]
assert numpy.fabs(dens_fromOpar_half/dens_frompOpar_close-sdf_sanders15.density_par(2.6)/sdf_sanders15.density_par(0.3)) < 10.**-4., 'density from integrating pOparapar not equal to that from density_par for Sanders15 stream'
return None
def test_density_apar_approx():
# Test that the approximate density agrees with the direct integration
# Need to do this relatively to another density, because there is an
# overall offset
apar= 2.6
assert numpy.fabs(sdf_sanders15.density_par(apar,approx=False)/sdf_sanders15.density_par(apar,approx=True)/sdf_sanders15.density_par(0.3,approx=False)*sdf_sanders15.density_par(0.3,approx=True)-1.) < 10.**-3., 'Approximate density does not agree with direct integration'
apar= 2.3
assert numpy.fabs(sdf_sanders15.density_par(apar,approx=False)/sdf_sanders15.density_par(apar,approx=True)/sdf_sanders15.density_par(0.3,approx=False)*sdf_sanders15.density_par(0.3,approx=True)-1.) < 10.**-3., 'Approximate density does not agree with direct integration'
return None
def test_density_apar_approx_higherorder():
# Test that the approximate density agrees with the direct integration
# Need to do this relatively to another density, because there is an
# overall offset
apar= 2.6
assert numpy.fabs(sdf_sanders15.density_par(apar,approx=False)/sdf_sanders15.density_par(apar,approx=True,higherorder=True)/sdf_sanders15.density_par(0.3,approx=False)*sdf_sanders15.density_par(0.3,approx=True,higherorder=True)-1.) < 10.**-3., 'Approximate density does not agree with direct integration'
apar= 2.3
assert numpy.fabs(sdf_sanders15.density_par(apar,approx=False)/sdf_sanders15.density_par(apar,approx=True,higherorder=True)/sdf_sanders15.density_par(0.3,approx=False)*sdf_sanders15.density_par(0.3,approx=True,higherorder=True)-1.) < 10.**-3., 'Approximate density does not agree with direct integration'
return None
def test_minOpar():
# Test that for Opar < minOpar, p(Opar,apar) is in fact zero!
apar= 0.3
dO= 10.**-4.
assert numpy.fabs(sdf_sanders15.pOparapar(sdf_sanders15.minOpar(apar)-dO,
apar)) < 10.**-16., 'Probability for Opar < minOpar is not zero'
apar= 2.6
dO= 10.**-4.
assert numpy.fabs(sdf_sanders15.pOparapar(sdf_sanders15.minOpar(apar)-dO,
apar)) < 10.**-16., 'Probability for Opar < minOpar is not zero'
return None
def test_meanOmega_approx():
# Test that the approximate meanOmega agrees with the direct integration
# Need to do this relatively to another density, because there is an
# overall offset
apar= 2.6
assert numpy.fabs(sdf_sanders15.meanOmega(apar,approx=False,oned=True)/sdf_sanders15.meanOmega(apar,approx=True,oned=True)-1.) < 10.**-3., 'Approximate meanOmega does not agree with direct integration'
apar= 2.3
assert numpy.fabs(sdf_sanders15.meanOmega(apar,approx=False,oned=True)/sdf_sanders15.meanOmega(apar,approx=True,oned=True)-1.) < 10.**-3., 'Approximate meanOmega does not agree with direct integration'
return None
def test_meanOmega_approx_higherorder():
# Test that the approximate meanOmega agrees with the direct integration
# Need to do this relatively to another density, because there is an
# overall offset
apar= 2.6
assert numpy.fabs(sdf_sanders15.meanOmega(apar,approx=False,oned=True)/sdf_sanders15.meanOmega(apar,approx=True,higherorder=True,oned=True)-1.) < 10.**-3., 'Approximate meanOmega does not agree with direct integration'
apar= 2.3
assert numpy.fabs(sdf_sanders15.meanOmega(apar,approx=False,oned=True)/sdf_sanders15.meanOmega(apar,approx=True,higherorder=True,oned=True)-1.) < 10.**-3., 'Approximate meanOmega does not agree with direct integration'
return None
def test_hernquist():
# Test that Hernquist kicks are similar to Plummer kicks, but are
# different in understood ways (...)
from galpy.util import conversion
# Switch to Hernquist
V0, R0= 220., 8.
impactb=0.
subhalovel=numpy.array([6.82200571,132.7700529,
149.4174464])/V0
impact_angle=-2.34
GM=10.**-2./conversion.mass_in_1010msol(V0,R0)
rs=0.625/R0
sdf_sanders15._determine_deltav_kick(impact_angle,impactb,subhalovel,
GM,rs,None,
3,True)
hernquist_kicks= sdf_sanders15._kick_deltav
# Back to Plummer
sdf_sanders15._determine_deltav_kick(impact_angle,impactb,subhalovel,
GM,rs,None,
3,False)
# Repeat some of the deltav tests from above
# Closest one to the impact point, should be close to zero
tIndx= numpy.argmin(numpy.fabs(sdf_sanders15._kick_interpolatedThetasTrack\
-sdf_sanders15._impact_angle))
assert numpy.all(numpy.fabs(hernquist_kicks[tIndx]*sdf_sanders15._vo) < 0.4), 'Kick near the impact point not close to zero for Hernquist'
# The peak, size and location
# Peak should be slightly less (guessed these correct!)
assert numpy.fabs(numpy.amax(numpy.fabs(hernquist_kicks[:,0]*sdf_sanders15._vo))-0.25) < 0.06, 'Peak dvx incorrect'
assert sdf_sanders15._kick_interpolatedThetasTrack[numpy.argmax(hernquist_kicks[:,0]*sdf_sanders15._vo)]-sdf_sanders15._impact_angle < 0., 'Location of peak dvx incorrect'
assert numpy.fabs(numpy.amax(numpy.fabs(hernquist_kicks[:,1]*sdf_sanders15._vo))-0.25) < 0.06, 'Peak dvy incorrect'
assert sdf_sanders15._kick_interpolatedThetasTrack[numpy.argmax(hernquist_kicks[:,1]*sdf_sanders15._vo)]-sdf_sanders15._impact_angle > 0., 'Location of peak dvy incorrect'
assert numpy.fabs(numpy.amax(numpy.fabs(hernquist_kicks[:,2]*sdf_sanders15._vo))-1.3) < 0.06, 'Peak dvz incorrect'
assert sdf_sanders15._kick_interpolatedThetasTrack[numpy.argmax(hernquist_kicks[:,2]*sdf_sanders15._vo)]-sdf_sanders15._impact_angle > 0., 'Location of peak dvz incorrect'
# Close to zero far from impact point
tIndx= numpy.argmin(numpy.fabs(sdf_sanders15._kick_interpolatedThetasTrack\
-sdf_sanders15._impact_angle-1.5))
assert numpy.all(numpy.fabs(hernquist_kicks[tIndx]*sdf_sanders15._vo) < 0.3), 'Kick far the impact point not close to zero'
return None
def test_determine_deltav_valueerrort():
# Test that modeling leading (trailing) impact for trailing (leading) arm
# raises a ValueError when using _determine_deltav_kick
from galpy.util import conversion
# Switch to Hernquist
V0, R0= 220., 8.
impactb=0.
subhalovel=numpy.array([6.82200571,132.7700529,
149.4174464])/V0
impact_angle=-2.34
GM=10.**-2./conversion.mass_in_1010msol(V0,R0)
rs=0.625/R0
# Can't do minus impact angle!
with pytest.raises(ValueError) as excinfo:
sdf_sanders15._determine_deltav_kick(-impact_angle,impactb,subhalovel,
GM,rs,None,
3,True)
return None
# Test the routine that rotates vectors to an arbitrary vector
def test_rotate_to_arbitrary_vector():
from galpy.df.streamgapdf import _rotate_to_arbitrary_vector
tol= -10.
v= numpy.array([[1.,0.,0.]])
# Rotate to 90 deg off
ma= _rotate_to_arbitrary_vector(v,[0,1.,0])
assert numpy.fabs(ma[0,0,1]+1.) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[0,1,0]-1.) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[0,2,2]-1.) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[0,0,0]) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[0,0,2]) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[0,1,1]) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[0,1,2]) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[0,2,0]) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[0,2,1]) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
# Rotate to 90 deg off
ma= _rotate_to_arbitrary_vector(v,[0,0,1.])
assert numpy.fabs(ma[0,0,2]+1.) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[0,2,0]-1.) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[0,1,1]-1.) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[0,0,0]) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[0,0,1]) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[0,2,2]) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[0,2,1]) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[0,1,0]) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[0,1,2]) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
# Rotate to same should be unit matrix
ma= _rotate_to_arbitrary_vector(v,v[0])
assert numpy.all(numpy.fabs(numpy.diag(ma[0])-1.) < 10.**tol), \
'Rotation matrix to same vector is not unity'
assert numpy.fabs(numpy.sum(ma**2.)-3.)< 10.**tol, \
'Rotation matrix to same vector is not unity'
# Rotate to -same should be -unit matrix
ma= _rotate_to_arbitrary_vector(v,-v[0])
assert numpy.all(numpy.fabs(numpy.diag(ma[0])+1.) < 10.**tol), \
'Rotation matrix to minus same vector is not minus unity'
assert numpy.fabs(numpy.sum(ma**2.)-3.)< 10.**tol, \
'Rotation matrix to minus same vector is not minus unity'
return None
# Test that the rotation routine works for multiple vectors
def test_rotate_to_arbitrary_vector_multi():
from galpy.df.streamgapdf import _rotate_to_arbitrary_vector
tol= -10.
v= numpy.array([[1.,0.,0.],[0.,1.,0.]])
# Rotate to 90 deg off
ma= _rotate_to_arbitrary_vector(v,[0,0,1.])
assert numpy.fabs(ma[0,0,2]+1.) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[0,2,0]-1.) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[0,1,1]-1.) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[0,0,0]) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[0,0,1]) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[0,2,2]) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[0,2,1]) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[0,1,0]) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[0,1,2]) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
# 2nd
assert numpy.fabs(ma[1,1,2]+1.) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[1,2,1]-1.) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[1,0,0]-1.) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[1,0,1]) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[1,0,2]) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[1,1,0]) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[1,1,1]) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[1,2,0]) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[1,2,2]) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
return None
# Test the inverse of the routine that rotates vectors to an arbitrary vector
def test_rotate_to_arbitrary_vector_inverse():
from galpy.df.streamgapdf import _rotate_to_arbitrary_vector
tol= -10.
v= numpy.array([[1.,0.,0.]])
# Rotate to random vector and back
a= numpy.random.uniform(size=3)
a/= numpy.sqrt(numpy.sum(a**2.))
ma= _rotate_to_arbitrary_vector(v,a)
ma_inv= _rotate_to_arbitrary_vector(v,a,inv=True)
ma= numpy.dot(ma[0],ma_inv[0])
assert numpy.all(numpy.fabs(ma-numpy.eye(3)) < 10.**tol), 'Inverse rotation matrix incorrect'
return None
# Test that rotating to vy in particular works as expected
def test_rotation_vy():
from galpy.df.streamgapdf import _rotation_vy
tol= -10.
v= numpy.array([[1.,0.,0.]])
# Rotate to 90 deg off
ma= _rotation_vy(v)
assert numpy.fabs(ma[0,0,1]+1.) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[0,1,0]-1.) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[0,2,2]-1.) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[0,0,0]) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[0,0,2]) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[0,1,1]) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[0,1,2]) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[0,2,0]) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
assert numpy.fabs(ma[0,2,1]) < 10.**tol, 'Rotation matrix to 90 deg off incorrect'
|
|
"""Convert H.264 mkv files to mp4 files playable on the PS3, and "correct" the
MPEG4/ISO/AVC profile for use on the PS3."""
try:
from .version import __version__
except ImportError:
__version__ = 'unknown'
from . import info
import sys
import os
import re
import getopt
import subprocess as sp
import struct
import pipes
import simplemkv.info
simple_usage = 'usage: mkvtomp4 [options] [--] <file>'
def exit_if(bbool, value=0):
if bbool:
sys.exit(value)
class Kwargs(object):
def __init__(self, f, **kwargs):
self.f = f
self.kwargs = kwargs
def __call__(self, *args):
return self.f(*args, **self.kwargs)
def prin(*args, **kwargs):
fobj = kwargs.get('fobj', None)
if fobj is None:
fobj = sys.stdout
sep = kwargs.get('sep', ' ')
end = kwargs.get('end', '\n')
if len(args) > 0:
fobj.write(args[0])
if len(args) > 1:
for arg in args[1:]:
fobj.write(sep + arg)
fobj.write(end)
def eprint(*args, **kwargs):
kwargs['fobj'] = sys.stderr
prin("error:", *args, **kwargs)
def die(*args, **kwargs):
eprint(*args, **kwargs)
sys.exit(1)
def wprint(*args, **kwargs):
kwargs['fobj'] = sys.stderr
prin("warning:", *args, **kwargs)
_verbosity = 0
def vprint(level, *args, **kwargs):
global _verbosity
local = kwargs.get('verbosity', 0)
if _verbosity >= level or local >= level:
prin('verbose:', *args, **kwargs)
def onlykeys(d, keys):
newd = {}
for k in keys:
newd[k] = d[k]
return newd
def __sq(one):
if one == '':
return "''"
return pipes.quote(str(one))
def sq(args):
return " ".join([__sq(x) for x in args])
def command(cmd, **kwargs):
verbose_kwargs = {}
verbosity = kwargs.get('verbosity', None)
if verbosity is not None:
verbose_kwargs['verbosity'] = verbosity
spopts = kwargs.get('spopts', {})
vprint(1, 'command: %s' % str(cmd), **verbose_kwargs)
if spopts:
vprint(1, 'command: options: %s' % str(spopts), **verbose_kwargs)
try:
proc = sp.Popen(
cmd, stdout=sp.PIPE, stderr=sp.PIPE, close_fds=True, **spopts
)
except OSError, e:
die('command failed:', str(e), ':', sq(cmd))
chout, cherr = proc.communicate()
vprint(1, 'command: stdout:', chout, '\ncommand: stderr:', cherr)
if proc.returncode != 0:
die('failure: %s' % cherr, end='')
return chout
def dry_command(cmd, **opts):
if opts['dry_run']:
prin(sq(cmd))
else:
command(cmd, **opts)
def dry_system(cmd, **opts):
quoted = sq(cmd)
if opts['dry_run']:
prin(quoted)
else:
os.system(quoted)
def default_options(argv0):
return {
'argv0': argv0,
'verbosity': 0,
'a_bitrate': '328',
'a_channels': '5.1',
'a_codec': 'libfaac',
'a_delay': None,
'output': None,
'video_track': None,
'audio_track': None,
'keep_temp_files': False,
'dry_run': False,
'correct_prof_only': False,
'stop_v_ex': False,
'stop_correct': False,
'stop_a_ex': False,
'stop_a_conv': False,
'stop_v_mp4': False,
'stop_hint_mp4': False,
'stop_a_mp4': False,
'mp4': 'mp4creator',
'mp4creator': 'mp4creator',
'mp4box': 'MP4Box',
'summary': True,
}
def mp4_add_audio_optimize_cmd(mp4file, audio, **opts):
if opts['mp4'] == 'mp4creator':
return [
opts.get('mp4creator', 'mp4creator'),
'-c', audio, '-interleave', '-optimize', mp4file
]
elif opts['mp4'] == 'mp4box':
delay = opts.get('a_delay', None)
if delay is not None:
delay = ':delay=' + delay
else:
delay = ''
return [
opts.get('mp4box', 'MP4Box'),
'-add', audio + '#audio:trackID=2' + delay, mp4file
]
def mp4_add_hint_cmd(mp4file, **opts):
if opts['mp4'] == 'mp4creator':
return [opts.get('mp4creator', 'mp4creator'), '-hint=1', mp4file]
elif opts['mp4'] == 'mp4box':
return None
def mp4_add_video_cmd(mp4file, video, fps, **opts):
if opts['mp4'] == 'mp4creator':
return [
opts.get('mp4creator', 'mp4creator'),
'-c', video, '-rate', str(fps), mp4file
]
elif opts['mp4'] == 'mp4box':
return [
opts.get('mp4box', 'MP4Box'), '-add',
video + '#video:trackID=1', '-hint', '-fps', str(fps), mp4file,
]
def ffmpeg_convert_audio_cmd(old, new, **opts):
bitrate = opts.get('bitrate', '128')
channels = opts.get('channels', '2')
codec = opts.get('codec', 'libfaac')
verbosity = opts.get('verbosity', 0)
if str(channels) == '5.1':
channels = '6'
if verbosity > 1:
cmd = ['ffmpeg', '-v', str(verbosity - 1)]
else:
cmd = ['ffmpeg']
return cmd + [
'-i', old, '-ac', str(channels), '-acodec', codec,
'-ab', str(bitrate) + 'k', new
]
def pretend_correct_rawmp4_profile(rawmp4, argv0):
prin(sq([argv0, '--correct-profile-only', rawmp4]))
def correct_rawmp4_profile(rawmp4):
level_string = struct.pack('b', int('29', 16))
f = open(rawmp4, 'r+b')
try:
f.seek(7)
vprint(1, 'correcting profile:', rawmp4)
f.write(level_string)
finally:
f.close()
def dry_correct_rawmp4_profile(rawmp4, **opts):
if opts['dry_run']:
pretend_correct_rawmp4_profile(rawmp4, opts['argv0'])
else:
correct_rawmp4_profile(rawmp4)
def mkv_extract_track_cmd(mkv, out, track, verbosely=False, mkvextract=None):
v = ['-v'] if verbosely else []
if not mkvextract: mkvextract = 'mkvextract'
return [mkvextract, 'tracks', mkv] + v + [str(track) + ':' + out]
def real_main(mkvfile, **opts):
mkvinfo = opts.get('mkvinfo', None)
infostr = simplemkv.info.infostring(mkvfile, arguments=['--ui-language', 'en_US'], mkvinfo=mkvinfo)
info = simplemkv.info.infodict(infostr.split('\n'))
tracks = info['tracks']
def get_track(typ, codec_re):
number = opts.get(typ + '_track', None)
if number is not None:
try:
track = tracks[number]
except IndexError:
die('track %d not found: %s' % (number, str(tracks)))
if not codec_re.search(track['codec']):
die('track %d has incorrect codec: %s' % (number, str(track)))
else:
types = [
t for t in tracks
if t['type'] == typ # and codec_re.search(t['codec'])
]
if not types:
die('appropriate %s track not found: %s' % (typ, str(tracks)))
return types[0]
videotrack = get_track('video', re.compile(r'^(?!V_)?MPEG4/ISO/AVC\b'))
audiotrack = get_track('audio', re.compile(r'^(?!A_)?(?!DTS|AAC|AC3)\b'))
tempfiles = []
succeeded = False
try:
# Extract video
video = mkvfile + '.h264'
exit_if(opts['stop_v_ex'])
extract_cmd = mkv_extract_track_cmd(
mkvfile, out=video, track=videotrack['number'],
verbosely=(opts['verbosity'] > 0),
mkvextract=opts.get('mkvextract', None),
)
tempfiles.append(video)
dry_command(extract_cmd, **opts)
exit_if(opts['stop_correct'])
# Correct profile
dry_correct_rawmp4_profile(video, **opts)
a_codec = audiotrack['codec']
audio = mkvfile + '.' + a_codec.lower()
exit_if(opts['stop_a_ex'])
# Extract audio
extract_cmd = mkv_extract_track_cmd(
mkvfile, out=audio, track=audiotrack['number'],
verbosely=(opts['verbosity'] > 0),
mkvextract=opts.get('mkvextract', None),
)
tempfiles.append(audio)
dry_command(extract_cmd, **opts)
exit_if(opts['stop_a_conv'])
# Convert audio
if str(a_codec).lower() != 'aac':
aacaudio, oldaudio = audio + '.aac', audio
audio_cmd = ffmpeg_convert_audio_cmd(oldaudio, aacaudio, **opts)
tempfiles.append(aacaudio)
dry_system(audio_cmd, **opts)
if opts['output'] is None:
opts['output'] = os.path.splitext(mkvfile)[0] + '.mp4'
exit_if(opts['stop_v_mp4'])
# Create mp4 container with video
opts['fps'] = videotrack['fps']
mp4video_cmd = mp4_add_video_cmd(
opts['output'], video,
**opts
)
dry_command(mp4video_cmd, **opts)
exit_if(opts['stop_hint_mp4'])
# Hint mp4 container
mp4hint_cmd = mp4_add_hint_cmd(opts['output'], **opts)
if mp4hint_cmd is not None:
dry_command(mp4hint_cmd, **opts)
exit_if(opts['stop_a_mp4'])
# Add audio to mp4 container and optimize
mp4opt_cmd = mp4_add_audio_optimize_cmd(
opts['output'], aacaudio,
**opts
)
dry_command(mp4opt_cmd, **opts)
succeeded = True
finally:
if not succeeded:
eprint('keeping temp files since we failed.')
return
if opts['dry_run']:
prin(sq(['rm', '-f'] + tempfiles))
elif not opts['keep_temp_files']:
for f in tempfiles:
try:
os.remove(f)
except OSError:
pass
def usage(**kwargs):
p = Kwargs(prin, **kwargs)
p(simple_usage)
p('options:')
p(' -h|--help:')
p(' Print this help message.')
p(' --usage:')
p(' Print a short help message.')
p(' -v|--verbose:')
p(' Print info about what is happening.')
p(' --use-mp4box:')
p(' Use mp4box when packaging the mp4.')
p(' --use-mp4creator:')
p(' Use mp4creator when packaging the mp4.')
p(' --mp4box=<mp4box>:')
p(' Use this <mp4box> command.')
p(' --mp4creator=<mp4creator>:')
p(' Use this <mp4creator> command.')
p(' --mkvinfo=<mkvinfo>:')
p(' Use this <mkvinfo> command.')
p(' --mkvextract=<mkvextract>:')
p(' Use this <mkvextract> command.')
p(' --video-track=<video-track>:')
p(' Use this video track number.')
p(' --audio-track=<audio-track>:')
p(' Use this audio track number.')
p(' --audio-delay-ms=<audio-delay-ms>:')
p(' Use this many milliseconds of audio delay.')
p(' --audio-bitrate=<audio-bitrate>:')
p(' Use this audio bitrate.')
p(' --audio-channels=<audio-channels>:')
p(' Use this many audio channel.')
p(' --audio-codec=<audio-codec>:')
p(' Use this audio codec.')
p(' -o <output>|--output=<output>:')
p(' Write the mp4 to this file.')
p(' --keep-temp-files:')
p(' Keep all temporary files generated.')
p(' -n|--dry-run:')
p(' Don\'t actually run any commands.')
p(' --correct-profile-only:')
p(' Only correct the mp4 profile.')
p(' --stop-before-extract-video:')
p(' Don\'t do anything after extracting video.')
p(' --stop-before-correct-profile:')
p(' Don\'t do anything after correcting the mp4 profile.')
p(' --stop-before-extract-audio:')
p(' Don\'t do anything after extracting audio.')
p(' --stop-before-convert-audio:')
p(' Don\'t do anything after converting video.')
p(' --stop-before-video-mp4:')
p(' Don\'t do anything after adding video to mp4.')
p(' --stop-before-hinting-mp4:')
p(' Don\'t do anything after hinting the mp4.')
p(' --stop-before-audio-mp4:')
p(' Don\'t do anything after adding audio to mp4.')
p(' --no-summary:')
p(' Don\'t provide a summary.')
def parseopts(argv=None):
opts = default_options(argv[0])
try:
options, arguments = getopt.gnu_getopt(
argv[1:],
'hvo:n',
[
'help', 'usage', 'version', 'verbose',
'mp4box=', 'mp4creator=', 'mkvinfo=', 'mkvextract=',
'use-mp4box', 'use-mp4creator',
'video-track=', 'audio-track=',
'audio-delay-ms=', 'audio-bitrate=', 'audio-channels=',
'audio-codec=',
'output=', 'keep-temp-files', 'dry-run',
'correct-profile-only',
'stop-before-extract-video', 'stop-before-correct-profile',
'stop-before-extract-audio', 'stop-before-convert-audio',
'stop-before-video-mp4', 'stop-before-hinting-mp4',
'stop-before-audio-mp4',
'no-summary',
]
)
except getopt.GetoptError, err:
die(str(err))
for opt, optarg in options:
if opt in ('-h', '--help'):
usage()
sys.exit(0)
elif opt == '--usage':
prin(simple_usage)
sys.exit(0)
elif opt == '--version':
prin(__version__)
sys.exit(0)
elif opt in ('-v', '--verbose'):
opts['verbosity'] = opts['verbosity'] + 1
elif opt == '--mp4creator':
opts['mp4creator'] = optarg
elif opt == '--mp4box':
opts['mp4box'] = optarg
elif opt == '--mkvinfo':
opts['mkvinfo'] = optarg
elif opt == '--mkvextract':
opts['mkvextract'] = optarg
elif opt == '--use-mp4creator':
opts['mp4'] = 'mp4creator'
elif opt == '--use-mp4box':
opts['mp4'] = 'mp4box'
elif opt == '--video-track':
opts['video_track'] = optarg
elif opt == '--audio-track':
opts['audio_track'] = optarg
elif opt == '--audio-delay-ms':
opts['a_delay'] = optarg
elif opt == '--audio-bitrate':
opts['a_bitrate'] = optarg
elif opt == '--audio-channels':
opts['a_channels'] = optarg
elif opt == '--audio-codec':
opts['a_codec'] = optarg
elif opt in ('-o', '--output'):
opts['output'] = optarg
elif opt == '--keep-temp-files':
opts['keep_temp_files'] = True
elif opt in ('-n', '--dry-run'):
opts['dry_run'] = True
elif opt == '--correct-profile-only':
opts['correct_prof_only'] = True
elif opt == '--stop-before-extract-video':
opts['stop_v_ex'] = True
elif opt == '--stop-before-correct-profile':
opts['stop_correct'] = True
elif opt == '--stop-before-extract-audio':
opts['stop_a_ex'] = True
elif opt == '--stop-before-convert-audio':
opts['stop_a_conv'] = True
elif opt == '--stop-before-video-mp4':
opts['stop_v_mp4'] = True
elif opt == '--stop-before-hinting-mp4':
opts['stop_hint_mp4'] = True
elif opt == '--stop-before-audio-mp4':
opts['stop_a_mp4'] = True
elif opt == '--no-summary':
opts['summary'] = False
return opts, arguments
def main(argv=None):
if argv is None:
argv = sys.argv
opts, args = parseopts(argv)
if len(args) != 1:
die(simple_usage)
if opts['a_delay'] is not None and opts['mp4'] == 'mp4creator':
die("Cannot use --audio-delay-ms with mp4creator. Try --use-mp4box")
if opts['correct_prof_only']:
dry_correct_rawmp4_profile(args[0], **opts)
else:
if opts['summary'] and not opts['dry_run']:
keep, dry_run = opts['keep_temp_files'], opts['dry_run']
opts['keep_temp_files'], opts['dry_run'] = True, True
real_main(args[0], **opts)
opts['keep_temp_files'], opts['dry_run'] = keep, dry_run
real_main(args[0], **opts)
|
|
# Copyright 2014 Intel Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Stack object."""
from oslo_log import log as logging
from oslo_versionedobjects import base
from oslo_versionedobjects import fields
import six
from heat.common import exception
from heat.common.i18n import _
from heat.common import identifier
from heat.db.sqlalchemy import api as db_api
from heat.objects import base as heat_base
from heat.objects import fields as heat_fields
from heat.objects import raw_template
from heat.objects import stack_tag
LOG = logging.getLogger(__name__)
class Stack(
heat_base.HeatObject,
base.VersionedObjectDictCompat,
base.ComparableVersionedObject,
):
fields = {
'id': fields.StringField(),
'name': fields.StringField(),
'raw_template_id': fields.IntegerField(),
'backup': fields.BooleanField(),
'created_at': fields.DateTimeField(read_only=True),
'deleted_at': fields.DateTimeField(nullable=True),
'disable_rollback': fields.BooleanField(),
'nested_depth': fields.IntegerField(),
'owner_id': fields.StringField(nullable=True),
'stack_user_project_id': fields.StringField(nullable=True),
'tenant': fields.StringField(nullable=True),
'timeout': fields.IntegerField(nullable=True),
'updated_at': fields.DateTimeField(nullable=True),
'user_creds_id': fields.StringField(nullable=True),
'username': fields.StringField(nullable=True),
'action': fields.StringField(nullable=True),
'status': fields.StringField(nullable=True),
'status_reason': fields.StringField(nullable=True),
'raw_template_obj': fields.ObjectField('RawTemplate'),
'convergence': fields.BooleanField(),
'current_traversal': fields.StringField(),
'current_deps': heat_fields.JsonField(),
'prev_raw_template_id': fields.IntegerField(),
'prev_raw_template': fields.ObjectField('RawTemplate'),
'parent_resource_name': fields.StringField(nullable=True),
}
@staticmethod
def _from_db_object(context, stack, db_stack):
for field in stack.fields:
if field == 'raw_template_obj':
raw_template_obj = db_stack.__dict__.get('raw_template')
if raw_template_obj is not None:
# Object is already lazy loaded
raw_template_obj = (
raw_template.RawTemplate.from_db_object(
context,
raw_template.RawTemplate(),
raw_template_obj))
stack._raw_template = raw_template_obj
else:
stack[field] = db_stack.__dict__.get(field)
stack._context = context
stack.obj_reset_changes()
return stack
@property
def raw_template(self):
if hasattr(self, '_raw_template'):
return self._raw_template
LOG.warning('Loading a raw_template that should have been '
'eagerly loaded for stack id %s' % self.id)
self._raw_template = raw_template.RawTemplate.get_by_id(
self._context,
self['raw_template_id'])
return self._raw_template
@raw_template.setter
def raw_template(self, value):
self['raw_template_obj'] = value
self._raw_template = value
@classmethod
def get_root_id(cls, context, stack_id):
return db_api.stack_get_root_id(context, stack_id)
@classmethod
def get_by_id(cls, context, stack_id, **kwargs):
db_stack = db_api.stack_get(context, stack_id, **kwargs)
if not db_stack:
return None
stack = cls._from_db_object(context, cls(context), db_stack)
return stack
@classmethod
def get_by_name_and_owner_id(cls, context, stack_name, owner_id):
db_stack = db_api.stack_get_by_name_and_owner_id(
context,
six.text_type(stack_name),
owner_id
)
if not db_stack:
return None
stack = cls._from_db_object(context, cls(context), db_stack)
return stack
@classmethod
def get_by_name(cls, context, stack_name):
db_stack = db_api.stack_get_by_name(context, six.text_type(stack_name))
if not db_stack:
return None
stack = cls._from_db_object(context, cls(context), db_stack)
return stack
@classmethod
def get_all(cls, context, limit=None, sort_keys=None, marker=None,
sort_dir=None, filters=None,
show_deleted=False, show_nested=False, show_hidden=False,
tags=None, tags_any=None, not_tags=None,
not_tags_any=None, eager_load=False):
db_stacks = db_api.stack_get_all(
context,
limit=limit,
sort_keys=sort_keys,
marker=marker,
sort_dir=sort_dir,
filters=filters,
show_deleted=show_deleted,
show_nested=show_nested,
show_hidden=show_hidden,
tags=tags,
tags_any=tags_any,
not_tags=not_tags,
not_tags_any=not_tags_any,
eager_load=eager_load)
for db_stack in db_stacks:
try:
yield cls._from_db_object(context, cls(context), db_stack)
except exception.NotFound:
pass
@classmethod
def get_all_by_owner_id(cls, context, owner_id):
db_stacks = db_api.stack_get_all_by_owner_id(context, owner_id)
for db_stack in db_stacks:
try:
yield cls._from_db_object(context, cls(context), db_stack)
except exception.NotFound:
pass
@classmethod
def get_all_by_root_owner_id(cls, context, root_owner_id):
db_stacks = db_api.stack_get_all_by_root_owner_id(context,
root_owner_id)
for db_stack in db_stacks:
try:
yield cls._from_db_object(context, cls(context), db_stack)
except exception.NotFound:
pass
@classmethod
def count_all(cls, context, **kwargs):
return db_api.stack_count_all(context, **kwargs)
@classmethod
def count_total_resources(cls, context, stack_id):
return db_api.stack_count_total_resources(context, stack_id)
@classmethod
def create(cls, context, values):
return cls._from_db_object(context, cls(context),
db_api.stack_create(context, values))
@classmethod
def update_by_id(cls, context, stack_id, values):
"""Update and return (boolean) if it was updated.
Note: the underlying stack_update filters by current_traversal
and stack_id.
"""
return db_api.stack_update(context, stack_id, values)
@classmethod
def select_and_update(cls, context, stack_id, values, exp_trvsl=None):
"""Update the stack by selecting on traversal ID.
Uses UPDATE ... WHERE (compare and swap) to catch any concurrent
update problem.
If the stack is found with given traversal, it is updated.
If there occurs a race while updating, only one will succeed and
other will get return value of False.
"""
return db_api.stack_update(context, stack_id, values,
exp_trvsl=exp_trvsl)
@classmethod
def persist_state_and_release_lock(cls, context, stack_id,
engine_id, values):
return db_api.persist_state_and_release_lock(context, stack_id,
engine_id, values)
@classmethod
def delete(cls, context, stack_id):
db_api.stack_delete(context, stack_id)
def update_and_save(self, values):
has_updated = self.__class__.update_by_id(self._context,
self.id, values)
if not has_updated:
raise exception.NotFound(_('Attempt to update a stack with id: '
'%(id)s %(traversal)s %(msg)s') % {
'id': self.id,
'traversal': self.current_traversal,
'msg': 'that does not exist'})
def __eq__(self, another):
self.refresh() # to make test object comparison work well
return super(Stack, self).__eq__(another)
def __ne__(self, other):
return not self.__eq__(other)
def refresh(self):
db_stack = db_api.stack_get(
self._context, self.id, show_deleted=True)
if db_stack is None:
message = _('No stack exists with id "%s"') % str(self.id)
raise exception.NotFound(message)
return self.__class__._from_db_object(
self._context,
self,
db_stack
)
@classmethod
def encrypt_hidden_parameters(cls, tmpl):
raw_template.RawTemplate.encrypt_hidden_parameters(tmpl)
@classmethod
def get_status(cls, context, stack_id):
"""Return action and status for the given stack."""
return db_api.stack_get_status(context, stack_id)
def identifier(self):
"""Return an identifier for this stack."""
return identifier.HeatIdentifier(self.tenant, self.name, self.id)
@property
def tags(self):
return stack_tag.StackTagList.get(self._context, self.id)
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# fsfs-reshard.py REPOS_PATH MAX_FILES_PER_SHARD
#
# Perform an offline conversion of an FSFS repository between linear (format
# 2, usable by Subversion 1.4+) and sharded (format 3, usable by Subversion
# 1.5+) layouts.
#
# The MAX_FILES_PER_SHARD argument specifies the maximum number of files
# that will be stored in each shard (directory), or zero to specify a linear
# layout. Subversion 1.5 uses a default value of 1000 files per shard.
#
# As the repository will not be valid while the conversion is in progress,
# the repository administrator must ensure that access to the repository is
# blocked for the duration of the conversion.
#
# In the event that the conversion is interrupted, the repository will be in
# an inconsistent state. The repository administrator should then re-run
# this tool to completion.
#
#
# Note that, currently, resharding from one sharded layout to another is
# likely to be an extremely slow process. To reshard, we convert from a
# sharded to linear layout and then to the new sharded layout. The problem
# is that the initial conversion to the linear layout triggers exactly the
# same 'large number of files in a directory' problem that sharding is
# intended to solve.
#
# ====================================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# ====================================================================
#
# $HeadURL: http://svn.apache.org/repos/asf/subversion/branches/1.8.x/tools/server-side/fsfs-reshard.py $
# $LastChangedDate: 2009-11-16 19:07:17 +0000 (Mon, 16 Nov 2009) $
# $LastChangedBy: hwright $
# $LastChangedRevision: 880911 $
import os, stat, sys
from errno import EEXIST
def usage():
"""Print a usage message and exit."""
print("""usage: %s REPOS_PATH MAX_FILES_PER_SHARD [START END]
Perform an offline conversion of an FSFS repository between linear
(readable by Subversion 1.4 or later) and sharded (readable by
Subversion 1.5 or later) layouts.
The MAX_FILES_PER_SHARD argument specifies the maximum number of
files that will be stored in each shard (directory), or zero to
specify a linear layout. Subversion 1.5 uses a default value of
1000 files per shard.
Convert revisions START through END inclusive if specified, or all
revisions if unspecified.
""" % sys.argv[0])
sys.exit(1)
def incompatible_repos_format(repos_path, format):
"""Print an error saying that REPOS_PATH is a repository with an
incompatible repository format FORMAT, then exit."""
sys.stderr.write("""error: unable to convert repository '%s'.
This repository is not compatible with this tool. Valid
repository formats are '3' or '5'; this repository is
format '%s'.
""" % (repos_path, format))
sys.stderr.flush()
sys.exit(1)
def incompatible_fs_format(repos_path, format):
"""Print an error saying that REPOS_PATH is a repository with an
incompatible filesystem format FORMAT, then exit."""
sys.stderr.write("""error: unable to convert repository '%s'.
This repository contains a filesystem that is not compatible with
this tool. Valid filesystem formats are '1', '2', or '3'; this
repository contains a filesystem with format '%s'.
""" % (repos_path, format))
sys.stderr.flush()
sys.exit(1)
def unexpected_fs_format_options(repos_path):
"""Print an error saying that REPOS_PATH is a repository with
unexpected filesystem format options, then exit."""
sys.stderr.write("""error: unable to convert repository '%s'.
This repository contains a filesystem that appears to be invalid -
there is unexpected data after the filesystem format number.
""" % repos_path)
sys.stderr.flush()
sys.exit(1)
def incompatible_fs_format_option(repos_path, option):
"""Print an error saying that REPOS_PATH is a repository with an
incompatible filesystem format option OPTION, then exit."""
sys.stderr.write("""error: unable to convert repository '%s'.
This repository contains a filesystem that is not compatible with
this tool. This tool recognises the 'layout' option but the
filesystem uses the '%s' option.
""" % (repos_path, option))
sys.stderr.flush()
sys.exit(1)
def warn_about_fs_format_1(repos_path, format_path):
"""Print a warning saying that REPOS_PATH contains a format 1 FSFS
filesystem that we can't reconstruct, then exit."""
sys.stderr.write("""warning: conversion of '%s' will be one-way.
This repository is currently readable by Subversion 1.1 or later.
This tool can convert this repository to one that is readable by
either Subversion 1.4 (or later) or Subversion 1.5 (or later),
but it is not able to convert it back to the original format - a
separate dump/load step would be required.
If you would like to upgrade this repository anyway, delete the
file '%s' and re-run this tool.
""" % (repos_path, format_path))
sys.stderr.flush()
sys.exit(1)
def check_repos_format(repos_path):
"""Check that REPOS_PATH contains a repository with a suitable format;
print a message and exit if not."""
format_path = os.path.join(repos_path, 'format')
try:
format_file = open(format_path)
format = format_file.readline()
if not format.endswith('\n'):
incompatible_repos_format(repos_path, format + ' <missing newline>')
format = format.rstrip('\n')
if format == '3' or format == '5':
pass
else:
incompatible_repos_format(repos_path, format)
except IOError:
# In all likelihood, the file doesn't exist.
incompatible_repos_format(repos_path, '<unreadable>')
def check_fs_format(repos_path):
"""Check that REPOS_PATH contains a filesystem with a suitable format,
or that it contains no format file; print a message and exit if neither
is true. Return bool whether the filesystem is sharded."""
sharded = False
db_path = os.path.join(repos_path, 'db')
format_path = os.path.join(db_path, 'format')
try:
format_file = open(format_path)
format = format_file.readline()
if not format.endswith('\n'):
incompatible_fs_format(repos_path, format + ' <missing newline>')
format = format.rstrip('\n')
if format == '1':
# This is a format 1 (svndiff0 only) filesystem. We can upgrade it,
# but we can't downgrade again (since we can't uncompress any of the
# svndiff1 deltas that may have been written). Warn the user and exit.
warn_about_fs_format_1(repos_path, format_path)
if format == '2':
pass
elif format == '3':
pass
else:
incompatible_fs_format(repos_path, format)
for line in format_file:
if format == '2':
unexpected_fs_format_options(repos_path)
line = line.rstrip('\n')
if line == 'layout linear':
pass
elif line.startswith('layout sharded '):
sharded = True
else:
incompatible_fs_format_option(repos_path, line)
format_file.close()
except IOError:
# The format file might not exist if we've previously been interrupted,
# or if the user is following our advice about upgrading a format 1
# repository. In both cases, we'll just assume the format was
# compatible.
pass
return sharded
def current_file(repos_path):
"""Return triple of (revision, next_node_id, next_copy_id) from
REPOS_PATH/db/current ."""
return open(os.path.join(repos_path, 'db', 'current')).readline().split()
def remove_fs_format(repos_path):
"""Remove the filesystem format file for repository REPOS_PATH.
Do not raise an error if the file is already missing."""
format_path = os.path.join(repos_path, 'db', 'format')
try:
statinfo = os.stat(format_path)
except OSError:
# The file probably doesn't exist.
return
# On Windows, we need to ensure the file is writable before we can
# remove it.
os.chmod(format_path, statinfo.st_mode | stat.S_IWUSR)
os.remove(format_path)
def write_fs_format(repos_path, contents):
"""Write a new filesystem format file for repository REPOS_PATH containing
CONTENTS."""
format_path = os.path.join(repos_path, 'db', 'format')
f = open(format_path, 'wb')
f.write(contents)
f.close()
os.chmod(format_path, stat.S_IRUSR | stat.S_IRGRP)
def linearise(path):
"""Move all the files in subdirectories of PATH into PATH, and remove the
subdirectories. Handle conflicts between subdirectory names and files
contained in subdirectories by ensuring subdirectories have a '.shard'
suffix prior to moving (the files are assumed not to have this suffix.
Abort if a subdirectory is found to contain another subdirectory."""
# First enumerate all subdirectories of DIR and rename where necessary
# to include a .shard suffix.
for name in os.listdir(path):
if name.endswith('.shard'):
continue
subdir_path = os.path.join(path, name)
if not os.path.isdir(subdir_path):
continue
os.rename(subdir_path, subdir_path + '.shard')
# Now move all the subdirectory contents into the parent and remove
# the subdirectories.
for root_path, dirnames, filenames in os.walk(path):
if root_path == path:
continue
if len(dirnames) > 0:
sys.stderr.write("error: directory '%s' contains other unexpected directories.\n" \
% root_path)
sys.stderr.flush()
sys.exit(1)
for name in filenames:
from_path = os.path.join(root_path, name)
to_path = os.path.join(path, name)
os.rename(from_path, to_path)
os.rmdir(root_path)
def shard(path, max_files_per_shard, start, end):
"""Move the files for revisions START to END inclusive in PATH into
subdirectories of PATH named such that subdirectory '0' contains at most
MAX_FILES_PER_SHARD files, those named [0, MAX_FILES_PER_SHARD). Abort if
PATH is found to contain any entries with non-numeric names."""
tmp = path + '.reshard'
try:
os.mkdir(tmp)
except OSError, e:
if e.errno != EEXIST:
raise
# Move all entries into shards named N.shard.
for rev in range(start, end + 1):
name = str(rev)
shard = rev // max_files_per_shard
shard_name = str(shard) + '.shard'
from_path = os.path.join(path, name)
to_path = os.path.join(tmp, shard_name, name)
try:
os.rename(from_path, to_path)
except OSError:
# The most likely explanation is that the shard directory doesn't
# exist. Let's create it and retry the rename.
os.mkdir(os.path.join(tmp, shard_name))
os.rename(from_path, to_path)
# Now rename all the shards to remove the suffix.
skipped = 0
for name in os.listdir(tmp):
if not name.endswith('.shard'):
sys.stderr.write("warning: ignoring unexpected subdirectory '%s'.\n" \
% os.path.join(tmp, name))
sys.stderr.flush()
skipped += 1
continue
from_path = os.path.join(tmp, name)
to_path = os.path.join(path, os.path.basename(from_path)[:-6])
os.rename(from_path, to_path)
skipped == 0 and os.rmdir(tmp)
def main():
if len(sys.argv) < 3:
usage()
repos_path = sys.argv[1]
max_files_per_shard = sys.argv[2]
try:
start = int(sys.argv[3])
end = int(sys.argv[4])
except IndexError:
start = 0
end = int(current_file(repos_path)[0])
# Validate the command-line arguments.
db_path = os.path.join(repos_path, 'db')
current_path = os.path.join(db_path, 'current')
if not os.path.exists(current_path):
sys.stderr.write("error: '%s' doesn't appear to be a Subversion FSFS repository.\n" \
% repos_path)
sys.stderr.flush()
sys.exit(1)
try:
max_files_per_shard = int(max_files_per_shard)
except ValueError, OverflowError:
sys.stderr.write("error: maximum files per shard ('%s') is not a valid number.\n" \
% max_files_per_shard)
sys.stderr.flush()
sys.exit(1)
if max_files_per_shard < 0:
sys.stderr.write("error: maximum files per shard ('%d') must not be negative.\n" \
% max_files_per_shard)
sys.stderr.flush()
sys.exit(1)
# Check the format of the repository.
check_repos_format(repos_path)
sharded = check_fs_format(repos_path)
# Let the user know what's going on.
if max_files_per_shard > 0:
print("Converting '%s' to a sharded structure with %d files per directory" \
% (repos_path, max_files_per_shard))
if sharded:
print('(will convert to a linear structure first)')
else:
print("Converting '%s' to a linear structure" % repos_path)
# Prevent access to the repository for the duration of the conversion.
# There's no clean way to do this, but since the format of the repository
# is indeterminate, let's remove the format file while we're converting.
print('- marking the repository as invalid')
remove_fs_format(repos_path)
# First, convert to a linear scheme (this makes recovery easier because
# it's easier to reason about the behaviour on restart).
if sharded:
print('- linearising db/revs')
linearise(os.path.join(repos_path, 'db', 'revs'))
print('- linearising db/revprops')
linearise(os.path.join(repos_path, 'db', 'revprops'))
if max_files_per_shard == 0:
# We're done. Stamp the filesystem with a format 2 db/format file.
print('- marking the repository as a valid linear repository')
write_fs_format(repos_path, '2\n')
else:
print('- sharding db/revs')
shard(os.path.join(repos_path, 'db', 'revs'), max_files_per_shard,
start, end)
print('- sharding db/revprops')
shard(os.path.join(repos_path, 'db', 'revprops'), max_files_per_shard,
start, end)
# We're done. Stamp the filesystem with a format 3 db/format file.
print('- marking the repository as a valid sharded repository')
write_fs_format(repos_path, '3\nlayout sharded %d\n' % max_files_per_shard)
print('- done.')
sys.exit(0)
if __name__ == '__main__':
raise Exception("""This script is unfinished and not ready to be used on live data.
Trust us.""")
main()
|
|
from __future__ import absolute_import, division, print_function, with_statement
import contextlib
import datetime
import functools
import sys
import textwrap
import time
import weakref
from tornado.concurrent import return_future, Future
from tornado.escape import url_escape
from tornado.httpclient import AsyncHTTPClient
from tornado.ioloop import IOLoop
from tornado.log import app_log
from tornado import stack_context
from tornado.testing import AsyncHTTPTestCase, AsyncTestCase, ExpectLog, gen_test
from tornado.test.util import unittest, skipOnTravis, skipBefore33, skipBefore35, skipNotCPython, exec_test
from tornado.web import Application, RequestHandler, asynchronous, HTTPError
from tornado import gen
try:
from concurrent import futures
except ImportError:
futures = None
class GenEngineTest(AsyncTestCase):
def setUp(self):
super(GenEngineTest, self).setUp()
self.named_contexts = []
def named_context(self, name):
@contextlib.contextmanager
def context():
self.named_contexts.append(name)
try:
yield
finally:
self.assertEqual(self.named_contexts.pop(), name)
return context
def run_gen(self, f):
f()
return self.wait()
def delay_callback(self, iterations, callback, arg):
"""Runs callback(arg) after a number of IOLoop iterations."""
if iterations == 0:
callback(arg)
else:
self.io_loop.add_callback(functools.partial(
self.delay_callback, iterations - 1, callback, arg))
@return_future
def async_future(self, result, callback):
self.io_loop.add_callback(callback, result)
@gen.coroutine
def async_exception(self, e):
yield gen.moment
raise e
def test_no_yield(self):
@gen.engine
def f():
self.stop()
self.run_gen(f)
def test_inline_cb(self):
@gen.engine
def f():
(yield gen.Callback("k1"))()
res = yield gen.Wait("k1")
self.assertTrue(res is None)
self.stop()
self.run_gen(f)
def test_ioloop_cb(self):
@gen.engine
def f():
self.io_loop.add_callback((yield gen.Callback("k1")))
yield gen.Wait("k1")
self.stop()
self.run_gen(f)
def test_exception_phase1(self):
@gen.engine
def f():
1 / 0
self.assertRaises(ZeroDivisionError, self.run_gen, f)
def test_exception_phase2(self):
@gen.engine
def f():
self.io_loop.add_callback((yield gen.Callback("k1")))
yield gen.Wait("k1")
1 / 0
self.assertRaises(ZeroDivisionError, self.run_gen, f)
def test_exception_in_task_phase1(self):
def fail_task(callback):
1 / 0
@gen.engine
def f():
try:
yield gen.Task(fail_task)
raise Exception("did not get expected exception")
except ZeroDivisionError:
self.stop()
self.run_gen(f)
def test_exception_in_task_phase2(self):
# This is the case that requires the use of stack_context in gen.engine
def fail_task(callback):
self.io_loop.add_callback(lambda: 1 / 0)
@gen.engine
def f():
try:
yield gen.Task(fail_task)
raise Exception("did not get expected exception")
except ZeroDivisionError:
self.stop()
self.run_gen(f)
def test_with_arg(self):
@gen.engine
def f():
(yield gen.Callback("k1"))(42)
res = yield gen.Wait("k1")
self.assertEqual(42, res)
self.stop()
self.run_gen(f)
def test_with_arg_tuple(self):
@gen.engine
def f():
(yield gen.Callback((1, 2)))((3, 4))
res = yield gen.Wait((1, 2))
self.assertEqual((3, 4), res)
self.stop()
self.run_gen(f)
def test_key_reuse(self):
@gen.engine
def f():
yield gen.Callback("k1")
yield gen.Callback("k1")
self.stop()
self.assertRaises(gen.KeyReuseError, self.run_gen, f)
def test_key_reuse_tuple(self):
@gen.engine
def f():
yield gen.Callback((1, 2))
yield gen.Callback((1, 2))
self.stop()
self.assertRaises(gen.KeyReuseError, self.run_gen, f)
def test_key_mismatch(self):
@gen.engine
def f():
yield gen.Callback("k1")
yield gen.Wait("k2")
self.stop()
self.assertRaises(gen.UnknownKeyError, self.run_gen, f)
def test_key_mismatch_tuple(self):
@gen.engine
def f():
yield gen.Callback((1, 2))
yield gen.Wait((2, 3))
self.stop()
self.assertRaises(gen.UnknownKeyError, self.run_gen, f)
def test_leaked_callback(self):
@gen.engine
def f():
yield gen.Callback("k1")
self.stop()
self.assertRaises(gen.LeakedCallbackError, self.run_gen, f)
def test_leaked_callback_tuple(self):
@gen.engine
def f():
yield gen.Callback((1, 2))
self.stop()
self.assertRaises(gen.LeakedCallbackError, self.run_gen, f)
def test_parallel_callback(self):
@gen.engine
def f():
for k in range(3):
self.io_loop.add_callback((yield gen.Callback(k)))
yield gen.Wait(1)
self.io_loop.add_callback((yield gen.Callback(3)))
yield gen.Wait(0)
yield gen.Wait(3)
yield gen.Wait(2)
self.stop()
self.run_gen(f)
def test_bogus_yield(self):
@gen.engine
def f():
yield 42
self.assertRaises(gen.BadYieldError, self.run_gen, f)
def test_bogus_yield_tuple(self):
@gen.engine
def f():
yield (1, 2)
self.assertRaises(gen.BadYieldError, self.run_gen, f)
def test_reuse(self):
@gen.engine
def f():
self.io_loop.add_callback((yield gen.Callback(0)))
yield gen.Wait(0)
self.stop()
self.run_gen(f)
self.run_gen(f)
def test_task(self):
@gen.engine
def f():
yield gen.Task(self.io_loop.add_callback)
self.stop()
self.run_gen(f)
def test_wait_all(self):
@gen.engine
def f():
(yield gen.Callback("k1"))("v1")
(yield gen.Callback("k2"))("v2")
results = yield gen.WaitAll(["k1", "k2"])
self.assertEqual(results, ["v1", "v2"])
self.stop()
self.run_gen(f)
def test_exception_in_yield(self):
@gen.engine
def f():
try:
yield gen.Wait("k1")
raise Exception("did not get expected exception")
except gen.UnknownKeyError:
pass
self.stop()
self.run_gen(f)
def test_resume_after_exception_in_yield(self):
@gen.engine
def f():
try:
yield gen.Wait("k1")
raise Exception("did not get expected exception")
except gen.UnknownKeyError:
pass
(yield gen.Callback("k2"))("v2")
self.assertEqual((yield gen.Wait("k2")), "v2")
self.stop()
self.run_gen(f)
def test_orphaned_callback(self):
@gen.engine
def f():
self.orphaned_callback = yield gen.Callback(1)
try:
self.run_gen(f)
raise Exception("did not get expected exception")
except gen.LeakedCallbackError:
pass
self.orphaned_callback()
def test_multi(self):
@gen.engine
def f():
(yield gen.Callback("k1"))("v1")
(yield gen.Callback("k2"))("v2")
results = yield [gen.Wait("k1"), gen.Wait("k2")]
self.assertEqual(results, ["v1", "v2"])
self.stop()
self.run_gen(f)
def test_multi_dict(self):
@gen.engine
def f():
(yield gen.Callback("k1"))("v1")
(yield gen.Callback("k2"))("v2")
results = yield dict(foo=gen.Wait("k1"), bar=gen.Wait("k2"))
self.assertEqual(results, dict(foo="v1", bar="v2"))
self.stop()
self.run_gen(f)
# The following tests explicitly run with both gen.Multi
# and gen.multi_future (Task returns a Future, so it can be used
# with either).
def test_multi_yieldpoint_delayed(self):
@gen.engine
def f():
# callbacks run at different times
responses = yield gen.Multi([
gen.Task(self.delay_callback, 3, arg="v1"),
gen.Task(self.delay_callback, 1, arg="v2"),
])
self.assertEqual(responses, ["v1", "v2"])
self.stop()
self.run_gen(f)
def test_multi_yieldpoint_dict_delayed(self):
@gen.engine
def f():
# callbacks run at different times
responses = yield gen.Multi(dict(
foo=gen.Task(self.delay_callback, 3, arg="v1"),
bar=gen.Task(self.delay_callback, 1, arg="v2"),
))
self.assertEqual(responses, dict(foo="v1", bar="v2"))
self.stop()
self.run_gen(f)
def test_multi_future_delayed(self):
@gen.engine
def f():
# callbacks run at different times
responses = yield gen.multi_future([
gen.Task(self.delay_callback, 3, arg="v1"),
gen.Task(self.delay_callback, 1, arg="v2"),
])
self.assertEqual(responses, ["v1", "v2"])
self.stop()
self.run_gen(f)
def test_multi_future_dict_delayed(self):
@gen.engine
def f():
# callbacks run at different times
responses = yield gen.multi_future(dict(
foo=gen.Task(self.delay_callback, 3, arg="v1"),
bar=gen.Task(self.delay_callback, 1, arg="v2"),
))
self.assertEqual(responses, dict(foo="v1", bar="v2"))
self.stop()
self.run_gen(f)
@skipOnTravis
@gen_test
def test_multi_performance(self):
# Yielding a list used to have quadratic performance; make
# sure a large list stays reasonable. On my laptop a list of
# 2000 used to take 1.8s, now it takes 0.12.
start = time.time()
yield [gen.Task(self.io_loop.add_callback) for i in range(2000)]
end = time.time()
self.assertLess(end - start, 1.0)
@gen_test
def test_multi_empty(self):
# Empty lists or dicts should return the same type.
x = yield []
self.assertTrue(isinstance(x, list))
y = yield {}
self.assertTrue(isinstance(y, dict))
@gen_test
def test_multi_mixed_types(self):
# A YieldPoint (Wait) and Future (Task) can be combined
# (and use the YieldPoint codepath)
(yield gen.Callback("k1"))("v1")
responses = yield [gen.Wait("k1"),
gen.Task(self.delay_callback, 3, arg="v2")]
self.assertEqual(responses, ["v1", "v2"])
@gen_test
def test_future(self):
result = yield self.async_future(1)
self.assertEqual(result, 1)
@gen_test
def test_multi_future(self):
results = yield [self.async_future(1), self.async_future(2)]
self.assertEqual(results, [1, 2])
@gen_test
def test_multi_future_duplicate(self):
f = self.async_future(2)
results = yield [self.async_future(1), f, self.async_future(3), f]
self.assertEqual(results, [1, 2, 3, 2])
@gen_test
def test_multi_dict_future(self):
results = yield dict(foo=self.async_future(1), bar=self.async_future(2))
self.assertEqual(results, dict(foo=1, bar=2))
@gen_test
def test_multi_exceptions(self):
with ExpectLog(app_log, "Multiple exceptions in yield list"):
with self.assertRaises(RuntimeError) as cm:
yield gen.Multi([self.async_exception(RuntimeError("error 1")),
self.async_exception(RuntimeError("error 2"))])
self.assertEqual(str(cm.exception), "error 1")
# With only one exception, no error is logged.
with self.assertRaises(RuntimeError):
yield gen.Multi([self.async_exception(RuntimeError("error 1")),
self.async_future(2)])
# Exception logging may be explicitly quieted.
with self.assertRaises(RuntimeError):
yield gen.Multi([self.async_exception(RuntimeError("error 1")),
self.async_exception(RuntimeError("error 2"))],
quiet_exceptions=RuntimeError)
@gen_test
def test_multi_future_exceptions(self):
with ExpectLog(app_log, "Multiple exceptions in yield list"):
with self.assertRaises(RuntimeError) as cm:
yield [self.async_exception(RuntimeError("error 1")),
self.async_exception(RuntimeError("error 2"))]
self.assertEqual(str(cm.exception), "error 1")
# With only one exception, no error is logged.
with self.assertRaises(RuntimeError):
yield [self.async_exception(RuntimeError("error 1")),
self.async_future(2)]
# Exception logging may be explicitly quieted.
with self.assertRaises(RuntimeError):
yield gen.multi_future(
[self.async_exception(RuntimeError("error 1")),
self.async_exception(RuntimeError("error 2"))],
quiet_exceptions=RuntimeError)
def test_arguments(self):
@gen.engine
def f():
(yield gen.Callback("noargs"))()
self.assertEqual((yield gen.Wait("noargs")), None)
(yield gen.Callback("1arg"))(42)
self.assertEqual((yield gen.Wait("1arg")), 42)
(yield gen.Callback("kwargs"))(value=42)
result = yield gen.Wait("kwargs")
self.assertTrue(isinstance(result, gen.Arguments))
self.assertEqual(((), dict(value=42)), result)
self.assertEqual(dict(value=42), result.kwargs)
(yield gen.Callback("2args"))(42, 43)
result = yield gen.Wait("2args")
self.assertTrue(isinstance(result, gen.Arguments))
self.assertEqual(((42, 43), {}), result)
self.assertEqual((42, 43), result.args)
def task_func(callback):
callback(None, error="foo")
result = yield gen.Task(task_func)
self.assertTrue(isinstance(result, gen.Arguments))
self.assertEqual(((None,), dict(error="foo")), result)
self.stop()
self.run_gen(f)
def test_stack_context_leak(self):
# regression test: repeated invocations of a gen-based
# function should not result in accumulated stack_contexts
def _stack_depth():
head = stack_context._state.contexts[1]
length = 0
while head is not None:
length += 1
head = head.old_contexts[1]
return length
@gen.engine
def inner(callback):
yield gen.Task(self.io_loop.add_callback)
callback()
@gen.engine
def outer():
for i in range(10):
yield gen.Task(inner)
stack_increase = _stack_depth() - initial_stack_depth
self.assertTrue(stack_increase <= 2)
self.stop()
initial_stack_depth = _stack_depth()
self.run_gen(outer)
def test_stack_context_leak_exception(self):
# same as previous, but with a function that exits with an exception
@gen.engine
def inner(callback):
yield gen.Task(self.io_loop.add_callback)
1 / 0
@gen.engine
def outer():
for i in range(10):
try:
yield gen.Task(inner)
except ZeroDivisionError:
pass
stack_increase = len(stack_context._state.contexts) - initial_stack_depth
self.assertTrue(stack_increase <= 2)
self.stop()
initial_stack_depth = len(stack_context._state.contexts)
self.run_gen(outer)
def function_with_stack_context(self, callback):
# Technically this function should stack_context.wrap its callback
# upon entry. However, it is very common for this step to be
# omitted.
def step2():
self.assertEqual(self.named_contexts, ['a'])
self.io_loop.add_callback(callback)
with stack_context.StackContext(self.named_context('a')):
self.io_loop.add_callback(step2)
@gen_test
def test_wait_transfer_stack_context(self):
# Wait should not pick up contexts from where callback was invoked,
# even if that function improperly fails to wrap its callback.
cb = yield gen.Callback('k1')
self.function_with_stack_context(cb)
self.assertEqual(self.named_contexts, [])
yield gen.Wait('k1')
self.assertEqual(self.named_contexts, [])
@gen_test
def test_task_transfer_stack_context(self):
yield gen.Task(self.function_with_stack_context)
self.assertEqual(self.named_contexts, [])
def test_raise_after_stop(self):
# This pattern will be used in the following tests so make sure
# the exception propagates as expected.
@gen.engine
def f():
self.stop()
1 / 0
with self.assertRaises(ZeroDivisionError):
self.run_gen(f)
def test_sync_raise_return(self):
# gen.Return is allowed in @gen.engine, but it may not be used
# to return a value.
@gen.engine
def f():
self.stop(42)
raise gen.Return()
result = self.run_gen(f)
self.assertEqual(result, 42)
def test_async_raise_return(self):
@gen.engine
def f():
yield gen.Task(self.io_loop.add_callback)
self.stop(42)
raise gen.Return()
result = self.run_gen(f)
self.assertEqual(result, 42)
def test_sync_raise_return_value(self):
@gen.engine
def f():
raise gen.Return(42)
with self.assertRaises(gen.ReturnValueIgnoredError):
self.run_gen(f)
def test_sync_raise_return_value_tuple(self):
@gen.engine
def f():
raise gen.Return((1, 2))
with self.assertRaises(gen.ReturnValueIgnoredError):
self.run_gen(f)
def test_async_raise_return_value(self):
@gen.engine
def f():
yield gen.Task(self.io_loop.add_callback)
raise gen.Return(42)
with self.assertRaises(gen.ReturnValueIgnoredError):
self.run_gen(f)
def test_async_raise_return_value_tuple(self):
@gen.engine
def f():
yield gen.Task(self.io_loop.add_callback)
raise gen.Return((1, 2))
with self.assertRaises(gen.ReturnValueIgnoredError):
self.run_gen(f)
def test_return_value(self):
# It is an error to apply @gen.engine to a function that returns
# a value.
@gen.engine
def f():
return 42
with self.assertRaises(gen.ReturnValueIgnoredError):
self.run_gen(f)
def test_return_value_tuple(self):
# It is an error to apply @gen.engine to a function that returns
# a value.
@gen.engine
def f():
return (1, 2)
with self.assertRaises(gen.ReturnValueIgnoredError):
self.run_gen(f)
@skipNotCPython
def test_task_refcounting(self):
# On CPython, tasks and their arguments should be released immediately
# without waiting for garbage collection.
@gen.engine
def f():
class Foo(object):
pass
arg = Foo()
self.arg_ref = weakref.ref(arg)
task = gen.Task(self.io_loop.add_callback, arg=arg)
self.task_ref = weakref.ref(task)
yield task
self.stop()
self.run_gen(f)
self.assertIs(self.arg_ref(), None)
self.assertIs(self.task_ref(), None)
class GenCoroutineTest(AsyncTestCase):
def setUp(self):
# Stray StopIteration exceptions can lead to tests exiting prematurely,
# so we need explicit checks here to make sure the tests run all
# the way through.
self.finished = False
super(GenCoroutineTest, self).setUp()
def tearDown(self):
super(GenCoroutineTest, self).tearDown()
assert self.finished
@gen_test
def test_sync_gen_return(self):
@gen.coroutine
def f():
raise gen.Return(42)
result = yield f()
self.assertEqual(result, 42)
self.finished = True
@gen_test
def test_async_gen_return(self):
@gen.coroutine
def f():
yield gen.Task(self.io_loop.add_callback)
raise gen.Return(42)
result = yield f()
self.assertEqual(result, 42)
self.finished = True
@gen_test
def test_sync_return(self):
@gen.coroutine
def f():
return 42
result = yield f()
self.assertEqual(result, 42)
self.finished = True
@skipBefore33
@gen_test
def test_async_return(self):
namespace = exec_test(globals(), locals(), """
@gen.coroutine
def f():
yield gen.Task(self.io_loop.add_callback)
return 42
""")
result = yield namespace['f']()
self.assertEqual(result, 42)
self.finished = True
@skipBefore33
@gen_test
def test_async_early_return(self):
# A yield statement exists but is not executed, which means
# this function "returns" via an exception. This exception
# doesn't happen before the exception handling is set up.
namespace = exec_test(globals(), locals(), """
@gen.coroutine
def f():
if True:
return 42
yield gen.Task(self.io_loop.add_callback)
""")
result = yield namespace['f']()
self.assertEqual(result, 42)
self.finished = True
@skipBefore35
@gen_test
def test_async_await(self):
# This test verifies that an async function can await a
# yield-based gen.coroutine, and that a gen.coroutine
# (the test method itself) can yield an async function.
namespace = exec_test(globals(), locals(), """
async def f():
await gen.Task(self.io_loop.add_callback)
return 42
""")
result = yield namespace['f']()
self.assertEqual(result, 42)
self.finished = True
@skipBefore35
@gen_test
def test_async_await_mixed_multi(self):
namespace = exec_test(globals(), locals(), """
async def f1():
await gen.Task(self.io_loop.add_callback)
return 42
""")
@gen.coroutine
def f2():
yield gen.Task(self.io_loop.add_callback)
raise gen.Return(43)
results = yield [namespace['f1'](), f2()]
self.assertEqual(results, [42, 43])
self.finished = True
@gen_test
def test_sync_return_no_value(self):
@gen.coroutine
def f():
return
result = yield f()
self.assertEqual(result, None)
self.finished = True
@gen_test
def test_async_return_no_value(self):
# Without a return value we don't need python 3.3.
@gen.coroutine
def f():
yield gen.Task(self.io_loop.add_callback)
return
result = yield f()
self.assertEqual(result, None)
self.finished = True
@gen_test
def test_sync_raise(self):
@gen.coroutine
def f():
1 / 0
# The exception is raised when the future is yielded
# (or equivalently when its result method is called),
# not when the function itself is called).
future = f()
with self.assertRaises(ZeroDivisionError):
yield future
self.finished = True
@gen_test
def test_async_raise(self):
@gen.coroutine
def f():
yield gen.Task(self.io_loop.add_callback)
1 / 0
future = f()
with self.assertRaises(ZeroDivisionError):
yield future
self.finished = True
@gen_test
def test_pass_callback(self):
@gen.coroutine
def f():
raise gen.Return(42)
result = yield gen.Task(f)
self.assertEqual(result, 42)
self.finished = True
@gen_test
def test_replace_yieldpoint_exception(self):
# Test exception handling: a coroutine can catch one exception
# raised by a yield point and raise a different one.
@gen.coroutine
def f1():
1 / 0
@gen.coroutine
def f2():
try:
yield f1()
except ZeroDivisionError:
raise KeyError()
future = f2()
with self.assertRaises(KeyError):
yield future
self.finished = True
@gen_test
def test_swallow_yieldpoint_exception(self):
# Test exception handling: a coroutine can catch an exception
# raised by a yield point and not raise a different one.
@gen.coroutine
def f1():
1 / 0
@gen.coroutine
def f2():
try:
yield f1()
except ZeroDivisionError:
raise gen.Return(42)
result = yield f2()
self.assertEqual(result, 42)
self.finished = True
@gen_test
def test_replace_context_exception(self):
# Test exception handling: exceptions thrown into the stack context
# can be caught and replaced.
# Note that this test and the following are for behavior that is
# not really supported any more: coroutines no longer create a
# stack context automatically; but one is created after the first
# YieldPoint (i.e. not a Future).
@gen.coroutine
def f2():
(yield gen.Callback(1))()
yield gen.Wait(1)
self.io_loop.add_callback(lambda: 1 / 0)
try:
yield gen.Task(self.io_loop.add_timeout,
self.io_loop.time() + 10)
except ZeroDivisionError:
raise KeyError()
future = f2()
with self.assertRaises(KeyError):
yield future
self.finished = True
@gen_test
def test_swallow_context_exception(self):
# Test exception handling: exceptions thrown into the stack context
# can be caught and ignored.
@gen.coroutine
def f2():
(yield gen.Callback(1))()
yield gen.Wait(1)
self.io_loop.add_callback(lambda: 1 / 0)
try:
yield gen.Task(self.io_loop.add_timeout,
self.io_loop.time() + 10)
except ZeroDivisionError:
raise gen.Return(42)
result = yield f2()
self.assertEqual(result, 42)
self.finished = True
@gen_test
def test_moment(self):
calls = []
@gen.coroutine
def f(name, yieldable):
for i in range(5):
calls.append(name)
yield yieldable
# First, confirm the behavior without moment: each coroutine
# monopolizes the event loop until it finishes.
immediate = Future()
immediate.set_result(None)
yield [f('a', immediate), f('b', immediate)]
self.assertEqual(''.join(calls), 'aaaaabbbbb')
# With moment, they take turns.
calls = []
yield [f('a', gen.moment), f('b', gen.moment)]
self.assertEqual(''.join(calls), 'ababababab')
self.finished = True
calls = []
yield [f('a', gen.moment), f('b', immediate)]
self.assertEqual(''.join(calls), 'abbbbbaaaa')
@gen_test
def test_sleep(self):
yield gen.sleep(0.01)
self.finished = True
@skipBefore33
@gen_test
def test_py3_leak_exception_context(self):
class LeakedException(Exception):
pass
@gen.coroutine
def inner(iteration):
raise LeakedException(iteration)
try:
yield inner(1)
except LeakedException as e:
self.assertEqual(str(e), "1")
self.assertIsNone(e.__context__)
try:
yield inner(2)
except LeakedException as e:
self.assertEqual(str(e), "2")
self.assertIsNone(e.__context__)
self.finished = True
class GenSequenceHandler(RequestHandler):
@asynchronous
@gen.engine
def get(self):
self.io_loop = self.request.connection.stream.io_loop
self.io_loop.add_callback((yield gen.Callback("k1")))
yield gen.Wait("k1")
self.write("1")
self.io_loop.add_callback((yield gen.Callback("k2")))
yield gen.Wait("k2")
self.write("2")
# reuse an old key
self.io_loop.add_callback((yield gen.Callback("k1")))
yield gen.Wait("k1")
self.finish("3")
class GenCoroutineSequenceHandler(RequestHandler):
@gen.coroutine
def get(self):
self.io_loop = self.request.connection.stream.io_loop
self.io_loop.add_callback((yield gen.Callback("k1")))
yield gen.Wait("k1")
self.write("1")
self.io_loop.add_callback((yield gen.Callback("k2")))
yield gen.Wait("k2")
self.write("2")
# reuse an old key
self.io_loop.add_callback((yield gen.Callback("k1")))
yield gen.Wait("k1")
self.finish("3")
class GenCoroutineUnfinishedSequenceHandler(RequestHandler):
@asynchronous
@gen.coroutine
def get(self):
self.io_loop = self.request.connection.stream.io_loop
self.io_loop.add_callback((yield gen.Callback("k1")))
yield gen.Wait("k1")
self.write("1")
self.io_loop.add_callback((yield gen.Callback("k2")))
yield gen.Wait("k2")
self.write("2")
# reuse an old key
self.io_loop.add_callback((yield gen.Callback("k1")))
yield gen.Wait("k1")
# just write, don't finish
self.write("3")
class GenTaskHandler(RequestHandler):
@asynchronous
@gen.engine
def get(self):
io_loop = self.request.connection.stream.io_loop
client = AsyncHTTPClient(io_loop=io_loop)
response = yield gen.Task(client.fetch, self.get_argument('url'))
response.rethrow()
self.finish(b"got response: " + response.body)
class GenExceptionHandler(RequestHandler):
@asynchronous
@gen.engine
def get(self):
# This test depends on the order of the two decorators.
io_loop = self.request.connection.stream.io_loop
yield gen.Task(io_loop.add_callback)
raise Exception("oops")
class GenCoroutineExceptionHandler(RequestHandler):
@gen.coroutine
def get(self):
# This test depends on the order of the two decorators.
io_loop = self.request.connection.stream.io_loop
yield gen.Task(io_loop.add_callback)
raise Exception("oops")
class GenYieldExceptionHandler(RequestHandler):
@asynchronous
@gen.engine
def get(self):
io_loop = self.request.connection.stream.io_loop
# Test the interaction of the two stack_contexts.
def fail_task(callback):
io_loop.add_callback(lambda: 1 / 0)
try:
yield gen.Task(fail_task)
raise Exception("did not get expected exception")
except ZeroDivisionError:
self.finish('ok')
class UndecoratedCoroutinesHandler(RequestHandler):
@gen.coroutine
def prepare(self):
self.chunks = []
yield gen.Task(IOLoop.current().add_callback)
self.chunks.append('1')
@gen.coroutine
def get(self):
self.chunks.append('2')
yield gen.Task(IOLoop.current().add_callback)
self.chunks.append('3')
yield gen.Task(IOLoop.current().add_callback)
self.write(''.join(self.chunks))
class AsyncPrepareErrorHandler(RequestHandler):
@gen.coroutine
def prepare(self):
yield gen.Task(IOLoop.current().add_callback)
raise HTTPError(403)
def get(self):
self.finish('ok')
class NativeCoroutineHandler(RequestHandler):
if sys.version_info > (3, 5):
exec(textwrap.dedent("""
async def get(self):
await gen.Task(IOLoop.current().add_callback)
self.write("ok")
"""))
class GenWebTest(AsyncHTTPTestCase):
def get_app(self):
return Application([
('/sequence', GenSequenceHandler),
('/coroutine_sequence', GenCoroutineSequenceHandler),
('/coroutine_unfinished_sequence',
GenCoroutineUnfinishedSequenceHandler),
('/task', GenTaskHandler),
('/exception', GenExceptionHandler),
('/coroutine_exception', GenCoroutineExceptionHandler),
('/yield_exception', GenYieldExceptionHandler),
('/undecorated_coroutine', UndecoratedCoroutinesHandler),
('/async_prepare_error', AsyncPrepareErrorHandler),
('/native_coroutine', NativeCoroutineHandler),
])
def test_sequence_handler(self):
response = self.fetch('/sequence')
self.assertEqual(response.body, b"123")
def test_coroutine_sequence_handler(self):
response = self.fetch('/coroutine_sequence')
self.assertEqual(response.body, b"123")
def test_coroutine_unfinished_sequence_handler(self):
response = self.fetch('/coroutine_unfinished_sequence')
self.assertEqual(response.body, b"123")
def test_task_handler(self):
response = self.fetch('/task?url=%s' % url_escape(self.get_url('/sequence')))
self.assertEqual(response.body, b"got response: 123")
def test_exception_handler(self):
# Make sure we get an error and not a timeout
with ExpectLog(app_log, "Uncaught exception GET /exception"):
response = self.fetch('/exception')
self.assertEqual(500, response.code)
def test_coroutine_exception_handler(self):
# Make sure we get an error and not a timeout
with ExpectLog(app_log, "Uncaught exception GET /coroutine_exception"):
response = self.fetch('/coroutine_exception')
self.assertEqual(500, response.code)
def test_yield_exception_handler(self):
response = self.fetch('/yield_exception')
self.assertEqual(response.body, b'ok')
def test_undecorated_coroutines(self):
response = self.fetch('/undecorated_coroutine')
self.assertEqual(response.body, b'123')
def test_async_prepare_error_handler(self):
response = self.fetch('/async_prepare_error')
self.assertEqual(response.code, 403)
@skipBefore35
def test_native_coroutine_handler(self):
response = self.fetch('/native_coroutine')
self.assertEqual(response.code, 200)
self.assertEqual(response.body, b'ok')
class WithTimeoutTest(AsyncTestCase):
@gen_test
def test_timeout(self):
with self.assertRaises(gen.TimeoutError):
yield gen.with_timeout(datetime.timedelta(seconds=0.1),
Future())
@gen_test
def test_completes_before_timeout(self):
future = Future()
self.io_loop.add_timeout(datetime.timedelta(seconds=0.1),
lambda: future.set_result('asdf'))
result = yield gen.with_timeout(datetime.timedelta(seconds=3600),
future, io_loop=self.io_loop)
self.assertEqual(result, 'asdf')
@gen_test
def test_fails_before_timeout(self):
future = Future()
self.io_loop.add_timeout(
datetime.timedelta(seconds=0.1),
lambda: future.set_exception(ZeroDivisionError()))
with self.assertRaises(ZeroDivisionError):
yield gen.with_timeout(datetime.timedelta(seconds=3600),
future, io_loop=self.io_loop)
@gen_test
def test_already_resolved(self):
future = Future()
future.set_result('asdf')
result = yield gen.with_timeout(datetime.timedelta(seconds=3600),
future, io_loop=self.io_loop)
self.assertEqual(result, 'asdf')
@unittest.skipIf(futures is None, 'futures module not present')
@gen_test
def test_timeout_concurrent_future(self):
with futures.ThreadPoolExecutor(1) as executor:
with self.assertRaises(gen.TimeoutError):
yield gen.with_timeout(self.io_loop.time(),
executor.submit(time.sleep, 0.1))
@unittest.skipIf(futures is None, 'futures module not present')
@gen_test
def test_completed_concurrent_future(self):
with futures.ThreadPoolExecutor(1) as executor:
yield gen.with_timeout(datetime.timedelta(seconds=3600),
executor.submit(lambda: None))
class WaitIteratorTest(AsyncTestCase):
@gen_test
def test_empty_iterator(self):
g = gen.WaitIterator()
self.assertTrue(g.done(), 'empty generator iterated')
with self.assertRaises(ValueError):
g = gen.WaitIterator(False, bar=False)
self.assertEqual(g.current_index, None, "bad nil current index")
self.assertEqual(g.current_future, None, "bad nil current future")
@gen_test
def test_already_done(self):
f1 = Future()
f2 = Future()
f3 = Future()
f1.set_result(24)
f2.set_result(42)
f3.set_result(84)
g = gen.WaitIterator(f1, f2, f3)
i = 0
while not g.done():
r = yield g.next()
# Order is not guaranteed, but the current implementation
# preserves ordering of already-done Futures.
if i == 0:
self.assertEqual(g.current_index, 0)
self.assertIs(g.current_future, f1)
self.assertEqual(r, 24)
elif i == 1:
self.assertEqual(g.current_index, 1)
self.assertIs(g.current_future, f2)
self.assertEqual(r, 42)
elif i == 2:
self.assertEqual(g.current_index, 2)
self.assertIs(g.current_future, f3)
self.assertEqual(r, 84)
i += 1
self.assertEqual(g.current_index, None, "bad nil current index")
self.assertEqual(g.current_future, None, "bad nil current future")
dg = gen.WaitIterator(f1=f1, f2=f2)
while not dg.done():
dr = yield dg.next()
if dg.current_index == "f1":
self.assertTrue(dg.current_future == f1 and dr == 24,
"WaitIterator dict status incorrect")
elif dg.current_index == "f2":
self.assertTrue(dg.current_future == f2 and dr == 42,
"WaitIterator dict status incorrect")
else:
self.fail("got bad WaitIterator index {}".format(
dg.current_index))
i += 1
self.assertEqual(dg.current_index, None, "bad nil current index")
self.assertEqual(dg.current_future, None, "bad nil current future")
def finish_coroutines(self, iteration, futures):
if iteration == 3:
futures[2].set_result(24)
elif iteration == 5:
futures[0].set_exception(ZeroDivisionError())
elif iteration == 8:
futures[1].set_result(42)
futures[3].set_result(84)
if iteration < 8:
self.io_loop.add_callback(self.finish_coroutines, iteration + 1, futures)
@gen_test
def test_iterator(self):
futures = [Future(), Future(), Future(), Future()]
self.finish_coroutines(0, futures)
g = gen.WaitIterator(*futures)
i = 0
while not g.done():
try:
r = yield g.next()
except ZeroDivisionError:
self.assertIs(g.current_future, futures[0],
'exception future invalid')
else:
if i == 0:
self.assertEqual(r, 24, 'iterator value incorrect')
self.assertEqual(g.current_index, 2, 'wrong index')
elif i == 2:
self.assertEqual(r, 42, 'iterator value incorrect')
self.assertEqual(g.current_index, 1, 'wrong index')
elif i == 3:
self.assertEqual(r, 84, 'iterator value incorrect')
self.assertEqual(g.current_index, 3, 'wrong index')
i += 1
@skipBefore35
@gen_test
def test_iterator_async_await(self):
# Recreate the previous test with py35 syntax. It's a little clunky
# because of the way the previous test handles an exception on
# a single iteration.
futures = [Future(), Future(), Future(), Future()]
self.finish_coroutines(0, futures)
self.finished = False
namespace = exec_test(globals(), locals(), """
async def f():
i = 0
g = gen.WaitIterator(*futures)
try:
async for r in g:
if i == 0:
self.assertEqual(r, 24, 'iterator value incorrect')
self.assertEqual(g.current_index, 2, 'wrong index')
else:
raise Exception("expected exception on iteration 1")
i += 1
except ZeroDivisionError:
i += 1
async for r in g:
if i == 2:
self.assertEqual(r, 42, 'iterator value incorrect')
self.assertEqual(g.current_index, 1, 'wrong index')
elif i == 3:
self.assertEqual(r, 84, 'iterator value incorrect')
self.assertEqual(g.current_index, 3, 'wrong index')
else:
raise Exception("didn't expect iteration %d" % i)
i += 1
self.finished = True
""")
yield namespace['f']()
self.assertTrue(self.finished)
@gen_test
def test_no_ref(self):
# In this usage, there is no direct hard reference to the
# WaitIterator itself, only the Future it returns. Since
# WaitIterator uses weak references internally to improve GC
# performance, this used to cause problems.
yield gen.with_timeout(datetime.timedelta(seconds=0.1),
gen.WaitIterator(gen.sleep(0)).next())
if __name__ == '__main__':
unittest.main()
|
|
# Copyright 2019 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Unit tests for `multistep.py`."""
import functools
from absl.testing import absltest
from absl.testing import parameterized
import chex
import jax
import jax.numpy as jnp
import numpy as np
from rlax._src import multistep
class LambdaReturnsTest(parameterized.TestCase):
def setUp(self):
super().setUp()
self.lambda_ = 0.75
self.r_t = np.array(
[[1.0, 0.0, -1.0, 0.0, 1.0], [0.5, 0.8, -0.7, 0.0, 2.1]])
self.discount_t = np.array(
[[0.5, 0.9, 1.0, 0.5, 0.8], [0.9, 0.5, 0.3, 0.8, 0.7]])
self.v_t = np.array(
[[3.0, 1.0, 5.0, -5.0, 3.0], [-1.7, 1.2, 2.3, 2.2, 2.7]])
self.expected = np.array(
[[1.6460547, 0.72281253, 0.7375001, 0.6500001, 3.4],
[0.7866317, 0.9913063, 0.1101501, 2.834, 3.99]],
dtype=np.float32)
@chex.all_variants()
def test_lambda_returns_batch(self):
"""Tests for a full batch."""
lambda_returns = self.variant(jax.vmap(functools.partial(
multistep.lambda_returns, lambda_=self.lambda_)))
# Compute lambda return in batch.
actual = lambda_returns(self.r_t, self.discount_t, self.v_t)
# Test return estimate.
np.testing.assert_allclose(self.expected, actual, rtol=1e-5)
class DiscountedReturnsTest(parameterized.TestCase):
def setUp(self):
super().setUp()
self.r_t = np.array(
[[1.0, 0.0, -1.0, 0.0, 1.0], [0.5, 0.8, -0.7, 0.0, 2.1]])
self.discount_t = np.array(
[[0.5, 0.9, 1.0, 0.5, 0.8], [0.9, 0.5, 0.3, 0.8, 0.7]])
self.v_t = np.array(
[[3.0, 1.0, 5.0, -5.0, 3.0], [-1.7, 1.2, 2.3, 2.2, 2.7]])
self.bootstrap_v = np.array([v[-1] for v in self.v_t])
self.expected = np.array(
[[1.315, 0.63000005, 0.70000005, 1.7, 3.4],
[1.33592, 0.9288, 0.2576, 3.192, 3.9899998]],
dtype=np.float32)
@chex.all_variants()
def test_discounted_returns_batch(self):
"""Tests for a single element."""
discounted_returns = self.variant(jax.vmap(multistep.discounted_returns))
# Compute discounted return.
actual_scalar = discounted_returns(self.r_t, self.discount_t,
self.bootstrap_v)
actual_vector = discounted_returns(self.r_t, self.discount_t, self.v_t)
# Test output.
np.testing.assert_allclose(self.expected, actual_scalar, rtol=1e-5)
np.testing.assert_allclose(self.expected, actual_vector, rtol=1e-5)
class NStepBootstrappedReturnsTest(parameterized.TestCase):
def setUp(self):
super().setUp()
self.r_t = np.array(
[[1.0, 0.0, -1.0, 0.0, 1.0], [0.5, 0.8, -0.7, 0.0, 2.1]])
self.discount_t = np.array(
[[0.5, 0.9, 1.0, 0.5, 0.8], [0.9, 0.5, 0.3, 0.8, 0.7]])
self.v_t = np.array(
[[3.0, 1.0, 5.0, -5.0, 3.0], [-1.7, 1.2, 2.3, 2.2, 2.7]])
# Different expected results for different values of n.
self.expected = {}
self.expected[3] = np.array(
[[2.8, -3.15, 0.7, 1.7, 3.4], [1.2155, 0.714, 0.2576, 3.192, 3.99]],
dtype=np.float32)
self.expected[5] = np.array(
[[1.315, 0.63, 0.7, 1.7, 3.4], [1.33592, 0.9288, 0.2576, 3.192, 3.99]],
dtype=np.float32)
self.expected[7] = np.array(
[[1.315, 0.63, 0.7, 1.7, 3.4], [1.33592, 0.9288, 0.2576, 3.192, 3.99]],
dtype=np.float32)
@chex.all_variants()
@parameterized.named_parameters(
('smaller_n', 3,), ('equal_n', 5,), ('bigger_n', 7,))
def test_n_step_sequence_returns_batch(self, n):
"""Tests for a full batch."""
n_step_returns = self.variant(jax.vmap(functools.partial(
multistep.n_step_bootstrapped_returns, n=n)))
# Compute n-step return in batch.
actual = n_step_returns(self.r_t, self.discount_t, self.v_t)
# Test return estimate.
np.testing.assert_allclose(self.expected[n], actual, rtol=1e-5)
def test_reduces_to_lambda_returns(self):
"""Test function is the same as lambda_returns when n is sequence length."""
lambda_t = 0.75
n = len(self.r_t[0])
expected = multistep.lambda_returns(self.r_t[0], self.discount_t[0],
self.v_t[0], lambda_t)
actual = multistep.n_step_bootstrapped_returns(self.r_t[0],
self.discount_t[0],
self.v_t[0], n, lambda_t)
np.testing.assert_allclose(expected, actual, rtol=1e-5)
class TDErrorTest(parameterized.TestCase):
def setUp(self):
super().setUp()
self.r_t = np.array(
[[1.0, 0.0, -1.0, 0.0, 1.0], [0.5, 0.8, -0.7, 0.0, 2.1]])
self.discount_t = np.array(
[[0.5, 0.9, 1.0, 0.5, 0.8], [0.9, 0.5, 0.3, 0.8, 0.7]])
self.rho_tm1 = np.array(
[[0.5, 0.9, 1.3, 0.2, 0.8], [2., 0.1, 1., 0.4, 1.7]])
self.values = np.array(
[[3.0, 1.0, 5.0, -5.0, 3.0, 1.], [-1.7, 1.2, 2.3, 2.2, 2.7, 2.]])
@chex.all_variants()
def test_importance_corrected_td_errors_batch(self):
"""Tests equivalence to computing the error from a the lambda-return."""
# Vmap and optionally compile.
lambda_returns = self.variant(jax.vmap(multistep.lambda_returns))
td_errors = self.variant(jax.vmap(multistep.importance_corrected_td_errors))
# Compute multistep td-error with recursion on deltas.
td_direct = td_errors(self.r_t, self.discount_t, self.rho_tm1,
np.ones_like(self.discount_t), self.values)
# Compute off-policy corrected return, and derive td-error from it.
ls_ = np.concatenate((self.rho_tm1[:, 1:], [[1.], [1.]]), axis=1)
td_from_returns = self.rho_tm1 * (
lambda_returns(self.r_t, self.discount_t, self.values[:, 1:], ls_) -
self.values[:, :-1])
# Check equivalence.
np.testing.assert_allclose(td_direct, td_from_returns, rtol=1e-5)
class TruncatedGeneralizedAdvantageEstimationTest(parameterized.TestCase):
def setUp(self):
super().setUp()
self.r_t = jnp.array([[0., 0., 1., 0., -0.5],
[0., 0., 0., 0., 1.]])
self.v_t = jnp.array([[1., 4., -3., -2., -1., -1.],
[-3., -2., -1., 0.0, 5., -1.]])
self.discount_t = jnp.array([[0.99, 0.99, 0.99, 0.99, 0.99],
[0.9, 0.9, 0.9, 0.0, 0.9]])
self.dummy_rho_tm1 = jnp.array([[1., 1., 1., 1., 1],
[1., 1., 1., 1., 1.]])
self.array_lambda = jnp.array([[0.9, 0.9, 0.9, 0.9, 0.9],
[0.9, 0.9, 0.9, 0.9, 0.9]])
# Different expected results for different values of lambda.
self.expected = {}
self.expected[1.] = np.array(
[[-1.45118, -4.4557, 2.5396, 0.5249, -0.49],
[3., 2., 1., 0., -4.9]],
dtype=np.float32)
self.expected[0.7] = np.array(
[[-0.676979, -5.248167, 2.4846, 0.6704, -0.49],
[2.2899, 1.73, 1., 0., -4.9]],
dtype=np.float32)
self.expected[0.4] = np.array(
[[0.56731, -6.042, 2.3431, 0.815, -0.49],
[1.725, 1.46, 1., 0., -4.9]],
dtype=np.float32)
@chex.all_variants()
@parameterized.named_parameters(
('lambda1', 1.0),
('lambda0.7', 0.7),
('lambda0.4', 0.4))
def test_truncated_gae(self, lambda_):
"""Tests truncated GAE for a full batch."""
batched_advantage_fn_variant = self.variant(jax.vmap(
multistep.truncated_generalized_advantage_estimation,
in_axes=(0, 0, None, 0), out_axes=0))
actual = batched_advantage_fn_variant(
self.r_t, self.discount_t, lambda_, self.v_t)
np.testing.assert_allclose(self.expected[lambda_], actual, atol=1e-3)
@chex.all_variants()
def test_array_lambda(self):
"""Tests that truncated GAE is consistent with scalar or array lambda_."""
scalar_lambda_fn = self.variant(jax.vmap(
multistep.truncated_generalized_advantage_estimation,
in_axes=(0, 0, None, 0), out_axes=0))
array_lambda_fn = self.variant(jax.vmap(
multistep.truncated_generalized_advantage_estimation))
scalar_lambda_result = scalar_lambda_fn(
self.r_t, self.discount_t, 0.9, self.v_t)
array_lambda_result = array_lambda_fn(
self.r_t, self.discount_t, self.array_lambda, self.v_t)
np.testing.assert_allclose(scalar_lambda_result, array_lambda_result,
atol=1e-3)
@chex.all_variants()
@parameterized.named_parameters(
('lambda_1', 1.0),
('lambda_0.7', 0.7),
('lambda_0.4', 0.4))
def test_gae_as_special_case_of_importance_corrected_td_errors(self, lambda_):
"""Tests truncated GAE.
Tests that truncated GAE yields same output as importance corrected
td errors with dummy ratios.
Args:
lambda_: a lambda to use in GAE.
"""
batched_gae_fn_variant = self.variant(jax.vmap(
multistep.truncated_generalized_advantage_estimation,
in_axes=(0, 0, None, 0), out_axes=0))
gae_result = batched_gae_fn_variant(
self.r_t, self.discount_t, lambda_, self.v_t)
batched_ictd_errors_fn_variant = self.variant(jax.vmap(
multistep.importance_corrected_td_errors))
ictd_errors_result = batched_ictd_errors_fn_variant(
self.r_t,
self.discount_t,
self.dummy_rho_tm1,
jnp.ones_like(self.discount_t) * lambda_,
self.v_t)
np.testing.assert_allclose(gae_result, ictd_errors_result, atol=1e-3)
if __name__ == '__main__':
jax.config.update('jax_numpy_rank_promotion', 'raise')
absltest.main()
|
|
# Copyright (c) 2015, Malte Schwarzkopf
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the project, the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Simple script which takes a file with one Naiad barrier latency (expressed as
# a signed integer) per line and plots a trivial histogram.
import os, sys, re
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
from matplotlib import pylab
from utils import *
#-----------
def serialize_hist(n, bins, outdir):
nf = open(outdir + "/binvals.txt", "w")
for val in n:
nf.write(str(val)+"\n")
bf = open(outdir + "/bins.txt", "w")
for val in bins:
bf.write(str(val)+"\n")
#-----------
if len(sys.argv) < 2:
print "usage: plot_naiad_latency_cdfs.py <input file 1> <label 1> ... " \
"<input file n> <label n> [output file]"
paper_mode = True
if (len(sys.argv) - 1) % 2 != 0:
outname = sys.argv[-1]
del sys.argv[-1]
else:
outname = "naiad_latency"
fnames = []
labels = []
for i in range(0, len(sys.argv) - 1, 2):
fnames.append(sys.argv[1 + i])
labels.append(sys.argv[2 + i])
if paper_mode:
fig = plt.figure(figsize=(2.33,1.55))
set_paper_rcs()
else:
fig = plt.figure()
set_rcs()
if paper_mode:
colors = paper_colors
# colors[2] = paper_colors[1]
# colors[1] = paper_colors[3]
else:
colors = ['b', 'r', 'g', 'c', 'm', 'y', 'k', '0.5']
i = 0
outliers_ignored = 0
for f in fnames:
# initial info
print "Analyzing %s:" % (f)
# parsing
j = 0
values = []
for line in open(f).readlines():
delay = float(line.strip()) * 1000
if delay < 90000: # 90ms
values.append(delay)
else:
outliers_ignored += 1
j += 1
# info output
print "--------------------------------------"
print "%s (%s)" % (labels[i], f)
print "--------------------------------------"
print "%d total samples" % (j)
print "%d outliers ignored" % (outliers_ignored)
print "--------------------------------------"
avg = np.mean(values)
print "AVG: %f" % (avg)
median = np.median(values)
print "MEDIAN: %f" % (median)
min_val = np.min(values)
print "MIN: %ld" % (min_val)
max_val = np.max(values)
print "MAX: %ld" % (max_val)
stddev = np.std(values)
print "STDEV: %f" % (stddev)
print "PERCENTILES:"
perc1 = np.percentile(values, 1)
print " 1st: %f" % (perc1)
perc10 = np.percentile(values, 10)
print " 10th: %f" % (np.percentile(values, 10))
perc25 = np.percentile(values, 25)
print " 25th: %f" % (np.percentile(values, 25))
perc50 = np.percentile(values, 50)
print " 50th: %f" % (np.percentile(values, 50))
perc75 = np.percentile(values, 75)
print " 75th: %f" % (np.percentile(values, 75))
perc90 = np.percentile(values, 90)
print " 90th: %f" % (np.percentile(values, 90))
perc99 = np.percentile(values, 99)
print " 99th: %f" % (np.percentile(values, 99))
# print "COPYABLE:"
# print avg
# print stddev
# print max_val
# print min_val
# print perc1
# print perc10
# print perc25
# print perc50
# print perc75
# print perc90
# print perc99
# figure out number of bins based on range
bin_width = 1 # 7.5ns measurement accuracy
bin_range = max_val - min_val
num_bins = min(100000, bin_range / bin_width)
print "Binning into %d bins and plotting..." % (num_bins)
# plotting
if paper_mode:
# plt.rc("font", size=8.0)
if i % 3 == 0:
style = 'solid'
elif i % 3 == 1:
style = 'dashed'
else:
style = 'dotted'
(n, bins, patches) = plt.hist(values, bins=num_bins, log=False, normed=True,
cumulative=True, histtype="step",
color=paper_colors[i % len(paper_colors)],
linestyle=style)
# hack to remove vertical bar
patches[0].set_xy(patches[0].get_xy()[:-1])
# hack to add line to legend
plt.plot([-100], [-100], label=labels[i],
color=paper_colors[i % len(paper_colors)],
linestyle=style, lw=1.0)
# serialize_hist(n, bins, os.path.dirname(outname))
else:
(n, bins, patches) = plt.hist(values, bins=num_bins, log=False, normed=True,
cumulative=True, histtype="step",
label=labels[i])
# hack to remove vertical bar
patches[0].set_xy(patches[0].get_xy()[:-1])
# serialize_hist(n, bins, os.path.dirname(outname))
i += 1
#plt.xticks(rotation=45)
# plt.xscale("log")
plt.xticks(range(0, 2001, 500), [str(x) for x in range(0, 2001, 500)])
plt.yticks(np.arange(0.0, 1.01, 0.2), [str(x) for x in np.arange(0.0, 1.01, 0.2)])
plt.xlim(0, 2250)
plt.ylim(0, 1.0)
plt.xlabel("Barrier sync.~latency [$\mu$s]")
#plt.ylabel("Cumulative distribution of latency")
#print n
#print bins
plt.legend(loc=4, frameon=False, borderaxespad=0.5, handlelength=2.5,
handletextpad=0.2)
#plt.legend(loc=8)
#plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
plt.savefig("%s.pdf" % outname, format="pdf", bbox_inches="tight")
plt.ylim(0.90, 1.0)
#plt.legend(bbox_to_anchor=(-0.2, 1.02, 1.3, .102), loc=3, ncol=3, mode="expand",
# borderaxespad=0., handlelength=2.5, handletextpad=0.2)
plt.legend(loc='lower right', frameon=False, borderaxespad=0.2,
handlelength=2.5, handletextpad=0.2)
leg = plt.gca().get_legend()
frame = leg.get_frame()
frame.set_edgecolor('1.0')
frame.set_alpha(0.0)
plt.yticks(np.arange(0.9, 1.01, 0.02),
[str(x) for x in np.arange(0.9, 1.01, 0.02)])
#plt.axhline(0.999, ls='--', color='k')
plt.savefig("%s-99th.pdf" % outname, format="pdf", bbox_inches="tight")
|
|
#!/usr/bin/env python
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Schema processing for discovery based APIs
Schemas holds an APIs discovery schemas. It can return those schema as
deserialized JSON objects, or pretty print them as prototype objects that
conform to the schema.
For example, given the schema:
schema = \"\"\"{
"Foo": {
"type": "object",
"properties": {
"etag": {
"type": "string",
"description": "ETag of the collection."
},
"kind": {
"type": "string",
"description": "Type of the collection ('calendar#acl').",
"default": "calendar#acl"
},
"nextPageToken": {
"type": "string",
"description": "Token used to access the next
page of this result. Omitted if no further results are available."
}
}
}
}\"\"\"
s = Schemas(schema)
print s.prettyPrintByName('Foo')
Produces the following output:
{
"nextPageToken": "A String", # Token used to access the
# next page of this result. Omitted if no further results are available.
"kind": "A String", # Type of the collection ('calendar#acl').
"etag": "A String", # ETag of the collection.
},
The constructor takes a discovery document in which to look up named schema.
"""
# TODO(user) support format, enum, minimum, maximum
import copy
from oauth2client import util
class Schemas(object):
"""Schemas for an API."""
def __init__(self, discovery):
"""Constructor.
Args:
discovery: object, Deserialized discovery document from which we pull
out the named schema.
"""
self.schemas = discovery.get('schemas', {})
# Cache of pretty printed schemas.
self.pretty = {}
@util.positional(2)
def _prettyPrintByName(self, name, seen=None, dent=0):
"""Get pretty printed object prototype from the schema name.
Args:
name: string, Name of schema in the discovery document.
seen: list of string, Names of schema already seen. Used to handle
recursive definitions.
Returns:
string, A string that contains a prototype object with
comments that conforms to the given schema.
"""
if seen is None:
seen = []
if name in seen:
# Do not fall into an infinite loop over recursive definitions.
return '# Object with schema name: %s' % name
seen.append(name)
if name not in self.pretty:
self.pretty[name] = _SchemaToStruct(self.schemas[name],
seen, dent=dent).to_str(self._prettyPrintByName)
seen.pop()
return self.pretty[name]
def prettyPrintByName(self, name):
"""Get pretty printed object prototype from the schema name.
Args:
name: string, Name of schema in the discovery document.
Returns:
string, A string that contains a prototype object with
comments that conforms to the given schema.
"""
# Return with trailing comma and newline removed.
return self._prettyPrintByName(name, seen=[], dent=1)[:-2]
@util.positional(2)
def _prettyPrintSchema(self, schema, seen=None, dent=0):
"""Get pretty printed object prototype of schema.
Args:
schema: object, Parsed JSON schema.
seen: list of string, Names of schema already seen. Used to handle
recursive definitions.
Returns:
string, A string that contains a prototype object with
comments that conforms to the given schema.
"""
if seen is None:
seen = []
return _SchemaToStruct(schema, seen, dent=dent).to_str(self._prettyPrintByName)
def prettyPrintSchema(self, schema):
"""Get pretty printed object prototype of schema.
Args:
schema: object, Parsed JSON schema.
Returns:
string, A string that contains a prototype object with
comments that conforms to the given schema.
"""
# Return with trailing comma and newline removed.
return self._prettyPrintSchema(schema, dent=1)[:-2]
def get(self, name):
"""Get deserialized JSON schema from the schema name.
Args:
name: string, Schema name.
"""
return self.schemas[name]
class _SchemaToStruct(object):
"""Convert schema to a prototype object."""
@util.positional(3)
def __init__(self, schema, seen, dent=0):
"""Constructor.
Args:
schema: object, Parsed JSON schema.
seen: list, List of names of schema already seen while parsing. Used to
handle recursive definitions.
dent: int, Initial indentation depth.
"""
# The result of this parsing kept as list of strings.
self.value = []
# The final value of the parsing.
self.string = None
# The parsed JSON schema.
self.schema = schema
# Indentation level.
self.dent = dent
# Method that when called returns a prototype object for the schema with
# the given name.
self.from_cache = None
# List of names of schema already seen while parsing.
self.seen = seen
def emit(self, text):
"""Add text as a line to the output.
Args:
text: string, Text to output.
"""
self.value.extend([" " * self.dent, text, '\n'])
def emitBegin(self, text):
"""Add text to the output, but with no line terminator.
Args:
text: string, Text to output.
"""
self.value.extend([" " * self.dent, text])
def emitEnd(self, text, comment):
"""Add text and comment to the output with line terminator.
Args:
text: string, Text to output.
comment: string, Python comment.
"""
if comment:
divider = '\n' + ' ' * (self.dent + 2) + '# '
lines = comment.splitlines()
lines = [x.rstrip() for x in lines]
comment = divider.join(lines)
self.value.extend([text, ' # ', comment, '\n'])
else:
self.value.extend([text, '\n'])
def indent(self):
"""Increase indentation level."""
self.dent += 1
def undent(self):
"""Decrease indentation level."""
self.dent -= 1
def _to_str_impl(self, schema):
"""Prototype object based on the schema, in Python code with comments.
Args:
schema: object, Parsed JSON schema file.
Returns:
Prototype object based on the schema, in Python code with comments.
"""
stype = schema.get('type')
if stype == 'object':
self.emitEnd('{', schema.get('description', ''))
self.indent()
if 'properties' in schema:
for pname, pschema in schema.get('properties', {}).iteritems():
self.emitBegin('"%s": ' % pname)
self._to_str_impl(pschema)
elif 'additionalProperties' in schema:
self.emitBegin('"a_key": ')
self._to_str_impl(schema['additionalProperties'])
self.undent()
self.emit('},')
elif '$ref' in schema:
schemaName = schema['$ref']
description = schema.get('description', '')
s = self.from_cache(schemaName, seen=self.seen)
parts = s.splitlines()
self.emitEnd(parts[0], description)
for line in parts[1:]:
self.emit(line.rstrip())
elif stype == 'boolean':
value = schema.get('default', 'True or False')
self.emitEnd('%s,' % str(value), schema.get('description', ''))
elif stype == 'string':
value = schema.get('default', 'A String')
self.emitEnd('"%s",' % str(value), schema.get('description', ''))
elif stype == 'integer':
value = schema.get('default', '42')
self.emitEnd('%s,' % str(value), schema.get('description', ''))
elif stype == 'number':
value = schema.get('default', '3.14')
self.emitEnd('%s,' % str(value), schema.get('description', ''))
elif stype == 'null':
self.emitEnd('None,', schema.get('description', ''))
elif stype == 'any':
self.emitEnd('"",', schema.get('description', ''))
elif stype == 'array':
self.emitEnd('[', schema.get('description'))
self.indent()
self.emitBegin('')
self._to_str_impl(schema['items'])
self.undent()
self.emit('],')
else:
self.emit('Unknown type! %s' % stype)
self.emitEnd('', '')
self.string = ''.join(self.value)
return self.string
def to_str(self, from_cache):
"""Prototype object based on the schema, in Python code with comments.
Args:
from_cache: callable(name, seen), Callable that retrieves an object
prototype for a schema with the given name. Seen is a list of schema
names already seen as we recursively descend the schema definition.
Returns:
Prototype object based on the schema, in Python code with comments.
The lines of the code will all be properly indented.
"""
self.from_cache = from_cache
return self._to_str_impl(self.schema)
|
|
# Django settings
import os
import json
from boto.s3.connection import S3Connection, Key
PROJECT_PATH = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
# read secrets from json
print "PROJECT_PATH: " + str(PROJECT_PATH)
SECRETS_PATH = os.path.join(PROJECT_PATH, "secret.json")
LOCAL = os.environ.get("LOCAL")
if LOCAL:
SECRETS_DICT = json.loads(open(SECRETS_PATH, "r").read())
else:
SECRETS_ENV = os.environ.get("SECRETS")
SECRETS_DICT = json.loads(SECRETS_ENV)
def getS3Credentials():
aws_access_key_id = SECRETS_DICT["AWS_ACCESS_KEY_ID"]
aws_secret_access_key = SECRETS_DICT["AWS_SECRET_ACCESS_KEY"]
return aws_access_key_id, aws_secret_access_key
def getS3Connection():
aws_access_key_id, aws_secret_access_key = getS3Credentials()
conn = S3Connection(aws_access_key_id, aws_secret_access_key)
return conn
def getTrueSpeakBucket():
conn = getS3Connection()
return conn.get_bucket("truespeak")
def getOrCreateS3Key(key_name):
bucket = getTrueSpeakBucket()
try:
key = bucket.get_key(key_name)
key_json = key.get_contents_as_string()
except Exception as e:
key = Key(bucket)
key.key = key_name
key_json = json.dumps({})
try:
key_dict = json.loads(key_json)
except:
key_dict = {}
return key, key_dict
DEBUG = True
APPEND_SLASH = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
ADMIN_EMAILS = [
"maxhfowler@gmail.com"
]
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': '', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': '', # Or path to database file if using sqlite3.
# The following settings are not used with sqlite3:
'USER': '',
'PASSWORD': '',
'HOST': '', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': '', # Set to empty string for default.
}
}
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
if LOCAL:
STATIC_ROOT = os.path.join(PROJECT_PATH, "static")
else:
STATIC_ROOT = os.path.join(PROJECT_PATH, "staticfiles")
print "STATIC_ROOT: " + str(STATIC_ROOT)
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(PROJECT_PATH, "django_yaba/media"),
os.path.join(PROJECT_PATH, "mhf/static"),
os.path.join(PROJECT_PATH, "greenlightning/static"),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '&66fite8%&_#6v-_j+eg_d2@^^8+r3gcvufp-aee0%g$hd85@x'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'wsgi.application'
TEMPLATE_DIRS = (
os.path.join(PROJECT_PATH, 'mhf/templates'),
os.path.join(PROJECT_PATH, 'greenlightning/templates'),
os.path.join(PROJECT_PATH, 'truespeak/templates')
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'mhf',
'tagging',
'disqus',
'django_yaba',
'south',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
#### DJANGO YABA SETTINGS django_yaba
# GitHub UserName for sidebar GitHub List - Leave blank if you don't want to use it
GITHUB_USERNAME = 'maximusfowler'
# Twitter UserName for sidebar Twitter List and Automatic Tweets
TWITTER_USERNAME = 'MaxFowler2'
TWITTER_PASSWORD = SECRETS_DICT['TWITTER_PASSWORD']
# Blog Name
BLOG_NAME = 'mhfowler'
# Blog URL
if LOCAL:
ROOT_BLOG_URL = "http://127.0.0.1:8000/blog/"
else:
ROOT_BLOG_URL = 'http://mhfowler.com/blog/'
# Root system path
PROJECT_DIR = PROJECT_PATH
# Disqus Settings
DISQUS_API_KEY = SECRETS_DICT['DISQUS_API_KEY']
DISQUS_WEBSITE_SHORTNAME = "mhfowler"
# If you want to use contrib.comments set the following to True
DJANGO_COMMENTS = False
# EMAIL SETTINGS
EMAIL_USE_TLS = True
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_PORT = 587
EMAIL_HOST_USER = SECRETS_DICT["GMAIL_USER"]
EMAIL_HOST_PASSWORD = SECRETS_DICT["GMAIL_PASSWORD"]
DEFAULT_FROM_EMAIL = SECRETS_DICT["GMAIL_USER"]
|
|
"""Sensitive variant calling using VarDict.
Defaults to using the faster, equally sensitive Java port:
https://github.com/AstraZeneca-NGS/VarDictJava
if 'vardict' or 'vardict-java' is specified in the configuration. To use the
VarDict perl version:
https://github.com/AstraZeneca-NGS/VarDict
specify 'vardict-perl'.
"""
import os
import itertools
import sys
import toolz as tz
import pybedtools
from bcbio import broad, utils
from bcbio.bam import highdepth
from bcbio.distributed.transaction import file_transaction
from bcbio.pipeline import config_utils, shared
from bcbio.pipeline import datadict as dd
from bcbio.provenance import do
from bcbio.variation import annotation, bamprep, vcfutils
def _is_bed_file(target):
return target and isinstance(target, basestring) and os.path.isfile(target)
def _vardict_options_from_config(items, config, out_file, target=None):
opts = ["-c 1", "-S 2", "-E 3", "-g 4"]
# ["-z", "-F", "-c", "1", "-S", "2", "-E", "3", "-g", "4", "-x", "0",
# "-k", "3", "-r", "4", "-m", "8"]
# remove low mapping quality reads
opts += ["-Q", "10"]
resources = config_utils.get_resources("vardict", config)
if resources.get("options"):
opts += resources["options"]
assert _is_bed_file(target)
if any(tz.get_in(["config", "algorithm", "coverage_interval"], x, "").lower() == "genome"
for x in items):
target = shared.remove_highdepth_regions(target, items)
target = shared.remove_lcr_regions(target, items)
target = _enforce_max_region_size(target, items[0])
opts += [target] # this must be the last option
return opts
def _enforce_max_region_size(in_file, data):
"""Ensure we don't have any chunks in the region greater than 1Mb.
Larger sections have high memory usage on VarDictJava and failures
on VarDict. This creates minimum windows from the input BED file
to avoid these issues. Downstream VarDict merging sorts out any
variants across windows.
"""
max_size = 1e6
overlap_size = 250
def _has_larger_regions(f):
return any(r.stop - r.start > max_size for r in pybedtools.BedTool(f))
out_file = "%s-regionlimit%s" % utils.splitext_plus(in_file)
if not utils.file_exists(out_file):
if _has_larger_regions(in_file):
with file_transaction(data, out_file) as tx_out_file:
pybedtools.BedTool().window_maker(w=max_size,
s=max_size - overlap_size,
b=pybedtools.BedTool(in_file)).saveas(tx_out_file)
else:
utils.symlink_plus(in_file, out_file)
return out_file
def run_vardict(align_bams, items, ref_file, assoc_files, region=None,
out_file=None):
"""Run VarDict variant calling.
"""
if vcfutils.is_paired_analysis(align_bams, items):
call_file = _run_vardict_paired(align_bams, items, ref_file,
assoc_files, region, out_file)
else:
vcfutils.check_paired_problems(items)
call_file = _run_vardict_caller(align_bams, items, ref_file,
assoc_files, region, out_file)
return call_file
def _get_jvm_opts(data, out_file):
"""Retrieve JVM options when running the Java version of VarDict.
"""
if get_vardict_command(data) == "vardict-java":
resources = config_utils.get_resources("vardict", data["config"])
jvm_opts = resources.get("jvm_opts", ["-Xms750m", "-Xmx4g"])
jvm_opts += broad.get_default_jvm_opts(os.path.dirname(out_file))
return "export VAR_DICT_OPTS='%s' && " % " ".join(jvm_opts)
else:
return ""
def _run_vardict_caller(align_bams, items, ref_file, assoc_files,
region=None, out_file=None):
"""Detect SNPs and indels with VarDict.
"""
config = items[0]["config"]
if out_file is None:
out_file = "%s-variants.vcf.gz" % os.path.splitext(align_bams[0])[0]
if not utils.file_exists(out_file):
with file_transaction(items[0], out_file) as tx_out_file:
target = shared.subset_variant_regions(dd.get_variant_regions(items[0]), region,
out_file, do_merge=False)
num_bams = len(align_bams)
sample_vcf_names = [] # for individual sample names, given batch calling may be required
for bamfile, item in itertools.izip(align_bams, items):
# prepare commands
sample = dd.get_sample_name(item)
vardict = get_vardict_command(items[0])
strandbias = "teststrandbias.R"
var2vcf = "var2vcf_valid.pl"
opts = (" ".join(_vardict_options_from_config(items, config, out_file, target))
if _is_bed_file(target) else "")
vcfstreamsort = config_utils.get_program("vcfstreamsort", config)
compress_cmd = "| bgzip -c" if out_file.endswith("gz") else ""
freq = float(utils.get_in(config, ("algorithm", "min_allele_fraction"), 10)) / 100.0
coverage_interval = utils.get_in(config, ("algorithm", "coverage_interval"), "exome")
# for deep targeted panels, require 50 worth of coverage
var2vcf_opts = " -v 50 " if highdepth.get_median_coverage(items[0]) > 5000 else ""
fix_ambig = vcfutils.fix_ambiguous_cl()
remove_dup = vcfutils.remove_dup_cl()
jvm_opts = _get_jvm_opts(items[0], tx_out_file)
r_setup = "unset R_HOME && export PATH=%s:$PATH && " % os.path.dirname(utils.Rscript_cmd())
cmd = ("{r_setup}{jvm_opts}{vardict} -G {ref_file} -f {freq} "
"-N {sample} -b {bamfile} {opts} "
"| {strandbias}"
"| {var2vcf} -N {sample} -E -f {freq} {var2vcf_opts} "
"| {fix_ambig} | {remove_dup} | {vcfstreamsort} {compress_cmd}")
if num_bams > 1:
temp_file_prefix = out_file.replace(".gz", "").replace(".vcf", "") + item["name"][1]
tmp_out = temp_file_prefix + ".temp.vcf"
tmp_out += ".gz" if out_file.endswith("gz") else ""
sample_vcf_names.append(tmp_out)
with file_transaction(item, tmp_out) as tx_tmp_file:
if not _is_bed_file(target):
vcfutils.write_empty_vcf(tx_tmp_file, config, samples=[sample])
else:
cmd += " > {tx_tmp_file}"
do.run(cmd.format(**locals()), "Genotyping with VarDict: Inference", {})
else:
if not _is_bed_file(target):
vcfutils.write_empty_vcf(tx_out_file, config, samples=[sample])
else:
cmd += " > {tx_out_file}"
do.run(cmd.format(**locals()), "Genotyping with VarDict: Inference", {})
if num_bams > 1:
# N.B. merge_variant_files wants region in 1-based end-inclusive
# coordinates. Thus use bamprep.region_to_gatk
vcfutils.merge_variant_files(orig_files=sample_vcf_names,
out_file=tx_out_file, ref_file=ref_file,
config=config, region=bamprep.region_to_gatk(region))
out_file = (annotation.add_dbsnp(out_file, assoc_files["dbsnp"], config)
if assoc_files.get("dbsnp") else out_file)
return out_file
def _safe_to_float(x):
if x is None:
return None
else:
try:
return float(x)
except ValueError:
return None
def depth_freq_filter(line, tumor_index, aligner):
"""Command line to filter VarDict calls based on depth, frequency and quality.
Looks at regions with low depth for allele frequency (AF * DP < 6, the equivalent
of < 13bp for heterogygote calls, but generalized. Within these calls filters if a
calls has:
- Low mapping quality and multiple mismatches in a read (NM)
For bwa only: MQ < 55.0 and NM > 1.0 or MQ < 60.0 and NM > 2.0
- Low depth (DP < 10)
- Low QUAL (QUAL < 45)
Also filters in low allele frequency regions with poor quality, if all of these are
true:
- Allele frequency < 0.2
- Quality < 55
- P-value (SSF) > 0.06
"""
if line.startswith("#CHROM"):
headers = [('##FILTER=<ID=LowAlleleDepth,Description="Low depth per allele frequency '
'along with poor depth, quality, mapping quality and read mismatches.">'),
('##FILTER=<ID=LowFreqQuality,Description="Low frequency read with '
'poor quality and p-value (SSF).">')]
return "\n".join(headers) + "\n" + line
elif line.startswith("#"):
return line
else:
parts = line.split("\t")
sample_ft = {a: v for (a, v) in zip(parts[8].split(":"), parts[9 + tumor_index].split(":"))}
qual = _safe_to_float(parts[5])
dp = _safe_to_float(sample_ft.get("DP"))
af = _safe_to_float(sample_ft.get("AF"))
nm = _safe_to_float(sample_ft.get("NM"))
mq = _safe_to_float(sample_ft.get("MQ"))
ssfs = [x for x in parts[7].split(";") if x.startswith("SSF=")]
pval = _safe_to_float(ssfs[0].split("=")[-1] if ssfs else None)
fname = None
if dp is not None and af is not None:
if dp * af < 6:
if aligner == "bwa" and nm is not None and mq is not None:
if (mq < 55.0 and nm > 1.0) or (mq < 60.0 and nm > 2.0):
fname = "LowAlleleDepth"
if dp < 10:
fname = "LowAlleleDepth"
if qual is not None and qual < 45:
fname = "LowAlleleDepth"
if af is not None and qual is not None and pval is not None:
if af < 0.2 and qual < 55 and pval > 0.06:
fname = "LowFreqQuality"
if fname:
if parts[6] in set([".", "PASS"]):
parts[6] = fname
else:
parts[6] += ";%s" % fname
line = "\t".join(parts)
return line
def _run_vardict_paired(align_bams, items, ref_file, assoc_files,
region=None, out_file=None):
"""Detect variants with Vardict.
This is used for paired tumor / normal samples.
"""
config = items[0]["config"]
if out_file is None:
out_file = "%s-paired-variants.vcf.gz" % os.path.splitext(align_bams[0])[0]
if not utils.file_exists(out_file):
with file_transaction(items[0], out_file) as tx_out_file:
target = shared.subset_variant_regions(dd.get_variant_regions(items[0]), region,
out_file, do_merge=True)
paired = vcfutils.get_paired_bams(align_bams, items)
if not _is_bed_file(target):
vcfutils.write_empty_vcf(tx_out_file, config,
samples=[x for x in [paired.tumor_name, paired.normal_name] if x])
else:
if not paired.normal_bam:
ann_file = _run_vardict_caller(align_bams, items, ref_file,
assoc_files, region, out_file)
return ann_file
vcffilter = config_utils.get_program("vcffilter", config)
vardict = get_vardict_command(items[0])
vcfstreamsort = config_utils.get_program("vcfstreamsort", config)
strandbias = "testsomatic.R"
var2vcf = "var2vcf_paired.pl"
compress_cmd = "| bgzip -c" if out_file.endswith("gz") else ""
freq = float(utils.get_in(config, ("algorithm", "min_allele_fraction"), 10)) / 100.0
# merge bed file regions as amplicon VarDict is only supported in single sample mode
opts = " ".join(_vardict_options_from_config(items, config, out_file, target))
coverage_interval = utils.get_in(config, ("algorithm", "coverage_interval"), "exome")
# for deep targeted panels, require 50 worth of coverage
var2vcf_opts = " -v 50 " if highdepth.get_median_coverage(items[0]) > 5000 else ""
fix_ambig = vcfutils.fix_ambiguous_cl()
remove_dup = vcfutils.remove_dup_cl()
if any("vardict_somatic_filter" in tz.get_in(("config", "algorithm", "tools_off"), data, [])
for data in items):
somatic_filter = ""
freq_filter = ""
else:
var2vcf_opts += " -M " # this makes VarDict soft filter non-differential variants
somatic_filter = ("| sed 's/\\\\.*Somatic\\\\/Somatic/' "
"| sed 's/REJECT,Description=\".*\">/REJECT,Description=\"Not Somatic via VarDict\">/' "
"| %s -x 'bcbio.variation.freebayes.call_somatic(x)'" %
os.path.join(os.path.dirname(sys.executable), "py"))
freq_filter = ("| bcftools filter -m '+' -s 'REJECT' -e 'STATUS !~ \".*Somatic\"' 2> /dev/null "
"| %s -x 'bcbio.variation.vardict.depth_freq_filter(x, %s, \"%s\")'" %
(os.path.join(os.path.dirname(sys.executable), "py"),
0, dd.get_aligner(paired.tumor_data)))
jvm_opts = _get_jvm_opts(items[0], tx_out_file)
r_setup = "unset R_HOME && export PATH=%s:$PATH && " % os.path.dirname(utils.Rscript_cmd())
cmd = ("{r_setup}{jvm_opts}{vardict} -G {ref_file} -f {freq} "
"-N {paired.tumor_name} -b \"{paired.tumor_bam}|{paired.normal_bam}\" {opts} "
"| {strandbias} "
"| {var2vcf} -P 0.9 -m 4.25 -f {freq} {var2vcf_opts} "
"-N \"{paired.tumor_name}|{paired.normal_name}\" "
"{freq_filter} "
"{somatic_filter} | {fix_ambig} | {remove_dup} | {vcfstreamsort} "
"{compress_cmd} > {tx_out_file}")
do.run(cmd.format(**locals()), "Genotyping with VarDict: Inference", {})
out_file = (annotation.add_dbsnp(out_file, assoc_files["dbsnp"], config)
if assoc_files.get("dbsnp") else out_file)
return out_file
def get_vardict_command(data):
"""
convert variantcaller specification to proper vardict command, handling
string or list specification
"""
vcaller = dd.get_variantcaller(data)
if isinstance(vcaller, list):
vardict = [x for x in vcaller if "vardict" in x]
if not vardict:
return None
vardict = vardict[0]
elif not vcaller:
return None
else:
vardict = vcaller
vardict = "vardict-java" if not vardict.endswith("-perl") else "vardict"
return vardict
|
|
from django.views.generic import CreateView, DetailView, UpdateView
from django import http
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.shortcuts import redirect
from django.core.exceptions import PermissionDenied
from rest_framework.renderers import JSONRenderer
from message_coding.apps.api import serializers
from message_coding.apps.project import models, forms
from message_coding.apps.coding import models as coding_models
from message_coding.apps.base.views import ProjectViewMixin, LoginRequiredMixin
class CreateProjectView(LoginRequiredMixin, CreateView):
"""View for creating new projects"""
form_class = forms.ProjectForm
template_name = "project/project_create.html"
def form_valid(self, form):
"""What to do when a project is created?"""
# The user comes from the session
form.instance.owner = self.request.user
return super(CreateProjectView, self).form_valid(form)
class UpdateProjectView(LoginRequiredMixin, UpdateView):
"""View for editing projects"""
model = models.Project
form_class = forms.ProjectForm
slug_url_kwarg = 'project_slug'
template_name = "project/project_update.html"
class ProjectDetailView(LoginRequiredMixin, DetailView):
"""View for viewing projects"""
model = models.Project
template_name = 'project/project_detail.html'
prefetch_related = ['datasets']
slug_url_kwarg = 'project_slug'
class TaskDetailView(LoginRequiredMixin, ProjectViewMixin, DetailView):
"""View for viewing tasks"""
model = models.Task
template_name = 'project/task_detail.html'
pk_url_kwarg = 'task_pk'
def get_context_data(self, **kwargs):
context = super(TaskDetailView, self).get_context_data(**kwargs)
context['msgs'] = context['task'].get_messages()
task = context['task']
return context
class TaskReviewView(LoginRequiredMixin, ProjectViewMixin, DetailView):
"""View for viewing tasks"""
model = models.Task
template_name = 'project/task_review.html'
pk_url_kwarg = 'task_pk'
def get_context_data(self, **kwargs):
context = super(TaskReviewView, self).get_context_data(**kwargs)
context['msgs'] = context['task'].get_messages()
task = context['task']
examples = task.get_examples()
frequency = task.get_frequency()
context['summary'] = task.get_coding_summary()
context['diff_summary'] = task.get_diff_summary()
code_info = {}
for code,count in frequency.iteritems():
code_info[code] = {
'count':count,
'examples':examples[code]
}
context['code_info'] = code_info
return context
class EditTaskView(LoginRequiredMixin, ProjectViewMixin, UpdateView):
"""View for editing new tasks"""
model = models.Task
pk_url_kwarg = 'task_pk'
# Let Django autogenerate the form for now
fields = ['name', 'description', 'scheme', ]
template_name = "project/task_edit.html"
CODING_BATCH_SIZE = 1
class CodingView(LoginRequiredMixin, ProjectViewMixin, DetailView):
"""
View for working on a coding task.
This is implemented as a detailview for coding tasks.
"""
model = models.Task
template_name = "project/coding.html"
pk_url_kwarg = 'task_pk'
def get_object(self, queryset=None):
obj = super(CodingView, self).get_object(queryset)
# Ensure the user is assigned to code this
if not obj.is_assigned_to(self.request.user):
raise PermissionDenied("You are not assigned to that task.")
return obj
def get_messages(self):
task = self.object
user = self.request.user
# Ensure the user is assigned to code this
if not task.is_assigned_to(user):
raise PermissionDenied("You are not assigned to that task.")
try:
self.page = int(self.kwargs.get('page', 1))
except ValueError:
self.page = 1
# Get the messages to code
self.paginator = Paginator(task.get_messages(), CODING_BATCH_SIZE)
try:
self.page = self.paginator.validate_number(self.page)
except PageNotAnInteger:
# If page is not an integer, go to the first page.
self.page = 1
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
self.page = self.paginator.num_pages
return self.paginator.page(self.page)
def get_context_data(self, **kwargs):
# Add some serialized json for bootstrapping the client-side app
renderer = JSONRenderer()
task = self.object
kwargs['project_json'] = renderer.render(serializers.ProjectSerializer(self.get_project()).data)
kwargs['task_json'] = renderer.render(serializers.TaskSerializer(self.object).data)
kwargs['user_json'] = renderer.render(serializers.UserSerializer(self.request.user).data)
kwargs['code_scheme_json'] = renderer.render(serializers.SchemeSerializer(task.scheme).data)
return super(CodingView, self).get_context_data(**kwargs)
def post(self, request, *args, **kwargs):
self.object = self.get_object()
task = self.object
# The messages we expect to be coding
msgs = self.get_messages()
# Get all the legal code ids for this task's scheme
all_code_ids = coding_models.Code.objects \
.filter(code_group__in=task.scheme.code_groups.all()) \
.values_list('pk', flat=True)
all_code_ids = set(all_code_ids) # for fast lookup
# Extract all the new code ids for each message from the POST params
message_code_ids = {}
for msg in msgs:
key = "messages[%d]" % msg.pk
message_code_ids[msg.pk] = set()
for code_id in self.request.POST.getlist(key):
code_id = int(code_id) # convert str to int
if code_id not in all_code_ids:
return http.HttpResponseBadRequest("Code id %d not allowed" % code_id)
message_code_ids[msg.pk].add(code_id)
# Make sure all the messages match up
if not message_code_ids or len(message_code_ids) != len(msgs):
return http.HttpResponseBadRequest("Invalid set of messages coded")
# Now create/delete instances as needed
to_create = []
to_delete = []
for msg in msgs:
current_instances = models.CodeInstance.objects \
.filter(owner=self.request.user,
task=task,
message=msg) \
.only('pk', 'code_id')
new_code_ids = message_code_ids[msg.pk]
for inst in current_instances:
if inst.code_id not in new_code_ids:
# it has been un-checked
to_delete.append(inst)
else:
# it is in both
new_code_ids.remove(inst.code_id)
for code_id in new_code_ids:
# any left over must be created
to_create.append(models.CodeInstance(owner=self.request.user,
task=task,
message=msg,
code_id=code_id))
models.CodeInstance.objects \
.filter(pk__in=[inst.pk for inst in to_delete]) \
.delete()
models.CodeInstance.objects.bulk_create(to_create)
next_page = self.page + 1
return redirect('coding_page', project_slug=task.project.slug, task_pk=task.pk, page=next_page)
|
|
import argparse
import os
import sys
import warnings
from gettext import gettext
from typing import Any
from typing import Callable
from typing import cast
from typing import Dict
from typing import List
from typing import Mapping
from typing import Optional
from typing import Sequence
from typing import Tuple
from typing import TYPE_CHECKING
from typing import Union
import _pytest._io
from _pytest.compat import final
from _pytest.config.exceptions import UsageError
from _pytest.deprecated import ARGUMENT_PERCENT_DEFAULT
from _pytest.deprecated import ARGUMENT_TYPE_STR
from _pytest.deprecated import ARGUMENT_TYPE_STR_CHOICE
if TYPE_CHECKING:
from typing import NoReturn
from typing_extensions import Literal
FILE_OR_DIR = "file_or_dir"
@final
class Parser:
"""Parser for command line arguments and ini-file values.
:ivar extra_info: Dict of generic param -> value to display in case
there's an error processing the command line arguments.
"""
prog: Optional[str] = None
def __init__(
self,
usage: Optional[str] = None,
processopt: Optional[Callable[["Argument"], None]] = None,
) -> None:
self._anonymous = OptionGroup("custom options", parser=self)
self._groups: List[OptionGroup] = []
self._processopt = processopt
self._usage = usage
self._inidict: Dict[str, Tuple[str, Optional[str], Any]] = {}
self._ininames: List[str] = []
self.extra_info: Dict[str, Any] = {}
def processoption(self, option: "Argument") -> None:
if self._processopt:
if option.dest:
self._processopt(option)
def getgroup(
self, name: str, description: str = "", after: Optional[str] = None
) -> "OptionGroup":
"""Get (or create) a named option Group.
:name: Name of the option group.
:description: Long description for --help output.
:after: Name of another group, used for ordering --help output.
The returned group object has an ``addoption`` method with the same
signature as :py:func:`parser.addoption
<_pytest.config.argparsing.Parser.addoption>` but will be shown in the
respective group in the output of ``pytest. --help``.
"""
for group in self._groups:
if group.name == name:
return group
group = OptionGroup(name, description, parser=self)
i = 0
for i, grp in enumerate(self._groups):
if grp.name == after:
break
self._groups.insert(i + 1, group)
return group
def addoption(self, *opts: str, **attrs: Any) -> None:
"""Register a command line option.
:opts: Option names, can be short or long options.
:attrs: Same attributes which the ``add_argument()`` function of the
`argparse library <https://docs.python.org/library/argparse.html>`_
accepts.
After command line parsing, options are available on the pytest config
object via ``config.option.NAME`` where ``NAME`` is usually set
by passing a ``dest`` attribute, for example
``addoption("--long", dest="NAME", ...)``.
"""
self._anonymous.addoption(*opts, **attrs)
def parse(
self,
args: Sequence[Union[str, "os.PathLike[str]"]],
namespace: Optional[argparse.Namespace] = None,
) -> argparse.Namespace:
from _pytest._argcomplete import try_argcomplete
self.optparser = self._getparser()
try_argcomplete(self.optparser)
strargs = [os.fspath(x) for x in args]
return self.optparser.parse_args(strargs, namespace=namespace)
def _getparser(self) -> "MyOptionParser":
from _pytest._argcomplete import filescompleter
optparser = MyOptionParser(self, self.extra_info, prog=self.prog)
groups = self._groups + [self._anonymous]
for group in groups:
if group.options:
desc = group.description or group.name
arggroup = optparser.add_argument_group(desc)
for option in group.options:
n = option.names()
a = option.attrs()
arggroup.add_argument(*n, **a)
file_or_dir_arg = optparser.add_argument(FILE_OR_DIR, nargs="*")
# bash like autocompletion for dirs (appending '/')
# Type ignored because typeshed doesn't know about argcomplete.
file_or_dir_arg.completer = filescompleter # type: ignore
return optparser
def parse_setoption(
self,
args: Sequence[Union[str, "os.PathLike[str]"]],
option: argparse.Namespace,
namespace: Optional[argparse.Namespace] = None,
) -> List[str]:
parsedoption = self.parse(args, namespace=namespace)
for name, value in parsedoption.__dict__.items():
setattr(option, name, value)
return cast(List[str], getattr(parsedoption, FILE_OR_DIR))
def parse_known_args(
self,
args: Sequence[Union[str, "os.PathLike[str]"]],
namespace: Optional[argparse.Namespace] = None,
) -> argparse.Namespace:
"""Parse and return a namespace object with known arguments at this point."""
return self.parse_known_and_unknown_args(args, namespace=namespace)[0]
def parse_known_and_unknown_args(
self,
args: Sequence[Union[str, "os.PathLike[str]"]],
namespace: Optional[argparse.Namespace] = None,
) -> Tuple[argparse.Namespace, List[str]]:
"""Parse and return a namespace object with known arguments, and
the remaining arguments unknown at this point."""
optparser = self._getparser()
strargs = [os.fspath(x) for x in args]
return optparser.parse_known_args(strargs, namespace=namespace)
def addini(
self,
name: str,
help: str,
type: Optional[
"Literal['string', 'pathlist', 'args', 'linelist', 'bool']"
] = None,
default=None,
) -> None:
"""Register an ini-file option.
:name: Name of the ini-variable.
:type: Type of the variable, can be ``string``, ``pathlist``, ``args``,
``linelist`` or ``bool``. Defaults to ``string`` if ``None`` or
not passed.
:default: Default value if no ini-file option exists but is queried.
The value of ini-variables can be retrieved via a call to
:py:func:`config.getini(name) <_pytest.config.Config.getini>`.
"""
assert type in (None, "string", "pathlist", "args", "linelist", "bool")
self._inidict[name] = (help, type, default)
self._ininames.append(name)
class ArgumentError(Exception):
"""Raised if an Argument instance is created with invalid or
inconsistent arguments."""
def __init__(self, msg: str, option: Union["Argument", str]) -> None:
self.msg = msg
self.option_id = str(option)
def __str__(self) -> str:
if self.option_id:
return f"option {self.option_id}: {self.msg}"
else:
return self.msg
class Argument:
"""Class that mimics the necessary behaviour of optparse.Option.
It's currently a least effort implementation and ignoring choices
and integer prefixes.
https://docs.python.org/3/library/optparse.html#optparse-standard-option-types
"""
_typ_map = {"int": int, "string": str, "float": float, "complex": complex}
def __init__(self, *names: str, **attrs: Any) -> None:
"""Store parms in private vars for use in add_argument."""
self._attrs = attrs
self._short_opts: List[str] = []
self._long_opts: List[str] = []
if "%default" in (attrs.get("help") or ""):
warnings.warn(ARGUMENT_PERCENT_DEFAULT, stacklevel=3)
try:
typ = attrs["type"]
except KeyError:
pass
else:
# This might raise a keyerror as well, don't want to catch that.
if isinstance(typ, str):
if typ == "choice":
warnings.warn(
ARGUMENT_TYPE_STR_CHOICE.format(typ=typ, names=names),
stacklevel=4,
)
# argparse expects a type here take it from
# the type of the first element
attrs["type"] = type(attrs["choices"][0])
else:
warnings.warn(
ARGUMENT_TYPE_STR.format(typ=typ, names=names), stacklevel=4
)
attrs["type"] = Argument._typ_map[typ]
# Used in test_parseopt -> test_parse_defaultgetter.
self.type = attrs["type"]
else:
self.type = typ
try:
# Attribute existence is tested in Config._processopt.
self.default = attrs["default"]
except KeyError:
pass
self._set_opt_strings(names)
dest: Optional[str] = attrs.get("dest")
if dest:
self.dest = dest
elif self._long_opts:
self.dest = self._long_opts[0][2:].replace("-", "_")
else:
try:
self.dest = self._short_opts[0][1:]
except IndexError as e:
self.dest = "???" # Needed for the error repr.
raise ArgumentError("need a long or short option", self) from e
def names(self) -> List[str]:
return self._short_opts + self._long_opts
def attrs(self) -> Mapping[str, Any]:
# Update any attributes set by processopt.
attrs = "default dest help".split()
attrs.append(self.dest)
for attr in attrs:
try:
self._attrs[attr] = getattr(self, attr)
except AttributeError:
pass
if self._attrs.get("help"):
a = self._attrs["help"]
a = a.replace("%default", "%(default)s")
# a = a.replace('%prog', '%(prog)s')
self._attrs["help"] = a
return self._attrs
def _set_opt_strings(self, opts: Sequence[str]) -> None:
"""Directly from optparse.
Might not be necessary as this is passed to argparse later on.
"""
for opt in opts:
if len(opt) < 2:
raise ArgumentError(
"invalid option string %r: "
"must be at least two characters long" % opt,
self,
)
elif len(opt) == 2:
if not (opt[0] == "-" and opt[1] != "-"):
raise ArgumentError(
"invalid short option string %r: "
"must be of the form -x, (x any non-dash char)" % opt,
self,
)
self._short_opts.append(opt)
else:
if not (opt[0:2] == "--" and opt[2] != "-"):
raise ArgumentError(
"invalid long option string %r: "
"must start with --, followed by non-dash" % opt,
self,
)
self._long_opts.append(opt)
def __repr__(self) -> str:
args: List[str] = []
if self._short_opts:
args += ["_short_opts: " + repr(self._short_opts)]
if self._long_opts:
args += ["_long_opts: " + repr(self._long_opts)]
args += ["dest: " + repr(self.dest)]
if hasattr(self, "type"):
args += ["type: " + repr(self.type)]
if hasattr(self, "default"):
args += ["default: " + repr(self.default)]
return "Argument({})".format(", ".join(args))
class OptionGroup:
def __init__(
self, name: str, description: str = "", parser: Optional[Parser] = None
) -> None:
self.name = name
self.description = description
self.options: List[Argument] = []
self.parser = parser
def addoption(self, *optnames: str, **attrs: Any) -> None:
"""Add an option to this group.
If a shortened version of a long option is specified, it will
be suppressed in the help. addoption('--twowords', '--two-words')
results in help showing '--two-words' only, but --twowords gets
accepted **and** the automatic destination is in args.twowords.
"""
conflict = set(optnames).intersection(
name for opt in self.options for name in opt.names()
)
if conflict:
raise ValueError("option names %s already added" % conflict)
option = Argument(*optnames, **attrs)
self._addoption_instance(option, shortupper=False)
def _addoption(self, *optnames: str, **attrs: Any) -> None:
option = Argument(*optnames, **attrs)
self._addoption_instance(option, shortupper=True)
def _addoption_instance(self, option: "Argument", shortupper: bool = False) -> None:
if not shortupper:
for opt in option._short_opts:
if opt[0] == "-" and opt[1].islower():
raise ValueError("lowercase shortoptions reserved")
if self.parser:
self.parser.processoption(option)
self.options.append(option)
class MyOptionParser(argparse.ArgumentParser):
def __init__(
self,
parser: Parser,
extra_info: Optional[Dict[str, Any]] = None,
prog: Optional[str] = None,
) -> None:
self._parser = parser
argparse.ArgumentParser.__init__(
self,
prog=prog,
usage=parser._usage,
add_help=False,
formatter_class=DropShorterLongHelpFormatter,
allow_abbrev=False,
)
# extra_info is a dict of (param -> value) to display if there's
# an usage error to provide more contextual information to the user.
self.extra_info = extra_info if extra_info else {}
def error(self, message: str) -> "NoReturn":
"""Transform argparse error message into UsageError."""
msg = f"{self.prog}: error: {message}"
if hasattr(self._parser, "_config_source_hint"):
# Type ignored because the attribute is set dynamically.
msg = f"{msg} ({self._parser._config_source_hint})" # type: ignore
raise UsageError(self.format_usage() + msg)
# Type ignored because typeshed has a very complex type in the superclass.
def parse_args( # type: ignore
self,
args: Optional[Sequence[str]] = None,
namespace: Optional[argparse.Namespace] = None,
) -> argparse.Namespace:
"""Allow splitting of positional arguments."""
parsed, unrecognized = self.parse_known_args(args, namespace)
if unrecognized:
for arg in unrecognized:
if arg and arg[0] == "-":
lines = ["unrecognized arguments: %s" % (" ".join(unrecognized))]
for k, v in sorted(self.extra_info.items()):
lines.append(f" {k}: {v}")
self.error("\n".join(lines))
getattr(parsed, FILE_OR_DIR).extend(unrecognized)
return parsed
if sys.version_info[:2] < (3, 9): # pragma: no cover
# Backport of https://github.com/python/cpython/pull/14316 so we can
# disable long --argument abbreviations without breaking short flags.
def _parse_optional(
self, arg_string: str
) -> Optional[Tuple[Optional[argparse.Action], str, Optional[str]]]:
if not arg_string:
return None
if not arg_string[0] in self.prefix_chars:
return None
if arg_string in self._option_string_actions:
action = self._option_string_actions[arg_string]
return action, arg_string, None
if len(arg_string) == 1:
return None
if "=" in arg_string:
option_string, explicit_arg = arg_string.split("=", 1)
if option_string in self._option_string_actions:
action = self._option_string_actions[option_string]
return action, option_string, explicit_arg
if self.allow_abbrev or not arg_string.startswith("--"):
option_tuples = self._get_option_tuples(arg_string)
if len(option_tuples) > 1:
msg = gettext(
"ambiguous option: %(option)s could match %(matches)s"
)
options = ", ".join(option for _, option, _ in option_tuples)
self.error(msg % {"option": arg_string, "matches": options})
elif len(option_tuples) == 1:
(option_tuple,) = option_tuples
return option_tuple
if self._negative_number_matcher.match(arg_string):
if not self._has_negative_number_optionals:
return None
if " " in arg_string:
return None
return None, arg_string, None
class DropShorterLongHelpFormatter(argparse.HelpFormatter):
"""Shorten help for long options that differ only in extra hyphens.
- Collapse **long** options that are the same except for extra hyphens.
- Shortcut if there are only two options and one of them is a short one.
- Cache result on the action object as this is called at least 2 times.
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
# Use more accurate terminal width.
if "width" not in kwargs:
kwargs["width"] = _pytest._io.get_terminal_width()
super().__init__(*args, **kwargs)
def _format_action_invocation(self, action: argparse.Action) -> str:
orgstr = argparse.HelpFormatter._format_action_invocation(self, action)
if orgstr and orgstr[0] != "-": # only optional arguments
return orgstr
res: Optional[str] = getattr(action, "_formatted_action_invocation", None)
if res:
return res
options = orgstr.split(", ")
if len(options) == 2 and (len(options[0]) == 2 or len(options[1]) == 2):
# a shortcut for '-h, --help' or '--abc', '-a'
action._formatted_action_invocation = orgstr # type: ignore
return orgstr
return_list = []
short_long: Dict[str, str] = {}
for option in options:
if len(option) == 2 or option[2] == " ":
continue
if not option.startswith("--"):
raise ArgumentError(
'long optional argument without "--": [%s]' % (option), option
)
xxoption = option[2:]
shortened = xxoption.replace("-", "")
if shortened not in short_long or len(short_long[shortened]) < len(
xxoption
):
short_long[shortened] = xxoption
# now short_long has been filled out to the longest with dashes
# **and** we keep the right option ordering from add_argument
for option in options:
if len(option) == 2 or option[2] == " ":
return_list.append(option)
if option[2:] == short_long.get(option.replace("-", "")):
return_list.append(option.replace(" ", "=", 1))
formatted_action_invocation = ", ".join(return_list)
action._formatted_action_invocation = formatted_action_invocation # type: ignore
return formatted_action_invocation
def _split_lines(self, text, width):
"""Wrap lines after splitting on original newlines.
This allows to have explicit line breaks in the help text.
"""
import textwrap
lines = []
for line in text.splitlines():
lines.extend(textwrap.wrap(line.strip(), width))
return lines
|
|
from __future__ import unicode_literals
import re
import tempfile
from django.contrib.gis import gdal
from django.contrib.gis.geos import HAS_GEOS
from django.core.management import call_command
from django.db import connection
from django.test import TestCase, ignore_warnings, skipUnlessDBFeature
from django.utils import six
from django.utils.deprecation import (
RemovedInDjango20Warning, RemovedInDjango21Warning,
)
from ..utils import no_oracle, oracle, postgis, spatialite
if HAS_GEOS:
from django.contrib.gis.db.models import Extent, MakeLine, Union
from django.contrib.gis.geos import (fromstr, GEOSGeometry,
Point, LineString, LinearRing, Polygon, GeometryCollection)
from .models import Country, City, PennsylvaniaCity, State, Track, NonConcreteModel, Feature, MinusOneSRID
def postgis_bug_version():
spatial_version = getattr(connection.ops, "spatial_version", (0, 0, 0))
return spatial_version and (2, 0, 0) <= spatial_version <= (2, 0, 1)
@skipUnlessDBFeature("gis_enabled")
class GeoModelTest(TestCase):
fixtures = ['initial']
def test_fixtures(self):
"Testing geographic model initialization from fixtures."
# Ensuring that data was loaded from initial data fixtures.
self.assertEqual(2, Country.objects.count())
self.assertEqual(8, City.objects.count())
self.assertEqual(2, State.objects.count())
def test_proxy(self):
"Testing Lazy-Geometry support (using the GeometryProxy)."
# Testing on a Point
pnt = Point(0, 0)
nullcity = City(name='NullCity', point=pnt)
nullcity.save()
# Making sure TypeError is thrown when trying to set with an
# incompatible type.
for bad in [5, 2.0, LineString((0, 0), (1, 1))]:
try:
nullcity.point = bad
except TypeError:
pass
else:
self.fail('Should throw a TypeError')
# Now setting with a compatible GEOS Geometry, saving, and ensuring
# the save took, notice no SRID is explicitly set.
new = Point(5, 23)
nullcity.point = new
# Ensuring that the SRID is automatically set to that of the
# field after assignment, but before saving.
self.assertEqual(4326, nullcity.point.srid)
nullcity.save()
# Ensuring the point was saved correctly after saving
self.assertEqual(new, City.objects.get(name='NullCity').point)
# Setting the X and Y of the Point
nullcity.point.x = 23
nullcity.point.y = 5
# Checking assignments pre & post-save.
self.assertNotEqual(Point(23, 5), City.objects.get(name='NullCity').point)
nullcity.save()
self.assertEqual(Point(23, 5), City.objects.get(name='NullCity').point)
nullcity.delete()
# Testing on a Polygon
shell = LinearRing((0, 0), (0, 100), (100, 100), (100, 0), (0, 0))
inner = LinearRing((40, 40), (40, 60), (60, 60), (60, 40), (40, 40))
# Creating a State object using a built Polygon
ply = Polygon(shell, inner)
nullstate = State(name='NullState', poly=ply)
self.assertEqual(4326, nullstate.poly.srid) # SRID auto-set from None
nullstate.save()
ns = State.objects.get(name='NullState')
self.assertEqual(ply, ns.poly)
# Testing the `ogr` and `srs` lazy-geometry properties.
if gdal.HAS_GDAL:
self.assertIsInstance(ns.poly.ogr, gdal.OGRGeometry)
self.assertEqual(ns.poly.wkb, ns.poly.ogr.wkb)
self.assertIsInstance(ns.poly.srs, gdal.SpatialReference)
self.assertEqual('WGS 84', ns.poly.srs.name)
# Changing the interior ring on the poly attribute.
new_inner = LinearRing((30, 30), (30, 70), (70, 70), (70, 30), (30, 30))
ns.poly[1] = new_inner
ply[1] = new_inner
self.assertEqual(4326, ns.poly.srid)
ns.save()
self.assertEqual(ply, State.objects.get(name='NullState').poly)
ns.delete()
@skipUnlessDBFeature("supports_transform")
def test_lookup_insert_transform(self):
"Testing automatic transform for lookups and inserts."
# San Antonio in 'WGS84' (SRID 4326)
sa_4326 = 'POINT (-98.493183 29.424170)'
wgs_pnt = fromstr(sa_4326, srid=4326) # Our reference point in WGS84
# Oracle doesn't have SRID 3084, using 41157.
if oracle:
# San Antonio in 'Texas 4205, Southern Zone (1983, meters)' (SRID 41157)
# Used the following Oracle SQL to get this value:
# SELECT SDO_UTIL.TO_WKTGEOMETRY(
# SDO_CS.TRANSFORM(SDO_GEOMETRY('POINT (-98.493183 29.424170)', 4326), 41157))
# )
# FROM DUAL;
nad_wkt = 'POINT (300662.034646583 5416427.45974934)'
nad_srid = 41157
else:
# San Antonio in 'NAD83(HARN) / Texas Centric Lambert Conformal' (SRID 3084)
# Used ogr.py in gdal 1.4.1 for this transform
nad_wkt = 'POINT (1645978.362408288754523 6276356.025927528738976)'
nad_srid = 3084
# Constructing & querying with a point from a different SRID. Oracle
# `SDO_OVERLAPBDYINTERSECT` operates differently from
# `ST_Intersects`, so contains is used instead.
nad_pnt = fromstr(nad_wkt, srid=nad_srid)
if oracle:
tx = Country.objects.get(mpoly__contains=nad_pnt)
else:
tx = Country.objects.get(mpoly__intersects=nad_pnt)
self.assertEqual('Texas', tx.name)
# Creating San Antonio. Remember the Alamo.
sa = City.objects.create(name='San Antonio', point=nad_pnt)
# Now verifying that San Antonio was transformed correctly
sa = City.objects.get(name='San Antonio')
self.assertAlmostEqual(wgs_pnt.x, sa.point.x, 6)
self.assertAlmostEqual(wgs_pnt.y, sa.point.y, 6)
# If the GeometryField SRID is -1, then we shouldn't perform any
# transformation if the SRID of the input geometry is different.
if spatialite and connection.ops.spatial_version < (3, 0, 0):
# SpatiaLite < 3 does not support missing SRID values.
return
m1 = MinusOneSRID(geom=Point(17, 23, srid=4326))
m1.save()
self.assertEqual(-1, m1.geom.srid)
def test_createnull(self):
"Testing creating a model instance and the geometry being None"
c = City()
self.assertEqual(c.point, None)
def test_geometryfield(self):
"Testing the general GeometryField."
Feature(name='Point', geom=Point(1, 1)).save()
Feature(name='LineString', geom=LineString((0, 0), (1, 1), (5, 5))).save()
Feature(name='Polygon', geom=Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0), (0, 0)))).save()
Feature(name='GeometryCollection',
geom=GeometryCollection(Point(2, 2), LineString((0, 0), (2, 2)),
Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0), (0, 0))))).save()
f_1 = Feature.objects.get(name='Point')
self.assertIsInstance(f_1.geom, Point)
self.assertEqual((1.0, 1.0), f_1.geom.tuple)
f_2 = Feature.objects.get(name='LineString')
self.assertIsInstance(f_2.geom, LineString)
self.assertEqual(((0.0, 0.0), (1.0, 1.0), (5.0, 5.0)), f_2.geom.tuple)
f_3 = Feature.objects.get(name='Polygon')
self.assertIsInstance(f_3.geom, Polygon)
f_4 = Feature.objects.get(name='GeometryCollection')
self.assertIsInstance(f_4.geom, GeometryCollection)
self.assertEqual(f_3.geom, f_4.geom[2])
@skipUnlessDBFeature("supports_transform")
def test_inherited_geofields(self):
"Test GeoQuerySet methods on inherited Geometry fields."
# Creating a Pennsylvanian city.
PennsylvaniaCity.objects.create(name='Mansfield', county='Tioga', point='POINT(-77.071445 41.823881)')
# All transformation SQL will need to be performed on the
# _parent_ table.
qs = PennsylvaniaCity.objects.transform(32128)
self.assertEqual(1, qs.count())
for pc in qs:
self.assertEqual(32128, pc.point.srid)
def test_raw_sql_query(self):
"Testing raw SQL query."
cities1 = City.objects.all()
# Only PostGIS would support a 'select *' query because of its recognized
# HEXEWKB format for geometry fields
as_text = 'ST_AsText(%s)' if postgis else connection.ops.select
cities2 = City.objects.raw(
'select id, name, %s from geoapp_city' % as_text % 'point'
)
self.assertEqual(len(cities1), len(list(cities2)))
self.assertIsInstance(cities2[0].point, Point)
def test_dumpdata_loaddata_cycle(self):
"""
Test a dumpdata/loaddata cycle with geographic data.
"""
out = six.StringIO()
original_data = list(City.objects.all().order_by('name'))
call_command('dumpdata', 'geoapp.City', stdout=out)
result = out.getvalue()
houston = City.objects.get(name='Houston')
self.assertIn('"point": "%s"' % houston.point.ewkt, result)
# Reload now dumped data
with tempfile.NamedTemporaryFile(mode='w', suffix='.json') as tmp:
tmp.write(result)
tmp.seek(0)
call_command('loaddata', tmp.name, verbosity=0)
self.assertListEqual(original_data, list(City.objects.all().order_by('name')))
@skipUnlessDBFeature("gis_enabled")
class GeoLookupTest(TestCase):
fixtures = ['initial']
def test_disjoint_lookup(self):
"Testing the `disjoint` lookup type."
ptown = City.objects.get(name='Pueblo')
qs1 = City.objects.filter(point__disjoint=ptown.point)
self.assertEqual(7, qs1.count())
if connection.features.supports_real_shape_operations:
qs2 = State.objects.filter(poly__disjoint=ptown.point)
self.assertEqual(1, qs2.count())
self.assertEqual('Kansas', qs2[0].name)
def test_contains_contained_lookups(self):
"Testing the 'contained', 'contains', and 'bbcontains' lookup types."
# Getting Texas, yes we were a country -- once ;)
texas = Country.objects.get(name='Texas')
# Seeing what cities are in Texas, should get Houston and Dallas,
# and Oklahoma City because 'contained' only checks on the
# _bounding box_ of the Geometries.
if connection.features.supports_contained_lookup:
qs = City.objects.filter(point__contained=texas.mpoly)
self.assertEqual(3, qs.count())
cities = ['Houston', 'Dallas', 'Oklahoma City']
for c in qs:
self.assertIn(c.name, cities)
# Pulling out some cities.
houston = City.objects.get(name='Houston')
wellington = City.objects.get(name='Wellington')
pueblo = City.objects.get(name='Pueblo')
okcity = City.objects.get(name='Oklahoma City')
lawrence = City.objects.get(name='Lawrence')
# Now testing contains on the countries using the points for
# Houston and Wellington.
tx = Country.objects.get(mpoly__contains=houston.point) # Query w/GEOSGeometry
nz = Country.objects.get(mpoly__contains=wellington.point.hex) # Query w/EWKBHEX
self.assertEqual('Texas', tx.name)
self.assertEqual('New Zealand', nz.name)
# Spatialite 2.3 thinks that Lawrence is in Puerto Rico (a NULL geometry).
if not (spatialite and connection.ops.spatial_version < (3, 0, 0)):
ks = State.objects.get(poly__contains=lawrence.point)
self.assertEqual('Kansas', ks.name)
# Pueblo and Oklahoma City (even though OK City is within the bounding box of Texas)
# are not contained in Texas or New Zealand.
self.assertEqual(len(Country.objects.filter(mpoly__contains=pueblo.point)), 0) # Query w/GEOSGeometry object
self.assertEqual(len(Country.objects.filter(mpoly__contains=okcity.point.wkt)),
0 if connection.features.supports_real_shape_operations else 1) # Query w/WKT
# OK City is contained w/in bounding box of Texas.
if connection.features.supports_bbcontains_lookup:
qs = Country.objects.filter(mpoly__bbcontains=okcity.point)
self.assertEqual(1, len(qs))
self.assertEqual('Texas', qs[0].name)
@skipUnlessDBFeature("supports_crosses_lookup")
def test_crosses_lookup(self):
Track.objects.create(
name='Line1',
line=LineString([(-95, 29), (-60, 0)])
)
self.assertEqual(
Track.objects.filter(line__crosses=LineString([(-95, 0), (-60, 29)])).count(),
1
)
self.assertEqual(
Track.objects.filter(line__crosses=LineString([(-95, 30), (0, 30)])).count(),
0
)
@skipUnlessDBFeature("supports_left_right_lookups")
def test_left_right_lookups(self):
"Testing the 'left' and 'right' lookup types."
# Left: A << B => true if xmax(A) < xmin(B)
# Right: A >> B => true if xmin(A) > xmax(B)
# See: BOX2D_left() and BOX2D_right() in lwgeom_box2dfloat4.c in PostGIS source.
# The left/right lookup tests are known failures on PostGIS 2.0/2.0.1
# http://trac.osgeo.org/postgis/ticket/2035
if postgis_bug_version():
self.skipTest("PostGIS 2.0/2.0.1 left and right lookups are known to be buggy.")
# Getting the borders for Colorado & Kansas
co_border = State.objects.get(name='Colorado').poly
ks_border = State.objects.get(name='Kansas').poly
# Note: Wellington has an 'X' value of 174, so it will not be considered
# to the left of CO.
# These cities should be strictly to the right of the CO border.
cities = ['Houston', 'Dallas', 'Oklahoma City',
'Lawrence', 'Chicago', 'Wellington']
qs = City.objects.filter(point__right=co_border)
self.assertEqual(6, len(qs))
for c in qs:
self.assertIn(c.name, cities)
# These cities should be strictly to the right of the KS border.
cities = ['Chicago', 'Wellington']
qs = City.objects.filter(point__right=ks_border)
self.assertEqual(2, len(qs))
for c in qs:
self.assertIn(c.name, cities)
# Note: Wellington has an 'X' value of 174, so it will not be considered
# to the left of CO.
vic = City.objects.get(point__left=co_border)
self.assertEqual('Victoria', vic.name)
cities = ['Pueblo', 'Victoria']
qs = City.objects.filter(point__left=ks_border)
self.assertEqual(2, len(qs))
for c in qs:
self.assertIn(c.name, cities)
def test_equals_lookups(self):
"Testing the 'same_as' and 'equals' lookup types."
pnt = fromstr('POINT (-95.363151 29.763374)', srid=4326)
c1 = City.objects.get(point=pnt)
c2 = City.objects.get(point__same_as=pnt)
c3 = City.objects.get(point__equals=pnt)
for c in [c1, c2, c3]:
self.assertEqual('Houston', c.name)
@skipUnlessDBFeature("supports_null_geometries")
def test_null_geometries(self):
"Testing NULL geometry support, and the `isnull` lookup type."
# Creating a state with a NULL boundary.
State.objects.create(name='Puerto Rico')
# Querying for both NULL and Non-NULL values.
nullqs = State.objects.filter(poly__isnull=True)
validqs = State.objects.filter(poly__isnull=False)
# Puerto Rico should be NULL (it's a commonwealth unincorporated territory)
self.assertEqual(1, len(nullqs))
self.assertEqual('Puerto Rico', nullqs[0].name)
# The valid states should be Colorado & Kansas
self.assertEqual(2, len(validqs))
state_names = [s.name for s in validqs]
self.assertIn('Colorado', state_names)
self.assertIn('Kansas', state_names)
# Saving another commonwealth w/a NULL geometry.
nmi = State.objects.create(name='Northern Mariana Islands', poly=None)
self.assertEqual(nmi.poly, None)
# Assigning a geometry and saving -- then UPDATE back to NULL.
nmi.poly = 'POLYGON((0 0,1 0,1 1,1 0,0 0))'
nmi.save()
State.objects.filter(name='Northern Mariana Islands').update(poly=None)
self.assertIsNone(State.objects.get(name='Northern Mariana Islands').poly)
@skipUnlessDBFeature("supports_relate_lookup")
def test_relate_lookup(self):
"Testing the 'relate' lookup type."
# To make things more interesting, we will have our Texas reference point in
# different SRIDs.
pnt1 = fromstr('POINT (649287.0363174 4177429.4494686)', srid=2847)
pnt2 = fromstr('POINT(-98.4919715741052 29.4333344025053)', srid=4326)
# Not passing in a geometry as first param should
# raise a type error when initializing the GeoQuerySet
self.assertRaises(ValueError, Country.objects.filter, mpoly__relate=(23, 'foo'))
# Making sure the right exception is raised for the given
# bad arguments.
for bad_args, e in [((pnt1, 0), ValueError), ((pnt2, 'T*T***FF*', 0), ValueError)]:
qs = Country.objects.filter(mpoly__relate=bad_args)
self.assertRaises(e, qs.count)
# Relate works differently for the different backends.
if postgis or spatialite:
contains_mask = 'T*T***FF*'
within_mask = 'T*F**F***'
intersects_mask = 'T********'
elif oracle:
contains_mask = 'contains'
within_mask = 'inside'
# TODO: This is not quite the same as the PostGIS mask above
intersects_mask = 'overlapbdyintersect'
# Testing contains relation mask.
self.assertEqual('Texas', Country.objects.get(mpoly__relate=(pnt1, contains_mask)).name)
self.assertEqual('Texas', Country.objects.get(mpoly__relate=(pnt2, contains_mask)).name)
# Testing within relation mask.
ks = State.objects.get(name='Kansas')
self.assertEqual('Lawrence', City.objects.get(point__relate=(ks.poly, within_mask)).name)
# Testing intersection relation mask.
if not oracle:
self.assertEqual('Texas', Country.objects.get(mpoly__relate=(pnt1, intersects_mask)).name)
self.assertEqual('Texas', Country.objects.get(mpoly__relate=(pnt2, intersects_mask)).name)
self.assertEqual('Lawrence', City.objects.get(point__relate=(ks.poly, intersects_mask)).name)
@skipUnlessDBFeature("gis_enabled")
@ignore_warnings(category=RemovedInDjango21Warning)
class GeoQuerySetTest(TestCase):
fixtures = ['initial']
# Please keep the tests in GeoQuerySet method's alphabetic order
@skipUnlessDBFeature("has_centroid_method")
def test_centroid(self):
"Testing the `centroid` GeoQuerySet method."
qs = State.objects.exclude(poly__isnull=True).centroid()
if oracle:
tol = 0.1
elif spatialite:
tol = 0.000001
else:
tol = 0.000000001
for s in qs:
self.assertTrue(s.poly.centroid.equals_exact(s.centroid, tol))
@skipUnlessDBFeature(
"has_difference_method", "has_intersection_method",
"has_sym_difference_method", "has_union_method")
def test_diff_intersection_union(self):
"Testing the `difference`, `intersection`, `sym_difference`, and `union` GeoQuerySet methods."
geom = Point(5, 23)
qs = Country.objects.all().difference(geom).sym_difference(geom).union(geom)
# XXX For some reason SpatiaLite does something screwy with the Texas geometry here. Also,
# XXX it doesn't like the null intersection.
if spatialite:
qs = qs.exclude(name='Texas')
else:
qs = qs.intersection(geom)
for c in qs:
if oracle:
# Should be able to execute the queries; however, they won't be the same
# as GEOS (because Oracle doesn't use GEOS internally like PostGIS or
# SpatiaLite).
pass
else:
self.assertEqual(c.mpoly.difference(geom), c.difference)
if not spatialite:
self.assertEqual(c.mpoly.intersection(geom), c.intersection)
# Ordering might differ in collections
self.assertSetEqual(set(g.wkt for g in c.mpoly.sym_difference(geom)),
set(g.wkt for g in c.sym_difference))
self.assertSetEqual(set(g.wkt for g in c.mpoly.union(geom)),
set(g.wkt for g in c.union))
@skipUnlessDBFeature("has_envelope_method")
def test_envelope(self):
"Testing the `envelope` GeoQuerySet method."
countries = Country.objects.all().envelope()
for country in countries:
self.assertIsInstance(country.envelope, Polygon)
@skipUnlessDBFeature("supports_extent_aggr")
@ignore_warnings(category=RemovedInDjango20Warning)
def test_extent(self):
"""
Testing the (deprecated) `extent` GeoQuerySet method and the Extent
aggregate.
"""
# Reference query:
# `SELECT ST_extent(point) FROM geoapp_city WHERE (name='Houston' or name='Dallas');`
# => BOX(-96.8016128540039 29.7633724212646,-95.3631439208984 32.7820587158203)
expected = (-96.8016128540039, 29.7633724212646, -95.3631439208984, 32.782058715820)
qs = City.objects.filter(name__in=('Houston', 'Dallas'))
extent1 = qs.extent()
extent2 = qs.aggregate(Extent('point'))['point__extent']
for extent in (extent1, extent2):
for val, exp in zip(extent, expected):
self.assertAlmostEqual(exp, val, 4)
self.assertIsNone(City.objects.filter(name=('Smalltown')).extent())
self.assertIsNone(City.objects.filter(name=('Smalltown')).aggregate(Extent('point'))['point__extent'])
@skipUnlessDBFeature("supports_extent_aggr")
def test_extent_with_limit(self):
"""
Testing if extent supports limit.
"""
extent1 = City.objects.all().aggregate(Extent('point'))['point__extent']
extent2 = City.objects.all()[:3].aggregate(Extent('point'))['point__extent']
self.assertNotEqual(extent1, extent2)
@skipUnlessDBFeature("has_force_rhr_method")
def test_force_rhr(self):
"Testing GeoQuerySet.force_rhr()."
rings = (
((0, 0), (5, 0), (0, 5), (0, 0)),
((1, 1), (1, 3), (3, 1), (1, 1)),
)
rhr_rings = (
((0, 0), (0, 5), (5, 0), (0, 0)),
((1, 1), (3, 1), (1, 3), (1, 1)),
)
State.objects.create(name='Foo', poly=Polygon(*rings))
s = State.objects.force_rhr().get(name='Foo')
self.assertEqual(rhr_rings, s.force_rhr.coords)
@skipUnlessDBFeature("has_geohash_method")
def test_geohash(self):
"Testing GeoQuerySet.geohash()."
# Reference query:
# SELECT ST_GeoHash(point) FROM geoapp_city WHERE name='Houston';
# SELECT ST_GeoHash(point, 5) FROM geoapp_city WHERE name='Houston';
ref_hash = '9vk1mfq8jx0c8e0386z6'
h1 = City.objects.geohash().get(name='Houston')
h2 = City.objects.geohash(precision=5).get(name='Houston')
self.assertEqual(ref_hash, h1.geohash)
self.assertEqual(ref_hash[:5], h2.geohash)
def test_geojson(self):
"Testing GeoJSON output from the database using GeoQuerySet.geojson()."
# Only PostGIS and SpatiaLite 3.0+ support GeoJSON.
if not connection.ops.geojson:
self.assertRaises(NotImplementedError, Country.objects.all().geojson, field_name='mpoly')
return
pueblo_json = '{"type":"Point","coordinates":[-104.609252,38.255001]}'
houston_json = (
'{"type":"Point","crs":{"type":"name","properties":'
'{"name":"EPSG:4326"}},"coordinates":[-95.363151,29.763374]}'
)
victoria_json = (
'{"type":"Point","bbox":[-123.30519600,48.46261100,-123.30519600,48.46261100],'
'"coordinates":[-123.305196,48.462611]}'
)
chicago_json = (
'{"type":"Point","crs":{"type":"name","properties":{"name":"EPSG:4326"}},'
'"bbox":[-87.65018,41.85039,-87.65018,41.85039],"coordinates":[-87.65018,41.85039]}'
)
if spatialite:
victoria_json = (
'{"type":"Point","bbox":[-123.305196,48.462611,-123.305196,48.462611],'
'"coordinates":[-123.305196,48.462611]}'
)
# Precision argument should only be an integer
self.assertRaises(TypeError, City.objects.geojson, precision='foo')
# Reference queries and values.
# SELECT ST_AsGeoJson("geoapp_city"."point", 8, 0)
# FROM "geoapp_city" WHERE "geoapp_city"."name" = 'Pueblo';
self.assertEqual(pueblo_json, City.objects.geojson().get(name='Pueblo').geojson)
# SELECT ST_AsGeoJson("geoapp_city"."point", 8, 2) FROM "geoapp_city"
# WHERE "geoapp_city"."name" = 'Houston';
# This time we want to include the CRS by using the `crs` keyword.
self.assertEqual(houston_json, City.objects.geojson(crs=True, model_att='json').get(name='Houston').json)
# SELECT ST_AsGeoJson("geoapp_city"."point", 8, 1) FROM "geoapp_city"
# WHERE "geoapp_city"."name" = 'Houston';
# This time we include the bounding box by using the `bbox` keyword.
self.assertEqual(victoria_json, City.objects.geojson(bbox=True).get(name='Victoria').geojson)
# SELECT ST_AsGeoJson("geoapp_city"."point", 5, 3) FROM "geoapp_city"
# WHERE "geoapp_city"."name" = 'Chicago';
# Finally, we set every available keyword.
self.assertEqual(
chicago_json,
City.objects.geojson(bbox=True, crs=True, precision=5).get(name='Chicago').geojson
)
@skipUnlessDBFeature("has_gml_method")
def test_gml(self):
"Testing GML output from the database using GeoQuerySet.gml()."
# Should throw a TypeError when trying to obtain GML from a
# non-geometry field.
qs = City.objects.all()
self.assertRaises(TypeError, qs.gml, field_name='name')
ptown1 = City.objects.gml(field_name='point', precision=9).get(name='Pueblo')
ptown2 = City.objects.gml(precision=9).get(name='Pueblo')
if oracle:
# No precision parameter for Oracle :-/
gml_regex = re.compile(
r'^<gml:Point srsName="SDO:4326" xmlns:gml="http://www.opengis.net/gml">'
r'<gml:coordinates decimal="\." cs="," ts=" ">-104.60925\d+,38.25500\d+ '
r'</gml:coordinates></gml:Point>'
)
elif spatialite and connection.ops.spatial_version < (3, 0, 0):
# Spatialite before 3.0 has extra colon in SrsName
gml_regex = re.compile(
r'^<gml:Point SrsName="EPSG::4326"><gml:coordinates decimal="\." '
r'cs="," ts=" ">-104.609251\d+,38.255001</gml:coordinates></gml:Point>'
)
else:
gml_regex = re.compile(
r'^<gml:Point srsName="EPSG:4326"><gml:coordinates>'
r'-104\.60925\d+,38\.255001</gml:coordinates></gml:Point>'
)
for ptown in [ptown1, ptown2]:
self.assertTrue(gml_regex.match(ptown.gml))
if postgis:
self.assertIn('<gml:pos srsDimension="2">', City.objects.gml(version=3).get(name='Pueblo').gml)
@skipUnlessDBFeature("has_kml_method")
def test_kml(self):
"Testing KML output from the database using GeoQuerySet.kml()."
# Should throw a TypeError when trying to obtain KML from a
# non-geometry field.
qs = City.objects.all()
self.assertRaises(TypeError, qs.kml, 'name')
# Ensuring the KML is as expected.
ptown1 = City.objects.kml(field_name='point', precision=9).get(name='Pueblo')
ptown2 = City.objects.kml(precision=9).get(name='Pueblo')
for ptown in [ptown1, ptown2]:
self.assertEqual('<Point><coordinates>-104.609252,38.255001</coordinates></Point>', ptown.kml)
@ignore_warnings(category=RemovedInDjango20Warning)
def test_make_line(self):
"""
Testing the (deprecated) `make_line` GeoQuerySet method and the MakeLine
aggregate.
"""
if not connection.features.supports_make_line_aggr:
# Only PostGIS has support for the MakeLine aggregate. For other
# backends, test that NotImplementedError is raised
self.assertRaises(
NotImplementedError,
City.objects.all().aggregate, MakeLine('point')
)
return
# Ensuring that a `TypeError` is raised on models without PointFields.
self.assertRaises(TypeError, State.objects.make_line)
self.assertRaises(TypeError, Country.objects.make_line)
# MakeLine on an inappropriate field returns simply None
self.assertIsNone(State.objects.aggregate(MakeLine('poly'))['poly__makeline'])
# Reference query:
# SELECT AsText(ST_MakeLine(geoapp_city.point)) FROM geoapp_city;
ref_line = GEOSGeometry(
'LINESTRING(-95.363151 29.763374,-96.801611 32.782057,'
'-97.521157 34.464642,174.783117 -41.315268,-104.609252 38.255001,'
'-95.23506 38.971823,-87.650175 41.850385,-123.305196 48.462611)',
srid=4326
)
# We check for equality with a tolerance of 10e-5 which is a lower bound
# of the precisions of ref_line coordinates
line1 = City.objects.make_line()
line2 = City.objects.aggregate(MakeLine('point'))['point__makeline']
for line in (line1, line2):
self.assertTrue(ref_line.equals_exact(line, tolerance=10e-5),
"%s != %s" % (ref_line, line))
@skipUnlessDBFeature("has_num_geom_method")
def test_num_geom(self):
"Testing the `num_geom` GeoQuerySet method."
# Both 'countries' only have two geometries.
for c in Country.objects.num_geom():
self.assertEqual(2, c.num_geom)
for c in City.objects.filter(point__isnull=False).num_geom():
# Oracle and PostGIS 2.0+ will return 1 for the number of
# geometries on non-collections, whereas PostGIS < 2.0.0
# will return None.
if postgis and connection.ops.spatial_version < (2, 0, 0):
self.assertIsNone(c.num_geom)
else:
self.assertEqual(1, c.num_geom)
@skipUnlessDBFeature("supports_num_points_poly")
def test_num_points(self):
"Testing the `num_points` GeoQuerySet method."
for c in Country.objects.num_points():
self.assertEqual(c.mpoly.num_points, c.num_points)
if not oracle:
# Oracle cannot count vertices in Point geometries.
for c in City.objects.num_points():
self.assertEqual(1, c.num_points)
@skipUnlessDBFeature("has_point_on_surface_method")
def test_point_on_surface(self):
"Testing the `point_on_surface` GeoQuerySet method."
# Reference values.
if oracle:
# SELECT SDO_UTIL.TO_WKTGEOMETRY(SDO_GEOM.SDO_POINTONSURFACE(GEOAPP_COUNTRY.MPOLY, 0.05))
# FROM GEOAPP_COUNTRY;
ref = {'New Zealand': fromstr('POINT (174.616364 -36.100861)', srid=4326),
'Texas': fromstr('POINT (-103.002434 36.500397)', srid=4326),
}
else:
# Using GEOSGeometry to compute the reference point on surface values
# -- since PostGIS also uses GEOS these should be the same.
ref = {'New Zealand': Country.objects.get(name='New Zealand').mpoly.point_on_surface,
'Texas': Country.objects.get(name='Texas').mpoly.point_on_surface
}
for c in Country.objects.point_on_surface():
if spatialite:
# XXX This seems to be a WKT-translation-related precision issue?
tol = 0.00001
else:
tol = 0.000000001
self.assertTrue(ref[c.name].equals_exact(c.point_on_surface, tol))
@skipUnlessDBFeature("has_reverse_method")
def test_reverse_geom(self):
"Testing GeoQuerySet.reverse_geom()."
coords = [(-95.363151, 29.763374), (-95.448601, 29.713803)]
Track.objects.create(name='Foo', line=LineString(coords))
t = Track.objects.reverse_geom().get(name='Foo')
coords.reverse()
self.assertEqual(tuple(coords), t.reverse_geom.coords)
if oracle:
self.assertRaises(TypeError, State.objects.reverse_geom)
@skipUnlessDBFeature("has_scale_method")
def test_scale(self):
"Testing the `scale` GeoQuerySet method."
xfac, yfac = 2, 3
tol = 5 # XXX The low precision tolerance is for SpatiaLite
qs = Country.objects.scale(xfac, yfac, model_att='scaled')
for c in qs:
for p1, p2 in zip(c.mpoly, c.scaled):
for r1, r2 in zip(p1, p2):
for c1, c2 in zip(r1.coords, r2.coords):
self.assertAlmostEqual(c1[0] * xfac, c2[0], tol)
self.assertAlmostEqual(c1[1] * yfac, c2[1], tol)
@skipUnlessDBFeature("has_snap_to_grid_method")
def test_snap_to_grid(self):
"Testing GeoQuerySet.snap_to_grid()."
# Let's try and break snap_to_grid() with bad combinations of arguments.
for bad_args in ((), range(3), range(5)):
self.assertRaises(ValueError, Country.objects.snap_to_grid, *bad_args)
for bad_args in (('1.0',), (1.0, None), tuple(map(six.text_type, range(4)))):
self.assertRaises(TypeError, Country.objects.snap_to_grid, *bad_args)
# Boundary for San Marino, courtesy of Bjorn Sandvik of thematicmapping.org
# from the world borders dataset he provides.
wkt = ('MULTIPOLYGON(((12.41580 43.95795,12.45055 43.97972,12.45389 43.98167,'
'12.46250 43.98472,12.47167 43.98694,12.49278 43.98917,'
'12.50555 43.98861,12.51000 43.98694,12.51028 43.98277,'
'12.51167 43.94333,12.51056 43.93916,12.49639 43.92333,'
'12.49500 43.91472,12.48778 43.90583,12.47444 43.89722,'
'12.46472 43.89555,12.45917 43.89611,12.41639 43.90472,'
'12.41222 43.90610,12.40782 43.91366,12.40389 43.92667,'
'12.40500 43.94833,12.40889 43.95499,12.41580 43.95795)))')
Country.objects.create(name='San Marino', mpoly=fromstr(wkt))
# Because floating-point arithmetic isn't exact, we set a tolerance
# to pass into GEOS `equals_exact`.
tol = 0.000000001
# SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.1)) FROM "geoapp_country"
# WHERE "geoapp_country"."name" = 'San Marino';
ref = fromstr('MULTIPOLYGON(((12.4 44,12.5 44,12.5 43.9,12.4 43.9,12.4 44)))')
self.assertTrue(ref.equals_exact(Country.objects.snap_to_grid(0.1).get(name='San Marino').snap_to_grid, tol))
# SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.05, 0.23)) FROM "geoapp_country"
# WHERE "geoapp_country"."name" = 'San Marino';
ref = fromstr('MULTIPOLYGON(((12.4 43.93,12.45 43.93,12.5 43.93,12.45 43.93,12.4 43.93)))')
self.assertTrue(
ref.equals_exact(Country.objects.snap_to_grid(0.05, 0.23).get(name='San Marino').snap_to_grid, tol)
)
# SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.5, 0.17, 0.05, 0.23)) FROM "geoapp_country"
# WHERE "geoapp_country"."name" = 'San Marino';
ref = fromstr(
'MULTIPOLYGON(((12.4 43.87,12.45 43.87,12.45 44.1,12.5 44.1,12.5 43.87,12.45 43.87,12.4 43.87)))'
)
self.assertTrue(
ref.equals_exact(
Country.objects.snap_to_grid(0.05, 0.23, 0.5, 0.17).get(name='San Marino').snap_to_grid,
tol
)
)
@skipUnlessDBFeature("has_svg_method")
def test_svg(self):
"Testing SVG output using GeoQuerySet.svg()."
self.assertRaises(TypeError, City.objects.svg, precision='foo')
# SELECT AsSVG(geoapp_city.point, 0, 8) FROM geoapp_city WHERE name = 'Pueblo';
svg1 = 'cx="-104.609252" cy="-38.255001"'
# Even though relative, only one point so it's practically the same except for
# the 'c' letter prefix on the x,y values.
svg2 = svg1.replace('c', '')
self.assertEqual(svg1, City.objects.svg().get(name='Pueblo').svg)
self.assertEqual(svg2, City.objects.svg(relative=5).get(name='Pueblo').svg)
@skipUnlessDBFeature("has_transform_method")
def test_transform(self):
"Testing the transform() GeoQuerySet method."
# Pre-transformed points for Houston and Pueblo.
htown = fromstr('POINT(1947516.83115183 6322297.06040572)', srid=3084)
ptown = fromstr('POINT(992363.390841912 481455.395105533)', srid=2774)
prec = 3 # Precision is low due to version variations in PROJ and GDAL.
# Asserting the result of the transform operation with the values in
# the pre-transformed points. Oracle does not have the 3084 SRID.
if not oracle:
h = City.objects.transform(htown.srid).get(name='Houston')
self.assertEqual(3084, h.point.srid)
self.assertAlmostEqual(htown.x, h.point.x, prec)
self.assertAlmostEqual(htown.y, h.point.y, prec)
p1 = City.objects.transform(ptown.srid, field_name='point').get(name='Pueblo')
p2 = City.objects.transform(srid=ptown.srid).get(name='Pueblo')
for p in [p1, p2]:
self.assertEqual(2774, p.point.srid)
self.assertAlmostEqual(ptown.x, p.point.x, prec)
self.assertAlmostEqual(ptown.y, p.point.y, prec)
@skipUnlessDBFeature("has_translate_method")
def test_translate(self):
"Testing the `translate` GeoQuerySet method."
xfac, yfac = 5, -23
qs = Country.objects.translate(xfac, yfac, model_att='translated')
for c in qs:
for p1, p2 in zip(c.mpoly, c.translated):
for r1, r2 in zip(p1, p2):
for c1, c2 in zip(r1.coords, r2.coords):
# XXX The low precision is for SpatiaLite
self.assertAlmostEqual(c1[0] + xfac, c2[0], 5)
self.assertAlmostEqual(c1[1] + yfac, c2[1], 5)
# TODO: Oracle can be made to pass if
# union1 = union2 = fromstr('POINT (-97.5211570000000023 34.4646419999999978)')
# but this seems unexpected and should be investigated to determine the cause.
@skipUnlessDBFeature("has_unionagg_method")
@no_oracle
@ignore_warnings(category=RemovedInDjango20Warning)
def test_unionagg(self):
"""
Testing the (deprecated) `unionagg` (aggregate union) GeoQuerySet method
and the Union aggregate.
"""
tx = Country.objects.get(name='Texas').mpoly
# Houston, Dallas -- Ordering may differ depending on backend or GEOS version.
union1 = fromstr('MULTIPOINT(-96.801611 32.782057,-95.363151 29.763374)')
union2 = fromstr('MULTIPOINT(-95.363151 29.763374,-96.801611 32.782057)')
qs = City.objects.filter(point__within=tx)
self.assertRaises(TypeError, qs.unionagg, 'name')
self.assertRaises(ValueError, qs.aggregate, Union('name'))
# Using `field_name` keyword argument in one query and specifying an
# order in the other (which should not be used because this is
# an aggregate method on a spatial column)
u1 = qs.unionagg(field_name='point')
u2 = qs.order_by('name').unionagg()
u3 = qs.aggregate(Union('point'))['point__union']
u4 = qs.order_by('name').aggregate(Union('point'))['point__union']
tol = 0.00001
self.assertTrue(union1.equals_exact(u1, tol) or union2.equals_exact(u1, tol))
self.assertTrue(union1.equals_exact(u2, tol) or union2.equals_exact(u2, tol))
self.assertTrue(union1.equals_exact(u3, tol) or union2.equals_exact(u3, tol))
self.assertTrue(union1.equals_exact(u4, tol) or union2.equals_exact(u4, tol))
qs = City.objects.filter(name='NotACity')
self.assertIsNone(qs.unionagg(field_name='point'))
self.assertIsNone(qs.aggregate(Union('point'))['point__union'])
def test_within_subquery(self):
"""
Test that using a queryset inside a geo lookup is working (using a subquery)
(#14483).
"""
tex_cities = City.objects.filter(
point__within=Country.objects.filter(name='Texas').values('mpoly')).order_by('name')
expected = ['Dallas', 'Houston']
if not connection.features.supports_real_shape_operations:
expected.append('Oklahoma City')
self.assertEqual(
list(tex_cities.values_list('name', flat=True)),
expected
)
def test_non_concrete_field(self):
NonConcreteModel.objects.create(point=Point(0, 0), name='name')
list(NonConcreteModel.objects.all())
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.